Dataset Viewer
id
stringlengths 2
8
| text
stringlengths 16
264k
| dataset_id
stringclasses 1
value |
---|---|---|
3467324
|
<reponame>aivazis/ampcor
# -*- coding: utf-8 -*-
#
# <NAME> <<EMAIL>>
# parasim
# (c) 1998-2021 all rights reserved
#
# pull the action protocol
from ..shells import action
# and the base panel
from ..shells import command
# pull in the command decorator
from .. import foundry
# commands
@foundry(implements=action, tip="estimate an offset field given a pair of raster images")
def offsets():
# get the action
from .Offsets import Offsets
# borrow its doctsring
__doc__ = Offsets.__doc__
# and publish it
return Offsets
# temporary vis
@foundry(implements=action, tip="visualize the correlation surface")
def mdy():
# get the action
from .Gamma import Gamma
# borrow its doctsring
__doc__ = Gamma.__doc__
# and publish it
return Gamma
# help
@foundry(implements=action, tip="display information about this application")
def about():
# get the action
from .About import About
# borrow its docstring
__doc__ = About.__doc__
# and publish it
return About
@foundry(implements=action, tip="display configuration information about this application")
def config():
# get the action
from .Config import Config
# borrow its docstring
__doc__ = Config.__doc__
# and publish it
return Config
@foundry(implements=action, tip="display debugging information about this application")
def debug():
# get the action
from .Debug import Debug
# borrow its docstring
__doc__ = Debug.__doc__
# and publish it
return Debug
# command completion; no tip so it doesn't show up on the help panel
@foundry(implements=action)
def complete():
# get the action
from .Complete import Complete
# and publish it
return Complete
# end of file
|
StarcoderdataPython
|
1873136
|
<reponame>amcclead7336/Enterprise_Data_Science_Final<filename>venv/lib/python3.8/site-packages/vsts/test/v4_0/models/suite_entry.py<gh_stars>0
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class SuiteEntry(Model):
"""SuiteEntry.
:param child_suite_id: Id of child suite in a suite
:type child_suite_id: int
:param sequence_number: Sequence number for the test case or child suite in the suite
:type sequence_number: int
:param suite_id: Id for the suite
:type suite_id: int
:param test_case_id: Id of a test case in a suite
:type test_case_id: int
"""
_attribute_map = {
'child_suite_id': {'key': 'childSuiteId', 'type': 'int'},
'sequence_number': {'key': 'sequenceNumber', 'type': 'int'},
'suite_id': {'key': 'suiteId', 'type': 'int'},
'test_case_id': {'key': 'testCaseId', 'type': 'int'}
}
def __init__(self, child_suite_id=None, sequence_number=None, suite_id=None, test_case_id=None):
super(SuiteEntry, self).__init__()
self.child_suite_id = child_suite_id
self.sequence_number = sequence_number
self.suite_id = suite_id
self.test_case_id = test_case_id
|
StarcoderdataPython
|
322006
|
<filename>istio/datadog_checks/istio/metrics.py
# (C) Datadog, Inc. 2020 - Present
# All rights reserved
# Licensed under Simplified BSD License (see LICENSE)
GENERIC_METRICS = {
'go_gc_duration_seconds': 'go.gc_duration_seconds',
'go_goroutines': 'go.goroutines',
'go_info': 'go.info',
'go_memstats_alloc_bytes': 'go.memstats.alloc_bytes',
'go_memstats_alloc_bytes_total': 'go.memstats.alloc_bytes_total',
'go_memstats_buck_hash_sys_bytes': 'go.memstats.buck_hash_sys_bytes',
'go_memstats_frees_total': 'go.memstats.frees_total',
'go_memstats_gc_cpu_fraction': 'go.memstats.gc_cpu_fraction',
'go_memstats_gc_sys_bytes': 'go.memstats.gc_sys_bytes',
'go_memstats_heap_alloc_bytes': 'go.memstats.heap_alloc_bytes',
'go_memstats_heap_idle_bytes': 'go.memstats.heap_idle_bytes',
'go_memstats_heap_inuse_bytes': 'go.memstats.heap_inuse_bytes',
'go_memstats_heap_objects': 'go.memstats.heap_objects',
'go_memstats_heap_released_bytes': 'go.memstats.heap_released_bytes',
'go_memstats_heap_sys_bytes': 'go.memstats.heap_sys_bytes',
'go_memstats_last_gc_time_seconds': 'go.memstats.last_gc_time_seconds',
'go_memstats_lookups_total': 'go.memstats.lookups_total',
'go_memstats_mallocs_total': 'go.memstats.mallocs_total',
'go_memstats_mcache_inuse_bytes': 'go.memstats.mcache_inuse_bytes',
'go_memstats_mcache_sys_bytes': 'go.memstats.mcache_sys_bytes',
'go_memstats_mspan_inuse_bytes': 'go.memstats.mspan_inuse_bytes',
'go_memstats_mspan_sys_bytes': 'go.memstats.mspan_sys_bytes',
'go_memstats_next_gc_bytes': 'go.memstats.next_gc_bytes',
'go_memstats_other_sys_bytes': 'go.memstats.other_sys_bytes',
'go_memstats_stack_inuse_bytes': 'go.memstats.stack_inuse_bytes',
'go_memstats_stack_sys_bytes': 'go.memstats.stack_sys_bytes',
'go_memstats_sys_bytes': 'go.memstats.sys_bytes',
'go_threads': 'go.threads',
'process_cpu_seconds_total': 'process.cpu_seconds_total',
'process_max_fds': 'process.max_fds',
'process_open_fds': 'process.open_fds',
'process_resident_memory_bytes': 'process.resident_memory_bytes',
'process_start_time_seconds': 'process.start_time_seconds',
'process_virtual_memory_bytes': 'process.virtual_memory_bytes',
}
CITADEL_METRICS = {
'citadel_secret_controller_csr_err_count': 'secret_controller.csr_err_count',
'citadel_secret_controller_secret_deleted_cert_count': ('secret_controller.secret_deleted_cert_count'),
'citadel_secret_controller_svc_acc_created_cert_count': ('secret_controller.svc_acc_created_cert_count'),
'citadel_secret_controller_svc_acc_deleted_cert_count': ('secret_controller.svc_acc_deleted_cert_count'),
'citadel_server_authentication_failure_count': 'server.authentication_failure_count',
'citadel_server_citadel_root_cert_expiry_timestamp': ('server.citadel_root_cert_expiry_timestamp'),
'citadel_server_csr_count': 'server.csr_count',
'citadel_server_csr_parsing_err_count': 'server.csr_parsing_err_count',
'citadel_server_id_extraction_err_count': 'server.id_extraction_err_count',
'citadel_server_success_cert_issuance_count': 'server.success_cert_issuance_count',
'citadel_server_root_cert_expiry_timestamp': 'server.root_cert_expiry_timestamp',
}
GALLEY_METRICS = {
'endpoint_no_pod': 'endpoint_no_pod',
'galley_mcp_source_clients_total': 'mcp_source.clients_total',
'galley_runtime_processor_event_span_duration_milliseconds': ('runtime_processor.event_span_duration_milliseconds'),
'galley_runtime_processor_events_processed_total': 'runtime_processor.events_processed_total',
'galley_runtime_processor_snapshot_events_total': 'runtime_processor.snapshot_events_total',
'galley_runtime_processor_snapshot_lifetime_duration_milliseconds': (
'runtime_processor.snapshot_lifetime_duration_milliseconds'
),
'galley_runtime_processor_snapshots_published_total': ('runtime_processor.snapshots_published_total'),
'galley_runtime_state_type_instances_total': 'runtime_state_type_instances_total',
'galley_runtime_strategy_on_change_total': 'runtime_strategy.on_change_total',
'galley_runtime_strategy_timer_max_time_reached_total': ('runtime_strategy.timer_max_time_reached_total'),
'galley_runtime_strategy_timer_quiesce_reached_total': 'runtime_strategy.quiesce_reached_total',
'galley_runtime_strategy_timer_resets_total': 'runtime_strategy.timer_resets_total',
'galley_source_kube_dynamic_converter_success_total': ('source_kube.dynamic_converter_success_total'),
'galley_source_kube_event_success_total': 'source_kube.event_success_total',
'galley_validation_cert_key_updates': 'validation.cert_key_updates',
'galley_validation_config_load': 'validation.config_load',
'galley_validation_config_updates': 'validation.config_update',
'galley_validation_passed': 'validation.passed',
# These metrics supported Istio 1.5
'galley_validation_config_update_error': 'validation.config_update_error',
}
MESH_METRICS = {
# These metrics support Istio 1.5
'istio_request_duration_milliseconds': 'request.duration.milliseconds',
# These metrics support Istio 1.0
'istio_requests_total': 'request.count',
'istio_request_duration_seconds': 'request.duration',
'istio_request_bytes': 'request.size',
'istio_response_bytes': 'response.size',
# These metrics support Istio 0.8
'istio_request_count': 'request.count',
'istio_request_duration': 'request.duration',
'istio_request_size': 'request.size',
'istio_response_size': 'response.size',
# TCP metrics
'istio_tcp_connections_closed_total': 'tcp.connections_closed.total',
'istio_tcp_connections_opened_total': 'tcp.connections_opened.total',
'istio_tcp_received_bytes_total': 'tcp.received_bytes.total',
'istio_tcp_sent_bytes_total': 'tcp.send_bytes.total',
}
MIXER_METRICS = {
# Pre 1.1 metrics
'grpc_server_handled_total': 'grpc.server.handled_total',
'grpc_server_handling_seconds': 'grpc.server.handling_seconds',
'grpc_server_msg_received_total': 'grpc.server.msg_received_total',
'grpc_server_msg_sent_total': 'grpc.server.msg_sent_total',
'grpc_server_started_total': 'grpc.server.started_total',
'mixer_adapter_dispatch_count': 'adapter.dispatch_count',
'mixer_adapter_dispatch_duration': 'adapter.dispatch_duration',
'mixer_adapter_old_dispatch_count': 'adapter.old_dispatch_count',
'mixer_adapter_old_dispatch_duration': 'adapter.old_dispatch_duration',
'mixer_config_resolve_actions': 'config.resolve_actions',
'mixer_config_resolve_count': 'config.resolve_count',
'mixer_config_resolve_duration': 'config.resolve_duration',
'mixer_config_resolve_rules': 'config.resolve_rules',
# 1.1 metrics
'grpc_io_server_completed_rpcs': 'grpc_io_server.completed_rpcs',
'grpc_io_server_received_bytes_per_rpc': 'grpc_io_server.received_bytes_per_rpc',
'grpc_io_server_sent_bytes_per_rpc': 'grpc_io_server.sent_bytes_per_rpc',
'grpc_io_server_server_latency': 'grpc_io_server.server_latency',
'mixer_config_attributes_total': 'config.attributes_total',
'mixer_config_handler_configs_total': 'config.handler_configs_total',
'mixer_config_instance_configs_total': 'config.instance_configs_total',
'mixer_config_rule_configs_total': 'config.rule_configs_total',
'mixer_dispatcher_destinations_per_request': 'dispatcher.destinations_per_request',
'mixer_dispatcher_instances_per_request': 'dispatcher.instances_per_request',
'mixer_handler_daemons_total': 'handler.daemons_total',
'mixer_handler_new_handlers_total': 'handler.new_handlers_total',
'mixer_mcp_sink_reconnections': 'mcp_sink.reconnections',
'mixer_mcp_sink_request_acks_total': 'mcp_sink.request_acks_total',
'mixer_runtime_dispatches_total': 'runtime.dispatches_total',
'mixer_runtime_dispatch_duration_seconds': 'runtime.dispatch_duration_seconds',
}
PILOT_METRICS = {
'pilot_conflict_inbound_listener': 'conflict.inbound_listener',
'pilot_conflict_outbound_listener_http_over_current_tcp': ('conflict.outbound_listener.http_over_current_tcp'),
'pilot_conflict_outbound_listener_tcp_over_current_http': ('conflict.outbound_listener.tcp_over_current_http'),
'pilot_conflict_outbound_listener_tcp_over_current_tcp': ('conflict.outbound_listener.tcp_over_current_tcp'),
'pilot_destrule_subsets': 'destrule_subsets',
'pilot_duplicate_envoy_clusters': 'duplicate_envoy_clusters',
'pilot_eds_no_instances': 'eds_no_instances',
'pilot_endpoint_not_ready': 'endpoint_not_ready',
'pilot_invalid_out_listeners': 'invalid_out_listeners',
'pilot_mcp_sink_reconnections': 'mcp_sink.reconnections',
'pilot_mcp_sink_recv_failures_total': 'mcp_sink.recv_failures_total',
'pilot_mcp_sink_request_acks_total': 'mcp_sink.request_acks_total',
'pilot_no_ip': 'no_ip',
'pilot_proxy_convergence_time': 'proxy_convergence_time',
'pilot_rds_expired_nonce': 'rds_expired_nonce',
'pilot_services': 'services',
'pilot_total_xds_internal_errors': 'total_xds_internal_errors',
'pilot_total_xds_rejects': 'total_xds_rejects',
'pilot_virt_services': 'virt_services',
'pilot_vservice_dup_domain': 'vservice_dup_domain',
'pilot_xds': 'xds',
'pilot_xds_eds_instances': 'xds.eds_instances',
'pilot_xds_push_context_errors': 'xds.push.context_errors',
'pilot_xds_push_timeout': 'xds.push.timeout',
'pilot_xds_push_timeout_failures': 'xds.push.timeout_failures',
'pilot_xds_pushes': 'xds.pushes',
'pilot_xds_write_timeout': 'xds.write_timeout',
'pilot_xds_rds_reject': 'pilot.xds.rds_reject',
'pilot_xds_eds_reject': 'pilot.xds.eds_reject',
'pilot_xds_cds_reject': 'pilot.xds.cds_reject',
'pilot_xds_lds_reject': 'pilot.xds.lds_reject',
}
ISTIOD_METRICS = {
# Maintain namespace compatibility from legacy components
# Generic metrics
'go_gc_duration_seconds': 'go.gc_duration_seconds',
'go_goroutines': 'go.goroutines',
'go_info': 'go.info',
'go_memstats_alloc_bytes': 'go.memstats.alloc_bytes',
'go_memstats_alloc_bytes_total': 'go.memstats.alloc_bytes_total',
'go_memstats_buck_hash_sys_bytes': 'go.memstats.buck_hash_sys_bytes',
'go_memstats_frees_total': 'go.memstats.frees_total',
'go_memstats_gc_cpu_fraction': 'go.memstats.gc_cpu_fraction',
'go_memstats_gc_sys_bytes': 'go.memstats.gc_sys_bytes',
'go_memstats_heap_alloc_bytes': 'go.memstats.heap_alloc_bytes',
'go_memstats_heap_idle_bytes': 'go.memstats.heap_idle_bytes',
'go_memstats_heap_inuse_bytes': 'go.memstats.heap_inuse_bytes',
'go_memstats_heap_objects': 'go.memstats.heap_objects',
'go_memstats_heap_released_bytes': 'go.memstats.heap_released_bytes',
'go_memstats_heap_sys_bytes': 'go.memstats.heap_sys_bytes',
'go_memstats_last_gc_time_seconds': 'go.memstats.last_gc_time_seconds',
'go_memstats_lookups_total': 'go.memstats.lookups_total',
'go_memstats_mallocs_total': 'go.memstats.mallocs_total',
'go_memstats_mcache_inuse_bytes': 'go.memstats.mcache_inuse_bytes',
'go_memstats_mcache_sys_bytes': 'go.memstats.mcache_sys_bytes',
'go_memstats_mspan_inuse_bytes': 'go.memstats.mspan_inuse_bytes',
'go_memstats_mspan_sys_bytes': 'go.memstats.mspan_sys_bytes',
'go_memstats_next_gc_bytes': 'go.memstats.next_gc_bytes',
'go_memstats_other_sys_bytes': 'go.memstats.other_sys_bytes',
'go_memstats_stack_inuse_bytes': 'go.memstats.stack_inuse_bytes',
'go_memstats_stack_sys_bytes': 'go.memstats.stack_sys_bytes',
'go_memstats_sys_bytes': 'go.memstats.sys_bytes',
'go_threads': 'go.threads',
'process_cpu_seconds_total': 'process.cpu_seconds_total',
'process_max_fds': 'process.max_fds',
'process_open_fds': 'process.open_fds',
'process_resident_memory_bytes': 'process.resident_memory_bytes',
'process_start_time_seconds': 'process.start_time_seconds',
'process_virtual_memory_bytes': 'process.virtual_memory_bytes',
'pilot_conflict_inbound_listener': 'pilot.conflict.inbound_listener',
'pilot_conflict_outbound_listener_http_over_current_tcp': (
'pilot.conflict.outbound_listener.http_over_current_tcp'
),
'pilot_conflict_outbound_listener_tcp_over_current_http': (
'pilot.conflict.outbound_listener.tcp_over_current_http'
),
'pilot_conflict_outbound_listener_tcp_over_current_tcp': ('pilot.conflict.outbound_listener.tcp_over_current_tcp'),
'pilot_destrule_subsets': 'pilot.destrule_subsets',
'pilot_duplicate_envoy_clusters': 'pilot.duplicate_envoy_clusters',
'pilot_eds_no_instances': 'pilot.eds_no_instances',
'pilot_endpoint_not_ready': 'pilot.endpoint_not_ready',
'pilot_invalid_out_listeners': 'pilot.invalid_out_listeners',
'pilot_mcp_sink_reconnections': 'pilot.mcp_sink.reconnections',
'pilot_mcp_sink_recv_failures_total': 'pilot.mcp_sink.recv_failures_total',
'pilot_mcp_sink_request_acks_total': 'pilot.mcp_sink.request_acks_total',
'pilot_no_ip': 'pilot.no_ip',
'pilot_proxy_convergence_time': 'pilot.proxy_convergence_time',
'pilot_rds_expired_nonce': 'pilot.rds_expired_nonce',
'pilot_services': 'pilot.services',
'pilot_total_xds_internal_errors': 'pilot.total_xds_internal_errors',
'pilot_total_xds_rejects': 'pilot.total_xds_rejects',
'pilot_virt_services': 'pilot.virt_services',
'pilot_vservice_dup_domain': 'pilot.vservice_dup_domain',
'pilot_xds': 'pilot.xds',
'pilot_xds_eds_instances': 'pilot.xds.eds_instances',
'pilot_xds_push_context_errors': 'pilot.xds.push.context_errors',
'pilot_xds_push_timeout': 'pilot.xds.push.timeout',
'pilot_xds_push_timeout_failures': 'pilot.xds.push.timeout_failures',
'pilot_xds_pushes': 'pilot.xds.pushes',
'pilot_xds_write_timeout': 'pilot.xds.write_timeout',
'pilot_xds_rds_reject': 'pilot.xds.rds_reject',
'pilot_xds_eds_reject': 'pilot.xds.eds_reject',
'pilot_xds_cds_reject': 'pilot.xds.cds_reject',
'pilot_xds_lds_reject': 'pilot.xds.lds_reject',
'grpc_server_handled_total': 'grpc.server.handled_total',
'grpc_server_handling_seconds': 'grpc.server.handling_seconds',
'grpc_server_msg_received_total': 'grpc.server.msg_received_total',
'grpc_server_msg_sent_total': 'grpc.server.msg_sent_total',
'grpc_server_started_total': 'grpc.server.started_total',
'grpc_io_server_completed_rpcs': 'mixer.grpc_io_server.completed_rpcs',
'grpc_io_server_received_bytes_per_rpc': 'mixer.grpc_io_server.received_bytes_per_rpc',
'grpc_io_server_sent_bytes_per_rpc': 'mixer.grpc_io_server.sent_bytes_per_rpc',
'grpc_io_server_server_latency': 'mixer.grpc_io_server.server_latency',
'mixer_config_attributes_total': 'mixer.config.attributes_total',
'mixer_config_handler_configs_total': 'mixer.config.handler_configs_total',
'mixer_config_instance_configs_total': 'mixer.config.instance_configs_total',
'mixer_config_rule_configs_total': 'mixer.config.rule_configs_total',
'mixer_dispatcher_destinations_per_request': 'mixer.dispatcher.destinations_per_request',
'mixer_dispatcher_instances_per_request': 'mixer.dispatcher.instances_per_request',
'mixer_handler_daemons_total': 'mixer.handler.daemons_total',
'mixer_handler_new_handlers_total': 'mixer.handler.new_handlers_total',
'mixer_mcp_sink_reconnections': 'mixer.mcp_sink.reconnections',
'mixer_mcp_sink_request_acks_total': 'mixer.mcp_sink.request_acks_total',
'mixer_runtime_dispatches_total': 'mixer.runtime.dispatches_total',
'mixer_runtime_dispatch_duration_seconds': 'mixer.runtime.dispatch_duration_seconds',
'endpoint_no_pod': 'galley.endpoint_no_pod',
'galley_mcp_source_clients_total': 'galley.mcp_source.clients_total',
'galley_runtime_processor_event_span_duration_milliseconds': (
'galley.runtime_processor.event_span_duration_milliseconds'
),
'galley_runtime_processor_events_processed_total': 'galley.runtime_processor.events_processed_total',
'galley_runtime_processor_snapshot_events_total': 'galley.runtime_processor.snapshot_events_total',
'galley_runtime_processor_snapshot_lifetime_duration_milliseconds': (
'galley.runtime_processor.snapshot_lifetime_duration_milliseconds'
),
'galley_runtime_processor_snapshots_published_total': ('galley.runtime_processor.snapshots_published_total'),
'galley_runtime_state_type_instances_total': 'galley.runtime_state_type_instances_total',
'galley_runtime_strategy_on_change_total': 'galley.runtime_strategy.on_change_total',
'galley_runtime_strategy_timer_max_time_reached_total': ('galley.runtime_strategy.timer_max_time_reached_total'),
'galley_runtime_strategy_timer_quiesce_reached_total': 'galley.runtime_strategy.quiesce_reached_total',
'galley_runtime_strategy_timer_resets_total': 'galley.runtime_strategy.timer_resets_total',
'galley_source_kube_dynamic_converter_success_total': ('galley.source_kube.dynamic_converter_success_total'),
'galley_source_kube_event_success_total': 'galley.source_kube.event_success_total',
'galley_validation_config_load': 'galley.validation.config_load',
'galley_validation_config_updates': 'galley.validation.config_update',
'citadel_secret_controller_csr_err_count': 'citadel.secret_controller.csr_err_count',
'citadel_secret_controller_secret_deleted_cert_count': ('citadel.secret_controller.secret_deleted_cert_count'),
'citadel_secret_controller_svc_acc_created_cert_count': ('citadel.secret_controller.svc_acc_created_cert_count'),
'citadel_secret_controller_svc_acc_deleted_cert_count': ('citadel.secret_controller.svc_acc_deleted_cert_count'),
'citadel_server_authentication_failure_count': 'citadel.server.authentication_failure_count',
'citadel_server_citadel_root_cert_expiry_timestamp': ('citadel.server.citadel_root_cert_expiry_timestamp'),
'citadel_server_csr_count': 'citadel.server.csr_count',
'citadel_server_csr_parsing_err_count': 'citadel.server.csr_parsing_err_count',
'citadel_server_id_extraction_err_count': 'citadel.server.id_extraction_err_count',
'citadel_server_success_cert_issuance_count': 'citadel.server.success_cert_issuance_count',
# These metrics supported Istio 1.5
'galley_validation_config_update_error': 'galley.validation.config_update_error',
'citadel_server_root_cert_expiry_timestamp': 'citadel.server.root_cert_expiry_timestamp',
'galley_validation_passed': 'galley.validation.passed',
'galley_validation_failed': 'galley.validation.failed',
'pilot_conflict_outbound_listener_http_over_https': 'pilot.conflict.outbound_listener.http_over_https',
'pilot_inbound_updates': 'pilot.inbound_updates',
'pilot_k8s_cfg_events': 'pilot.k8s.cfg_events',
'pilot_k8s_reg_events': 'pilot.k8s.reg_events',
'pilot_proxy_queue_time': 'pilot.proxy_queue_time',
'pilot_push_triggers': 'pilot.push.triggers',
'pilot_xds_eds_all_locality_endpoints': 'pilot.xds.eds_all_locality_endpoints',
'pilot_xds_push_time': 'pilot.xds.push.time',
'process_virtual_memory_max_bytes': 'process.virtual_memory_max_bytes',
'sidecar_injection_requests_total': 'sidecar_injection.requests_total',
'sidecar_injection_success_total': 'sidecar_injection.success_total',
'sidecar_injection_failure_total': 'sidecar_injection.failure_total',
'sidecar_injection_skip_total': 'sidecar_injection.skip_total',
}
|
StarcoderdataPython
|
8169604
|
# https://hackernoon.com/gradient-boosting-and-xgboost-90862daa6c77
import pandas as pd
from sklearn.datasets import load_iris
from sklearn.model_selection import train_test_split
from sklearn.ensemble import GradientBoostingClassifier
from sklearn.metrics import classification_report
import numpy as np
import matplotlib.pyplot as plt
iris_dataset = load_iris()
# print(iris_dataset.data)
# print(iris_dataset.target_names)
# print(iris_dataset.target)
# print(iris_dataset.feature_names)
features = pd.DataFrame(iris_dataset.data)
features.columns = iris_dataset.feature_names
print(features.head())
labels = pd.DataFrame(iris_dataset.target)
print(labels.head())
print(features.info())
print(labels.info())
print("features size ", features.shape)
print("labels size ", labels.shape)
train_features, test_features, train_labels, test_labels = train_test_split(
features, labels, test_size=0.3, stratify=labels
)
print("train features size ", train_features.shape)
print("train labels size ", train_labels.shape)
print("test features size ", test_features.shape)
print("test labels size ", test_labels.shape)
# learning_rate=0.1, n_estimators=100, loss='deviance',
model = GradientBoostingClassifier()
model.fit(train_features, train_labels)
print("train score", model.score(train_features, train_labels))
print(classification_report(train_labels, model.predict(train_features)))
print("test score", model.score(test_features, test_labels))
print(classification_report(test_labels, model.predict(test_features)))
"""
train score 1.0
precision recall f1-score support
0 1.00 1.00 1.00 37
1 1.00 1.00 1.00 35
2 1.00 1.00 1.00 33
accuracy 1.00 105
macro avg 1.00 1.00 1.00 105
weighted avg 1.00 1.00 1.00 105
test score 0.9333333333333333
precision recall f1-score support
0 1.00 1.00 1.00 13
1 0.88 0.93 0.90 15
2 0.94 0.88 0.91 17
accuracy 0.93 45
macro avg 0.94 0.94 0.94 45
weighted avg 0.93 0.93 0.93 45
"""
X = iris_dataset.data[:, :2] # Sepal length, sepal width
y = iris_dataset.target
h = 0.02 # Step size
color_dict = {0: "blue", 1: "lightgreen", 2: "red"}
colors = [color_dict[i] for i in y]
model.fit(X, y)
x_min, x_max = X[:, 0].min() - 1, X[:, 0].max() + 1
y_min, y_max = X[:, 1].min() - 1, X[:, 1].max() + 1
xx, yy = np.meshgrid(np.arange(x_min, x_max, h), np.arange(y_min, y_max, h))
plt.figure()
Z = model.predict(np.c_[xx.ravel(), yy.ravel()])
Z = Z.reshape(xx.shape)
plt.contourf(xx, yy, Z, cmap=plt.cm.coolwarm, alpha=0.8)
plt.scatter(X[:, 0], X[:, 1], c=colors)
plt.xlabel(iris_dataset.feature_names[0])
plt.ylabel(iris_dataset.feature_names[1])
plt.xlim(xx.min(), xx.max())
plt.ylim(yy.min(), yy.max())
plt.xticks()
plt.yticks()
plt.show()
|
StarcoderdataPython
|
6413789
|
<reponame>dave-tucker/hp-sdn-client
#!/usr/bin/env python
#
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import json
# Python3 compatibility
try:
import urllib.parse as urllib
except ImportError:
import urllib
def raise_errors(response):
if response.status_code == 400:
raise_400(response)
elif response.status_code == 404:
raise_404(response)
elif response.status_code == 500:
raise_500(response)
else:
# let requests raise the error
response.raise_for_status()
def raise_400(response):
data = response.json()
if "Invalid JSON format" in data['message']:
raise InvalidJson(response.request.url,
json.dumps(response.request.body),
data['message'])
elif "IllegalArgumentException" in data['error']:
raise IllegalArgument()
elif "VersionMismatchException" in data['error']:
dpid = urllib.unquote(response.request.url.split('/')[7])
required_version = data['message'][-3:]
raise VersionMismatch(dpid, required_version)
else:
response.raise_for_status()
def raise_404(response):
data = response.json()
if "NotFoundException" in data['error']:
raise NotFound(data['message'])
else:
response.raise_for_status()
def raise_500(response):
data = response.json()
if "IllegalStateException" in data['error']:
raise OpenflowProtocolError()
else:
response.raise_for_status()
class HpsdnclientError(Exception):
"""Base class for Flare API errors"""
pass
class InvalidJson(HpsdnclientError):
def __init__(self, url, request_body, message):
self.url = url
self.request_body = request_body
self.message = message
super(InvalidJson, self).__init__(message)
class VersionMismatch(HpsdnclientError):
def __init__(self, dpid, required_version):
self.dpid = dpid
self.required_version = required_version
message = ("This feature is not supported on DPID {0}. " +
"It requires OpenFlow version {1}").format(dpid,
required_version)
super(VersionMismatch, self).__init__(message)
class IllegalArgument(HpsdnclientError):
def __init__(self, arguments=None):
super(IllegalArgument, self).__init__()
self.arguments = arguments
class NotFound(HpsdnclientError):
def __init__(self, message):
super(NotFound, self).__init__(message)
class OpenflowProtocolError(HpsdnclientError):
def __init__(self):
message = ("Something bad happened at the OpenFlow protocol layer." +
" This could be because this feature is not implemented " +
"on this device")
super(OpenflowProtocolError, self).__init__(message)
class DatatypeError(HpsdnclientError):
def __init__(self, received, expected):
self.received = received
self.expected = expected
message = "Received: {0} Expected: {1}".format(received, expected)
super(DatatypeError, self).__init__(message)
|
StarcoderdataPython
|
8093794
|
<gh_stars>10-100
# ===============================================================================
# Copyright 2021 ross
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ===============================================================================
from traits.api import Enum
from pychron.options.options import FigureOptions
from pychron.options.views.regression_views import VIEWS
from pychron.pychron_constants import MAIN, APPEARANCE
class RegressionOptions(FigureOptions):
regressor = Enum("NewYork")
def initialize(self):
self.subview_names = [MAIN, APPEARANCE]
def _get_subview(self, name):
return VIEWS[name]
# ============= EOF =============================================
|
StarcoderdataPython
|
3516081
|
<gh_stars>0
class ErrorLog:
def __init__(self, _servername, _timestamp, _type, _msg):
self.servername = _servername
self.timestamp = _timestamp
self.typ = _type
self.msg = _msg
def get_servername(self):
return self.servername
def get_timestamp(self):
# TODO
return self.timestamp
def get_type(self):
return self.typ
def get_msg(self):
return self.msg
|
StarcoderdataPython
|
5088990
|
import torch
def log_likelihood(nbhd_means, feature_means, feature_vars, k, past_comps = []):
# given neighborhood expression data, construct likelihood function
# should work in pytorch
n_samples = nbhd_means.shape[0]
nbhd_means = torch.tensor(nbhd_means).double()
feature_means = torch.tensor(feature_means).double()
feature_vars = torch.tensor(feature_vars).double()
def f(coefs):
coefs_orth = coefs
for i,comp in enumerate(past_comps):
#project orthogonally
coefs_orth = coefs_orth - torch.dot(coefs_orth, comp) * comp
observed_means = torch.matmul(nbhd_means, torch.reshape(coefs_orth, (-1, 1)))
# mean and variance of metagene defined by coef, under
# independent gene hypothesis
# variance gets scaled down due to averaging.
theoretical_mean = torch.dot(feature_means, coefs_orth)
theoretical_var = torch.div(torch.dot(torch.pow(coefs_orth, 2), feature_vars), float(k))
# print(theoretical_mean)
# print(theoretical_var)
result = (-1 * n_samples / 2.) * torch.log(theoretical_var) - torch.div(
torch.sum(torch.pow(observed_means - theoretical_mean, 2)),
2 * theoretical_var)
return (result)
return (f)
|
StarcoderdataPython
|
390346
|
from django.urls import path
from . import views as v
app_name = 'core'
urlpatterns = [
path('', v.index, name='index'),
path('form_submit', v.form_submit, name='form_submit'),
path('api/pokemon/<slug:slug>', v.get_pokemon, name='get_pokemon'),
]
|
StarcoderdataPython
|
47806
|
# encoding: utf-8
import os
import re
import sys
import gzip
import time
import json
import socket
import random
import weakref
import datetime
import functools
import threading
import collections
import urllib.error
import urllib.parse
import urllib.request
import collections.abc
import json_dict
from . import utils
class ProxyURLRefreshError(Exception):
pass
class AliveProxiesNotFound(Exception):
pass
class NoFreeProxies(Exception):
pass
def _get_missing(target, source):
"""Возвращает присутствующие в `target`, но отсутствующие в `source` элементы
"""
old_target = set(target)
new_target = old_target.intersection(source)
return old_target.difference(new_target)
def _build_opener(proxy=None):
if proxy is not None:
parsed = urllib.parse.urlparse(proxy)
handler = urllib.request.ProxyHandler({parsed.scheme: proxy})
return urllib.request.build_opener(handler)
else:
return urllib.request.build_opener()
class Proxies:
default_opener = _build_opener()
def __init__(
self,
proxies=None,
proxies_url=None,
proxies_url_gateway=None,
proxies_file=None,
options=None,
):
"""
@param proxies: список адресов прокси-серверов
@param proxies_url: ссылка на список прокси-серверов
@param proxies_file: путь до файла со списком прокси-серверов
@param options: доп. параметры
"""
if options is None:
options = {}
shuffle = options.get('shuffle', False)
if proxies is not None:
proxies = list(proxies)
if shuffle:
random.shuffle(proxies)
auto_refresh_period = options.get('auto_refresh_period')
if auto_refresh_period:
auto_refresh_period = datetime.timedelta(**auto_refresh_period)
blacklist = utils.get_json_dict(json_dict.JsonLastUpdatedOrderedDict, filename=options.get('blacklist'))
cooling_down = utils.get_json_dict(json_dict.JsonOrderedDict, filename=options.get('cooldown'))
stats = utils.get_json_dict(json_dict.JsonDict, filename=options.get('stats'))
if proxies_url_gateway:
url_opener = _build_opener(proxies_url_gateway)
else:
url_opener = None
self._url_opener = url_opener
self._proxies = proxies
self.proxies_url = proxies_url
self.proxies_file = proxies_file
self._shuffle = shuffle
self.slice = options.get('slice')
self.force_type = options.get('type')
self.auto_refresh_period = auto_refresh_period
self._blacklist = blacklist
self._cooling_down = cooling_down
self._stats = stats
self._cleanup_lock = threading.RLock()
self._last_auto_refresh = None
self._auto_refresh_lock = threading.Lock()
self._load_lock = threading.Lock()
self._modified_at = time.perf_counter()
self.__pool = None
self._smart_holdout_start = options.get('smart_holdout_start')
self._options = options
if self._proxies is not None:
proxies = set(self._proxies)
self._cleanup_internals(proxies)
@property
def proxies(self):
if self._proxies is None:
with self._load_lock:
# Вышли из состояния гонки, теперь можно удостовериться в реальной необходимости
if self._proxies is None:
self._proxies = self._load()
self._cleanup_internals(self._proxies)
self._modified_at = time.perf_counter()
return self._proxies
def _load(self):
if self.proxies_url:
proxies = self.read_url(self.proxies_url, opener=self._url_opener)
elif self.proxies_file:
proxies = self.read_file(self.proxies_file)
else:
raise NotImplementedError(
"Can't load proxies: "
"please specify one of the sources ('proxies_url' or 'proxies_file')"
)
if self.slice:
proxies = proxies[slice(*self.slice)]
if self.force_type:
new_type = self.force_type + '://' # `socks` format
proxies = [
re.sub(r'^(?:(.*?)://)?', new_type, proxy)
for proxy in proxies
]
if self._shuffle:
random.shuffle(proxies)
return proxies
def _cleanup_internals(self, proxies):
with self._cleanup_lock:
self._cleanup_blacklist(proxies)
self._cleanup_cooling_down(proxies)
self._cleanup_stats(proxies)
def _cleanup_cooling_down(self, proxies):
for proxy in _get_missing(self._cooling_down, proxies):
self._cooling_down.pop(proxy)
def _cleanup_blacklist(self, proxies):
for proxy in _get_missing(self._blacklist, proxies):
self._blacklist.pop(proxy)
def _cleanup_stats(self, proxies):
for proxy in _get_missing(self._stats, proxies):
self._stats.pop(proxy)
def _get_options(self, *options, missing_ok=True):
if missing_ok:
return {k: self._options.get(k) for k in options}
else:
return {k: self._options[k] for k in options}
@classmethod
def read_string(cls, string, sep=','):
return list(x for x in map(str.strip, string.split(sep)) if x)
@classmethod
def read_url(cls, url, sep='\n', retry=10, sleep_range=(2, 10), timeout=2, opener=None):
if opener is None:
opener = cls.default_opener
while True:
try:
resp = opener.open(url, timeout=timeout)
break
except (urllib.error.HTTPError, socket.timeout):
if not retry:
raise
retry -= 1
time.sleep(random.randint(*sleep_range))
content = resp.read()
if resp.headers.get('Content-Encoding', 'identity') == 'gzip':
content = gzip.decompress(content)
charset = resp.headers.get_content_charset('utf-8')
content = content.decode(charset)
return cls.read_string(content, sep=sep)
@classmethod
def read_file(cls, file_name, sep='\n'):
with open(file_name) as f:
return cls.read_string(f.read(), sep=sep)
def refresh(self):
if not self.proxies_url and not self.proxies_file:
return
try:
self._proxies = self._load()
self._cleanup_internals(self._proxies)
except urllib.error.HTTPError:
import problems
problems.handle(ProxyURLRefreshError, extra={'url': self.proxies_url})
else:
self._modified_at = time.perf_counter()
def _auto_refresh(self):
if self.proxies_file:
with self._auto_refresh_lock:
modification_time = datetime.datetime.fromtimestamp(os.stat(self.proxies_file).st_mtime)
if modification_time == self._last_auto_refresh:
return
self.refresh()
self._last_auto_refresh = modification_time
elif self.proxies_url:
if self.auto_refresh_period is None:
return
with self._auto_refresh_lock:
now = datetime.datetime.now()
if self._last_auto_refresh is not None:
if now - self._last_auto_refresh < self.auto_refresh_period:
return
self.refresh()
self._last_auto_refresh = now
def get_random_address(self):
self._auto_refresh()
return random.choice(self.proxies)
def get_pool(self):
if self.__pool is None:
with self._cleanup_lock: # оптимизация: используем уже существующий лок
# Вышли из состояния гонки, теперь можно удостовериться в реальной необходимости
if self.__pool is None:
options = self._get_options('default_holdout', 'default_bad_holdout', 'force_defaults')
if self._smart_holdout_start is not None:
options['smart_holdout'] = True
options['smart_holdout_start'] = self._smart_holdout_start
options.update(self._get_options('smart_holdout_min', 'smart_holdout_max'))
self.__pool = _Pool(
self, self._cooling_down, self._blacklist, self._stats, self._cleanup_lock,
**options
)
return self.__pool
@classmethod
def from_cfg_string(cls, cfg_string):
"""Возвращает список прокси с тем исключением что список опций берется автоматически.
Формат: json
Доступные опции:
type ('socks5', 'http'; для полного списка типов см. модуль socks):
все прокси будут автоматически промаркированы этип типом
slice (tuple c аргументами для builtins.slice):
будет взят только указанный фрагмент списка прокси-серверов
auto_refresh_period (dict): {'days': ..., 'hours': ..., 'minutes': ...}
как часто необходимо обновлять список прокси-серверов (только для `url` и `file`)
url_gateway:
адрес proxy, через которые будет загружаться список прокси по url
(url, file, list) - может быть именем файла, ссылкой или списком в формате json
Параметры slice и force_type являются необязательными
Примеры:
option = {"list": ["127.0.0.1:3128"]}
option = {"list": ["127.0.0.1:3128", "127.0.0.1:9999"]}
option = {"file": "./my_new_proxies.txt", "type": "socks5"}
option = {"url": "http://example.com/get/proxy_list/", "slice": [35, null], "type": "http"}
option = {"url": "http://example.com/get/proxy_list/", "auto_refresh_period": {"days": 1}}
option = {"url": "http://example.com/get/proxy_list/", "url_gateway": "http://proxy.example.com:9999"}
"""
cfg = json.loads(cfg_string)
proxies = cfg.pop('list', None)
proxies_url = cfg.pop('url', None)
proxies_url_gateway = cfg.pop('url_gateway', None)
proxies_file = cfg.pop('file', None)
return cls(
proxies=proxies,
proxies_url=proxies_url,
proxies_url_gateway=proxies_url_gateway,
proxies_file=proxies_file,
options=cfg
)
class _Pool:
def __init__(
self, proxies: "`Proxies` instance", cooling_down, blacklist, stats, _cleanup_lock=None,
smart_holdout=False, smart_holdout_start=None, smart_holdout_min=None, smart_holdout_max=None,
default_holdout=None, default_bad_holdout=None, force_defaults=False,
):
if smart_holdout:
if smart_holdout_start in (None, 0):
raise RuntimeError("Вы должны указать начальное время охлаждения")
if smart_holdout_max is None:
smart_holdout_max = float('inf')
self._used = set()
self._cond = threading.Condition(lock=_cleanup_lock)
self._free = collections.deque(
p for p in proxies.proxies
if (
p not in blacklist and
p not in cooling_down
)
)
self._proxies = proxies
self._cooling_down = cooling_down
self._blacklist = blacklist
self._stats = stats
self._smart_holdout = smart_holdout
self._smart_holdout_start = smart_holdout_start
self._smart_holdout_min = smart_holdout_min or 0
self._smart_holdout_max = smart_holdout_max
self._default_holdout = default_holdout
self._default_bad_holdout = default_bad_holdout
self._force_defaults = force_defaults
self._proxies_modified_at = proxies._modified_at
@property
def _size(self):
return len(self._free) + len(self._used) + len(self._cooling_down) + len(self._blacklist)
def _cool_released(self):
now = time.time()
cooled = []
for proxy, holdout in self._cooling_down.items():
if now >= holdout:
cooled.append(proxy)
for proxy in cooled:
self._cooling_down.pop(proxy, None)
if proxy not in self._blacklist:
self._free.append(proxy)
def _is_proxies_changed(self):
self._proxies._auto_refresh()
return self._proxies._modified_at != self._proxies_modified_at
def _remove_outdated(self):
# список прокси изменился, оставляем только актуальные
full_list = set(self._proxies.proxies)
for proxy in _get_missing(self._blacklist, full_list):
self._blacklist.pop(proxy, None)
for proxy in _get_missing(self._cooling_down, full_list):
self._cooling_down.pop(proxy, None)
for proxy in _get_missing(self._used, full_list):
self._used.remove(proxy)
for proxy in _get_missing(self._stats, full_list):
self._stats.pop(proxy, None)
free = set(
p for p in full_list
if (
p not in self._used and
p not in self._blacklist and
p not in self._cooling_down
)
)
old_free = set(self._free)
new_free = old_free.intersection(free)
if old_free.difference(new_free):
self._free.clear()
self._free.extend(new_free)
self._proxies_modified_at = self._proxies._modified_at
def _update_stats(self, proxy, bad=False, holdout=None):
proxy_stat = self._stats.get(proxy) or {}
ok, fail = proxy_stat.get('uptime', (0, 0))
if not bad:
ok += 1
else:
fail += 1
proxy_stat['uptime'] = ok, fail
proxy_stat['last_holdout'] = holdout
if (
not bad or
(
holdout is not None and
holdout >= (proxy_stat.get('last_good_holdout') or 0)
)
):
proxy_stat['last_good_holdout'] = holdout
# универсальный способ сказать что статистика обновилась
# тк без вызова метода .save будет работать и с обычным словарем (не только с JsonDict)
self._stats[proxy] = proxy_stat
def _get_next_holdout(self, proxy, bad=False):
"""Рассчитывает время охлаждения.
@param proxy: прокси, для которого необходимо вычислить
@param bad: True - вычисляем охлаждение для неудачи, иначе False
@return: рекомендуемое время охлаждения в секундах или None, если недостаточно данных
"""
# Алгоритм основан на бинарном поиске,
# в отличии от которого нам не известна верхняя граница
proxy_stat = self._stats.get(proxy)
if proxy_stat is None:
return None
last_holdout = proxy_stat['last_holdout']
last_good_holdout = proxy_stat.get('last_good_holdout', 0)
lo = last_holdout # предыдущее время охлаждения (нижняя граница)
if bad:
# Мы получили "бан" ...
if lo < last_good_holdout:
# ... возвращаемся к предыдущему хорошему значению ...
holdout = last_good_holdout
else:
# ... или сдвигаем границу дальше
holdout = lo * 2
else:
# возвращаемся к предыдущей границе (lo / 2)
# но с небольшим отступом - на середину отрезка [(lo / 2), lo]
holdout = lo * 0.75
return holdout
def acquire(self, timeout=None):
start = time.perf_counter()
with self._cond:
while True:
if self._is_proxies_changed():
self._remove_outdated()
self._cool_released()
if self._free:
proxy = self._free.popleft()
self._used.add(proxy)
return proxy
if self._blacklist:
# Возвращаем самый стабильный из блеклиста. Возможно бан снят.
def _uptime(p):
uptime = float('inf')
p_stat = self._stats.get(p)
if p_stat is not None:
ok, failed = p_stat.get('uptime', (0, 0))
if failed != 0:
uptime = ok // failed
else:
uptime = ok
return uptime
proxy = next((
p for p in sorted(self._blacklist, key=_uptime, reverse=True)
if p not in self._cooling_down
), None)
if proxy is not None:
self._blacklist.pop(proxy)
self._used.add(proxy)
return proxy
else:
# Все прокси из блеклиста находятся на охлаждении
pass
if self._cooling_down:
self._cond.wait(1)
else:
self._cond.wait(timeout)
if timeout is not None:
if time.perf_counter() - start > timeout:
raise NoFreeProxies
def release(self, proxy, bad=False, holdout=None, bad_reason=None):
"""Возвращает прокси в пул
@param proxy: прокси
@param holdout (сек): None - вернуть сразу, иначе прокси не будет использован до истечения указанного интервала
"""
with self._cond:
is_outdated = proxy not in self._used
if is_outdated:
# Скорее всего прокси уже не актуален
# И был удален из списка
return
self._used.remove(proxy)
if holdout is None or self._force_defaults:
holdout = self._default_holdout if not bad else self._default_bad_holdout
if self._smart_holdout:
_holdout = (
self._get_next_holdout(proxy, bad=bad) or
holdout or
self._smart_holdout_start
)
# Не позволяем границе опуститься слишком низко
if _holdout < self._smart_holdout_min:
holdout = self._smart_holdout_min
elif _holdout > self._smart_holdout_max:
holdout = self._smart_holdout_max
else:
holdout = max(self._smart_holdout_min, _holdout)
if holdout is not None:
self._cooling_down[proxy] = time.time() + holdout
if bad:
self._blacklist[proxy] = bad_reason
elif holdout is None:
# прокси не требует остывания
self._free.append(proxy)
self._cond.notify()
self._update_stats(proxy, bad=bad, holdout=holdout)
class IChain:
def switch(self, bad=False, holdout=None, bad_reason=None, lazy=False):
raise NotImplementedError
def get_adapter(self):
raise NotImplementedError
def get_handler(self):
raise NotImplementedError
def get_path(self):
raise NotImplementedError
def wrap_session(self, session):
raise NotImplementedError
def wrap_module(self, module, all_threads=False):
"""
Attempts to replace a module's socket library with a SOCKS socket.
This will only work on modules that import socket directly into the
namespace; most of the Python Standard Library falls into this category.
"""
import socks
import socks.monkey_socket
routes = socks.RoutingTable.from_addresses(self.get_path())
if not all_threads:
socks.monkey_socket.socks_wrap_module_thread(routes, module)
else:
socks.monkey_socket.socks_wrap_module_global(routes, module)
class Chain(IChain):
"""
Не является потокобезопасным.
"""
def __init__(self, proxies, proxy_gw=None, use_pool=False, pool_acquire_timeout=None):
"""
@param proxies: список адресов прокси-серверов
@param proxy_gw: прокси-сервер, который должен стоять во главе цепочки
(все запросы к другим прокси-серверам будут проходить через него)
@param use_pool: использовать список прокси в качестве пула
@param pool_acquire_timeout (сек.): если за указанный период не удастся получить свободный прокси
будет брошено исключение `NoFreeProxies`, None - ждать до появления свободного адреса
"""
if not isinstance(proxies, Proxies) and isinstance(proxies, collections.Sequence):
proxies = Proxies(proxies)
if use_pool:
pool = proxies.get_pool()
else:
pool = None
self.proxies = proxies
self.proxy_gw = proxy_gw
self._proxies_pool = pool
self._current_pool_proxy = None
self._pool_acquire_timeout = pool_acquire_timeout
self.__path = []
# fix http://bugs.python.org/issue23841
if sys.version_info >= (3, 4, 0):
self.finalizer = weakref.finalize(self, self.finalize)
else:
self.finalizer = None
def __del__(self):
if self.finalizer is None:
self.finalize()
def finalize(self):
if self._proxies_pool is not None:
self._release_pool_proxy()
def _build_path(self, proxy):
path = []
if self.proxy_gw:
path.append(self.proxy_gw)
path.append(proxy)
return path
def _release_pool_proxy(self, bad=False, holdout=None, bad_reason=None):
if self._current_pool_proxy:
proxy = self._current_pool_proxy
self._current_pool_proxy = None
self._proxies_pool.release(proxy, bad=bad, holdout=holdout, bad_reason=bad_reason)
def _acquire_pool_proxy(self):
proxy = self._proxies_pool.acquire(timeout=self._pool_acquire_timeout)
self._current_pool_proxy = proxy
return proxy
def _get_proxy(self):
if self._proxies_pool is not None:
return self._acquire_pool_proxy()
else:
return self.proxies.get_random_address()
@property
def _path(self):
if not self.__path:
self.__path = self._build_path(self._get_proxy())
return self.__path
def get_path(self):
return self._path
def switch(self, bad=False, holdout=None, bad_reason=None, lazy=False):
self.__path.clear()
if self._proxies_pool is not None:
self._release_pool_proxy(bad, holdout, bad_reason)
if not lazy:
self.__path = self._build_path(self._get_proxy())
def get_adapter(self):
import socks.adapters
return socks.adapters.ChainedProxyHTTPAdapter(chain=self._path)
def get_handler(self):
import socks.handlers
return socks.handlers.ChainProxyHandler(chain=self._path)
def wrap_session(self, session):
adapter = self.get_adapter()
session.mount('http://', adapter)
session.mount('https://', adapter)
return session
@classmethod
def from_config(cls, cfg):
proxy_cfg_string = cfg.get('Прокси')
if proxy_cfg_string is None:
return None
proxy_gw = cfg.get('Шлюз')
proxies = Proxies.from_cfg_string(proxy_cfg_string)
return cls(proxies, proxy_gw=proxy_gw)
class MultiChain(IChain):
def __init__(self, *proxies_all, use_pool=True, pool_acquire_timeout=None):
if use_pool:
pool_kw = {'use_pool': True, 'pool_acquire_timeout': 1}
else:
pool_kw = {}
self._pool_acquire_timeout = pool_acquire_timeout
self._chains = collections.deque(
Chain(p, gw, **pool_kw)
for p, gw in self._unwrap_proxies_all(proxies_all)
)
@staticmethod
def _unwrap_proxies_all(proxies_all):
for p in proxies_all:
if isinstance(p, tuple):
# (Proxies, Gateway)
p, gw = p
else:
# Proxies
p, gw = p, None
yield p, gw
def _self_auto_rotate(func):
@functools.wraps(func)
def wrapped(self, *args, **kw):
start = time.perf_counter()
while True:
try:
return func(self, *args, **kw)
except NoFreeProxies:
self._rotate() # FIXME: cycle rotate is normal?
if (
self._pool_acquire_timeout is not None and
time.perf_counter() - start > self._pool_acquire_timeout
):
raise
return wrapped
@property
def _current(self):
return self._chains[-1]
def get_path(self):
return self._current.get_path()
def _rotate(self):
self._chains.rotate(1)
def switch(self, bad=False, holdout=None, bad_reason=None, lazy=False):
self._current.switch(bad=bad, holdout=holdout, bad_reason=bad_reason, lazy=True)
self._rotate()
if not lazy:
self._enforce_current_path_build()
@_self_auto_rotate
def _enforce_current_path_build(self):
_ = self._current._path # FIXME: ugly enforce path building after switching
@_self_auto_rotate
def get_adapter(self):
return self._current.get_adapter()
@_self_auto_rotate
def get_handler(self):
return self._current.get_handler()
@_self_auto_rotate
def wrap_session(self, session):
return self._current.wrap_session(session)
@_self_auto_rotate
def wrap_module(self, module):
return self._current.wrap_module(module)
|
StarcoderdataPython
|
6528648
|
<filename>python_examples/util.py
from itertools import islice
import numpy as np
def data_generator(files, batch_size, n_classes):
while 1:
lines = []
for file in files:
with open(file,'r',encoding='utf-8') as f:
header = f.readline() # ignore the header
while True:
temp = len(lines)
lines += list(islice(f,batch_size-temp))
if len(lines)!=batch_size:
break
idxs = []
vals = []
##
y_idxs = []
y_vals = []
y_batch = np.zeros([batch_size,n_classes], dtype=float)
count = 0
for line in lines:
itms = line.strip().split(' ')
##
y_idxs = [int(itm) for itm in itms[0].split(',')]
for i in range(len(y_idxs)):
y_batch[count,y_idxs[i]] = 1.0/len(y_idxs)
# y_batch[count,y_idxs[i]] = 1.0
##
idxs += [(count,int(itm.split(':')[0])) for itm in itms[1:]]
vals += [float(itm.split(':')[1]) for itm in itms[1:]]
count += 1
lines = []
yield (idxs, vals, y_batch)
def data_generator_tst(files, batch_size):
while 1:
lines = []
for file in files:
with open(file,'r',encoding='utf-8') as f:
header = f.readline() # ignore the header
while True:
temp = len(lines)
lines += list(islice(f,batch_size-temp))
if len(lines)!=batch_size:
break
idxs = []
vals = []
##
y_batch = [None for i in range(len(lines))]
count = 0
for line in lines:
itms = line.strip().split(' ')
##
y_batch[count] = [int(itm) for itm in itms[0].split(',')]
##
idxs += [(count,int(itm.split(':')[0])) for itm in itms[1:]]
vals += [float(itm.split(':')[1]) for itm in itms[1:]]
count += 1
lines = []
yield (idxs, vals, y_batch)
|
StarcoderdataPython
|
1662948
|
import re
import random
import requests
import table
import user_agent_list
from bs4 import BeautifulSoup
class HtmlPage:
user_agent_number = 7345
def __init__(self, url):
self.url = url
def get_html(self, creds, proxy_pass):
have_a_try = 3
if not proxy_pass:
while have_a_try:
t = table.Table('proxy_list', creds=creds)
user_agent = user_agent_list.get_user_agent(int(random.random() * self.user_agent_number))
user_agent_dict = {'user-agent': user_agent}
table_exist = t.table_check()
if not table_exist:
print("Proxy table corrupted.")
return False
tab_length = t.table_len()
try:
proxy = t.table_read(int(random.random() * (tab_length[0] - 1)) + 1)
proxy_dict = {proxy[1]: proxy[2]}
except TypeError:
print("Fatal error in proxy list.")
return False
try:
result = requests.get(str.rstrip(self.url), headers=user_agent_dict, proxies=proxy_dict)
result.raise_for_status()
return result.text
except(requests.RequestException, ValueError):
print("Bad proxy. One more try.")
have_a_try -= 1
print("Network error. Update proxy list.")
else:
while have_a_try:
try:
result = requests.get(str.rstrip(self.url))
result.raise_for_status()
return result.text
except(requests.RequestException, ValueError):
have_a_try -= 1
print("Network error. Can't get html.")
return False
def get_wb_page(self, creds, proxy_pass):
html = self.get_html(creds, proxy_pass)
if html:
soup = BeautifulSoup(html, 'html.parser')
articles = {}
for index in soup.findAll('div', class_="dtList i-dtList j-card-item"):
article_number = re.search(r'\d+', index.get('data-catalogercod1s'))
articles[article_number[0]] = index.find('a')['href']
return articles
return False
|
StarcoderdataPython
|
12822
|
from django.urls import reverse
from rest_framework import status
from .base import BaseTestCase
class FollowTestCase(BaseTestCase):
"""Testcases for following a user."""
def test_follow_user_post(self):
"""Test start following a user."""
url = reverse('follow', kwargs={'username': 'test2'})
response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_post_follow_already_followed_user(self):
"""Test start following a user you already follow."""
url = reverse('follow', kwargs={'username': 'test2'})
self.client.post(url, HTTP_AUTHORIZATION=self.auth_header)
response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_follow_missing_user_post(self):
"""Test trying to start following a missing user."""
url = reverse('follow', kwargs={'username': 'joel'})
response = self.client.post(url, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
def test_delete_follow(self):
"""Test unfollowing a user"""
url = reverse('follow', kwargs={'username': 'test2'})
self.client.post(url, HTTP_AUTHORIZATION=self.auth_header)
response = self.client.delete(url, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_delete_follow_of_not_followed_user(self):
"""Test unfollowing a user you are not following"""
url = reverse('follow', kwargs={'username': 'test2'})
response = self.client.delete(url, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_list_followers_of_user(self):
"""Test list followers of a user"""
url_followers = reverse('getfollowers', kwargs={'username': 'test2'})
self.client.get(url_followers, HTTP_AUTHORIZATION=self.auth_header)
url_follow = reverse('follow', kwargs={'username': 'test2'})
self.client.post(url_follow, HTTP_AUTHORIZATION=self.auth_header)
response = self.client.get(url_followers, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_user_is_following(self):
"""Test list users the user is following"""
url_following = reverse('getfollowing', kwargs={'username': 'test1'})
self.client.get(url_following, HTTP_AUTHORIZATION=self.auth_header)
url_follow = reverse('follow', kwargs={'username': 'test2'})
self.client.post(url_follow, HTTP_AUTHORIZATION=self.auth_header)
response = self.client.get(url_following, HTTP_AUTHORIZATION=self.auth_header)
self.assertEqual(response.status_code, status.HTTP_200_OK)
|
StarcoderdataPython
|
9676225
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
# Author: <NAME>
# Description : FFT Baseline Correction
import sys, os
import numpy as np
import pandas as pd
import matplotlib.pyplot as plt
from matplotlib.widgets import Slider, Button, SpanSelector
from matplotlib import gridspec
import scipy.fftpack
from datetime import datetime
from dateutil.parser import parse
plt.rcParams['toolbar'] = 'None'
def calculate_y_limits(ys):
ymin = np.nanmin(ys)
ymax = np.nanmax(ys)
ydiff = ymax-ymin
return((ymin-0.1*ydiff, ymax+0.1*ydiff))
def crawl_info_file(filename):
with open(filename, "r") as file:
data = file.read()
blocks = [block.split("\n") for block in data.split("\n\n")]
db_dict={}
db_dict["filename"] = os.path.basename(filename)
if db_dict["filename"].find("fat.")!=-1:
db_dict["status"] = 0
elif db_dict["filename"][0]=="B":
db_dict["status"] = +2
else:
db_dict["status"] = +1
#Block 0
#General
db_dict["experimentalist"] = blocks[0][0].split(" measured",1)[0]
db_dict["molecule"] = blocks[0][0].split(": ",1)[1]
[db_dict["datestart"],db_dict["datestop"]]=blocks[0][1].replace("Measurement started on ","").split(" and finished at ")
db_dict["measurementtype"] = blocks[0][2].split(": ",1)[1]
for i in ["datestart", "datestop"]:
db_dict[i]=str(parse(db_dict[i]))
#Block 1
#Probe Synthesizer
db_dict["freqstart"] = float(blocks[1][1].split(": ",1)[1].replace("MHz",""))
db_dict["freqcenter"] = float(blocks[1][2].split(": ",1)[1].replace("MHz",""))
db_dict["freqstop"] = float(blocks[1][3].split(": ",1)[1].replace("MHz",""))
db_dict["span"] = float(blocks[1][4].split(": ",1)[1].replace("MHz",""))
db_dict["steps"] = float(blocks[1][5].split(": ",1)[1])
db_dict["stepsize"] = float(blocks[1][6].split(": ",1)[1].replace("kHz",""))
db_dict["datapoints"] = float(blocks[1][7].split(": ",1)[1])
db_dict["mfactor"] = float(blocks[1][8].split(": ",1)[1])
db_dict["fmdeviation"] = float(blocks[1][9].split(": ",1)[1].replace("kHz/V",""))
db_dict["probepower"] = float(blocks[1][10].split(": ",1)[1].replace("dBm",""))
#Block 2
#Pump Synthesizer
#Block 3
#Lock In
db_dict["delaytime"] = float(blocks[3][1].split(": ",1)[1].replace("ms",""))
db_dict["timeconstant"] = float(blocks[3][2].split(": ",1)[1].replace("ms",""))
db_dict["averagedpoints"]= float(blocks[3][3].split(": ",1)[1])
db_dict["averagediter"] = float(blocks[3][4].split(": ",1)[1])
db_dict["oscfreq"] = float(blocks[3][5].split(": ",1)[1].replace("Hz",""))
db_dict["oscamplitude"] = float(blocks[3][6].split(": ",1)[1].replace("V",""))
db_dict["ADCslope"] = float(blocks[3][7].split(": ",1)[1].replace("dB/ocatve",""))
db_dict["ACgain"] = float(blocks[3][8].split(": ",1)[1].replace("dB",""))
#Block 3
#Pressure
db_dict["totFMmod"] = float(blocks[4][0].split("= ",1)[1].replace("kHz",""))
if blocks[4][1].split(": ",1)[0]=="Pressure":
db_dict["pressurestart"]= "pressure not available"
db_dict["pressureend"] = "pressure not available"
else:
db_dict["pressurestart"]= blocks[4][1].split(": ",1)[1].replace("mbar","")
db_dict["pressureend"] = blocks[4][2].split(": ",1)[1].replace("mbar","")
for i in ("pressurestart", "pressureend"):
if db_dict[i].find("pressure not available")!=-1:
db_dict[i]=-1
else:
db_dict[i]=float(db_dict[i])
return(db_dict)
def decrease_standingwave(data_in, save_data):
global xs, ys, ys_corr, duration, filename, current_index, x_range, data
data = data_in
def onclick(event):
if event.inaxes == ax1:
if event.button == 1:
if isinstance(event.xdata,np.float64):
cut_off_slider.set_val(event.xdata)
def onzoom(vmin, vmax, i):
global x_range, fft_range
if vmin == vmax:
return
elif i < 2:
fft_range = [vmin, vmax]
else:
x_range = [vmin, vmax]
update_plot(rescale = False)
def press(key):
global xs, ys, ys_corr, duration, filename, current_index, data
if key=="left":
cut_off_slider.set_val(cut_off_slider.val-0.01)
elif key=="shift+left":
cut_off_slider.set_val(cut_off_slider.val-0.1)
elif key=="right":
cut_off_slider.set_val(cut_off_slider.val+0.01)
elif key=="shift+right":
cut_off_slider.set_val(cut_off_slider.val+0.1)
elif key=="up":
cut_off_slider.set_val(cut_off_slider.val+0.15)
elif key=="shift+up":
cut_off_slider.set_val(cut_off_slider.val+0.2)
elif key=="down":
cut_off_slider.set_val(cut_off_slider.val-0.15)
elif key=="shift+down":
cut_off_slider.set_val(cut_off_slider.val-0.2)
elif key in [" ", "space", "enter"]:
save_data(xs, ys_corr, filename)
current_index += 1
if current_index >= len(data):
current_index = len(data)-1
update_plot()
elif key in ["ctrl+left"]:
current_index -= 1
if current_index < 0:
current_index = 0
update_plot()
elif key in ["ctrl+q"]:
# fig.canvas.mpl_disconnect(cid_1)
fig.canvas.mpl_disconnect(cid_2)
plt.close()
elif key in ["escape", "ctrl+right"]:
current_index += 1
if current_index >= len(data):
current_index = len(data)-1
update_plot()
elif key in ["ctrl+r"]:
update_plot()
elif key in ["ctrl+s"]:
try:
import tkinter as tk
from tkinter import filedialog
root = tk.Tk()
root.withdraw()
filename = filedialog.asksaveasfilename()
root.destroy()
except Exception as E:
print(str(E))
filename = input("Enter filename: ")
plt.savefig(filename)
elif key in ["ctrl+o"]:
data = get_data()
current_index = 0
update_plot()
def update_plot(rescale = True):
global xs, ys, ys_corr, duration, filename, current_index, x_range, fft_range, data
if len(data) == 0 or current_index > len(data):
return
xs, ys, duration, filename = data[current_index]
cutoff_freq = cut_off_slider.val
fft_ys = scipy.fftpack.rfft(ys)
fft_xs = scipy.fftpack.rfftfreq(len(ys), duration/len(xs))
fft_cut = [x for x in fft_ys]
fft_bas = [x for x in fft_ys]
fft_cut = [fft_ys[i] if fft_xs[i] > cutoff_freq else 0 for i in range(len(fft_ys))]
fft_bas = [fft_ys[i] if fft_xs[i] < cutoff_freq else 0 for i in range(len(fft_ys))]
ys_corr = scipy.fftpack.irfft(fft_cut)
ys_base = scipy.fftpack.irfft(fft_bas)
ax1.lines[0].set_data(fft_xs, fft_ys)
ax2.lines[0].set_data(xs, ys)
ax2.lines[1].set_data(xs, ys_base)
ax3.lines[0].set_data(xs, ys_corr)
if rescale == True:
x_range = [np.nanmin(xs), np.nanmax(xs)]
tmp = (np.nanmin(fft_xs), np.nanmax(fft_xs))
tmp_p = (tmp[1]-tmp[0])*0.05
fft_range = (tmp[0]-tmp_p, tmp[1]+tmp_p)
mask = [x_range[0] <= x <= x_range[1] for x in xs]
y_range = calculate_y_limits(ys[mask])
y_range_corr = calculate_y_limits(ys_corr[mask])
y_range = (min(y_range[0], y_range_corr[0]), max(y_range[1], y_range_corr[1]))
y_range_fft = calculate_y_limits(fft_ys)
cut_off_slider.valmin = fft_range[0]
cut_off_slider.valmax = fft_range[1]
cut_off_slider.ax.set_xlim(fft_range)
ax1.set_xlim(fft_range)
ax1.set_ylim(y_range_fft)
ax2.set_xlim(x_range)
ax2.set_ylim(y_range)
ax3.set_xlim(x_range)
ax3.set_ylim(y_range)
ax3.set_xticks(np.linspace(*x_range, 5))
ax3.set_xticklabels([f"{x:.2f}" for x in np.linspace(*x_range, 5)])
line.set_xdata(cutoff_freq)
title_ax.set_title(f"{current_index+1}/{len(data)}: {os.path.basename(filename)}", ha="center")
fig.canvas.draw_idle()
current_index = 0
cutoff_freq = 0.0
x_range = [0, 0]
fft_range = [0, 0]
fig= plt.figure()
gs = gridspec.GridSpec(9, 12, height_ratios = [0.25, 0.5, 1, 0.5, 1, 1, 0.5, 0.5, 0.5], hspace = 0, wspace=0)
title_ax = fig.add_subplot(gs[0, :])
title_ax.axis("off")
title_ax.set_title("Press 'Replace Files' to open files")
ax0 = fig.add_subplot(gs[1, :])
cut_off_slider = Slider(ax0, "Cut-Off", 0, 1, valinit=cutoff_freq)
cut_off_slider.on_changed(lambda a: update_plot(rescale=False))
ax1 = fig.add_subplot(gs[2, :])
ax1.plot([], [], color="green", label="FFT Coefficients")
ax1.legend(loc = "upper right")
line = ax1.axvline(x=cutoff_freq, color="red", ls="--")
tmp_ax = fig.add_subplot(gs[3, :])
tmp_ax.axis("off")
ax2 = fig.add_subplot(gs[4, :])
ax2.plot([], [], color="#6ebeff", label="Original Spectrum")
ax2.plot([], [], color="#FF0266", label="Baseline", linewidth=3, alpha=0.3)
ax2.get_xaxis().set_visible(False)
ax2.legend(loc = "upper right")
ax3 = fig.add_subplot(gs[5, :], sharex=ax2)
ax3.plot([], [], color="#0336FF", label="Corrected Spectrum")
ax3.legend(loc = "upper right")
tmp_ax = fig.add_subplot(gs[6, :])
tmp_ax.axis("off")
buttons = [("Reset Zoom", "ctrl+r"), ("Previous", "ctrl+left"), ("Next", "ctrl+right"), ("Save", "enter")]
buttons_nsi = [("Quit", "ctrl+q"), ("Save Figure", "ctrl+s"), ("Replace Files", "ctrl+o")]
refs = {}
for i, (text, key) in enumerate(buttons):
tmp_ax = fig.add_subplot(gs[7, 3*i:3*(i+1)])
tmp_button = Button(tmp_ax, text)
tmp_button.on_clicked(lambda a, key=key: press(key))
refs[key] = tmp_button
for i, (text, key) in enumerate(buttons_nsi):
tmp_ax = fig.add_subplot(gs[8, 4*i:4*(i+1)])
tmp_button = Button(tmp_ax, text)
tmp_button.on_clicked(lambda a, key=key: press(key))
refs[key] = tmp_button
update_plot()
cid_1 = fig.canvas.mpl_connect('button_press_event', onclick) # Is now done by span selectors
cid_2 = fig.canvas.mpl_connect('key_press_event', lambda event: press(event.key))
rectprops = dict(facecolor='blue', alpha=0.5)
span_selectors = {}
for i, ax in enumerate((ax0, ax1, ax2, ax3)):
span_selectors[i] = SpanSelector(ax, lambda vmax, vmin, index=i: onzoom(vmax, vmin, index), 'horizontal',rectprops=rectprops, useblit=True, button = 3)
fig.tight_layout()
plt.show()
def get_data():
# Get files
try:
import tkinter as tk
from tkinter import filedialog
root = tk.Tk()
root.withdraw()
filenames = filedialog.askopenfilename(multiple=True)
root.destroy()
except Exception as E:
filenames = input("Enter filenames: ").split(",")
filenames = list(set(filenames))
data = []
# Fill data array
for filename in filenames:
# Get x- and y-data
df = pd.read_csv(filename, sep="\t", skip_blank_lines=True, dtype=np.float64, names=(["x", "y"]))
xs = df["x"].to_numpy()
ys = df["y"].to_numpy()
# Get duration if possible, otherwise set to 30
fname, extension = os.path.splitext(filename)
try:
db_dict=crawl_info_file(fname+".info")
date_start=parse(db_dict["datestart"])
date_stop=parse(db_dict["datestop"])
duration=(date_stop-date_start).total_seconds()/2
except Exception as E:
duration = 30
data.append((xs, ys, duration, filename))
return(data)
if __name__ == '__main__':
# Set up what should happen with corrected data, here just save to file
def save_data(xs, ys, filename):
fname, extension = os.path.splitext(filename)
df = pd.DataFrame({"x": xs, "y":ys})
df.to_csv(fname + "FFT" + extension, header=False, index=False, sep="\t")
# Start main function
decrease_standingwave(get_data(), save_data)
|
StarcoderdataPython
|
3201542
|
##########################################################################
# Geometry data
##
class GeometryData:
""" Class which holds the geometry data of a ObjId
"""
def __init__(self, subdetid = 0, discriminator = ()):
self.subdetid = subdetid
self.discriminator = discriminator
# ObjId names from Alignment/CommonAlignment/interface/StructureType.h
data = {-1: GeometryData(), # notfound
0: GeometryData(), # invalid
1: GeometryData(), # AlignableDetUnit
2: GeometryData(), # AlignableDet
3: GeometryData(1), # TPBModule
4: GeometryData(1, ("Half", "Layer", "Rod")), # TPBLadder
5: GeometryData(1, ("Half", "Layer")), # TPBLayer
6: GeometryData(1, ("Half",)), # TPBHalfBarrel
7: GeometryData(1), # TPBBarrel
8: GeometryData(2), # TPEModule
9: GeometryData(2, ("Side", "Half", "Layer", "Blade", "Panel")), # TPEPanel
10: GeometryData(2, ("Side", "Half", "Layer", "Blade")), # TPEBlade
11: GeometryData(2, ("Side", "Half", "Layer")), # TPEHalfDisk
12: GeometryData(2, ("Side", "Half")), # TPEHalfCylinder
13: GeometryData(2, ("Side",)), # TPEEndcap
14: GeometryData(3), # TIBModule
15: GeometryData(3), # TIBString
16: GeometryData(3, ("Side", "Layer", "Half", "OuterInner")), # TIBSurface
17: GeometryData(3, ("Side", "Layer", "Half")), # TIBHalfShell
18: GeometryData(3, ("Side", "Layer")), # TIBLayer
19: GeometryData(3, ("Side",)), # TIBHalfBarrel
20: GeometryData(3), # TIBBarrel
21: GeometryData(4), # TIDModule
22: GeometryData(4, ("Side", "Layer", "Ring", "OuterInner")), # TIDSide
23: GeometryData(4, ("Side", "Layer", "Ring")), # TIDRing
24: GeometryData(4, ("Side", "Layer")), # TIDDisk
25: GeometryData(4, ("Side",)), # TIDEndcap
26: GeometryData(5), # TOBModule
27: GeometryData(5, ("Side", "Layer", "Rod")), # TOBRod
28: GeometryData(5, ("Side", "Layer")), # TOBLayer
29: GeometryData(5, ("Side",)), # TOBHalfBarrel
30: GeometryData(5), # TOBBarrel
31: GeometryData(6), # TECModule
32: GeometryData(6, ("Side", "Layer", "OuterInner", "Petal", "Ring")), # TECRing
33: GeometryData(6, ("Side", "Layer", "OuterInner", "Petal")), # TECPetal
34: GeometryData(6, ("Side", "Layer", "OuterInner")), # TECSide
35: GeometryData(6, ("Side", "Layer")), # TECDisk
36: GeometryData(6, ("Side",)), # TECEndcap
37: GeometryData(), # Pixel
38: GeometryData(), # Strip
39: GeometryData(), # Tracker
100: GeometryData(), # AlignableDTBarrel
101: GeometryData(), # AlignableDTWheel
102: GeometryData(), # AlignableDTStation
103: GeometryData(), # AlignableDTChamber
104: GeometryData(), # AlignableDTSuperLayer
105: GeometryData(), # AlignableDTLayer
106: GeometryData(), # AlignableCSCEndcap
107: GeometryData(), # AlignableCSCStation
108: GeometryData(), # AlignableCSCRing
109: GeometryData(), # AlignableCSCChamber
110: GeometryData(), # AlignableCSCLayer
111: GeometryData(), # AlignableMuon
112: GeometryData(), # Detector
1000: GeometryData(), # Extras
1001: GeometryData(), # BeamSpot
}
|
StarcoderdataPython
|
1770434
|
from os import path
import sys
sys.path.append(path.join(path.dirname(__file__), path.pardir, path.pardir))
|
StarcoderdataPython
|
109858
|
import pandas as pd
import seaborn as sns
from datetime import datetime
import matplotlib.patches as patches
from ..common import log
from ..util.completion import completion_idx_has_data
def completion_plot(completion, modalities, start, end, freq,
ax=None, cmap=None, x_tick_mult=24, x_tick_fmt="%y-%m-%d %H:%M",
enrolment_date=None, events=None, event_radius=0):
""" Plots the completion array as a heatmap
Parameters
__________
completion: np.ndarray
Completion 2d array for a participant
modalities: list of str
A list of the modalities, used to label the Y-axis
start: pd.Timedelta / np.datetime64
end: pd.Timedelta / np.datetime64
freq: str
The timedelta string associated with the given completion array
ax: matplotlib axes
cmap: Colormap
x_tick_mult: int
Multiplier for x ticks, will draw a tick every x_tick_mult hours.
x_tick_fmt: str
enrolment_date: pd.Timedelta / np.datetime64
events: list of tuples of strings
A list of event timestamp and label pairs, both as strings
Draws a vertical line at each event, colored as:
green: data from all modalities is present at the event
orange: data from at least one modality is present at the event
red: no data is present at the event
If event_radius is specified the color checks include the whole event circumference.
event_radius: float
Radius around each event in multiples of freq
Returns
_______
ax: matplotlib axes
"""
td = pd.Timedelta(freq)
ax = sns.heatmap(completion, cmap=cmap, cbar=False, xticklabels=int(x_tick_mult*(pd.Timedelta('1h')/td)), ax=None)
# Gaps
N_x = completion.shape[1]
N_y = completion.shape[0]
for i in range(N_y-1):
ax.hlines(i+1, 0, N_x, color='white', linewidth=2)
# Outline
ax.vlines(0, 0, N_y, color='black', linewidth=2)
ax.vlines(N_x, 0, N_y, color='black', linewidth=2)
ax.hlines(0, 0, N_x, color='black', linewidth=2)
ax.hlines(N_y, 0, N_x, color='black', linewidth=2)
# Events
if events:
for e_stamp_str,e_label in events:
try:
e_stamp_unix = float(e_stamp_str)
e_stamp = pd.Timestamp(e_stamp_unix, unit='s').tz_localize('UTC')
except:
# TODO: review usage of tz_localize()
e_stamp = pd.Timestamp(datetime.strptime(e_stamp_str, '%Y-%m-%d %H:%M:%S')).tz_localize('CET').tz_convert('UTC')
if e_stamp < start or e_stamp > end: continue
log.debug("Event at {}: {}".format(e_stamp, e_label))
e_idx = (e_stamp - start)//td
e_slice = None
if event_radius:
e_start = max(0, (e_stamp - start - (td * event_radius))//td)
e_end = min(N_x-1, (e_stamp - start + (td * event_radius))//td)
e_slice = slice(e_start, e_end+1)
rect = patches.Rectangle((e_start, 0), e_end - e_start, N_y, linewidth=0.5, edgecolor='k', alpha=0.25, zorder=9)
ax.add_patch(rect)
has_all = completion_idx_has_data(completion, e_slice if e_slice else e_idx, requirement_function=all)
has_any = completion_idx_has_data(completion, e_slice if e_slice else e_idx, requirement_function=any)
if has_all: ax.vlines(e_idx, 0, N_y, color='green', linewidth=2, zorder=10)
elif has_any: ax.vlines(e_idx, 0, N_y, color='orange', linewidth=2, zorder=10)
else: ax.vlines(e_idx, 0, N_y, color='red', linewidth=2, zorder=10)
# Enrolment
if enrolment_date:
enrolment_idx = (enrolment_date - start)//td
ax.vline(enrolment_idx-1, 0, N_y, color='red', linewidth=0.5)
# Labels
ax.set_ylabel('Data topic')
ax.set_xlabel('Date')
xticks = ax.get_xticks()
ax.set_xticklabels([(start + (tm * td)).strftime(x_tick_fmt) for tm in xticks], rotation=0)
ax.set_yticklabels([ m.split('_', 2)[-1] for m in modalities ], rotation=0)
return ax
|
StarcoderdataPython
|
4993721
|
<reponame>parsoyaarihant/CS726-Project-2048-Using-RL<gh_stars>0
import random
import logic
import constants as c
class GameGrid():
def __init__(self):
self.commands = {c.KEY_UP: logic.up, c.KEY_DOWN: logic.down,
c.KEY_LEFT: logic.left, c.KEY_RIGHT: logic.right,
c.KEY_UP_ALT: logic.up, c.KEY_DOWN_ALT: logic.down,
c.KEY_LEFT_ALT: logic.left,
c.KEY_RIGHT_ALT: logic.right}
self.actions = [c.KEY_UP, c.KEY_DOWN, c.KEY_RIGHT, c.KEY_LEFT]
self.grid_cells = []
self.init_matrix()
def gen(self):
return random.randint(0, c.GRID_LEN - 1)
def init_matrix(self):
self.matrix = logic.new_game(4)
self.matrix = logic.add_two(self.matrix)
self.matrix = logic.add_two(self.matrix)
def step(self, key):
if isinstance(key, int):
key = self.actions[key]
if key in self.commands:
self.matrix, done = self.commands[key](self.matrix)
if done:
self.matrix = logic.add_two(self.matrix)
done = False
# Check if game is completed
if logic.game_state(self.matrix) == 'win':
done = True
if logic.game_state(self.matrix) == 'lose':
done = True
return self.matrix, logic.reward(self.matrix), done, ""
def generate_next(self):
index = (self.gen(), self.gen())
while self.matrix[index[0]][index[1]] != 0:
index = (self.gen(), self.gen())
self.matrix[index[0]][index[1]] = 2
def get_state(self):
return self.matrix
def key_down(self, event):
key = repr(event.char)
if event.keycode == 114:
self.reset()
return
self.step(key)
self.display_state()
def action_space(self):
# return possible action
return self.actions[random.randint(0, 3)]
def reset(self):
# resets the game to initial state
self.init_matrix()
def display_state(self):
print()
for i in self.matrix:
print(i)
def reward(self):
return logic.reward(self.matrix)
def highest_score(self):
return logic.highest_score(self.matrix)
gamegrid = GameGrid()
#print(gamegrid.display_state())
#gamegrid.perform_action(c.KEY_UP)
#print(gamegrid.display_state())
|
StarcoderdataPython
|
6582731
|
<filename>psydac/linalg/tests/test_pcg.py
import numpy as np
import pytest
#===============================================================================
@pytest.mark.parametrize( 'n', [8, 16] )
@pytest.mark.parametrize( 'p', [2, 3] )
def test_pcg(n, p):
"""
Test preconditioned Conjugate Gradient algorithm on tridiagonal linear system.
Parameters
----------
n : int
Dimension of linear system (number of rows = number of columns).
"""
from psydac.linalg.iterative_solvers import pcg, jacobi
from psydac.linalg.stencil import StencilVectorSpace, StencilMatrix, StencilVector
from psydac.linalg.basic import LinearSolver
#---------------------------------------------------------------------------
# PARAMETERS
#---------------------------------------------------------------------------
# ... Vector Spaces
V = StencilVectorSpace([n], [p], [False])
e = V.ends[0]
s = V.starts[0]
# Build banded matrix with 2p+1 diagonals: must be symmetric and positive definite
# Here we assign value 2*p on main diagonal and -1 on other diagonals
A = StencilMatrix(V, V)
A[:,-p:0 ] = -1
A[:, 0:1 ] = 2*p
A[:, 1:p+1] = -1
A.remove_spurious_entries()
# Build exact solution
xe = StencilVector(V)
xe[s:e+1] = np.random.random(e+1-s)
# Tolerance for success: L2-norm of error in solution
tol = 1e-10
#---------------------------------------------------------------------------
# TEST
#---------------------------------------------------------------------------
# Title
print()
print("="*80)
print("SERIAL TEST: solve linear system A*x = b using preconditioned conjugate gradient")
print("="*80)
print()
# Manufacture right-hand-side vector from exact solution
b = A.dot(xe)
class LocallyOnlyJacobiSolver(LinearSolver):
@property
def space(self):
return V
def solve(self, rhs, out=None, transposed=False):
# (don't care about out or any other parameter here; it's only used locally)
return jacobi(A, rhs)
# Solve linear system using PCG (and CG)
# also does an interface test for the Jacobi preconditioner
x0, info0 = pcg( A, b, pc= None, tol=1e-12 )
x1, info1 = pcg( A, b, pc= "jacobi", tol=1e-12 )
x1b, info1b = pcg( A, b, pc= jacobi, tol=1e-12 )
x1c, info1c = pcg( A, b, pc= LocallyOnlyJacobiSolver(), tol=1e-12 )
x2, info2 = pcg( A, b, pc= "weighted_jacobi", tol=1e-12 )
# Verify correctness of calculation: L2-norm of error
err0 = x0-xe
err_norm0 = np.linalg.norm(err0.toarray())
err1 = x1-xe
err_norm1 = np.linalg.norm(err1.toarray())
err2 = x2-xe
err_norm2 = np.linalg.norm(err2.toarray())
#---------------------------------------------------------------------------
# TERMINAL OUTPUT
#---------------------------------------------------------------------------
print()
print( 'A =', A.toarray(), sep='\n' )
print( 'b =', b.toarray())
print( 'x1 =', x1.toarray())
print( 'x2 =', x2.toarray())
print( 'xe =', xe.toarray())
print( 'info1 (Jac) =', info1 )
print( 'info2 (w-Jac)=', info2 )
print()
print( "-"*40 )
print( "L2-norm of error in (PCG + Jacobi) solution = {:.2e}".format(err_norm1))
print( "L2-norm of error in solution (PCG + weighted Jacobi) solution = {:.2e}".format(err_norm2))
if err_norm0 < tol and err_norm1 < tol and err_norm2 < tol:
print( "PASSED" )
else:
print( "FAIL" )
print( "-"*40 )
#---------------------------------------------------------------------------
# PYTEST
#---------------------------------------------------------------------------
assert err_norm0 < tol and err_norm1 < tol and err_norm2 < tol
assert info1 == info1b and info1 == info1c
|
StarcoderdataPython
|
3533318
|
from mathlib.math import CustomMath
def test_sum_two_arguments():
first = 2
second = 11
custom_math = CustomMath()
result = custom_math.sum(first,second)
assert result == (first+second)
|
StarcoderdataPython
|
8000389
|
<gh_stars>1-10
"""
Create doc-doc edges
Steps:
1. Load all entities with their relations
2. Load relevant relations
3. Create adjacency matrix for word-word relations
4. Count number of relation between two documents
5. Weight relations and set a doc-doc edge weight
"""
from collections import defaultdict
from math import log
import pandas as pd
from tqdm import tqdm
from helper import file_utils as file, io_utils as io
from loader.wiki_api import get_safely
from os.path import exists
# Data loader
def get_vocab_ids():
entity2id_df = file.get_entity2id()
unmapped_entities = entity2id_df[entity2id_df["wikiID"] == "-1"].index
entity2id_df.drop(unmapped_entities, inplace=True)
return entity2id_df["wikiID"].to_numpy()
def get_triples(filtered=False):
return file.get_filtered_triples() if filtered else file.get_all_triples()
def get_documents():
cleaned_sentences = list(map(lambda doc: doc.split(" "), file.get_cleaned_sentences()))
return cleaned_sentences
# Create triple documents
def create_triples():
# Creates triples based on the vocab entities and relations (unfiltered)
triples = []
all_entities = file.get_vocab_entities()
for entity in all_entities.keys():
for relation in get_safely(all_entities, [entity, "relations"]).keys():
for relation_value in get_safely(all_entities, [entity, "relations", relation]).keys():
result = get_safely(all_entities, [entity, "relations", relation, relation_value])
if not isinstance(result, dict) or result.get("id") is None:
continue
else:
triple = [all_entities[entity]["id"], relation, result.get("id")]
triples.append(triple)
file.save_all_triples(triples)
return triples
def filter_triples():
# Adjust filters in `analyze_properties` and save them to the filtered_relations.csv
triples = get_triples()
relevant_relations = file.get_filtered_relations()
vocab_ids = get_vocab_ids()
old_size = triples.shape[0]
# Filter out all triples which are not contained in `filtered_relations.csv`
irrelevant_triples = triples[~triples["relation"].isin(relevant_relations)].index
triples.drop(irrelevant_triples, inplace=True)
# Filter out all triples which don't lead to another word from the node
unmatched_triples = triples[~triples["entity2"].isin(vocab_ids)].index
triples.drop(unmatched_triples, inplace=True)
# Filter out all relation to itself
self_relations = triples[triples["entity1"] == triples["entity2"]].index
triples.drop(self_relations, inplace=True)
# Drop duplicate relations
triples.drop_duplicates(inplace=True)
file.save_filtered_triples(triples)
print(f"Filtered out {old_size - triples.shape[0]} irrelevant triples...")
def setup_triples():
# Creates a filtered and unfiltered triples CVS file
create_triples()
filter_triples()
def generate_doc2relations():
setup_triples()
doc_nouns_norm = file.get_normalized_nouns() # Array with all nouns per doc // must be split
relations_array = []
ids = file.get_doc2id()
filtered_triples = get_triples(filtered=True) # Triples
for doc_index, doc in enumerate(doc_nouns_norm):
if doc == "":
relations_array.append("-")
continue
# All ID's of the normalized nouns in the current document
doc_ids = ids[ids["doc"] == doc_index]["wikiID"].tolist()
# Graph edges pointing to other entities
triples_out = filtered_triples[filtered_triples["entity1"].isin(doc_ids)]
triples_in = filtered_triples[filtered_triples["entity2"].isin(doc_ids)]
triples_in.columns = ["entity2", "relations", "entity1"]
triples_total = pd.concat([triples_out, triples_in])
all_outgoing_relations = triples_total["relations"].tolist()
if len(all_outgoing_relations) == 0:
all_outgoing_relations = "-"
relations_array.append(all_outgoing_relations)
file.save_doc2relations([" ".join(elem) for elem in relations_array])
# Adjacency matrices
def create_doc2doc_edges():
# if exists(io.get_document_triples_path()):
# print("Document triples pickle file adready exists, will not be created again")
# generate_idf_scores()
# apply_idf()
# return
generate_doc2relations()
generate_idf_scores()
doc_nouns_norm = file.get_normalized_nouns() # Array with all nouns per doc // must be split
filtered_triples = get_triples(filtered=True) # Triples
ids = file.get_doc2id()
triples = []
filtered_out_items = 0
with tqdm(total=len(doc_nouns_norm)) as bar:
for doc_index, doc in enumerate(doc_nouns_norm):
if doc == "":
bar.update(1)
continue
# All ID's of the normalized nouns in the current document
doc_ids = ids[ids["doc"] == doc_index]["wikiID"].tolist()
assert len(doc_ids) <= len(doc.split(" ")), f"{len(doc.split(' '))} vs. {len(doc_ids)}"
# Graph edges pointing to other entities
triples_out = filtered_triples[filtered_triples["entity1"].isin(doc_ids)]
triples_in = filtered_triples[filtered_triples["entity2"].isin(doc_ids)]
triples_in.columns = ["entity2", "relations", "entity1"]
triples_total = pd.concat([triples_out, triples_in])
doc_pointers = {}
for index, row in triples_total.iterrows():
entity1 = row["entity1"]
relation = row["relations"]
entity2 = row["entity2"]
# Look in which documents entity2 appears
pointer = ids[ids["wikiID"] == entity2]["doc"].tolist()
assert entity1 in doc_ids
for doc_id in pointer:
# Ignore doc2doc edges to doc itself
if doc_id <= doc_index:
continue
if doc_id in doc_pointers:
doc_pointers[doc_id].append(relation)
else:
doc_pointers[doc_id] = [relation]
for key in doc_pointers.keys():
# Filter out all docs with length below 2
if len(doc_pointers[key]) > 1:
triples.append([doc_index, key, len(doc_pointers[key]), "+".join(doc_pointers[key])])
bar.update(1)
data = pd.DataFrame(triples)
data.columns = ["doc1", "doc2", "relations", "detail"]
print(f"Highest number of relations between two docs: {max(data['relations'])}")
print(f"Created {len(triples)} doc2doc edges (filtered by threshold: {filtered_out_items})")
file.save_document_triples(data)
apply_idf()
def generate_idf_scores():
print("Generate IDF scores...")
doc_relations = file.get_doc2relations()
num_docs = len(doc_relations)
doc_word_freq = defaultdict(int)
relation_doc_freq = {}
relation_doc_freq_wiki = {}
relations_in_docs = defaultdict(set)
row = []
col = []
weight = []
weight_wiki = []
for i, rels in enumerate(doc_relations):
relations = rels.split()
for rel in relations:
relations_in_docs[rel].add(i)
doc_word_str = (i, rel)
doc_word_freq[doc_word_str] += 1
all_relations = file.get_all_relations()
for rel, doc_list in relations_in_docs.items():
count = all_relations[all_relations["ID"] == rel]["count"].tolist()
assert len(count) <= 1, (count, rel)
if len(count) == 1:
relation_doc_freq_wiki[rel] = count[0]
else:
relation_doc_freq_wiki[rel] = 0
relation_doc_freq[rel] = len(doc_list)
for i, rels in enumerate(doc_relations):
relations = rels.split()
doc_rel_set = set()
for rel in relations:
if rel in doc_rel_set or rel == "-":
continue
freq = doc_word_freq[(i, rel)]
row.append(i)
col.append(rel)
idf = log(1.0 * num_docs / relation_doc_freq[rel])
# Source: https://www.wikidata.org/wiki/Wikidata:Statistics on 17.12.2020 at 12:20
idf_wiki = log(1.0 * 91559495 / relation_doc_freq_wiki[rel])
weight.append(freq * idf)
weight_wiki.append(freq * idf_wiki)
doc_rel_set.add(rel)
data = pd.DataFrame({"doc": row, "relation": col, "idf": weight, "idf_wiki": weight_wiki})
file.save_doc2idf(data)
def apply_idf():
print("Applying IDF...")
doc_triples = file.get_document_triples()
idf = file.get_doc2idf()
data = []
with tqdm(total=doc_triples.shape[0]) as bar:
for index, row in doc_triples.iterrows():
doc1 = row["doc1"]
doc2 = row["doc2"]
relations = row["detail"].split("+")
score = 0
wiki_score = 0
for rel in relations:
scores = idf[(idf["relation"] == rel) & (idf["doc"] == doc1)][["idf", "idf_wiki"]]
idf_score = scores["idf"].tolist()
idf_wiki_score = scores["idf_wiki"].tolist()
assert len(idf_score) == 1 and len(idf_wiki_score) == 1
score += idf_score[0]
wiki_score += idf_wiki_score[0]
data.append([doc1, doc2, len(relations), score, wiki_score])
bar.update(1)
dataframe = pd.DataFrame(data)
dataframe.columns = ["doc1", "doc2", "count", "idf", "idf_wiki"]
normalize(dataframe)
def normalize(data):
base_edges = file.get_original_edges()
pmi_factor = base_edges[base_edges["edge_type"] == "pmi"]["weight"].max()
idf_factor = base_edges[base_edges["edge_type"] == "idf"]["weight"].max()
idf_max = data["idf"].max()
idf_min = data["idf"].min()
idf_wiki_max = data["idf_wiki"].max()
idf_wiki_min = data["idf_wiki"].min()
count_max = data["count"].max()
count_min = data["count"].min()
all = []
for index, row in data.iterrows():
doc1 = row["doc1"]
doc2 = row["doc2"]
count = row["count"]
idf_score = row["idf"]
idf_wiki_score = row["idf_wiki"]
count_norm = apply_normalization(count, count_min, count_max, idf_factor)
count_norm_pmi = apply_normalization(count, count_min, count_max, pmi_factor)
idf_norm = apply_normalization(idf_score, idf_min, idf_max, idf_factor)
idf_wiki_norm = apply_normalization(idf_wiki_score, idf_wiki_min, idf_wiki_max, idf_factor)
idf_norm_pmi = apply_normalization(idf_score, idf_min, idf_max, pmi_factor)
idf_wiki_norm_pmi = apply_normalization(idf_wiki_score, idf_wiki_min, idf_wiki_max, pmi_factor)
result = [doc1, doc2, count, idf_score, idf_wiki_score, count_norm, count_norm_pmi, idf_norm, idf_wiki_norm, idf_norm_pmi, idf_wiki_norm_pmi]
all.append(result)
df = pd.DataFrame(all)
df.columns = ["doc1", "doc2", "count", "idf", "idf_wiki", "count_norm", "count_norm_pmi", "idf_norm", "idf_wiki_norm", "idf_norm_pmi", "idf_wiki_norm_pmi"]
file.save_document_triples_metrics(df)
def apply_normalization(value, min, max, factor):
return ((value - min) / (max - min)) * factor
# windows_relation = []
# windows_document = []
#
#
# def windows_relation_base(cond_index=-1, window_size=15):
# global windows_relation
# triples = file.get_document_triples("mr")
# tmp = []
# for index, row in triples.iterrows():
# if index > 20:
# break
# if index <= cond_index:
# continue
# relations = row["detail"].split("+")
# doc_length = len(relations)
# if doc_length <= window_size:
# [tmp.append(r) for r in relations]
# else:
# assert False
#
# if len(tmp) >= window_size:
# windows_relation.append(tmp)
# tmp = []
# windows_relation_base(cond_index+1)
#
#
# def windows_document_base(cond_index=-1, window_size=15):
# global windows_document
# triples = file.get_document_triples("mr")
# tmp = []
# counter = 0
# for index, row in triples.iterrows():
# if index > 20:
# break
# if index <= cond_index:
# continue
# relations = row["detail"].split("+")
#
# if counter < window_size:
# [tmp.append(r) for r in relations]
#
# elif counter == window_size:
# windows_document.append(tmp)
# windows_document_base(cond_index+1)
# break
#
# elif index == triples.shape[0] - window_size
#
# counter += 1
#
# def generate_pmi(windows):
# number_sliding_windows = len(windows) #W
# counter = {}
# for window in windows:
# for relation in window:
# for x in windows:
# contained = relation in x
# if contained:
# if relation in counter:
# counter[relation] += 1
# else:
# counter[relation] = 1
# print(counter)
#
#
if __name__ == '__main__':
create_doc2doc_edges()
|
StarcoderdataPython
|
9746323
|
from django.contrib import admin
from common.actions import make_export_action
from search.models.alias import Alias
from search.models import SuggestionLog
from search.models.session_alias import SessionAlias
class AliasAdmin(admin.ModelAdmin):
list_display = ('id', 'alias', 'target')
actions = make_export_action("Export Alias to CSV")
class SuggestionAdmin(admin.ModelAdmin):
search_fields = ['search_query', 'session_id']
list_filter = ['num_suggestions']
list_display = ['session_hash', 'search_query', 'num_suggestions', 'created_at']
actions = make_export_action("Export Suggestions to CSV")
def session_hash(self, obj):
return obj.session_id[:6]
def get_queryset(self, request):
return SuggestionLog.objects.exclude(session_id__isnull=True).exclude(session_id__exact='')
class SessionAliasAdmin(admin.ModelAdmin):
list_display = ('id', 'alias', 'session')
search_fields = ('alias',)
admin.site.register(Alias, AliasAdmin)
admin.site.register(SuggestionLog, SuggestionAdmin)
admin.site.register(SessionAlias, SessionAliasAdmin)
|
StarcoderdataPython
|
11299897
|
<filename>L1Trigger/L1TCalorimeter/python/hackConditions_cff.py
#
# hachConditions.py Load ES Producers for any conditions not yet in GT...
#
# The intention is that this file should shrink with time as conditions are added to GT.
#
import FWCore.ParameterSet.Config as cms
import sys
from Configuration.Eras.Modifier_run2_HI_specific_cff import run2_HI_specific
#from Configuration.Eras.Era_Run2_2016_pA_cff import Run2_2016_pA
from Configuration.Eras.Modifier_pA_2016_cff import pA_2016
#
# Legacy Trigger: No Hacks Needed
#
from Configuration.Eras.Modifier_stage1L1Trigger_cff import stage1L1Trigger
from Configuration.Eras.Modifier_stage2L1Trigger_cff import stage2L1Trigger
def _load(process, f):
process.load(f)
#
# Stage-1 Trigger
#
# Switch between HI and PP calo configuration:
modifyL1TCalorimeterHackConditions_stage1HI = (stage1L1Trigger & ~stage2L1Trigger & run2_HI_specific).makeProcessModifier(lambda p: _load(p, "L1Trigger.L1TCalorimeter.caloConfigStage1HI_cfi"))
modifyL1TCalorimeterHackConditions_stage1PP = (stage1L1Trigger & ~stage2L1Trigger & ~run2_HI_specific).makeProcessModifier(lambda p: _load(p, "L1Trigger.L1TCalorimeter.caloConfigStage1PP_cfi"))
# Override Calo Scales:
modifyL1TCalorimeterHackConditions_stage1Common = (stage1L1Trigger & ~stage2L1Trigger).makeProcessModifier(lambda p: _load(p, "L1Trigger.L1TCalorimeter.caloScalesStage1_cff"))
# CaloParams is in the DB for Stage-1
#
# Stage-2 Trigger
#
modifyL1TCalorimeterHackConditions_stage2PA = (stage2L1Trigger & pA_2016).makeProcessModifier(lambda p: _load(p, "L1Trigger.L1TCalorimeter.caloStage2Params_2016_v3_3_1_HI_cfi"))
modifyL1TCalorimeterHackConditions_stage2PP = (stage2L1Trigger & ~pA_2016).makeProcessModifier(lambda p: _load(p, "L1Trigger.L1TCalorimeter.caloStage2Params_2017_v1_8_4_cfi"))
# What about CaloConfig? Related: How will we switch PP/HH?
#
|
StarcoderdataPython
|
1678043
|
<reponame>HSunboy/hue<filename>desktop/core/ext-py/eventlet-0.21.0/eventlet/hubs/poll.py<gh_stars>1-10
import errno
import sys
from eventlet import patcher
select = patcher.original('select')
time = patcher.original('time')
from eventlet.hubs.hub import BaseHub, READ, WRITE, noop
from eventlet.support import get_errno, clear_sys_exc_info
EXC_MASK = select.POLLERR | select.POLLHUP
READ_MASK = select.POLLIN | select.POLLPRI
WRITE_MASK = select.POLLOUT
class Hub(BaseHub):
def __init__(self, clock=None):
super(Hub, self).__init__(clock)
self.poll = select.poll()
# poll.modify is new to 2.6
try:
self.modify = self.poll.modify
except AttributeError:
self.modify = self.poll.register
def add(self, evtype, fileno, cb, tb, mac):
listener = super(Hub, self).add(evtype, fileno, cb, tb, mac)
self.register(fileno, new=True)
return listener
def remove(self, listener):
super(Hub, self).remove(listener)
self.register(listener.fileno)
def register(self, fileno, new=False):
mask = 0
if self.listeners[READ].get(fileno):
mask |= READ_MASK | EXC_MASK
if self.listeners[WRITE].get(fileno):
mask |= WRITE_MASK | EXC_MASK
try:
if mask:
if new:
self.poll.register(fileno, mask)
else:
try:
self.modify(fileno, mask)
except (IOError, OSError):
self.poll.register(fileno, mask)
else:
try:
self.poll.unregister(fileno)
except (KeyError, IOError, OSError):
# raised if we try to remove a fileno that was
# already removed/invalid
pass
except ValueError:
# fileno is bad, issue 74
self.remove_descriptor(fileno)
raise
def remove_descriptor(self, fileno):
super(Hub, self).remove_descriptor(fileno)
try:
self.poll.unregister(fileno)
except (KeyError, ValueError, IOError, OSError):
# raised if we try to remove a fileno that was
# already removed/invalid
pass
def do_poll(self, seconds):
# poll.poll expects integral milliseconds
return self.poll.poll(int(seconds * 1000.0))
def wait(self, seconds=None):
readers = self.listeners[READ]
writers = self.listeners[WRITE]
if not readers and not writers:
if seconds:
time.sleep(seconds)
return
try:
presult = self.do_poll(seconds)
except (IOError, select.error) as e:
if get_errno(e) == errno.EINTR:
return
raise
SYSTEM_EXCEPTIONS = self.SYSTEM_EXCEPTIONS
if self.debug_blocking:
self.block_detect_pre()
# Accumulate the listeners to call back to prior to
# triggering any of them. This is to keep the set
# of callbacks in sync with the events we've just
# polled for. It prevents one handler from invalidating
# another.
callbacks = set()
for fileno, event in presult:
if event & READ_MASK:
callbacks.add((readers.get(fileno, noop), fileno))
if event & WRITE_MASK:
callbacks.add((writers.get(fileno, noop), fileno))
if event & select.POLLNVAL:
self.remove_descriptor(fileno)
continue
if event & EXC_MASK:
callbacks.add((readers.get(fileno, noop), fileno))
callbacks.add((writers.get(fileno, noop), fileno))
for listener, fileno in callbacks:
try:
listener.cb(fileno)
except SYSTEM_EXCEPTIONS:
raise
except:
self.squelch_exception(fileno, sys.exc_info())
clear_sys_exc_info()
if self.debug_blocking:
self.block_detect_post()
|
StarcoderdataPython
|
11263779
|
# class Node:
# def __init__(self,value,next= None):
# self.value = value
# self.next = next
# class LinkedList:
# def __init__(self, head= None):
# self.head = head
# def __str__(self):
# current = self.head
# output = ""
# while current is not None:
# output += f"{{ {current.value} }} -> "
# current = current.next
# return output + "None"
# def append(self, value):
# newnode = Node(value)
# if self.head:
# current = self.head
# while current.next:
# current = current.next
# current.next = newnode
# else:
# self.head = newnode
# @staticmethod
def zipLists(a=object, b=object):
cur1, cur2 = a.head, b.head
while cur1 and cur2:
save1 = cur1.next
save2 = cur2.next
cur1.next = cur2
cur2.next = save1
cur1 = save1
cur2 = save2
return a
# next1,cur1.next = cur1.next, cur2
# if next1:
# next2, cur2.next = cur2.next, next1
# cur1, cur2 = next1, next2
# return a
|
StarcoderdataPython
|
6521537
|
"""Generating spectra with Fluctuating Gunn Peterson Approximation (FGPA)
- The code is MPI working on Illustris and MP-Gadget snapshots. The packages needed are :
- astropy
- fake_spectra
To get the FGPA spectra, refer to the helper script at
https://github.com/mahdiqezlou/LyTomo_Watershed/tree/dist/helper_scripts/FGPA
which follows the steps below :
1. From density.py use Gadget() to construct DM density
field on a grid with desired size. For FGPA, the grid cells should on avergae
have 1 particle per cell.
2. Save the results from the previous step in savedir directory. The density
has been saved on several files depending on number of ranks used.
3. Run get_noiseless_map() or get_sample_spectra() functions here and pass the
directories for the density field above as savedir argument.
4. The output is a single hdf5 file containing either the full true flux map or the
random spectra sample.
"""
import os
import glob
import numpy as np
import h5py
from astropy.cosmology import Planck15 as cosmo
from scipy.ndimage import gaussian_filter1d
import fake_spectra.fluxstatistics as fs
from . import spectra_mocking as sm
from . import density
class Fgpa:
"""A class for FGPA method"""
def __init__(self, MPI, comm, z, boxsize, Ngrids, Npix, SmLD, SmLV, savedir, fix_mean_flux=True,
mean_flux=None, gamma=1.46, T0=1.94*10**4):
"""
Params :
comm : instanse of the MPI communicator
z : redshift
savedir : the directory containing the density map
Ngrids : int, the size of the x and y dimensions of the desired map
Npix : number of desired pxiels along final map
gamma : The slope in temperature-density relation
T0 : Temperature at mean density
"""
# Initialize the MPI communication
self.MPI = MPI
self.comm = comm
self.rank = self.comm.Get_rank()
self.z = z
self.boxsize = boxsize
self.Ngrids = Ngrids
self.Npix = Npix
self.SmLD = SmLD
self.SmLV = SmLV
self.savedir = savedir
self.fix_mean_flux = fix_mean_flux
self.mean_flux = mean_flux
# Thermal paramters of the IGM
self.gamma= gamma
self.T0 = T0
# Physical constants
# Lya constants do not matter at all, since we fix the mean absorption
self.lambda_Lya = 1215.67
self.sigma_Lya = 1
self.mp = 1.67*10**(-27) # proton mass in Kg
self.kB=1.38*10**(-23) # Boltzman const in SI
def get_sample_spectra(self, num, seed=13, savefile='spectra_z2.4_FGPA_n1.hdf5'):
"""Get a sample of spectra to be used for mock map reconstruction
num : Numebr of desired random spectra
savefile : The name for hdf5 file to save the spectra file
"""
tau_conv = self.get_tau_conv()
if seed is not None:
cofm = self.get_cofm(num=num, Nvoxles=tau_conv.shape[0], seed=seed).astype(int)
else:
x, y = np.meshgrid(np.arange(self.Ngrids), np.arange(self.Ngrids))
cofm = np.zeros(shape=(self.Ngrids*self.Ngrids,2), dtype=int)
cofm[:,0] = np.ravel(x)
cofm[:,1] = np.ravel(y)
del x, y
ind = np.where(tau_conv!= -1)
tau_sampled = np.zeros(shape=(cofm.shape[0], tau_conv.shape[2]))
# Find the sample spectra on this rank
ind_cofm = np.where(np.isin(cofm[:,0], ind[0])*np.isin(cofm[:,1], ind[1]))[0]
tau_sampled[ind_cofm] = tau_conv[cofm[ind_cofm,0], cofm[ind_cofm,1],:]
### MPI part
# Make sure the data is contiguous in memeory
tau_sampled = np.ascontiguousarray(tau_sampled, np.float64)
# Add the results from all ranks
self.comm.Allreduce(self.MPI.IN_PLACE, tau_sampled, op=self.MPI.SUM)
# Scale the tau to get mean flux right
if self.fix_mean_flux:
if self.mean_flux is None:
mean_flux = sm.get_mean_flux(z=self.z)
else:
mean_flux =self.mean_flux
print('mean flux is ', mean_flux, flush=True)
scale = fs.mean_flux(tau_sampled, mean_flux_desired=mean_flux)
else:
scale=1
tau_sampled *= scale
if self.rank==0 :
print('Scaling tau with :', scale)
# Change cofm to kpc/h to record on spctra
cofm = cofm.astype(float)*(self.boxsize*1000/tau_conv.shape[0])
if self.rank == 0 :
with h5py.File(self.savedir+savefile,'w') as fw:
# We need all Header info to load the file with fake_spectra
# Some attrs are copied from hydro spectra, a more stable way
# should be implemented
fw.create_group('Header')
fw['Header'].attrs.create('redshift', self.z)
fw['Header'].attrs.create('box', self.boxsize*1000)
fw['Header'].attrs.create('discarded', 0)
fw['Header'].attrs.create('hubble', 0.6774)
fw['Header'].attrs.create('nbins', tau_sampled.shape[1])
fw['Header'].attrs.create('npart', np.array([0, 15625000000, 0, 0, 0, 0]))
fw['Header'].attrs.create('omegab', 0.04757289217927339)
fw['Header'].attrs.create('omegal', 0.6911)
fw['Header'].attrs.create('omegam', 0.3089)
fw['tau/H/1/1215'] = tau_sampled
fw.create_group('spectra')
fw['spectra/axis'] = 3*np.ones(shape=(cofm.shape[0],))
fw['spectra/cofm'] = cofm
fw['colden/H/1'] = np.zeros(shape=(1,))
fw.create_group('density_Weight_debsity')
fw.create_group('num_important')
fw.create_group('velocity')
fw.create_group('temperature')
fw.create_group('tau_obs')
def get_cofm(self, num, Nvoxels, seed):
""" A copy of fake_spectra.rand_spectra.get_cofm() to replicate the
spectra used for hydro snalysis.
seed : the seed for random sample
num : number of spectra
Nvoxels: the number of vxoels along each side of the simulations
"""
np.random.seed(seed)
cofm = Nvoxels*np.random.random_sample((num,3))
return cofm
def get_noiseless_map(self, savefile='FGPA_flux_z2.4.hdf5'):
"""Calculate the true map on a mesh grid of size (Ngrids*Ngrids*Npix)
savefile : The name for hdf5 file to save final map on
"""
tau_conv = self.get_tau_conv()
### The work here is not very well balanced among ranks
if self.rank == 0:
if self.fix_mean_flux:
if self.mean_flux is None:
mean_flux = sm.get_mean_flux(z=self.z)
else:
mean_flux = self.mean_flux
print('mean flux is ', mean_flux, flush=True)
scale = fs.mean_flux(tau_conv, mean_flux_desired=mean_flux)
else :
scale = 1
### Resampling pixels along spectra
flux_conv = self.resample_flux(scale*tau_conv)
del tau_conv
with h5py.File(self.savedir+savefile,'w') as fw:
fw['map'] = flux_conv
self.comm.Barrier()
def resample_flux(self, tau):
"""
Resample spectra to get Npix pixels along line of sight. It is done by averaging the flux over
few consecutive pixels.
Params :
tau : Optical depth.
"""
Nz = tau.shape[2]
# Number of adjacent pixels along spectrum need to be averaged over
addpix = int(Nz / self.Npix)
flux = np.zeros(shape=(tau.shape[0], tau.shape[1], self.Npix), dtype=np.float64)
for t in range(self.Npix):
flux[:,:,t] = np.sum(np.exp(-tau[:,:,t*addpix:(t+1)*addpix]), axis=2)/addpix
flux = gaussian_filter1d(flux, sigma=1, mode='wrap')
return flux
def get_tau_conv(self):
"""
Calculate tau in redshift space
Convolving tau in real space with an apprimation of the Voight profile (Gaussian profile)
Returns :
tau_conv : convoluted optical depth
"""
import glob
import os
from . import mpi4py_helper
fnames = glob.glob(os.path.join(self.savedir,'*_densfield.hdf5'))
fnames = mpi4py_helper.distribute_files(comm=self.comm, fnames=fnames)
tau_conv = None
c=0
for fn in fnames:
c+=1
print(self.rank, fn, flush=True)
if not os.path.exists(fn):
raise IOError('File '+fn+' does not exist!')
with h5py.File(fn,'r') as f:
if tau_conv is None:
# nbodykit does not break the data along the z direction, so Nz is the
# the size of the initial density map in all 3 dimentions
Nz = f['DM/dens'][:].shape[2]
dvbin = cosmo.H(self.z).value*self.boxsize/(cosmo.h*Nz*(1+self.z))
up = np.arange(Nz)*dvbin
# Approx position of the desired sightlines. The approximation should be ok
# for FGPA since the density map has very fine voxels
x, y = int(Nz/self.Ngrids)*np.arange(self.Ngrids), int(Nz/self.Ngrids)*np.arange(self.Ngrids)
# Which sightlines are on this rank
indx = np.where(np.isin(x, f['DM/x'][:]))[0]
if indx.size == 0:
# Some ranks may not hold any sightlines at all
print('The sightline coordinates are not on density grids on file ', fn, flush=True)
print("The y desnity grid coordinates are = ", f['DM/y'][:], flush=True)
continue
xstart, xend = indx[0], indx[-1]
indy = np.where(np.isin(y, f['DM/y'][:]))[0]
if indy.size == 0:
# Some ranks may not hold any sightlines at all
print('The sightline coordinates are not on density grids on file ', fn, flush=True)
print("The y desnity grid coordinates are = ", f['DM/y'][:], flush=True)
continue
ystart, yend = indy[0], indy[-1]
print('Sightlines on Rank =', self.rank, (int(xstart), int(xend)), (int(ystart), int(yend)) ,flush=True)
# i, j are indices for the final flux map (Ngrids * Ngrids)
tau_conv = np.zeros(shape=(indx.size, indy.size, Nz))
for i in range(indx.size):
if self.rank ==1:
print(str(int(100*c/len(fnames)))+'%', flush=True )
# Indices on f['DM/dens'] map
ic = x[indx[i]] - f['DM/x'][0]
for j in range(indy.size):
# Indices on f['DM/dens'] map
jc = y[indy[j]] - f['DM/y'][0]
dens = f['DM/dens'][ic,jc,:]
tau_real = self.get_tau_real(f['DM/dens'][ic,jc,:])
# Peculiar velocity addition
ind = np.where((dens != 0))
vel_pec = np.zeros_like(f['DM/pz'][ic,jc,:])
# Convert momentum to velocity
vel_pec[ind] = f['DM/pz'][ic,jc,:][ind]/dens[ind]
vel_pec = gaussian_filter1d(vel_pec, self.SmLV)
dens = gaussian_filter1d(dens, self.SmLD)
u0 = up + vel_pec
btherm = self.get_btherm(dens)
# To avoide devision by 0, if b_threm == 0, pass a nonzero value since
# tau_real is 0 in that voxel anyway, tau_conv will be 0.
btherm[np.where(btherm==0)] = 1.0
for k in range(Nz):
dvel = np.abs(up[k]-u0)
# Periodic Boundary
indv = np.where(dvel > dvbin*Nz/2)
dvel[indv] = dvbin*Nz - dvel[indv]
Voight = (1/btherm)*np.exp(-(dvel/btherm)**2)
tau_conv[i,j,k] = np.sum(tau_real*Voight*dvbin)
# save the tau_conv on file for density files containing the desired sightlines
with h5py.File(fn.replace('densfield','fgpa_LessMem'), 'w') as fw:
fw['tau_conv'] = tau_conv
fw['indx'] = indx
fw['indy'] = indy
self.comm.Barrier()
if self.rank==0:
# Read the saved tau_conv files
tau_conv = self.add_up_tau_conv(Nz=Nz)
else:
tau_conv = None
self.comm.Barrier()
print('Rank', self.rank, 'is done with tau_conv', flush=True)
return tau_conv
def add_up_tau_conv(self, Nz):
"""Add individual tau_conv files to form the full map"""
tau_conv = -1*np.ones(shape=(self.Ngrids, self.Ngrids, Nz))
tau_files = glob.glob(os.path.join(self.savedir,'*_fgpa_LessMem.hdf5'))
for fn in tau_files:
with h5py.File(fn,'r') as f:
indx = f['indx'][:]
indy = f['indy'][:]
indx, indy = np.meshgrid(indx,indy, indexing='ij')
tau_conv[indx, indy, :] = f['tau_conv'][:]
assert np.all(tau_conv != -1)
return tau_conv
def get_tau_real(self, Delta):
""" Get tau in real space
The amplitude needs to get fixed with mean
observed flux or 1D power
z : redshift
"""
return (self.lambda_Lya*self.sigma_Lya/cosmo.H(self.z).value)*self.get_nHI(Delta)
def get_nHI(self, Delta):
""" Calculate Neutral Hydrogen Density
The amplitude needs to get fixed with mean flux
"""
return Delta**(2-0.7*(self.gamma -1))
def get_btherm(self, Delta):
""" Thermal Doppler parameter in km/s"""
return np.sqrt(2*self.kB*self.get_Temp(Delta)/self.mp)/1000
def get_Temp(self, Delta):
""" Temperature density relation
Delta : (1 + delta_b)
"""
return self.T0*Delta**(self.gamma-1)
|
StarcoderdataPython
|
1652657
|
from flask import Flask
# from config import Config
app = Flask(__name__)
from application import routes
|
StarcoderdataPython
|
9793758
|
<gh_stars>1-10
# Copyright 2016 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from benchmarks import media_router_dialog_metric
from benchmarks import media_router_cpu_memory_metric
from telemetry.page import page_test
class MediaRouterDialogTest(page_test.PageTest):
"""Performs a measurement of Media Route dialog latency."""
def __init__(self):
super(MediaRouterDialogTest, self).__init__()
self._metric = media_router_dialog_metric.MediaRouterDialogMetric()
def DidNavigateToPage(self, page, tab):
self._metric.Start(page, tab)
def ValidateAndMeasurePage(self, page, tab, results):
self._metric.Stop(page, tab)
self._metric.AddResults(tab, results)
class MediaRouterCPUMemoryTest(page_test.PageTest):
"""Performs a measurement of Media Route CPU/memory usage."""
def __init__(self):
super(MediaRouterCPUMemoryTest, self).__init__()
self._metric = media_router_cpu_memory_metric.MediaRouterCPUMemoryMetric()
def ValidateAndMeasurePage(self, page, tab, results):
self._metric.AddResults(tab, results)
|
StarcoderdataPython
|
1643201
|
<gh_stars>1-10
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
import os.path
MPATH = "44'/77'/"
WIF_PREFIX = 212 # 212 = d4
MAGIC_BYTE = 30
TESTNET_WIF_PREFIX = 239
TESTNET_MAGIC_BYTE = 139
DEFAULT_PROTOCOL_VERSION = 70913
MINIMUM_FEE = 0.0001 # minimum QMC/kB
starting_width = 933
starting_height = 666
APPDATA_DIRNAME = ".QMCTorrentTool"
home_dir = os.path.expanduser('~')
user_dir = os.path.join(home_dir, APPDATA_DIRNAME)
log_File = os.path.join(user_dir, 'lastLogs.html')
masternodes_File = 'masternodes.json'
rpc_File = 'rpcServer.json'
cache_File = 'cache.json'
DEFAULT_RPC_CONF = {
"rpc_ip": "127.0.0.1",
"rpc_port": 55777,
"rpc_user": "myUsername",
"rpc_password": "<PASSWORD>"
}
DEFAULT_MN_CONF = {
"name": "",
"ip": "",
"port": 51472,
"mnPrivKey": "",
"isTestnet": 0,
"isHardware": True,
"hwAcc": 0,
"collateral": {}
}
DEFAULT_CACHE = {
"lastAddress": "",
"window_width": starting_width,
"window_height": starting_height,
"splitter_sizes": [342, 133],
"mnList_order": {},
"useSwiftX": False,
"votingMasternodes": [],
"votingDelayCheck": False,
"votingDelayNeg": 0,
"votingDelayPos": 300
}
|
StarcoderdataPython
|
124186
|
from pathlib import Path
import configparser
from logger import logger
def change_config(**options):
"""takes arbitrary keyword arguments and
writes their values into the config"""
# overwrite values
for k, v in options.items():
config.set('root', k, v)
# write back, but without the mandatory header
config_string = '\n'.join(['{}={}'.format(k, v)
for (k, v) in config['root'].items()])
with open(str(config_path), 'w') as f:
f.write(config_string)
f.write('\n')
def get_config(key):
return config['root'][key]
# load config file for both server.py and fader.py
config = None
config_path = None
try:
config_path = Path(Path(__file__).resolve().parent,
Path('../config')).resolve()
with open(str(config_path), 'r') as f:
config = configparser.RawConfigParser()
config.read_string('[root]\n' + f.read())
if not 'raspberry_port' in config['root']:
# for the port I just went with some random unassigned port from this list:
# https://www.iana.org/assignments/service-names-port-numbers/service-names-port-numbers.xhtml?search=Unassigned
change_config(raspberry_port=3546)
if not 'raspberry_ip' in config['root']:
# 0.0.0.0 works if you send requests from another local machine to the raspberry
# 'localhost' would only allow requests from within the raspberry
change_config(raspberry_ip='0.0.0.0')
except FileNotFoundError:
logger.warning(
'config file could not be found! using port {}'.format(raspberry_port))
|
StarcoderdataPython
|
6501686
|
<filename>arachnado/downloadermiddlewares/droprequests.py
# -*- coding: utf-8 -*-
from __future__ import absolute_import
import warnings
from scrapy.exceptions import IgnoreRequest
class DropRequestsMiddleware:
"""
Downloader middleware to drop a requests if a certain condition is met.
It calls ``spider.should_drop_request(request)`` method to check if a
request should be downloaded or dropped; spider must implement this method.
"""
def __init__(self, stats):
self.stats = stats
@classmethod
def from_crawler(cls, crawler):
return cls(stats=crawler.stats)
def process_request(self, request, spider):
if not hasattr(spider, 'should_drop_request'):
return
if not callable(spider.should_drop_request):
warnings.warn('spider %s has "should_drop_request" attribute, '
'but it is not callable' % spider)
return
if spider.should_drop_request(request):
self.stats.inc_value("DropRequestsMiddleware/dropped")
raise IgnoreRequest()
|
StarcoderdataPython
|
End of preview. Expand
in Data Studio
- Downloads last month
- 8