content
stringlengths
66
45k
language
stringclasses
11 values
license
stringclasses
14 values
path
stringlengths
20
176
annotation_id
stringlengths
36
36
pii
stringlengths
2
19.6k
pii_modified
stringlengths
2
19.6k
class Hyperspec < Formula desc "Common Lisp ANSI-standard Hyperspec" homepage "http://www.lispworks.com/documentation/common-lisp.html" url "ftp://ftp.lispworks.com/pub/software_tools/reference/HyperSpec-7-0.tar.gz" version "7.0" sha256 "1ac1666a9dc697dbd8881262cad4371bcd2e9843108b643e2ea93472ba85d7c3" bottle :unneeded def install doc.install Dir["*"] end def caveats; <<-EOS.undent To use this copy of the HyperSpec with SLIME, put the following in you .emacs intialization file: (eval-after-load "slime" '(progn (setq common-lisp-hyperspec-root "#{HOMEBREW_PREFIX}/share/doc/hyperspec/HyperSpec/") (setq common-lisp-hyperspec-symbol-table (concat common-lisp-hyperspec-root "Data/Map_Sym.txt")) (setq common-lisp-hyperspec-issuex-table (concat common-lisp-hyperspec-root "Data/Map_IssX.txt")))) EOS end test do assert (doc/"HyperSpec-README.text").exist? end end
Ruby
BSD-2-Clause
AGWA-forks/homebrew/Library/Formula/hyperspec.rb
5130d6a5-7d6d-4bd2-ae1c-3fcdd0419491
[]
[]
#!/usr/bin/env python # -*- coding: utf-8 -*- # # Copyright 2015 Felix Wunsch, Communications Engineering Lab (CEL) / Karlsruhe Institute of Technology (KIT) <wunsch.felix@googlemail.com>. # # This is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3, or (at your option) # any later version. # # This software is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this software; see the file COPYING. If not, write to # the Free Software Foundation, Inc., 51 Franklin Street, # Boston, MA 02110-1301, USA. # from gnuradio import gr, gr_unittest from gnuradio import blocks import ieee802_15_4_swig as ieee802_15_4 import numpy as np class qa_dqpsk_soft_demapper_cc (gr_unittest.TestCase): def setUp (self): self.tb = gr.top_block () def tearDown (self): self.tb = None def test_001_t (self): # set up fg pi=np.pi data_in = [0, pi/2, pi, -pi/2, pi/2, -pi/2, -pi/2, 0, 0, pi, pi/2, pi/2] data_in = [np.exp(1j*i) for i in data_in] data_in = [i*np.exp(1j*pi/4) for i in data_in] self.src = blocks.vector_source_c(data_in) self.dqpsk = ieee802_15_4.dqpsk_soft_demapper_cc(framelen=6) self.snk = blocks.vector_sink_c(1) self.tb.connect(self.src, self.dqpsk, self.snk) self.tb.run () # check data data_out = self.snk.data() ref = [0, pi/2, pi, -pi/2, pi/2, pi, -pi/2, 0, 0, pi, pi, pi/2] ref = np.array([np.exp(1j*i) for i in ref]) print "angle in:", np.angle(data_in)/pi*180 print "angle out:", np.angle(data_out)/pi*180 print "angle ref:", np.angle(ref)/pi*180 self.assertFloatTuplesAlmostEqual(ref, data_out, 5) if __name__ == '__main__': gr_unittest.run(qa_dqpsk_soft_demapper_cc)
Python
BSD-2-Clause
xueyuecanfeng/C-LQI/gr-ieee802-15-4/python/qa_dqpsk_soft_demapper_cc.py
4828e517-f808-4a8c-9022-607d17688618
[{"tag": "EMAIL", "value": "wunsch.felix@googlemail.com", "start": 160, "end": 187, "context": " (CEL) / Karlsruhe Institute of Technology (KIT) <wunsch.felix@googlemail.com>.\n# \n# This is free software; you can redistribut"}, {"tag": "NAME", "value": "Felix Wunsch", "start": 66, "end": 78, "context": "ython\n# -*- coding: utf-8 -*-\n# \n# Copyright 2015 Felix Wunsch, Communications Engineering Lab (CEL) / Karlsruhe"}]
[{"tag": "EMAIL", "value": "wunsch.felix@googlemail.com", "start": 160, "end": 187, "context": " (CEL) / Karlsruhe Institute of Technology (KIT) <wunsch.felix@googlemail.com>.\n# \n# This is free software; you can redistribut"}, {"tag": "NAME", "value": "Felix Wunsch", "start": 66, "end": 78, "context": "ython\n# -*- coding: utf-8 -*-\n# \n# Copyright 2015 Felix Wunsch, Communications Engineering Lab (CEL) / Karlsruhe"}]
/** * angular-strap * @version v2.3.12 - 2018-05-03 * @link http://mgcrea.github.io/angular-strap * @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea) * @license MIT License, http://www.opensource.org/licenses/MIT */ 'use strict';angular.module('mgcrea.ngStrap.tab').run(['$templateCache',function(e){e.put('tab/tab.tpl.html','<ul class="nav" ng-class="$navClass" role="tablist"><li role="presentation" ng-repeat="$pane in $panes track by $index" ng-class="[ $isActive($pane, $index) ? $activeClass : \'\', $pane.disabled ? \'disabled\' : \'\' ]"><a role="tab" data-toggle="tab" ng-click="!$pane.disabled && $setActive($pane.name || $index)" data-index="{{ $index }}" ng-bind-html="$pane.title" aria-controls="$pane.title" ng-keypress="$onKeyPress($event, $pane.name || $index)" href=""></a></li></ul><div ng-transclude class="tab-content"></div>')}]);
JavaScript
MIT
oncompass/angular-strap/dist/modules/tab.tpl.min.js
0afd8bdc-9451-4fa2-8e1e-c3ad33d013f9
[{"tag": "EMAIL", "value": "olivier@mg-crea.com", "start": 131, "end": 150, "context": "ub.io/angular-strap\n * @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea)\n * @license MIT Lice"}, {"tag": "USERNAME", "value": "mgcrea", "start": 172, "end": 178, "context": "uvignes <olivier@mg-crea.com> (https://github.com/mgcrea)\n * @license MIT License, http://www.opensource.o"}, {"tag": "NAME", "value": "Olivier Louvignes", "start": 112, "end": 129, "context": " http://mgcrea.github.io/angular-strap\n * @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea)"}]
[{"tag": "EMAIL", "value": "olivier@mg-crea.com", "start": 131, "end": 150, "context": "ub.io/angular-strap\n * @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea)\n * @license MIT Lice"}, {"tag": "USERNAME", "value": "mgcrea", "start": 172, "end": 178, "context": "uvignes <olivier@mg-crea.com> (https://github.com/mgcrea)\n * @license MIT License, http://www.opensource.o"}, {"tag": "NAME", "value": "Olivier Louvignes", "start": 112, "end": 129, "context": " http://mgcrea.github.io/angular-strap\n * @author Olivier Louvignes <olivier@mg-crea.com> (https://github.com/mgcrea)"}]
/* * Copyright (c) 2018, The OpenThread Authors. * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * 1. Redistributions of source code must retain the above copyright * notice, this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright * notice, this list of conditions and the following disclaimer in the * documentation and/or other materials provided with the distribution. * 3. Neither the name of the copyright holder nor the * names of its contributors may be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE * ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE * LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR * CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF * SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) * ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE * POSSIBILITY OF SUCH DAMAGE. */ /** * @file * This file implements FTD-specific mesh forwarding of IPv6/6LoWPAN messages. */ #if OPENTHREAD_FTD #include "mesh_forwarder.hpp" #include "common/locator-getters.hpp" #include "common/logging.hpp" #include "meshcop/meshcop.hpp" #include "net/ip6.hpp" #include "net/tcp.hpp" #include "net/udp6.hpp" namespace ot { otError MeshForwarder::SendMessage(Message &aMessage) { Mle::MleRouter &mle = Get<Mle::MleRouter>(); otError error = OT_ERROR_NONE; Neighbor * neighbor; aMessage.SetOffset(0); aMessage.SetDatagramTag(0); mSendQueue.Enqueue(aMessage); switch (aMessage.GetType()) { case Message::kTypeIp6: { Ip6::Header ip6Header; aMessage.Read(0, sizeof(ip6Header), &ip6Header); if (ip6Header.GetDestination().IsMulticast()) { // For traffic destined to multicast address larger than realm local, generally it uses IP-in-IP // encapsulation (RFC2473), with outer destination as ALL_MPL_FORWARDERS. So here if the destination // is multicast address larger than realm local, it should be for indirection transmission for the // device's sleepy child, thus there should be no direct transmission. if (!ip6Header.GetDestination().IsMulticastLargerThanRealmLocal()) { // schedule direct transmission aMessage.SetDirectTransmission(); } if (aMessage.GetSubType() != Message::kSubTypeMplRetransmission) { if (ip6Header.GetDestination() == mle.GetLinkLocalAllThreadNodesAddress() || ip6Header.GetDestination() == mle.GetRealmLocalAllThreadNodesAddress()) { // destined for all sleepy children for (Child &child : Get<ChildTable>().Iterate(Child::kInStateValidOrRestoring)) { if (!child.IsRxOnWhenIdle()) { mIndirectSender.AddMessageForSleepyChild(aMessage, child); } } } else { // destined for some sleepy children which subscribed the multicast address. for (Child &child : Get<ChildTable>().Iterate(Child::kInStateValidOrRestoring)) { if (mle.IsSleepyChildSubscribed(ip6Header.GetDestination(), child)) { mIndirectSender.AddMessageForSleepyChild(aMessage, child); } } } } } else if ((neighbor = mle.GetNeighbor(ip6Header.GetDestination())) != nullptr && !neighbor->IsRxOnWhenIdle() && !aMessage.GetDirectTransmission()) { // destined for a sleepy child Child &child = *static_cast<Child *>(neighbor); mIndirectSender.AddMessageForSleepyChild(aMessage, child); } else { // schedule direct transmission aMessage.SetDirectTransmission(); } break; } case Message::kTypeSupervision: { Child *child = Get<Utils::ChildSupervisor>().GetDestination(aMessage); OT_ASSERT((child != nullptr) && !child->IsRxOnWhenIdle()); mIndirectSender.AddMessageForSleepyChild(aMessage, *child); break; } default: aMessage.SetDirectTransmission(); break; } mScheduleTransmissionTask.Post(); return error; } void MeshForwarder::HandleResolved(const Ip6::Address &aEid, otError aError) { Message * cur, *next; Ip6::Address ip6Dst; bool enqueuedMessage = false; for (cur = mResolvingQueue.GetHead(); cur; cur = next) { next = cur->GetNext(); if (cur->GetType() != Message::kTypeIp6) { continue; } cur->Read(Ip6::Header::GetDestinationOffset(), sizeof(ip6Dst), &ip6Dst); if (ip6Dst == aEid) { mResolvingQueue.Dequeue(*cur); if (aError == OT_ERROR_NONE) { mSendQueue.Enqueue(*cur); enqueuedMessage = true; } else { LogMessage(kMessageDrop, *cur, nullptr, aError); cur->Free(); } } } if (enqueuedMessage) { mScheduleTransmissionTask.Post(); } } otError MeshForwarder::EvictMessage(Message::Priority aPriority) { otError error = OT_ERROR_NOT_FOUND; PriorityQueue *queues[] = {&mResolvingQueue, &mSendQueue}; Message * evict = nullptr; // search for a lower priority message to evict (choose lowest priority message among all queues) for (PriorityQueue *queue : queues) { for (uint8_t priority = 0; priority < aPriority; priority++) { for (Message *message = queue->GetHeadForPriority(static_cast<Message::Priority>(priority)); message; message = message->GetNext()) { if (message->GetPriority() != priority) { break; } if (message->GetDoNotEvict()) { continue; } evict = message; aPriority = static_cast<Message::Priority>(priority); break; } } } if (evict != nullptr) { ExitNow(error = OT_ERROR_NONE); } for (uint8_t priority = aPriority; priority < Message::kNumPriorities; priority++) { // search for an equal or higher priority indirect message to evict for (Message *message = mSendQueue.GetHeadForPriority(aPriority); message; message = message->GetNext()) { if (message->GetPriority() != priority) { break; } if (message->GetDoNotEvict()) { continue; } if (message->IsChildPending()) { evict = message; ExitNow(error = OT_ERROR_NONE); } } } exit: if (error == OT_ERROR_NONE) { RemoveMessage(*evict); } return error; } void MeshForwarder::RemoveMessages(Child &aChild, Message::SubType aSubType) { Mle::MleRouter &mle = Get<Mle::MleRouter>(); Message * nextMessage; for (Message *message = mSendQueue.GetHead(); message; message = nextMessage) { nextMessage = message->GetNext(); if ((aSubType != Message::kSubTypeNone) && (aSubType != message->GetSubType())) { continue; } if (mIndirectSender.RemoveMessageFromSleepyChild(*message, aChild) != OT_ERROR_NONE) { switch (message->GetType()) { case Message::kTypeIp6: { Ip6::Header ip6header; IgnoreReturnValue(message->Read(0, sizeof(ip6header), &ip6header)); if (&aChild == static_cast<Child *>(mle.GetNeighbor(ip6header.GetDestination()))) { message->ClearDirectTransmission(); } break; } case Message::kType6lowpan: { Lowpan::MeshHeader meshHeader; IgnoreError(meshHeader.ParseFrom(*message)); if (&aChild == static_cast<Child *>(mle.GetNeighbor(meshHeader.GetDestination()))) { message->ClearDirectTransmission(); } break; } default: break; } } if (!message->IsChildPending() && !message->GetDirectTransmission()) { if (mSendMessage == message) { mSendMessage = nullptr; } mSendQueue.Dequeue(*message); message->Free(); } } } void MeshForwarder::RemoveDataResponseMessages(void) { Ip6::Header ip6Header; for (Message *message = mSendQueue.GetHead(); message; message = message->GetNext()) { if (message->GetSubType() != Message::kSubTypeMleDataResponse) { continue; } message->Read(0, sizeof(ip6Header), &ip6Header); if (!(ip6Header.GetDestination().IsMulticast())) { for (Child &child : Get<ChildTable>().Iterate(Child::kInStateAnyExceptInvalid)) { IgnoreError(mIndirectSender.RemoveMessageFromSleepyChild(*message, child)); } } if (mSendMessage == message) { mSendMessage = nullptr; } mSendQueue.Dequeue(*message); LogMessage(kMessageDrop, *message, nullptr, OT_ERROR_NONE); message->Free(); } } void MeshForwarder::SendMesh(Message &aMessage, Mac::TxFrame &aFrame) { uint16_t fcf; // initialize MAC header fcf = Mac::Frame::kFcfFrameData | Mac::Frame::kFcfPanidCompression | Mac::Frame::kFcfDstAddrShort | Mac::Frame::kFcfSrcAddrShort | Mac::Frame::kFcfAckRequest | Mac::Frame::kFcfSecurityEnabled; Get<Mac::Mac>().UpdateFrameControlField(aMessage.IsTimeSync(), fcf); aFrame.InitMacHeader(fcf, Mac::Frame::kKeyIdMode1 | Mac::Frame::kSecEncMic32); aFrame.SetDstPanId(Get<Mac::Mac>().GetPanId()); aFrame.SetDstAddr(mMacDest.GetShort()); aFrame.SetSrcAddr(mMacSource.GetShort()); // write payload OT_ASSERT(aMessage.GetLength() <= aFrame.GetMaxPayloadLength()); aMessage.Read(0, aMessage.GetLength(), aFrame.GetPayload()); aFrame.SetPayloadLength(aMessage.GetLength()); mMessageNextOffset = aMessage.GetLength(); } otError MeshForwarder::UpdateMeshRoute(Message &aMessage) { otError error = OT_ERROR_NONE; Lowpan::MeshHeader meshHeader; Neighbor * neighbor; uint16_t nextHop; IgnoreError(meshHeader.ParseFrom(aMessage)); nextHop = Get<Mle::MleRouter>().GetNextHop(meshHeader.GetDestination()); if (nextHop != Mac::kShortAddrInvalid) { neighbor = Get<Mle::MleRouter>().GetNeighbor(nextHop); } else { neighbor = Get<Mle::MleRouter>().GetNeighbor(meshHeader.GetDestination()); } if (neighbor == nullptr) { ExitNow(error = OT_ERROR_DROP); } mMacDest.SetShort(neighbor->GetRloc16()); mMacSource.SetShort(Get<Mac::Mac>().GetShortAddress()); mAddMeshHeader = true; mMeshDest = meshHeader.GetDestination(); mMeshSource = meshHeader.GetSource(); exit: return error; } otError MeshForwarder::UpdateIp6RouteFtd(Ip6::Header &ip6Header, Message &aMessage) { Mle::MleRouter &mle = Get<Mle::MleRouter>(); otError error = OT_ERROR_NONE; Neighbor * neighbor; if (aMessage.GetOffset() > 0) { mMeshDest = aMessage.GetMeshDest(); } else if (mle.IsRoutingLocator(ip6Header.GetDestination())) { uint16_t rloc16 = ip6Header.GetDestination().GetIid().GetLocator(); VerifyOrExit(mle.IsRouterIdValid(Mle::Mle::RouterIdFromRloc16(rloc16)), error = OT_ERROR_DROP); mMeshDest = rloc16; } else if (mle.IsAnycastLocator(ip6Header.GetDestination())) { uint16_t aloc16 = ip6Header.GetDestination().GetIid().GetLocator(); if (aloc16 == Mle::kAloc16Leader) { mMeshDest = Mle::Mle::Rloc16FromRouterId(mle.GetLeaderId()); } else if (aloc16 <= Mle::kAloc16DhcpAgentEnd) { uint16_t agentRloc16; uint8_t routerId; VerifyOrExit((Get<NetworkData::Leader>().GetRlocByContextId( static_cast<uint8_t>(aloc16 & Mle::kAloc16DhcpAgentMask), agentRloc16) == OT_ERROR_NONE), error = OT_ERROR_DROP); routerId = Mle::Mle::RouterIdFromRloc16(agentRloc16); // if agent is active router or the child of the device if ((Mle::Mle::IsActiveRouter(agentRloc16)) || (Mle::Mle::Rloc16FromRouterId(routerId) == mle.GetRloc16())) { mMeshDest = agentRloc16; } else { // use the parent of the ED Agent as Dest mMeshDest = Mle::Mle::Rloc16FromRouterId(routerId); } } else if (aloc16 <= Mle::kAloc16ServiceEnd) { SuccessOrExit(error = GetDestinationRlocByServiceAloc(aloc16, mMeshDest)); } else if (aloc16 <= Mle::kAloc16CommissionerEnd) { SuccessOrExit(error = MeshCoP::GetBorderAgentRloc(Get<ThreadNetif>(), mMeshDest)); } #if (OPENTHREAD_CONFIG_THREAD_VERSION >= OT_THREAD_VERSION_1_2) else if (aloc16 == Mle::kAloc16BackboneRouterPrimary) { VerifyOrExit(Get<BackboneRouter::Leader>().HasPrimary(), error = OT_ERROR_DROP); mMeshDest = Get<BackboneRouter::Leader>().GetServer16(); } #endif else { // TODO: support for Neighbor Discovery Agent ALOC ExitNow(error = OT_ERROR_DROP); } } else if ((neighbor = mle.GetNeighbor(ip6Header.GetDestination())) != nullptr) { mMeshDest = neighbor->GetRloc16(); } else if (Get<NetworkData::Leader>().IsOnMesh(ip6Header.GetDestination())) { SuccessOrExit(error = Get<AddressResolver>().Resolve(ip6Header.GetDestination(), mMeshDest)); } else { IgnoreError(Get<NetworkData::Leader>().RouteLookup(ip6Header.GetSource(), ip6Header.GetDestination(), nullptr, &mMeshDest)); } VerifyOrExit(mMeshDest != Mac::kShortAddrInvalid, error = OT_ERROR_DROP); mMeshSource = Get<Mac::Mac>().GetShortAddress(); SuccessOrExit(error = mle.CheckReachability(mMeshDest, ip6Header)); aMessage.SetMeshDest(mMeshDest); mMacDest.SetShort(mle.GetNextHop(mMeshDest)); if (mMacDest.GetShort() != mMeshDest) { // destination is not neighbor mMacSource.SetShort(mMeshSource); mAddMeshHeader = true; } exit: return error; } otError MeshForwarder::GetIp6Header(const uint8_t * aFrame, uint16_t aFrameLength, const Mac::Address &aMacSource, const Mac::Address &aMacDest, Ip6::Header & aIp6Header) { uint8_t headerLength; bool nextHeaderCompressed; return DecompressIp6Header(aFrame, aFrameLength, aMacSource, aMacDest, aIp6Header, headerLength, nextHeaderCompressed); } void MeshForwarder::SendIcmpErrorIfDstUnreach(const Message & aMessage, const Mac::Address &aMacSource, const Mac::Address &aMacDest) { otError error; Ip6::Header ip6header; Child * child; VerifyOrExit(aMacSource.IsShort() && aMacDest.IsShort(), OT_NOOP); child = Get<ChildTable>().FindChild(aMacSource.GetShort(), Child::kInStateAnyExceptInvalid); VerifyOrExit((child == nullptr) || child->IsFullThreadDevice(), OT_NOOP); aMessage.Read(0, sizeof(ip6header), &ip6header); VerifyOrExit(!ip6header.GetDestination().IsMulticast() && Get<NetworkData::Leader>().IsOnMesh(ip6header.GetDestination()), OT_NOOP); error = Get<Mle::MleRouter>().CheckReachability(aMacDest.GetShort(), ip6header); if (error == OT_ERROR_NO_ROUTE) { SendDestinationUnreachable(aMacSource.GetShort(), aMessage); } exit: return; } otError MeshForwarder::CheckReachability(const uint8_t * aFrame, uint16_t aFrameLength, const Mac::Address &aMeshSource, const Mac::Address &aMeshDest) { otError error = OT_ERROR_NONE; Ip6::Header ip6Header; Message * message = nullptr; Lowpan::FragmentHeader fragmentHeader; uint16_t fragmentHeaderLength; uint16_t datagramSize = 0; if (fragmentHeader.ParseFrom(aFrame, aFrameLength, fragmentHeaderLength) == OT_ERROR_NONE) { // Only the first fragment header is followed by a LOWPAN_IPHC header VerifyOrExit(fragmentHeader.GetDatagramOffset() == 0, error = OT_ERROR_NOT_FOUND); aFrame += fragmentHeaderLength; aFrameLength -= fragmentHeaderLength; datagramSize = fragmentHeader.GetDatagramSize(); } VerifyOrExit(aFrameLength >= 1 && Lowpan::Lowpan::IsLowpanHc(aFrame), error = OT_ERROR_NOT_FOUND); error = FrameToMessage(aFrame, aFrameLength, datagramSize, aMeshSource, aMeshDest, message); SuccessOrExit(error); message->Read(0, sizeof(ip6Header), &ip6Header); error = Get<Mle::MleRouter>().CheckReachability(aMeshDest.GetShort(), ip6Header); exit: if (error == OT_ERROR_NOT_FOUND) { // the message may not contain an IPv6 header error = OT_ERROR_NONE; } else if (error == OT_ERROR_NO_ROUTE) { SendDestinationUnreachable(aMeshSource.GetShort(), *message); } if (message != nullptr) { message->Free(); } return error; } void MeshForwarder::SendDestinationUnreachable(uint16_t aMeshSource, const Message &aMessage) { Ip6::MessageInfo messageInfo; messageInfo.GetPeerAddr() = Get<Mle::MleRouter>().GetMeshLocal16(); messageInfo.GetPeerAddr().GetIid().SetLocator(aMeshSource); IgnoreError(Get<Ip6::Icmp>().SendError(Ip6::Icmp::Header::kTypeDstUnreach, Ip6::Icmp::Header::kCodeDstUnreachNoRoute, messageInfo, aMessage)); } void MeshForwarder::HandleMesh(uint8_t * aFrame, uint16_t aFrameLength, const Mac::Address & aMacSource, const otThreadLinkInfo &aLinkInfo) { otError error = OT_ERROR_NONE; Message * message = nullptr; Mac::Address meshDest; Mac::Address meshSource; Lowpan::MeshHeader meshHeader; uint16_t headerLength; // Security Check: only process Mesh Header frames that had security enabled. VerifyOrExit(aLinkInfo.mLinkSecurity, error = OT_ERROR_SECURITY); SuccessOrExit(error = meshHeader.ParseFrom(aFrame, aFrameLength, headerLength)); meshSource.SetShort(meshHeader.GetSource()); meshDest.SetShort(meshHeader.GetDestination()); aFrame += headerLength; aFrameLength -= headerLength; UpdateRoutes(aFrame, aFrameLength, meshSource, meshDest); if (meshDest.GetShort() == Get<Mac::Mac>().GetShortAddress() || Get<Mle::MleRouter>().IsMinimalChild(meshDest.GetShort())) { if (Lowpan::FragmentHeader::IsFragmentHeader(aFrame, aFrameLength)) { HandleFragment(aFrame, aFrameLength, meshSource, meshDest, aLinkInfo); } else if (Lowpan::Lowpan::IsLowpanHc(aFrame)) { HandleLowpanHC(aFrame, aFrameLength, meshSource, meshDest, aLinkInfo); } else { ExitNow(error = OT_ERROR_PARSE); } } else if (meshHeader.GetHopsLeft() > 0) { Message::Priority priority = Message::kPriorityNormal; uint16_t offset = 0; Get<Mle::MleRouter>().ResolveRoutingLoops(aMacSource.GetShort(), meshDest.GetShort()); SuccessOrExit(error = CheckReachability(aFrame, aFrameLength, meshSource, meshDest)); meshHeader.DecrementHopsLeft(); GetForwardFramePriority(aFrame, aFrameLength, meshSource, meshDest, priority); message = Get<MessagePool>().New(Message::kType6lowpan, priority); VerifyOrExit(message != nullptr, error = OT_ERROR_NO_BUFS); SuccessOrExit(error = message->SetLength(meshHeader.GetHeaderLength() + aFrameLength)); offset += meshHeader.WriteTo(*message, offset); message->Write(offset, aFrameLength, aFrame); message->SetLinkSecurityEnabled(aLinkInfo.mLinkSecurity); message->SetPanId(aLinkInfo.mPanId); message->AddRss(aLinkInfo.mRss); LogMessage(kMessageReceive, *message, &aMacSource, OT_ERROR_NONE); IgnoreError(SendMessage(*message)); } exit: if (error != OT_ERROR_NONE) { otLogInfoMac("Dropping rx mesh frame, error:%s, len:%d, src:%s, sec:%s", otThreadErrorToString(error), aFrameLength, aMacSource.ToString().AsCString(), aLinkInfo.mLinkSecurity ? "yes" : "no"); if (message != nullptr) { message->Free(); } } } void MeshForwarder::UpdateRoutes(const uint8_t * aFrame, uint16_t aFrameLength, const Mac::Address &aMeshSource, const Mac::Address &aMeshDest) { Ip6::Header ip6Header; Neighbor * neighbor; VerifyOrExit(!aMeshDest.IsBroadcast() && aMeshSource.IsShort(), OT_NOOP); SuccessOrExit(GetIp6Header(aFrame, aFrameLength, aMeshSource, aMeshDest, ip6Header)); if (!ip6Header.GetSource().GetIid().IsLocator() && Get<NetworkData::Leader>().IsOnMesh(ip6Header.GetSource()) /* only for on mesh address which may require AQ */) { if (Get<AddressResolver>().UpdateCacheEntry(ip6Header.GetSource(), aMeshSource.GetShort()) == OT_ERROR_NOT_FOUND) { // Thread 1.1 Specification 5.5.2.2: FTDs MAY add/update // EID-to-RLOC Map Cache entries by inspecting packets // being received. We exclude frames from an MTD child // source and verify that the destination is the device // itself or an MTD child of the device. if (Get<Mle::MleRouter>().IsFullThreadDevice() && !Get<Mle::MleRouter>().IsMinimalChild(aMeshSource.GetShort()) && (aMeshDest.GetShort() == Get<Mac::Mac>().GetShortAddress() || Get<Mle::MleRouter>().IsMinimalChild(aMeshDest.GetShort()))) { Get<AddressResolver>().AddSnoopedCacheEntry(ip6Header.GetSource(), aMeshSource.GetShort()); } } } neighbor = Get<Mle::MleRouter>().GetNeighbor(ip6Header.GetSource()); VerifyOrExit(neighbor != nullptr && !neighbor->IsFullThreadDevice(), OT_NOOP); if (!Mle::Mle::RouterIdMatch(aMeshSource.GetShort(), Get<Mac::Mac>().GetShortAddress())) { Get<Mle::MleRouter>().RemoveNeighbor(*neighbor); } exit: return; } bool MeshForwarder::UpdateFragmentLifetime(void) { bool shouldRun = false; for (FragmentPriorityEntry &entry : mFragmentEntries) { if (entry.GetLifetime() != 0) { entry.DecrementLifetime(); if (entry.GetLifetime() != 0) { shouldRun = true; } } } return shouldRun; } void MeshForwarder::UpdateFragmentPriority(Lowpan::FragmentHeader &aFragmentHeader, uint16_t aFragmentLength, uint16_t aSrcRloc16, Message::Priority aPriority) { FragmentPriorityEntry *entry; if (aFragmentHeader.GetDatagramOffset() == 0) { VerifyOrExit((entry = GetUnusedFragmentPriorityEntry()) != nullptr, OT_NOOP); entry->SetDatagramTag(aFragmentHeader.GetDatagramTag()); entry->SetSrcRloc16(aSrcRloc16); entry->SetPriority(aPriority); entry->SetLifetime(kReassemblyTimeout); if (!mUpdateTimer.IsRunning()) { mUpdateTimer.Start(kStateUpdatePeriod); } } else { VerifyOrExit((entry = FindFragmentPriorityEntry(aFragmentHeader.GetDatagramTag(), aSrcRloc16)) != nullptr, OT_NOOP); entry->SetLifetime(kReassemblyTimeout); if (aFragmentHeader.GetDatagramOffset() + aFragmentLength >= aFragmentHeader.GetDatagramSize()) { entry->SetLifetime(0); } } exit: return; } FragmentPriorityEntry *MeshForwarder::FindFragmentPriorityEntry(uint16_t aTag, uint16_t aSrcRloc16) { FragmentPriorityEntry *rval = nullptr; for (FragmentPriorityEntry &entry : mFragmentEntries) { if ((entry.GetLifetime() != 0) && (entry.GetDatagramTag() == aTag) && (entry.GetSrcRloc16() == aSrcRloc16)) { rval = &entry; break; } } return rval; } FragmentPriorityEntry *MeshForwarder::GetUnusedFragmentPriorityEntry(void) { FragmentPriorityEntry *rval = nullptr; for (FragmentPriorityEntry &entry : mFragmentEntries) { if (entry.GetLifetime() == 0) { rval = &entry; break; } } return rval; } otError MeshForwarder::GetFragmentPriority(Lowpan::FragmentHeader &aFragmentHeader, uint16_t aSrcRloc16, Message::Priority & aPriority) { otError error = OT_ERROR_NONE; FragmentPriorityEntry *entry; entry = FindFragmentPriorityEntry(aFragmentHeader.GetDatagramTag(), aSrcRloc16); VerifyOrExit(entry != nullptr, error = OT_ERROR_NOT_FOUND); aPriority = entry->GetPriority(); exit: return error; } void MeshForwarder::GetForwardFramePriority(const uint8_t * aFrame, uint16_t aFrameLength, const Mac::Address &aMeshSource, const Mac::Address &aMeshDest, Message::Priority & aPriority) { otError error = OT_ERROR_NONE; bool isFragment = false; Lowpan::FragmentHeader fragmentHeader; uint16_t fragmentHeaderLength; if (fragmentHeader.ParseFrom(aFrame, aFrameLength, fragmentHeaderLength) == OT_ERROR_NONE) { isFragment = true; aFrame += fragmentHeaderLength; aFrameLength -= fragmentHeaderLength; if (fragmentHeader.GetDatagramOffset() > 0) { // Get priority from the pre-buffered info ExitNow(error = GetFragmentPriority(fragmentHeader, aMeshSource.GetShort(), aPriority)); } } // Get priority from IPv6 header or UDP destination port directly error = GetFramePriority(aFrame, aFrameLength, aMeshSource, aMeshDest, aPriority); exit: if (error != OT_ERROR_NONE) { otLogNoteMac("Failed to get forwarded frame priority, error:%s, len:%d, src:%d, dst:%s", otThreadErrorToString(error), aFrameLength, aMeshSource.ToString().AsCString(), aMeshDest.ToString().AsCString()); } else if (isFragment) { UpdateFragmentPriority(fragmentHeader, aFrameLength, aMeshSource.GetShort(), aPriority); } return; } otError MeshForwarder::GetDestinationRlocByServiceAloc(uint16_t aServiceAloc, uint16_t &aMeshDest) { otError error = OT_ERROR_NONE; uint8_t serviceId = Mle::Mle::ServiceIdFromAloc(aServiceAloc); const NetworkData::ServiceTlv *serviceTlv = Get<NetworkData::Leader>().FindServiceById(serviceId); if (serviceTlv != nullptr) { const NetworkData::NetworkDataTlv *cur = serviceTlv->GetSubTlvs(); const NetworkData::NetworkDataTlv *end = serviceTlv->GetNext(); Neighbor * neighbor; uint16_t server16; uint8_t bestCost = Mle::kMaxRouteCost; uint8_t curCost = 0x00; uint16_t bestDest = Mac::kShortAddrInvalid; while (cur < end) { switch (cur->GetType()) { case NetworkData::NetworkDataTlv::kTypeServer: server16 = static_cast<const NetworkData::ServerTlv *>(cur)->GetServer16(); // Path cost curCost = Get<Mle::MleRouter>().GetCost(server16); if (!Get<Mle::MleRouter>().IsActiveRouter(server16)) { // Assume best link between remote child server and its parent. curCost += 1; } // Cost if the server is direct neighbor. neighbor = Get<Mle::MleRouter>().GetNeighbor(server16); if (neighbor != nullptr && neighbor->IsStateValid()) { uint8_t cost; if (!Get<Mle::MleRouter>().IsActiveRouter(server16)) { // Cost calculated only from Link Quality In as the parent only maintains // one-direction link info. cost = Mle::MleRouter::LinkQualityToCost(neighbor->GetLinkInfo().GetLinkQuality()); } else { cost = Get<Mle::MleRouter>().GetLinkCost(Mle::Mle::RouterIdFromRloc16(server16)); } // Choose the minimum cost if (cost < curCost) { curCost = cost; } } if ((bestDest == Mac::kShortAddrInvalid) || (curCost < bestCost)) { bestDest = server16; bestCost = curCost; } break; default: break; } cur = cur->GetNext(); } if (bestDest != Mac::kShortAddrInvalid) { aMeshDest = bestDest; } else { // ServiceTLV without ServerTLV? Can't forward packet anywhere. ExitNow(error = OT_ERROR_NO_ROUTE); } } else { // Unknown service, can't forward ExitNow(error = OT_ERROR_NO_ROUTE); } exit: return error; } // LCOV_EXCL_START #if (OPENTHREAD_CONFIG_LOG_LEVEL >= OT_LOG_LEVEL_NOTE) && (OPENTHREAD_CONFIG_LOG_MAC == 1) otError MeshForwarder::LogMeshFragmentHeader(MessageAction aAction, const Message & aMessage, const Mac::Address *aMacAddress, otError aError, uint16_t & aOffset, Mac::Address & aMeshSource, Mac::Address & aMeshDest, otLogLevel aLogLevel) { otError error = OT_ERROR_FAILED; bool hasFragmentHeader = false; bool shouldLogRss; Lowpan::MeshHeader meshHeader; Lowpan::FragmentHeader fragmentHeader; uint16_t headerLength; SuccessOrExit(meshHeader.ParseFrom(aMessage, headerLength)); aMeshSource.SetShort(meshHeader.GetSource()); aMeshDest.SetShort(meshHeader.GetDestination()); aOffset = headerLength; if (fragmentHeader.ParseFrom(aMessage, aOffset, headerLength) == OT_ERROR_NONE) { hasFragmentHeader = true; aOffset += headerLength; } shouldLogRss = (aAction == kMessageReceive) || (aAction == kMessageReassemblyDrop); otLogMac( aLogLevel, "%s mesh frame, len:%d%s%s, msrc:%s, mdst:%s, hops:%d, frag:%s, sec:%s%s%s%s%s", MessageActionToString(aAction, aError), aMessage.GetLength(), (aMacAddress == nullptr) ? "" : ((aAction == kMessageReceive) ? ", from:" : ", to:"), (aMacAddress == nullptr) ? "" : aMacAddress->ToString().AsCString(), aMeshSource.ToString().AsCString(), aMeshDest.ToString().AsCString(), meshHeader.GetHopsLeft() + ((aAction == kMessageReceive) ? 1 : 0), hasFragmentHeader ? "yes" : "no", aMessage.IsLinkSecurityEnabled() ? "yes" : "no", (aError == OT_ERROR_NONE) ? "" : ", error:", (aError == OT_ERROR_NONE) ? "" : otThreadErrorToString(aError), shouldLogRss ? ", rss:" : "", shouldLogRss ? aMessage.GetRssAverager().ToString().AsCString() : ""); if (hasFragmentHeader) { otLogMac(aLogLevel, " Frag tag:%04x, offset:%d, size:%d", fragmentHeader.GetDatagramTag(), fragmentHeader.GetDatagramOffset(), fragmentHeader.GetDatagramSize()); VerifyOrExit(fragmentHeader.GetDatagramOffset() == 0, OT_NOOP); } error = OT_ERROR_NONE; exit: return error; } otError MeshForwarder::DecompressIp6UdpTcpHeader(const Message & aMessage, uint16_t aOffset, const Mac::Address &aMeshSource, const Mac::Address &aMeshDest, Ip6::Header & aIp6Header, uint16_t & aChecksum, uint16_t & aSourcePort, uint16_t & aDestPort) { otError error = OT_ERROR_PARSE; int headerLength; bool nextHeaderCompressed; uint8_t frameBuffer[sizeof(Ip6::Header)]; uint16_t frameLength; union { Ip6::Udp::Header udp; Ip6::Tcp::Header tcp; } header; aChecksum = 0; aSourcePort = 0; aDestPort = 0; // Read and decompress the IPv6 header frameLength = aMessage.Read(aOffset, sizeof(frameBuffer), frameBuffer); headerLength = Get<Lowpan::Lowpan>().DecompressBaseHeader(aIp6Header, nextHeaderCompressed, aMeshSource, aMeshDest, frameBuffer, frameLength); VerifyOrExit(headerLength >= 0, OT_NOOP); aOffset += headerLength; // Read and decompress UDP or TCP header switch (aIp6Header.GetNextHeader()) { case Ip6::kProtoUdp: if (nextHeaderCompressed) { frameLength = aMessage.Read(aOffset, sizeof(Ip6::Udp::Header), frameBuffer); headerLength = Get<Lowpan::Lowpan>().DecompressUdpHeader(header.udp, frameBuffer, frameLength); VerifyOrExit(headerLength >= 0, OT_NOOP); } else { VerifyOrExit(sizeof(Ip6::Udp::Header) == aMessage.Read(aOffset, sizeof(Ip6::Udp::Header), &header.udp), OT_NOOP); } aChecksum = header.udp.GetChecksum(); aSourcePort = header.udp.GetSourcePort(); aDestPort = header.udp.GetDestinationPort(); break; case Ip6::kProtoTcp: VerifyOrExit(sizeof(Ip6::Tcp::Header) == aMessage.Read(aOffset, sizeof(Ip6::Tcp::Header), &header.tcp), OT_NOOP); aChecksum = header.tcp.GetChecksum(); aSourcePort = header.tcp.GetSourcePort(); aDestPort = header.tcp.GetDestinationPort(); break; default: break; } error = OT_ERROR_NONE; exit: return error; } void MeshForwarder::LogMeshIpHeader(const Message & aMessage, uint16_t aOffset, const Mac::Address &aMeshSource, const Mac::Address &aMeshDest, otLogLevel aLogLevel) { uint16_t checksum; uint16_t sourcePort; uint16_t destPort; Ip6::Header ip6Header; SuccessOrExit(DecompressIp6UdpTcpHeader(aMessage, aOffset, aMeshSource, aMeshDest, ip6Header, checksum, sourcePort, destPort)); otLogMac(aLogLevel, " IPv6 %s msg, chksum:%04x, prio:%s", Ip6::Ip6::IpProtoToString(ip6Header.GetNextHeader()), checksum, MessagePriorityToString(aMessage)); LogIp6SourceDestAddresses(ip6Header, sourcePort, destPort, aLogLevel); exit: return; } void MeshForwarder::LogMeshMessage(MessageAction aAction, const Message & aMessage, const Mac::Address *aMacAddress, otError aError, otLogLevel aLogLevel) { uint16_t offset; Mac::Address meshSource; Mac::Address meshDest; SuccessOrExit( LogMeshFragmentHeader(aAction, aMessage, aMacAddress, aError, offset, meshSource, meshDest, aLogLevel)); // When log action is `kMessageTransmit` we do not include // the IPv6 header info in the logs, as the same info is // logged when the same Mesh Header message was received // and info about it was logged. VerifyOrExit(aAction != kMessageTransmit, OT_NOOP); LogMeshIpHeader(aMessage, offset, meshSource, meshDest, aLogLevel); exit: return; } #endif // #if (OPENTHREAD_CONFIG_LOG_LEVEL >= OT_LOG_LEVEL_NOTE) && (OPENTHREAD_CONFIG_LOG_MAC == 1) // LCOV_EXCL_STOP } // namespace ot #endif // OPENTHREAD_FTD
C++
BSD-3-Clause
kbogucki/openthread/src/core/thread/mesh_forwarder_ftd.cpp
fad54a62-766f-464d-8fef-27636157206d
[]
[]
from .base import * # noqa from .base import env # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key SECRET_KEY = env('DJANGO_SECRET_KEY') # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['127.0.0.1']) # DATABASES # ------------------------------------------------------------------------------ DATABASES['default'] = env.db('DATABASE_URL') # noqa F405 DATABASES['default']['ATOMIC_REQUESTS'] = True # noqa F405 DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) # noqa F405 # CACHES # ------------------------------------------------------------------------------ CACHES = { 'default': { 'BACKEND': 'django_redis.cache.RedisCache', 'LOCATION': env('REDIS_URL'), 'OPTIONS': { 'CLIENT_CLASS': 'django_redis.client.DefaultClient', # Mimicing memcache behavior. # http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior 'IGNORE_EXCEPTIONS': True, } } } # SECURITY # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') # https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) # https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure SESSION_COOKIE_SECURE = True # https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure CSRF_COOKIE_SECURE = True # https://docs.djangoproject.com/en/dev/topics/security/#ssl-https # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds # TODO: set this to 60 seconds first and then to 518400 once you prove the former works SECURE_HSTS_SECONDS = 60 # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool('DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True) # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload SECURE_HSTS_PRELOAD = env.bool('DJANGO_SECURE_HSTS_PRELOAD', default=True) # https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff SECURE_CONTENT_TYPE_NOSNIFF = env.bool('DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True) # STORAGES # ------------------------------------------------------------------------------ # https://django-storages.readthedocs.io/en/latest/#installation INSTALLED_APPS += ['storages'] # noqa F405 # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID') # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY') # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME') # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_QUERYSTRING_AUTH = False # DO NOT change these unless you know what you're doing. _AWS_EXPIRY = 60 * 60 * 24 * 7 # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_S3_OBJECT_PARAMETERS = { 'CacheControl': f'max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate', } # STATIC # ------------------------ STATICFILES_STORAGE = 'config.settings.production.StaticRootS3Boto3Storage' STATIC_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/' # MEDIA # ------------------------------------------------------------------------------ # region http://stackoverflow.com/questions/10390244/ # Full-fledge class: https://stackoverflow.com/a/18046120/104731 from storages.backends.s3boto3 import S3Boto3Storage # noqa E402 class StaticRootS3Boto3Storage(S3Boto3Storage): location = 'static' class MediaRootS3Boto3Storage(S3Boto3Storage): location = 'media' file_overwrite = False # endregion DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3Boto3Storage' MEDIA_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/' # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates TEMPLATES[0]['OPTIONS']['loaders'] = [ # noqa F405 ( 'django.template.loaders.cached.Loader', [ 'django.template.loaders.filesystem.Loader', 'django.template.loaders.app_directories.Loader', ] ), ] # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email DEFAULT_FROM_EMAIL = env( 'DJANGO_DEFAULT_FROM_EMAIL', default='The Bureau <noreply@127.0.0.1>' ) # https://docs.djangoproject.com/en/dev/ref/settings/#server-email SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) # https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[The Bureau]') # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL regex. ADMIN_URL = env('DJANGO_ADMIN_URL') # Anymail (Mailgun) # ------------------------------------------------------------------------------ # https://anymail.readthedocs.io/en/stable/installation/#installing-anymail INSTALLED_APPS += ['anymail'] # noqa F405 EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' # https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference ANYMAIL = { 'MAILGUN_API_KEY': env('MAILGUN_API_KEY'), 'MAILGUN_SENDER_DOMAIN': env('MAILGUN_DOMAIN') } # Gunicorn # ------------------------------------------------------------------------------ INSTALLED_APPS += ['gunicorn'] # noqa F405 # Collectfast # ------------------------------------------------------------------------------ # https://github.com/antonagestam/collectfast#installation INSTALLED_APPS = ['collectfast'] + INSTALLED_APPS # noqa F405 AWS_PRELOAD_METADATA = True # LOGGING # ------------------------------------------------------------------------------ # See: https://docs.djangoproject.com/en/dev/ref/settings/#logging # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. # See https://docs.djangoproject.com/en/dev/topics/logging for # more details on how to customize your logging configuration. LOGGING = { 'version': 1, 'disable_existing_loggers': False, 'filters': { 'require_debug_false': { '()': 'django.utils.log.RequireDebugFalse' } }, 'formatters': { 'verbose': { 'format': '%(levelname)s %(asctime)s %(module)s ' '%(process)d %(thread)d %(message)s' }, }, 'handlers': { 'mail_admins': { 'level': 'ERROR', 'filters': ['require_debug_false'], 'class': 'django.utils.log.AdminEmailHandler' }, 'console': { 'level': 'DEBUG', 'class': 'logging.StreamHandler', 'formatter': 'verbose', }, }, 'loggers': { 'django.request': { 'handlers': ['mail_admins'], 'level': 'ERROR', 'propagate': True }, 'django.security.DisallowedHost': { 'level': 'ERROR', 'handlers': ['console', 'mail_admins'], 'propagate': True } } } # Your stuff... # ------------------------------------------------------------------------------
Python
Apache-2.0
clairempr/bureau/config/settings/production.py
ff6108c2-47e9-4b86-8c20-0cfba2624d4d
[{"tag": "USERNAME", "value": "antonagestam", "start": 6208, "end": 6220, "context": "----------------------------\n# https://github.com/antonagestam/collectfast#installation\nINSTALLED_APPS = ['colle"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 372, "end": 381, "context": "OSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['127.0.0.1'])\n\n# DATABASES\n# -------------------------------"}]
[{"tag": "USERNAME", "value": "antonagestam", "start": 6208, "end": 6220, "context": "----------------------------\n# https://github.com/antonagestam/collectfast#installation\nINSTALLED_APPS = ['colle"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 372, "end": 381, "context": "OSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['127.0.0.1'])\n\n# DATABASES\n# -------------------------------"}]
// example.go package main import ( // currently go 1.4.2 std lib "fmt" "io/ioutil" "log" "os" // local "github.com/hotei/dcompress" "github.com/hotei/mdr" ) func main() { var filePair1 = [2]string{"kermit.Z", "d61611d13775c1f3a83675e81afcadfc4352b11e0f39f7c928bad62d25675b66"} var filePairs = [][2]string{filePair1 /*, filePair2, filePair3*/} dcompress.Verbose = true for i := 0; i < len(filePairs); i++ { infname := filePairs[i][0] outsig := filePairs[i][1] fmt.Printf("\n working to dcompress %s\n", infname) r, err := os.Open(infname) if err != nil { log.Panicf("open file failed for", infname) } rdr, err := dcompress.NewReader(r) if err != nil { fmt.Printf("FAILED - err from NewReader\n") return } dBuf, err := ioutil.ReadAll(rdr) if err != nil { fmt.Printf("FAILED - err from rdr.ReadAll()\n") return } fmt.Printf("dcompress would create %d bytes in new file\n", len(dBuf)) bufSig := mdr.BufSHA256(dBuf) fmt.Printf("dcompress buffer has sha256 sig of %s\n", bufSig) fmt.Printf("expected sha256 of %s\n", outsig) if bufSig != outsig { fmt.Printf("FAILED - output files sha256 not correct") } else { fmt.Printf("PASS \n") } } }
GO
BSD-2-Clause
hotei/dcompress/example/example.go
f5f6f707-347d-4204-bc6c-82dfba44e2b4
[]
[]
# Get twilio-ruby from twilio.com/docs/ruby/install require 'twilio-ruby' # Get your Account Sid and Auth Token from twilio.com/user/account account_sid = 'ACXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXXX' auth_token = 'your_auth_token' client = Twilio::REST::Client.new(account_sid, auth_token) # Get an object from its sid. If you do not have a sid, # check out the list resource examples on this page participant = client.account .conferences.get('CFbbe4632a3c49700934481addd5ce1659') .participants.get('CA386025c9bf5d6052a1d1ea42b4d16662') .update(muted: 'True') puts participant.muted
Ruby
MIT
PatNeedham/api-snippets/rest/participant/instance-post-example-1/instance-post-example-1.4.x.rb
17a2fea2-743e-4105-911f-a776e277e6da
[{"tag": "API_KEY", "value": "CFbbe4632a3c49700934481addd5ce1659", "start": 459, "end": 493, "context": "ent.account\n .conferences.get('CFbbe4632a3c49700934481addd5ce1659')\n .participants.get('CA386025"}, {"tag": "API_KEY", "value": "CA386025c9bf5d6052a1d1ea42b4d16662", "start": 535, "end": 569, "context": "d5ce1659')\n .participants.get('CA386025c9bf5d6052a1d1ea42b4d16662')\n .update(muted: 'True')\nputs"}]
[{"tag": "KEY", "value": "CFbbe4632a3c49700934481addd5ce1659", "start": 459, "end": 493, "context": "ent.account\n .conferences.get('CFbbe4632a3c49700934481addd5ce1659')\n .participants.get('CA386025"}, {"tag": "KEY", "value": "CA386025c9bf5d6052a1d1ea42b4d16662", "start": 535, "end": 569, "context": "d5ce1659')\n .participants.get('CA386025c9bf5d6052a1d1ea42b4d16662')\n .update(muted: 'True')\nputs"}]
require "webrat/core_extensions/detect_mapped" require "webrat/core/locators/locator" module Webrat module Locators class FieldLabeledLocator < Locator # :nodoc: def locate matching_labels.any? && matching_labels.detect_mapped { |label| label.field } end def matching_labels matching_label_elements.sort_by do |label_element| text(label_element).length end.map do |label_element| Label.load(@session, label_element) end end def matching_label_elements label_elements.select do |label_element| text(label_element) =~ /^\W*#{Regexp.escape(@value.to_s)}(\b|\Z)/i end end def label_elements @dom.xpath(*Label.xpath_search) end def error_message "Could not find field labeled #{@value.inspect}" end def text(element) str = element.inner_text str.gsub!("\n","") str.strip! str.squeeze!(" ") str end end # Locates a form field based on a <tt>label</tt> element in the HTML source. # This can be useful in order to verify that a field is pre-filled with the # correct value. # # Example: # field_labeled("First name").value.should == "Bryan" def field_labeled(label, *field_types) FieldLabeledLocator.new(@session, dom, label, *field_types).locate! end end end
Ruby
MIT
aureliosaraiva/webrat/lib/webrat/core/locators/field_labeled_locator.rb
c37ab859-37a6-4d26-9e5e-64ece3d03e8e
[]
[]
#include "googletest/googletest/include/gtest/gtest.h" #include "environment.h" #include "json/json_spirit_reader_template.h" #include "json/json_spirit_utils.h" #include "json/json_spirit_writer_template.h" #include "base58.h" #include "util.h" using namespace json_spirit; extern Array read_json(const std::string& filename); // Goal: test low-level base58 encoding functionality TEST(base58_tests, base58_EncodeBase58) { Array tests = read_json("base58_encode_decode.json"); for (Value& tv : tests) { Array test = tv.get_array(); std::string strTest = write_string(tv, false); if (test.size() < 2) // Allow for extra stuff (useful for comments) { ADD_FAILURE() << "Bad test: " << strTest; continue; } std::vector<unsigned char> sourcedata = ParseHex(test[0].get_str()); std::string base58string = test[1].get_str(); EXPECT_EQ(EncodeBase58(&sourcedata[0], &sourcedata[sourcedata.size()]), base58string) << strTest; } } // Goal: test low-level base58 decoding functionality TEST(base58_tests, base58_DecodeBase58) { Array tests = read_json("base58_encode_decode.json"); std::vector<unsigned char> result; BOOST_FOREACH (Value& tv, tests) { Array test = tv.get_array(); std::string strTest = write_string(tv, false); if (test.size() < 2) // Allow for extra stuff (useful for comments) { ADD_FAILURE() << "Bad test: " << strTest; continue; } std::vector<unsigned char> expected = ParseHex(test[0].get_str()); std::string base58string = test[1].get_str(); EXPECT_TRUE(DecodeBase58(base58string, result)) << strTest; EXPECT_TRUE(result.size() == expected.size() && std::equal(result.begin(), result.end(), expected.begin())) << strTest; } EXPECT_TRUE(!DecodeBase58("invalid", result)); } // Visitor to check address type class TestAddrTypeVisitor : public boost::static_visitor<bool> { private: std::string exp_addrType; public: TestAddrTypeVisitor(const std::string& exp_addrTypeIn) : exp_addrType(exp_addrTypeIn) {} bool operator()(const CKeyID& /*id*/) const { return (exp_addrType == "pubkey"); } bool operator()(const CScriptID& /*id*/) const { return (exp_addrType == "script"); } bool operator()(const CNoDestination& /*no*/) const { return (exp_addrType == "none"); } }; // Visitor to check address payload class TestPayloadVisitor : public boost::static_visitor<bool> { private: std::vector<unsigned char> exp_payload; public: TestPayloadVisitor(std::vector<unsigned char>& exp_payloadIn) : exp_payload(exp_payloadIn) {} bool operator()(const CKeyID& id) const { uint160 exp_key(exp_payload); return exp_key == id; } bool operator()(const CScriptID& id) const { uint160 exp_key(exp_payload); return exp_key == id; } bool operator()(const CNoDestination& /*no*/) const { return exp_payload.size() == 0; } }; // Goal: check that parsed keys match test payload TEST(base58_tests, base58_keys_valid_parse) { Array tests = read_json("base58_keys_valid.json"); std::vector<unsigned char> result; CBitcoinSecret secret; CBitcoinAddress addr; BOOST_FOREACH (Value& tv, tests) { Array test = tv.get_array(); std::string strTest = write_string(tv, false); if (test.size() < 3) // Allow for extra stuff (useful for comments) { ADD_FAILURE() << "Bad test: " << strTest; continue; } std::string exp_base58string = test[0].get_str(); std::vector<unsigned char> privkey_bin_from_hex = ParseHex(test[1].get_str()); const Object& metadata = test[2].get_obj(); bool isPrivkey = find_value(metadata, "isPrivkey").get_bool(); bool isTestnet = find_value(metadata, "isTestnet").get_bool(); SwitchNetworkTypeTemporarily state_holder(isTestnet ? NetworkType::Testnet : NetworkType::Mainnet); if (isPrivkey) { bool isCompressed = find_value(metadata, "isCompressed").get_bool(); // Must be valid private key EXPECT_TRUE(secret.SetString(exp_base58string)) << "!SetString:" + strTest; EXPECT_TRUE(secret.IsValid()) << "!IsValid:" + strTest; bool fCompressedOut = false; CSecret privkey = secret.GetSecret(fCompressedOut); EXPECT_TRUE(fCompressedOut == isCompressed) << "compressed mismatch:" + strTest; EXPECT_TRUE(privkey.size() == privkey_bin_from_hex.size() && std::equal(privkey.begin(), privkey.end(), privkey_bin_from_hex.begin())) << "key mismatch:" + strTest; // Private key must be invalid public key addr.SetString(exp_base58string); EXPECT_TRUE(!addr.IsValid()) << "IsValid privkey as pubkey:" + strTest; } else { std::string exp_addrType = find_value(metadata, "addrType").get_str(); // "script" or "pubkey" // Must be valid public key EXPECT_TRUE(addr.SetString(exp_base58string)) << "SetString:" + strTest; EXPECT_TRUE(addr.IsValid()) << "!IsValid:" + strTest; EXPECT_TRUE(addr.IsScript() == (exp_addrType == "script")) << "isScript mismatch" + strTest; CTxDestination dest = addr.Get(); EXPECT_TRUE(boost::apply_visitor(TestAddrTypeVisitor(exp_addrType), dest)) << "addrType mismatch" + strTest; // Public key must be invalid private key secret.SetString(exp_base58string); EXPECT_TRUE(!secret.IsValid()) << "IsValid pubkey as privkey:" + strTest; } } } // Goal: check that base58 parsing code is robust against a variety of corrupted data TEST(base58_tests, base58_keys_invalid) { Array tests = read_json("base58_keys_invalid.json"); // Negative testcases std::vector<unsigned char> result; CBitcoinSecret secret; CBitcoinAddress addr; BOOST_FOREACH (Value& tv, tests) { Array test = tv.get_array(); std::string strTest = write_string(tv, false); if (test.size() < 1) // Allow for extra stuff (useful for comments) { ADD_FAILURE() << "Bad test: " << strTest; continue; } std::string exp_base58string = test[0].get_str(); // must be invalid as public and as private key addr.SetString(exp_base58string); EXPECT_TRUE(!addr.IsValid()) << "IsValid pubkey:" + strTest; secret.SetString(exp_base58string); EXPECT_TRUE(!secret.IsValid()) << "IsValid privkey:" + strTest; } } template <typename TInputIter> std::string make_hex_string(TInputIter first, TInputIter last, bool use_uppercase = true, bool insert_spaces = false) { std::ostringstream ss; ss << std::hex << std::setfill('0'); if (use_uppercase) ss << std::uppercase; while (first != last) { ss << std::setw(2) << static_cast<int>(*first++); if (insert_spaces && first != last) ss << " "; } return ss.str(); } void test_priv_key_vs_address(const std::string& privkey, std::string pubkey, const std::string& address) { CBitcoinSecret vchSecret; EXPECT_TRUE(vchSecret.SetString(privkey)); bool fCompressed; CKey key; CSecret secret = vchSecret.GetSecret(fCompressed); EXPECT_TRUE(fCompressed); key.SetSecret(secret, fCompressed); EXPECT_TRUE(key.GetPubKey().IsValid()); std::vector<unsigned char> rawPubKey = key.GetPubKey().Raw(); std::transform(pubkey.begin(), pubkey.end(), pubkey.begin(), ::tolower); // make pubkey lower-case EXPECT_EQ(make_hex_string(rawPubKey.begin(), rawPubKey.end(), false), pubkey); CKeyID keyid = key.GetPubKey().GetID(); EXPECT_EQ(CBitcoinAddress(keyid).ToString(), address); } void test_random_key_generation() { // create private key from scratch CKey key; key.MakeNewKey(true); // create private key string from raw CBitcoinSecret vchSecret; bool compressed = false; vchSecret.SetSecret(key.GetSecret(compressed), true); EXPECT_TRUE(compressed); // currently keys are only compressed // validate public key EXPECT_TRUE(key.GetPubKey().IsValid()); // get raw public key std::vector<unsigned char> rawPubKey = key.GetPubKey().Raw(); // get address CKeyID keyid = key.GetPubKey().GetID(); CBitcoinAddress address(keyid); // test test_priv_key_vs_address(vchSecret.ToString(), make_hex_string(rawPubKey.begin(), rawPubKey.end(), false), address.ToString()); } TEST(base58_tests, base58_keys_generation) { { SwitchNetworkTypeTemporarily state_holder(NetworkType::Mainnet); // real-net test_priv_key_vs_address("TtnutkcnaPcu3zmjWcrJazf42fp1YAKRpm8grKRRuYjtiykmGuM7", "037f41ae8b46979087562e65494eb3a3b9d8addde9b9568ef5cbb8197fd26c0ff2", "NVFdK9ik6mBCG6syVw2gD1gBwJzKF5me5i"); test_priv_key_vs_address("TnNwg92Wpw8iuBRwaeJydzw2c6MMqTe2c6cA5hn3NBBdqFWvpViF", "03c7f8863df49735b1a1906a5f5939beb8622074b3ddf8fccc5462362271145f09", "NSTdV7BgFeYXR61ywiMyAoof2ihwUPDPpj"); test_priv_key_vs_address("TpWmAWxNCGN7tj218djRJjegAVy34K2eEx8Zbt7xbf2H9GNUBgci", "029cbf0da830b83a457877fcf009160e9de9f0383fe0c97769ce9a4c2d52949f4b", "NdLGazEn51ofFuztenM7bNfquNBV1FWMGG"); for (int i = 0; i < 10; i++) { test_random_key_generation(); } } { // test-net SwitchNetworkTypeTemporarily state_holder(NetworkType::Testnet); test_priv_key_vs_address("Vgg5VL2TW1NMNKt4wkazRkUygpnPiQXnztA2h3ALQxGUhk1tQUag", "0243bab8b87abbd42493ca577dee9befc15cc5565f2792e18c52eb90cdfa13dc2a", "TVPDsVw4vSbNkRPkfnwCJmbCygEuJEVpwW"); test_priv_key_vs_address("VfCCG4Ew6XAtxpEEnuWTTfUgX92fvyq2kPh7cghHtPDQs7PTmwSn", "02f20e5d83d939edc1169296250503b71cc37438ab608fbffb0ea11539d9341c7f", "TPiGYtUnB3qCjYBuXAj6QX7CiW5sJQ7Sdk"); test_priv_key_vs_address("VfAy2E8BhcFat6dLGaZotaZJReEU2jWHzZi6a5XQqD9q5qFAWzuK", "023ecc7eee129e8b009461b67b9f55120675c61957a7bb7f02726f73215051cb76", "THs3Lec52yQPfErz7Z32Yi3KJnBTzicEiz"); for (int i = 0; i < 10; i++) { test_random_key_generation(); } } }
C++
MIT
NeblioTeam/neblio/wallet/test/base58_tests.cpp
1a1f617a-f3a1-4d9e-a635-b98b94df2e3b
[]
[]
"use strict"; var _interopRequireDefault = require("@babel/runtime/helpers/builtin/interopRequireDefault"); Object.defineProperty(exports, "__esModule", { value: true }); exports.default = void 0; var _react = _interopRequireDefault(require("react")); var _createSvgIcon = _interopRequireDefault(require("./utils/createSvgIcon")); var _default = (0, _createSvgIcon.default)(_react.default.createElement(_react.default.Fragment, null, _react.default.createElement("path", { d: "M9 17H7v-7h2v7zm4 0h-2V7h2v10zm4 0h-2v-4h2v4zm2 2H5V5h14v14zm2-16H3v18h18V3z" })), 'InsertChartOutlinedSharp'); exports.default = _default;
JavaScript
MIT
GJCHOWDARY/chowdary_react/node_modules/@material-ui/icons/InsertChartOutlinedSharp.js
5157a4f9-7197-402f-807d-f944390b0334
[]
[]
# -*- coding: ISO-8859-15 -*- # ============================================================================= # Copyright (c) 2004, 2006 Sean C. Gillies # Copyright (c) 2007 STFC <http://www.stfc.ac.uk> # # Authors : # Dominic Lowe <d.lowe@rl.ac.uk> # # Contact email: d.lowe@rl.ac.uk # ============================================================================= from owslib.coverage.wcsBase import WCSBase, WCSCapabilitiesReader, ServiceException from urllib.parse import urlencode from owslib.util import openURL, testXMLValue from owslib.etree import etree from owslib.crs import Crs import os import errno import logging from owslib.util import log, makeString # function to save writing out WCS namespace in full each time def ns(tag): return '{http://www.opengis.net/wcs}' + tag class WebCoverageService_1_0_0(WCSBase): """Abstraction for OGC Web Coverage Service (WCS), version 1.0.0 Implements IWebCoverageService. """ def __getitem__(self, name): ''' check contents dictionary to allow dict like access to service layers''' if name in list(self.__getattribute__('contents').keys()): return self.__getattribute__('contents')[name] else: raise KeyError("No content named %s" % name) def __init__(self, url, xml, cookies, auth=None, timeout=30): super(WebCoverageService_1_0_0, self).__init__(auth) self.version = '1.0.0' self.url = url self.cookies = cookies self.timeout = timeout # initialize from saved capability document or access the server reader = WCSCapabilitiesReader(self.version, self.cookies, self.auth) if xml: self._capabilities = reader.readString(xml) else: self._capabilities = reader.read(self.url, self.timeout) # check for exceptions se = self._capabilities.find('ServiceException') if se is not None: err_message = str(se.text).strip() raise ServiceException(err_message, xml) self.updateSequence = self._capabilities.attrib.get('updateSequence') # serviceIdentification metadata subelem = self._capabilities.find(ns('Service')) self.identification = ServiceIdentification(subelem) # serviceProvider metadata subelem = self._capabilities.find(ns('Service/') + ns('responsibleParty')) self.provider = ServiceProvider(subelem) # serviceOperations metadata self.operations = [] for elem in self._capabilities.find(ns('Capability/') + ns('Request'))[:]: self.operations.append(OperationMetadata(elem)) # serviceContents metadata self.contents = {} for elem in self._capabilities.findall(ns('ContentMetadata/') + ns('CoverageOfferingBrief')): cm = ContentMetadata(elem, self) self.contents[cm.id] = cm # Some WCS servers (wrongly) advertise 'Content' OfferingBrief instead. if self.contents == {}: for elem in self._capabilities.findall(ns('ContentMetadata/') + ns('ContentOfferingBrief')): cm = ContentMetadata(elem, self) self.contents[cm.id] = cm # exceptions self.exceptions = [f.text for f in self._capabilities.findall('Capability/Exception/Format')] def items(self): '''supports dict-like items() access''' items = [] for item in self.contents: items.append((item, self.contents[item])) return items def getCoverage(self, identifier=None, bbox=None, time=None, format=None, crs=None, width=None, height=None, resx=None, resy=None, resz=None, parameter=None, method='Get', timeout=30, **kwargs): """Request and return a coverage from the WCS as a file-like object note: additional **kwargs helps with multi-version implementation core keyword arguments should be supported cross version example: cvg=wcs.getCoverage(identifier=['TuMYrRQ4'], timeSequence=['2792-06-01T00:00:00.0'], bbox=(-112,36,-106,41), format='cf-netcdf') is equivalent to: http://myhost/mywcs?SERVICE=WCS&REQUEST=GetCoverage&IDENTIFIER=TuMYrRQ4&VERSION=1.1.0&BOUNDINGBOX=-180,-90,180,90&TIME=2792-06-01T00:00:00.0&FORMAT=cf-netcdf """ if log.isEnabledFor(logging.DEBUG): msg = 'WCS 1.0.0 DEBUG: Parameters passed to GetCoverage: identifier={}, bbox={}, time={}, format={}, crs={}, width={}, height={}, resx={}, resy={}, resz={}, parameter={}, method={}, other_arguments={}' # noqa log.debug(msg.format( identifier, bbox, time, format, crs, width, height, resx, resy, resz, parameter, method, str(kwargs))) try: base_url = next((m.get('url') for m in self.getOperationByName('GetCoverage').methods if m.get('type').lower() == method.lower())) except StopIteration: base_url = self.url log.debug('WCS 1.0.0 DEBUG: base url of server: %s' % base_url) # process kwargs request = {'version': self.version, 'request': 'GetCoverage', 'service': 'WCS'} assert len(identifier) > 0 request['Coverage'] = identifier # request['identifier'] = ','.join(identifier) if bbox: request['BBox'] = ','.join([makeString(x) for x in bbox]) else: request['BBox'] = None if time: request['time'] = ','.join(time) if crs: request['crs'] = crs request['format'] = format if width: request['width'] = width if height: request['height'] = height if resx: request['resx'] = resx if resy: request['resy'] = resy if resz: request['resz'] = resz # anything else e.g. vendor specific parameters must go through kwargs if kwargs: for kw in kwargs: request[kw] = kwargs[kw] # encode and request data = urlencode(request) log.debug('WCS 1.0.0 DEBUG: Second part of URL: %s' % data) u = openURL(base_url, data, method, self.cookies, auth=self.auth, timeout=timeout) return u def getOperationByName(self, name): """Return a named operation item.""" for item in self.operations: if item.name == name: return item raise KeyError("No operation named %s" % name) class OperationMetadata(object): """Abstraction for WCS metadata. Implements IMetadata. """ def __init__(self, elem): """.""" self.name = elem.tag.split('}')[1] # self.formatOptions = [f.text for f in elem.findall('{http://www.opengis.net/wcs/1.1/ows}Parameter/{http://www.opengis.net/wcs/1.1/ows}AllowedValues/{http://www.opengis.net/wcs/1.1/ows}Value')] # noqa self.methods = [] for resource in elem.findall(ns('DCPType/') + ns('HTTP/') + ns('Get/') + ns('OnlineResource')): url = resource.attrib['{http://www.w3.org/1999/xlink}href'] self.methods.append({'type': 'Get', 'url': url}) for resource in elem.findall(ns('DCPType/') + ns('HTTP/') + ns('Post/') + ns('OnlineResource')): url = resource.attrib['{http://www.w3.org/1999/xlink}href'] self.methods.append({'type': 'Post', 'url': url}) class ServiceIdentification(object): """ Abstraction for ServiceIdentification metadata """ def __init__(self, elem): # properties self.type = 'OGC:WCS' self.version = '1.0.0' self.service = testXMLValue(elem.find(ns('name'))) self.abstract = testXMLValue(elem.find(ns('description'))) self.title = testXMLValue(elem.find(ns('label'))) self.keywords = [f.text for f in elem.findall(ns('keywords') + '/' + ns('keyword'))] # note: differs from 'rights' in interface self.fees = elem.find(ns('fees')).text self.accessConstraints = elem.find(ns('accessConstraints')).text class ServiceProvider(object): """ Abstraction for WCS ResponsibleParty Implements IServiceProvider""" def __init__(self, elem): # it's not uncommon for the service provider info to be missing # so handle case where None is passed in if elem is None: self.name = None self.url = None self.contact = None else: self.name = testXMLValue(elem.find(ns('organisationName'))) self.url = self.name # there is no definitive place for url WCS, repeat organisationName self.contact = ContactMetadata(elem) class ContactMetadata(object): ''' implements IContactMetadata''' def __init__(self, elem): try: self.name = elem.find(ns('individualName')).text except AttributeError: self.name = None try: self.organization = elem.find(ns('organisationName')).text except AttributeError: self.organization = None try: self.address = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('deliveryPoint')).text except AttributeError: self.address = None try: self.city = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('city')).text except AttributeError: self.city = None try: self.region = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('administrativeArea')).text except AttributeError: self.region = None try: self.postcode = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('postalCode')).text except AttributeError: self.postcode = None try: self.country = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('country')).text except AttributeError: self.country = None try: self.email = elem.find(ns('contactInfo') + '/' + ns('address') + '/' + ns('electronicMailAddress')).text except AttributeError: self.email = None class ContentMetadata(object): """ Implements IContentMetadata """ def __init__(self, elem, service): """Initialize. service is required so that describeCoverage requests may be made""" # TODO - examine the parent for bounding box info. # self._parent=parent self._elem = elem self._service = service self.id = elem.find(ns('name')).text self.title = testXMLValue(elem.find(ns('label'))) self.abstract = testXMLValue(elem.find(ns('description'))) self.keywords = [f.text for f in elem.findall(ns('keywords') + '/' + ns('keyword'))] self.boundingBox = None # needed for iContentMetadata harmonisation self.boundingBoxWGS84 = None b = elem.find(ns('lonLatEnvelope')) if b is not None: gmlpositions = b.findall('{http://www.opengis.net/gml}pos') lc = gmlpositions[0].text uc = gmlpositions[1].text self.boundingBoxWGS84 = ( float(lc.split()[0]), float(lc.split()[1]), float(uc.split()[0]), float(uc.split()[1]), ) # others not used but needed for iContentMetadata harmonisation self.styles = None self.crsOptions = None self.defaulttimeposition = None # grid is either a gml:Grid or a gml:RectifiedGrid if supplied as part of the DescribeCoverage response. def _getGrid(self): if not hasattr(self, 'descCov'): self.descCov = self._service.getDescribeCoverage(self.id) gridelem = self.descCov.find( ns('CoverageOffering/') + ns('domainSet/') + ns('spatialDomain/') + '{http://www.opengis.net/gml}RectifiedGrid') # noqa if gridelem is not None: grid = RectifiedGrid(gridelem) else: gridelem = self.descCov.find( ns('CoverageOffering/') + ns('domainSet/') + ns('spatialDomain/') + '{http://www.opengis.net/gml}Grid') # noqa grid = Grid(gridelem) return grid grid = property(_getGrid, None) # timelimits are the start/end times, timepositions are all timepoints. # WCS servers can declare one or both or neither of these. def _getTimeLimits(self): timepoints, timelimits = [], [] b = self._elem.find(ns('lonLatEnvelope')) if b is not None: timepoints = b.findall('{http://www.opengis.net/gml}timePosition') else: # have to make a describeCoverage request... if not hasattr(self, 'descCov'): self.descCov = self._service.getDescribeCoverage(self.id) for pos in self.descCov.findall( ns('CoverageOffering/') + ns('domainSet/') + ns('temporalDomain/') + '{http://www.opengis.net/gml}timePosition'): # noqa timepoints.append(pos) if timepoints: timelimits = [timepoints[0].text, timepoints[1].text] return timelimits timelimits = property(_getTimeLimits, None) def _getTimePositions(self): timepositions = [] if not hasattr(self, 'descCov'): self.descCov = self._service.getDescribeCoverage(self.id) for pos in self.descCov.findall( ns('CoverageOffering/') + ns('domainSet/') + ns('temporalDomain/') + '{http://www.opengis.net/gml}timePosition'): # noqa timepositions.append(pos.text) return timepositions timepositions = property(_getTimePositions, None) def _getOtherBoundingBoxes(self): ''' incomplete, should return other bounding boxes not in WGS84 #TODO: find any other bounding boxes. Need to check for gml:EnvelopeWithTimePeriod.''' bboxes = [] if not hasattr(self, 'descCov'): self.descCov = self._service.getDescribeCoverage(self.id) for envelope in self.descCov.findall( ns('CoverageOffering/') + ns('domainSet/') + ns('spatialDomain/') + '{http://www.opengis.net/gml}Envelope'): # noqa bbox = {} bbox['nativeSrs'] = envelope.attrib['srsName'] gmlpositions = envelope.findall('{http://www.opengis.net/gml}pos') lc = gmlpositions[0].text.split() uc = gmlpositions[1].text.split() bbox['bbox'] = ( float(lc[0]), float(lc[1]), float(uc[0]), float(uc[1]) ) bboxes.append(bbox) return bboxes boundingboxes = property(_getOtherBoundingBoxes, None) def _getSupportedCRSProperty(self): # gets supported crs info crss = [] for elem in self._service.getDescribeCoverage(self.id).findall( ns('CoverageOffering/') + ns('supportedCRSs/') + ns('responseCRSs')): for crs in elem.text.split(' '): crss.append(Crs(crs)) for elem in self._service.getDescribeCoverage(self.id).findall( ns('CoverageOffering/') + ns('supportedCRSs/') + ns('requestResponseCRSs')): for crs in elem.text.split(' '): crss.append(Crs(crs)) for elem in self._service.getDescribeCoverage(self.id).findall( ns('CoverageOffering/') + ns('supportedCRSs/') + ns('nativeCRSs')): for crs in elem.text.split(' '): crss.append(Crs(crs)) return crss supportedCRS = property(_getSupportedCRSProperty, None) def _getSupportedFormatsProperty(self): # gets supported formats info frmts = [] for elem in self._service.getDescribeCoverage(self.id).findall( ns('CoverageOffering/') + ns('supportedFormats/') + ns('formats')): frmts.append(elem.text) return frmts supportedFormats = property(_getSupportedFormatsProperty, None) def _getAxisDescriptionsProperty(self): # gets any axis descriptions contained in the rangeset (requires a DescribeCoverage call to server). axisDescs = [] for elem in self._service.getDescribeCoverage(self.id).findall( ns('CoverageOffering/') + ns('rangeSet/') + ns('RangeSet/') + ns('axisDescription/') + ns('AxisDescription')): # noqa axisDescs.append(AxisDescription(elem)) # create a 'AxisDescription' object. return axisDescs axisDescriptions = property(_getAxisDescriptionsProperty, None) # Adding classes to represent gml:grid and gml:rectifiedgrid. One of these is used for the cvg.grid property # (where cvg is a member of the contents dictionary) # There is no simple way to convert the offset values in a rectifiedgrid grid to real values without CRS understanding, # therefore this is beyond the current scope of owslib, so the representation here is purely to provide access # to the information in the GML. class Grid(object): ''' Simple grid class to provide axis and value information for a gml grid ''' def __init__(self, grid): self.axislabels = [] self.dimension = None self.lowlimits = [] self.highlimits = [] if grid is not None: self.dimension = int(grid.get('dimension')) self.lowlimits = grid.find( '{http://www.opengis.net/gml}limits/{http://www.opengis.net/gml}GridEnvelope/{http://www.opengis.net/gml}low').text.split(' ') # noqa self.highlimits = grid.find( '{http://www.opengis.net/gml}limits/{http://www.opengis.net/gml}GridEnvelope/{http://www.opengis.net/gml}high').text.split(' ') # noqa for axis in grid.findall('{http://www.opengis.net/gml}axisName'): self.axislabels.append(axis.text) class RectifiedGrid(Grid): ''' RectifiedGrid class, extends Grid with additional offset vector information ''' def __init__(self, rectifiedgrid): super(RectifiedGrid, self).__init__(rectifiedgrid) self.origin = rectifiedgrid.find( '{http://www.opengis.net/gml}origin/{http://www.opengis.net/gml}pos').text.split() self.offsetvectors = [] for offset in rectifiedgrid.findall('{http://www.opengis.net/gml}offsetVector'): self.offsetvectors.append(offset.text.split()) class AxisDescription(object): ''' Class to represent the AxisDescription element optionally found as part of the RangeSet and used to define ordinates of additional dimensions such as wavelength bands or pressure levels''' def __init__(self, axisdescElem): self.name = self.label = None self.values = [] for elem in axisdescElem.getchildren(): if elem.tag == ns('name'): self.name = elem.text elif elem.tag == ns('label'): self.label = elem.text elif elem.tag == ns('values'): for child in elem.getchildren(): self.values.append(child.text)
Python
BSD-3-Clause
ferreteleco/OWSLib/owslib/coverage/wcs100.py
0139420d-b94c-4483-a7a1-f132676747b1
[{"tag": "NAME", "value": "Sean C. Gillies", "start": 137, "end": 152, "context": "======================\n# Copyright (c) 2004, 2006 Sean C. Gillies\n# Copyright (c) 2007 STFC <http://www.stfc.ac.uk>"}, {"tag": "NAME", "value": "Dominic Lowe", "start": 228, "end": 240, "context": " <http://www.stfc.ac.uk>\n#\n# Authors :\n# Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac"}, {"tag": "EMAIL", "value": "d.lowe@rl.ac.uk", "start": 242, "end": 257, "context": "tfc.ac.uk>\n#\n# Authors :\n# Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac.uk\n# ==========="}, {"tag": "EMAIL", "value": "d.lowe@rl.ac.uk", "start": 278, "end": 293, "context": "Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac.uk\n# ==============================================="}]
[{"tag": "NAME", "value": "Sean C. Gillies", "start": 137, "end": 152, "context": "======================\n# Copyright (c) 2004, 2006 Sean C. Gillies\n# Copyright (c) 2007 STFC <http://www.stfc.ac.uk>"}, {"tag": "NAME", "value": "Dominic Lowe", "start": 228, "end": 240, "context": " <http://www.stfc.ac.uk>\n#\n# Authors :\n# Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac"}, {"tag": "EMAIL", "value": "d.lowe@rl.ac.uk", "start": 242, "end": 257, "context": "tfc.ac.uk>\n#\n# Authors :\n# Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac.uk\n# ==========="}, {"tag": "EMAIL", "value": "d.lowe@rl.ac.uk", "start": 278, "end": 293, "context": "Dominic Lowe <d.lowe@rl.ac.uk>\n#\n# Contact email: d.lowe@rl.ac.uk\n# ==============================================="}]
// // System.CodeDom CodeDirectiveCollection class // // Authors: // Marek Safar (marek.safar@seznam.cz) // Sebastien Pouliot <sebastien@ximian.com> // // (C) 2004 Ximian, Inc. // Copyright (C) 2005 Novell, Inc (http://www.novell.com) // // Permission is hereby granted, free of charge, to any person obtaining // a copy of this software and associated documentation files (the // "Software"), to deal in the Software without restriction, including // without limitation the rights to use, copy, modify, merge, publish, // distribute, sublicense, and/or sell copies of the Software, and to // permit persons to whom the Software is furnished to do so, subject to // the following conditions: // // The above copyright notice and this permission notice shall be // included in all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, // EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF // MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND // NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE // LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION // OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION // WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. // #if NET_2_0 using System.Runtime.InteropServices; namespace System.CodeDom { [Serializable] [ComVisible (true), ClassInterface (ClassInterfaceType.AutoDispatch)] public class CodeDirectiveCollection: System.Collections.CollectionBase { public CodeDirectiveCollection () { } public CodeDirectiveCollection (CodeDirective[] value) { AddRange (value); } public CodeDirectiveCollection (CodeDirectiveCollection value) { AddRange (value); } public CodeDirective this [int index] { get { return (CodeDirective) List [index]; } set { List [index] = value; } } public int Add (CodeDirective value) { return List.Add (value); } public void AddRange (CodeDirective[] value) { if (value == null) { throw new ArgumentNullException ("value"); } for (int i = 0; i < value.Length; i++) { Add (value[i]); } } public void AddRange (CodeDirectiveCollection value) { if (value == null) { throw new ArgumentNullException ("value"); } int count = value.Count; for (int i = 0; i < count; i++) { Add (value[i]); } } public bool Contains (CodeDirective value) { return List.Contains (value); } public void CopyTo (CodeDirective[] array, int index) { List.CopyTo (array, index); } public int IndexOf (CodeDirective value) { return List.IndexOf (value); } public void Insert (int index, CodeDirective value) { List.Insert (index, value); } public void Remove (CodeDirective value) { List.Remove (value); } } } #endif
C#
MIT
GrapeCity/pagefx/mono/mcs/class/System/System.CodeDom/CodeDirectiveCollection.cs
747eeb24-6cf2-4502-a466-71b8d1134813
[{"tag": "EMAIL", "value": "sebastien@ximian.com", "start": 133, "end": 153, "context": "r (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n//\r\n// (C) 2004 Ximian, Inc.\r\n// Copyright (C) "}, {"tag": "NAME", "value": "Marek Safar", "start": 73, "end": 84, "context": "odeDirectiveCollection class\r\n//\r\n// Authors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <s"}, {"tag": "EMAIL", "value": "marek.safar@seznam.cz", "start": 86, "end": 107, "context": "ollection class\r\n//\r\n// Authors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n/"}, {"tag": "NAME", "value": "Sebastien Pouliot", "start": 113, "end": 130, "context": "hors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n//\r\n// (C) 2004 Ximian, "}]
[{"tag": "EMAIL", "value": "sebastien@ximian.com", "start": 133, "end": 153, "context": "r (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n//\r\n// (C) 2004 Ximian, Inc.\r\n// Copyright (C) "}, {"tag": "NAME", "value": "Marek Safar", "start": 73, "end": 84, "context": "odeDirectiveCollection class\r\n//\r\n// Authors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <s"}, {"tag": "EMAIL", "value": "marek.safar@seznam.cz", "start": 86, "end": 107, "context": "ollection class\r\n//\r\n// Authors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n/"}, {"tag": "NAME", "value": "Sebastien Pouliot", "start": 113, "end": 130, "context": "hors:\r\n//\tMarek Safar (marek.safar@seznam.cz)\r\n//\tSebastien Pouliot <sebastien@ximian.com>\r\n//\r\n// (C) 2004 Ximian, "}]
<?php namespace App\Fuzzy\Memberships; /** * Class RMembershipFunction (extends TrapezoidalMembershipFunction) * @package App\Fuzzy\Memberships * @copyright (c) 2020 * @author Tomas Bodnar <bodnarto@gmail.com> */ class RMembershipFunction extends TrapezoidalMembershipFunction implements MembershipFunction { /** * RMembershipFunction constructor. * @param float $a * @param float $b */ public function __construct(float $a, float $b) { parent::__construct($a, $b, INF, INF); } }
PHP
MIT
bodny/fususapp/app/Fuzzy/Memberships/RMembershipFunction.php
a169456b-de06-4c71-b051-ef49fc2167b7
[{"tag": "NAME", "value": "Tomas Bodnar", "start": 182, "end": 194, "context": "zzy\\Memberships\n * @copyright (c) 2020\n * @author Tomas Bodnar <bodnarto@gmail.com>\n */\nclass RMembershipFunctio"}, {"tag": "EMAIL", "value": "bodnarto@gmail.com", "start": 196, "end": 214, "context": "s\n * @copyright (c) 2020\n * @author Tomas Bodnar <bodnarto@gmail.com>\n */\nclass RMembershipFunction extends Trapezoida"}]
[{"tag": "NAME", "value": "Tomas Bodnar", "start": 182, "end": 194, "context": "zzy\\Memberships\n * @copyright (c) 2020\n * @author Tomas Bodnar <bodnarto@gmail.com>\n */\nclass RMembershipFunctio"}, {"tag": "EMAIL", "value": "bodnarto@gmail.com", "start": 196, "end": 214, "context": "s\n * @copyright (c) 2020\n * @author Tomas Bodnar <bodnarto@gmail.com>\n */\nclass RMembershipFunction extends Trapezoida"}]
#!/usr/bin/env python # ------------------------------------------------------------------------------------------------------% # Created by "Thieu Nguyen" at 15:39, 20/04/2020 % # % # Email: nguyenthieu2102@gmail.com % # Homepage: https://www.researchgate.net/profile/Thieu_Nguyen6 % # Github: https://github.com/thieu1995 % #-------------------------------------------------------------------------------------------------------% from opfunu.cec.cec2005.root import Root from numpy import sum, dot, cos, exp, pi, e, sqrt class Model(Root): def __init__(self, f_name="Shifted Rotated Ackley's Function with Global Optimum on Bounds", f_shift_data_file="data_ackley", f_ext='.txt', f_bias=-140, f_matrix=None): Root.__init__(self, f_name, f_shift_data_file, f_ext, f_bias) self.f_matrix = f_matrix def _main__(self, solution=None): problem_size = len(solution) if problem_size > 100: print("CEC 2005 not support for problem size > 100") return 1 if problem_size == 10 or problem_size == 30 or problem_size == 50: self.f_matrix = "ackley_M_D" + str(problem_size) else: print("CEC 2005 F8 function only support problem size 10, 30, 50") return 1 shift_data = self.load_shift_data()[:problem_size] t1 = int(problem_size/2) for j in range(0, t1-1): shift_data[2*(j+1)-1] = -32 * shift_data[2*(j+1)] matrix = self.load_matrix_data(self.f_matrix) z = dot((solution - shift_data), matrix) result = -20 * exp(-0.2 * sum(z ** 2) / problem_size) - exp(sum(cos(2 * pi * z))) + 20 + e return result + self.f_bias
Python
MIT
ElliottP-13/opfunu/opfunu/cec/cec2005/F8.py
e5e72e70-8f43-41a3-866b-8b092a8d01f0
[{"tag": "NAME", "value": "Thieu Nguyen", "start": 142, "end": 154, "context": "----------------------------------%\n# Created by \"Thieu Nguyen\" at 15:39, 20/04/2020 "}, {"tag": "EMAIL", "value": "nguyenthieu2102@gmail.com", "start": 360, "end": 385, "context": " %\n# Email: nguyenthieu2102@gmail.com "}, {"tag": "USERNAME", "value": "thieu1995", "start": 591, "end": 600, "context": " %\n# Github: https://github.com/thieu1995 "}]
[{"tag": "NAME", "value": "Thieu Nguyen", "start": 142, "end": 154, "context": "----------------------------------%\n# Created by \"Thieu Nguyen\" at 15:39, 20/04/2020 "}, {"tag": "EMAIL", "value": "nguyenthieu2102@gmail.com", "start": 360, "end": 385, "context": " %\n# Email: nguyenthieu2102@gmail.com "}, {"tag": "USERNAME", "value": "thieu1995", "start": 591, "end": 600, "context": " %\n# Github: https://github.com/thieu1995 "}]
#if defined(WIN32) //Winows #error Windows is not Supported #else //Unix //Usado para fechamento de sockets #include <unistd.h> #endif //Usando operadores ms, ns, etc.. #include <chrono> using namespace std::chrono_literals; #include <thread> #include <random> #include "socket.hpp" #include "client_tui.hpp" #include "tui.hpp" using namespace tui::text_literals; int main(int argc, char const *argv[]) { std::srand(time(NULL)); tui::clear(); tui::printl("Inicializando Zaplan v0.2 - Cliente"_fgre); //Caso nenhum ip de servidor seja fornecido const std::string defaultServerIP("127.0.0.1"); const std::string *serverIp = &defaultServerIP; tui::print("Digite o IP do servidor com o qual deseja se conectar (Enter para localhost): "); std::string input = tui::readline(); if (!input.empty()) serverIp = &input; //Configura o endereço do servidor para a porta 4545 IPADDR4 serverAddress{*serverIp, 4545}; tui::print("Digite seu " + "nick"_fwhi + ": "); std::string nick = tui::readline(); Client client; try { client.ConnectAndLogin(serverAddress, nick); } catch(ConnectionFailedException &e) { return -1; } client.Start(); tui::printl("Zaplan (Cliente) v0.2 encerrado com sucesso."_fyel); return 0; }
C++
MIT
marcuscastelo/socket-messenger/src/client/main.cpp
38b1f688-01dd-47f0-83db-28e34051331b
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 609, "end": 618, "context": " fornecido\n const std::string defaultServerIP(\"127.0.0.1\");\n const std::string *serverIp = &defaultServ"}]
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 609, "end": 618, "context": " fornecido\n const std::string defaultServerIP(\"127.0.0.1\");\n const std::string *serverIp = &defaultServ"}]
'use strict'; const Controller = require('egg').Controller; class HomeController extends Controller { async putLog() { const sls = this.ctx.app.sls; const topic = String(Date.now()); const body = this.ctx.request.body; const logGroup = sls.createLogGroup({ topic, source: '127.0.0.1' }); logGroup.setLog({ time: new Date(), contents: body, }); await sls.postLogstoreLogs('egg-sls-post-log', 'egg-sls-post-log', logGroup); this.ctx.body = { topic }; } async getLogs() { const sls = this.ctx.app.sls; const topic = this.ctx.params.topic; const res = await sls.getLogs('egg-sls-post-log', 'egg-sls-post-log', { topic }); this.ctx.body = res.logs; } } module.exports = HomeController;
JavaScript
MIT
eggjs/egg-sls/test/fixtures/apps/sls-client/app/controller/home.js
3a0bb8a4-59f7-4b92-8147-b61b0e58f7b5
[]
[]
/** * UGENE - Integrated Bioinformatics Tools. * Copyright (C) 2008-2012 UniPro <ugene@unipro.ru> * http://ugene.unipro.ru * * This program is free software; you can redistribute it and/or * modify it under the terms of the GNU General Public License * as published by the Free Software Foundation; either version 2 * of the License, or (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, * MA 02110-1301, USA. */ #include "PhyTreeObject.h" namespace U2 { GObject* PhyTreeObject::clone(const U2DbiRef&, U2OpStatus&) const { PhyTreeObject* cln = new PhyTreeObject(tree, getGObjectName(), getGHintsMap()); cln->setIndexInfo(getIndexInfo()); return cln; } bool PhyTreeObject::treesAreAlike( const PhyTree& tree1, const PhyTree& tree2 ) { QList<const PhyNode*> track1 = tree1->collectNodes(); QList<const PhyNode*> track2 = tree2->collectNodes(); if (track1.count() != track2.count()) { return false; } foreach (const PhyNode* n1, track1) { if (n1->name.isEmpty()) { continue; } foreach (const PhyNode* n2, track2) { if (n2->name != n1->name) { continue; } if (n1->branches.count() != n2->branches.count()) { return false; } } } return true; } const PhyNode* PhyTreeObject::findPhyNodeByName( const QString& name ) { QList<const PhyNode*> nodes = tree.constData()->collectNodes(); foreach (const PhyNode* node, nodes) { if (node->name == name) { return node; } } return NULL; } }//namespace
C++
MIT
iganna/lspec/ugene/src/corelibs/U2Core/src/gobjects/PhyTreeObject.cpp
ebb96d35-60c0-4cea-945e-8915dc37efc0
[{"tag": "EMAIL", "value": "ugene@unipro.ru", "start": 83, "end": 98, "context": "rmatics Tools.\n * Copyright (C) 2008-2012 UniPro <ugene@unipro.ru>\n * http://ugene.unipro.ru\n *\n * This program is "}]
[{"tag": "EMAIL", "value": "ugene@unipro.ru", "start": 83, "end": 98, "context": "rmatics Tools.\n * Copyright (C) 2008-2012 UniPro <ugene@unipro.ru>\n * http://ugene.unipro.ru\n *\n * This program is "}]
require_dependency "aca_rails/application_controller" module AcaRails class PasswordResetsController < ApplicationController before_action :use_forgotten_pwd, only: [:new, :create, :edit, :update] def index @q = User.ransack(params[:q]) @users = @q.result(distinct: true).page params[:page] end def new end def create p = params[:email] || params[:user][:email] user = User.find_by_email(p) if user user.send_password_reset redirect_to password_resets_path, :notice => "Email sent with password reset instructions." else redirect_to login_path, :alert => "Email not registered. Password reset email not sent." end end def edit begin @user = User.find_by_password_reset_token!(params[:id]) rescue redirect_to login_path, alert: "Password reset has expired, or wrong url." end end def update @user = User.find_by_password_reset_token!(params[:id]) #@user.updating_password = true if @user.password_reset_sent_at < 2.hours.ago redirect_to new_password_reset_path, :alert => "Password reset has expired." elsif @user.update_attributes(user_params) redirect_to login_url, :notice => "Password has been reset." else render :edit end end private def user_params params.require(:user).permit(:password, :password_confirmation) end end end
Ruby
Apache-2.0
MarceloFossRJ/aca-rails/app/controllers/aca_rails/password_resets_controller.rb
0e32ab47-1f8e-44ed-9863-f06c046d51fd
[]
[]
/* ** header.c for in /home/trambe_m/CPE_2014_corewar/ASM/src ** ** Made by Manuel Trambert ** Login <trambe_m@epitech.net> ** ** Started on Sat Apr 11 03:50:12 2015 Manuel Trambert ** Last update Sun Oct 16 16:04:28 2016 */ #include "my.h" void get_name(char *str, t_tmp *ptr) { int i; int e; i = -1; e = 0; while (e < PROG_NAME_LENGTH + 1 && e < 128) { ptr->head->prog_name[e] = '\0'; e += 1; } e = 0; while (str[++i] != '"'); while (str[++i] != '"') { ptr->head->prog_name[e] = str[i]; e += 1; } ptr->head->prog_name[e] = '\0'; } int get_comment(char *str, t_tmp *ptr) { int i; static int e = 0; i = -1; if (e == 0) { while (e < COMMENT_LENGTH + 1) { ptr->head->comment[e] = '\0'; e += 1; } e = 0; } if (my_strncmp(str, ".comment", 7) == 0) while (str[++i] != '"'); while (str[++i] != '"' && str[i] != '\0') { ptr->head->comment[e] = str[i]; e += 1; } ptr->head->comment[e] = '\0'; if (str[i] == '"') return (1); return (0); }
C
MIT
KASOGIT/corewar/ASM/src/header.c
844f2f97-106b-4035-9145-5a7b6b65be8c
[{"tag": "EMAIL", "value": "trambe_m@epitech.net", "start": 106, "end": 126, "context": "SM/src\n** \n** Made by Manuel Trambert\n** Login <trambe_m@epitech.net>\n** \n** Started on Sat Apr 11 03:50:12 2015 Manu"}, {"tag": "NAME", "value": "Manuel Trambert", "start": 172, "end": 187, "context": ".net>\n** \n** Started on Sat Apr 11 03:50:12 2015 Manuel Trambert\n** Last update Sun Oct 16 16:04:28 2016 \n*/\n\n#inc"}, {"tag": "NAME", "value": "Manuel Trambert", "start": 78, "end": 93, "context": "/trambe_m/CPE_2014_corewar/ASM/src\n** \n** Made by Manuel Trambert\n** Login <trambe_m@epitech.net>\n** \n** Started "}]
[{"tag": "EMAIL", "value": "trambe_m@epitech.net", "start": 106, "end": 126, "context": "SM/src\n** \n** Made by Manuel Trambert\n** Login <trambe_m@epitech.net>\n** \n** Started on Sat Apr 11 03:50:12 2015 Manu"}, {"tag": "NAME", "value": "Manuel Trambert", "start": 172, "end": 187, "context": ".net>\n** \n** Started on Sat Apr 11 03:50:12 2015 Manuel Trambert\n** Last update Sun Oct 16 16:04:28 2016 \n*/\n\n#inc"}, {"tag": "NAME", "value": "Manuel Trambert", "start": 78, "end": 93, "context": "/trambe_m/CPE_2014_corewar/ASM/src\n** \n** Made by Manuel Trambert\n** Login <trambe_m@epitech.net>\n** \n** Started "}]
--- layout: post rel_link: "11_1_0_pre6" title: "CMSSW_11_1_0_pre6" date: 2020-04-15 21:39:23 categories: cmssw relmajor: 11 relminor: 1 relsubminor: 0 relpre: _pre6 --- # CMSSW_11_1_0_pre6 #### Changes since CMSSW_11_1_0_pre5: [compare to previous](https://github.com/cms-sw/cmssw/compare/CMSSW_11_1_0_pre5...CMSSW_11_1_0_pre6) 1. [29478](http://github.com/cms-sw/cmssw/pull/29478){:target="_blank"} from **@tschuh**: L1Trigger/TrackerDTC fix for SLHCUpgradeSimulations/Geometry unit test failure (#29474) `l1` `upgrade` created: 2020-04-15 10:05:43 merged: 2020-04-15 18:32:42 2. [29477](http://github.com/cms-sw/cmssw/pull/29477){:target="_blank"} from **@smuzaffar**: [PY3] Fix duplicate dictionary checker for python3 `core` created: 2020-04-15 09:48:02 merged: 2020-04-15 18:29:58 3. [29475](http://github.com/cms-sw/cmssw/pull/29475){:target="_blank"} from **@smuzaffar**: updated lost dictionaries mapping for L1T Phase2 `core` created: 2020-04-15 07:29:59 merged: 2020-04-15 08:40:22 4. [29473](http://github.com/cms-sw/cmssw/pull/29473){:target="_blank"} from **@Dr15Jones**: PoolSource's firstLuminosityBlockForEachRun properly skips Lumis `core` created: 2020-04-14 21:50:21 merged: 2020-04-15 08:11:44 5. [29469](http://github.com/cms-sw/cmssw/pull/29469){:target="_blank"} from **@bsunanda**: Run3-hcx253 Make additional tests for DetId setting in SIM/RECO `geometry` `simulation` created: 2020-04-13 19:24:13 merged: 2020-04-14 07:38:17 6. [29467](http://github.com/cms-sw/cmssw/pull/29467){:target="_blank"} from **@swozniewski**: Update PR #28417, add era modifier to TauRefProducer to read old data format from existing files `dqm` `operations` `pdmv` `upgrade` created: 2020-04-13 15:27:30 merged: 2020-04-14 16:34:36 7. [29466](http://github.com/cms-sw/cmssw/pull/29466){:target="_blank"} from **@jfernan2**: [DT] DQM Validation extension `dqm` created: 2020-04-13 15:25:13 merged: 2020-04-14 07:36:14 8. [29465](http://github.com/cms-sw/cmssw/pull/29465){:target="_blank"} from **@swozniewski**: Update PR #28417, TauSkimPFTausSelectedForMuTau reading anti-muon ID from new data format `pdmv` `reconstruction` created: 2020-04-13 14:31:46 merged: 2020-04-14 08:33:42 9. [29461](http://github.com/cms-sw/cmssw/pull/29461){:target="_blank"} from **@mmusich**: Re-add SiPixelPhase1TrackResidualsAnalyzer to HI scenario `dqm` created: 2020-04-12 19:00:34 merged: 2020-04-13 18:11:31 10. [29459](http://github.com/cms-sw/cmssw/pull/29459){:target="_blank"} from **@Dr15Jones**: Use LogTrace in IsolatedPixelTrackCandidateL1TProducer `alca` created: 2020-04-11 00:30:01 merged: 2020-04-11 08:42:09 11. [29457](http://github.com/cms-sw/cmssw/pull/29457){:target="_blank"} from **@srimanob**: Clean up Run-Dependent workflows, MultiRun harvesting for Run-Dependent MC, and update/bug fix for runTheMatrix `pdmv` `upgrade` created: 2020-04-10 21:25:25 merged: 2020-04-14 19:35:30 12. [29453](http://github.com/cms-sw/cmssw/pull/29453){:target="_blank"} from **@slava77**: improve exception reporting in HLTTauRefProducer `dqm` created: 2020-04-10 14:32:26 merged: 2020-04-11 18:09:27 13. [29451](http://github.com/cms-sw/cmssw/pull/29451){:target="_blank"} from **@silviodonato**: Remove PhysicsTools/RooStatsCms `analysis` `core` created: 2020-04-10 09:20:50 merged: 2020-04-14 14:45:28 14. [29450](http://github.com/cms-sw/cmssw/pull/29450){:target="_blank"} from **@perrotta**: Remove TICL dedicated wfs, and adjust the name for the pfTICLProducer automatic config `pdmv` `reconstruction` `upgrade` created: 2020-04-10 08:22:28 merged: 2020-04-14 18:17:52 15. [29442](http://github.com/cms-sw/cmssw/pull/29442){:target="_blank"} from **@civanch**: Migration to Geant4 10.6p01 `simulation` created: 2020-04-09 14:58:00 merged: 2020-04-15 08:53:19 16. [29440](http://github.com/cms-sw/cmssw/pull/29440){:target="_blank"} from **@bsunanda**: Run3-gem34 Add a GE21 version with 16 eta partitions `geometry` `simulation` created: 2020-04-09 14:01:05 merged: 2020-04-13 08:37:57 17. [29438](http://github.com/cms-sw/cmssw/pull/29438){:target="_blank"} from **@schneiml**: DQM: Add a local vs. global section to the README `dqm` created: 2020-04-09 12:39:36 merged: 2020-04-09 20:09:17 18. [29437](http://github.com/cms-sw/cmssw/pull/29437){:target="_blank"} from **@mandrenguyen**: Reenable relval wfs 310, 311 and 312 `pdmv` `upgrade` created: 2020-04-09 10:35:57 merged: 2020-04-11 08:41:27 19. [29433](http://github.com/cms-sw/cmssw/pull/29433){:target="_blank"} from **@gkrintir**: fixed bug in UL16 PU profile `simulation` created: 2020-04-08 21:59:44 merged: 2020-04-09 18:03:30 20. [29432](http://github.com/cms-sw/cmssw/pull/29432){:target="_blank"} from **@slava77**: add missing initializations in pat::Muon (minimal catch up to #28212 and #29324) `analysis` `reconstruction` created: 2020-04-08 21:27:12 merged: 2020-04-09 20:10:56 21. [29429](http://github.com/cms-sw/cmssw/pull/29429){:target="_blank"} from **@fabiocos**: Add a DDFilteredView test in DetectorDescription/DDCMS `geometry` created: 2020-04-08 14:40:50 merged: 2020-04-11 08:49:33 22. [29428](http://github.com/cms-sw/cmssw/pull/29428){:target="_blank"} from **@cms-sw**: Running code-format for reconstructio `reconstruction` created: 2020-04-08 14:09:51 merged: 2020-04-09 08:06:01 23. [29427](http://github.com/cms-sw/cmssw/pull/29427){:target="_blank"} from **@cms-sw**: Running code-format for simulation-upgrade `simulation` `upgrade` created: 2020-04-08 14:09:07 merged: 2020-04-09 08:09:08 24. [29426](http://github.com/cms-sw/cmssw/pull/29426){:target="_blank"} from **@cms-sw**: Running code-format for simulation `simulation` created: 2020-04-08 14:08:23 merged: 2020-04-09 08:06:10 25. [29425](http://github.com/cms-sw/cmssw/pull/29425){:target="_blank"} from **@cms-sw**: Running code-format for reconstruction-upgrade `reconstruction` `upgrade` created: 2020-04-08 14:07:21 merged: 2020-04-09 07:52:38 26. [29424](http://github.com/cms-sw/cmssw/pull/29424){:target="_blank"} from **@cms-sw**: Running code-format for dqm `dqm` created: 2020-04-08 14:06:20 merged: 2020-04-08 18:50:32 27. [29423](http://github.com/cms-sw/cmssw/pull/29423){:target="_blank"} from **@cms-sw**: Running code-format for geometry-upgrade `geometry` `upgrade` created: 2020-04-08 14:05:55 merged: 2020-04-09 07:53:04 28. [29422](http://github.com/cms-sw/cmssw/pull/29422){:target="_blank"} from **@cms-sw**: Running code-format for geometry `geometry` created: 2020-04-08 14:04:56 merged: 2020-04-09 07:53:25 29. [29418](http://github.com/cms-sw/cmssw/pull/29418){:target="_blank"} from **@cms-sw**: Running code-format for analysis `analysis` created: 2020-04-08 14:03:18 merged: 2020-04-10 18:38:28 30. [29417](http://github.com/cms-sw/cmssw/pull/29417){:target="_blank"} from **@cms-sw**: Running code-format for alca `alca` created: 2020-04-08 13:54:03 merged: 2020-04-13 08:31:58 31. [29413](http://github.com/cms-sw/cmssw/pull/29413){:target="_blank"} from **@rekovic**: pr11x L1T Configure EMTF using stage2L1Trigger modifiers `l1` created: 2020-04-08 10:11:57 merged: 2020-04-13 18:09:21 32. [29409](http://github.com/cms-sw/cmssw/pull/29409){:target="_blank"} from **@bsunanda**: Run3-sim65 Try to fix 0 TrackID for saved tracks `simulation` created: 2020-04-07 21:30:22 merged: 2020-04-09 19:55:33 33. [29407](http://github.com/cms-sw/cmssw/pull/29407){:target="_blank"} from **@fabiocos**: MTD Geometry: DD4hep migration, preliminary update and cleaning of ideal geometry test `geometry` `simulation` `upgrade` created: 2020-04-07 16:26:46 merged: 2020-04-10 08:35:15 34. [29404](http://github.com/cms-sw/cmssw/pull/29404){:target="_blank"} from **@guitargeek**: Clean BuildFiles in Validation Subsystem `core` `dqm` `generators` `geometry` `l1` `reconstruction` created: 2020-04-07 11:22:47 merged: 2020-04-14 13:30:03 35. [29403](http://github.com/cms-sw/cmssw/pull/29403){:target="_blank"} from **@smuzaffar**: [Heppy] drop unnecessary dependency on rootpy `analysis` created: 2020-04-07 11:01:33 merged: 2020-04-08 07:11:53 36. [29401](http://github.com/cms-sw/cmssw/pull/29401){:target="_blank"} from **@srimanob**: Define proper BS for 2018 premixing workflow if starting from scratch `pdmv` `upgrade` created: 2020-04-07 09:29:57 merged: 2020-04-09 20:07:31 37. [29400](http://github.com/cms-sw/cmssw/pull/29400){:target="_blank"} from **@PFCal-dev**: [HGCAL trigger] V11 geometry compatibility `l1` `upgrade` created: 2020-04-07 08:39:43 merged: 2020-04-14 07:32:54 38. [29396](http://github.com/cms-sw/cmssw/pull/29396){:target="_blank"} from **@smuzaffar**: Drop rootpy related unit tests `analysis` created: 2020-04-06 10:16:02 merged: 2020-04-08 07:12:36 39. [29395](http://github.com/cms-sw/cmssw/pull/29395){:target="_blank"} from **@smuzaffar**: added unit test to check for clang-tidy changes `core` created: 2020-04-04 22:54:01 merged: 2020-04-06 08:55:28 40. [29394](http://github.com/cms-sw/cmssw/pull/29394){:target="_blank"} from **@smuzaffar**: Run cuda tests only if cuda supports gcc version `simulation` created: 2020-04-04 21:32:52 merged: 2020-04-06 08:29:34 41. [29393](http://github.com/cms-sw/cmssw/pull/29393){:target="_blank"} from **@silviodonato**: Replace validationHarvestingNoHLT with validationHarvesting in HI workflows `pdmv` `upgrade` created: 2020-04-04 09:19:40 merged: 2020-04-06 17:00:09 42. [29388](http://github.com/cms-sw/cmssw/pull/29388){:target="_blank"} from **@bsunanda**: Run3-sim64 Allow CherenkovAnalysis to work with DDD as well as DD4Hep `simulation` created: 2020-04-03 11:38:15 merged: 2020-04-06 08:28:51 43. [29387](http://github.com/cms-sw/cmssw/pull/29387){:target="_blank"} from **@ianna**: [DD4hep] Expose Full Geo History on Request `geometry` created: 2020-04-03 10:15:29 merged: 2020-04-09 20:05:25 44. [29384](http://github.com/cms-sw/cmssw/pull/29384){:target="_blank"} from **@Dr15Jones**: Test inter process random state transfers `core` created: 2020-04-02 18:19:05 merged: 2020-04-04 08:32:49 45. [29383](http://github.com/cms-sw/cmssw/pull/29383){:target="_blank"} from **@rekovic**: Era modifiers L1T for Phase2, Run3, and Run2_2018. `l1` `operations` created: 2020-04-02 17:55:20 merged: 2020-04-03 08:30:56 46. [29380](http://github.com/cms-sw/cmssw/pull/29380){:target="_blank"} from **@bsunanda**: Run3-TB55 Remove direct access to DDCompactView in the SIM code for HCAL TB `geometry` `simulation` created: 2020-04-02 11:45:33 merged: 2020-04-03 08:32:56 47. [29379](http://github.com/cms-sw/cmssw/pull/29379){:target="_blank"} from **@slomeo**: Fixed all the "hidden" overlaps inside the CSC subsystem `geometry` `upgrade` created: 2020-04-02 06:54:44 merged: 2020-04-04 08:27:12 48. [29378](http://github.com/cms-sw/cmssw/pull/29378){:target="_blank"} from **@cms-tsg-storm**: Remove use of TSG internal GTs `pdmv` `upgrade` created: 2020-04-02 06:21:51 merged: 2020-04-02 08:40:20 49. [29375](http://github.com/cms-sw/cmssw/pull/29375){:target="_blank"} from **@guitargeek**: PFEGammaProducer to global producer `reconstruction` created: 2020-04-01 21:28:03 merged: 2020-04-06 08:18:59 50. [29374](http://github.com/cms-sw/cmssw/pull/29374){:target="_blank"} from **@bendavid**: Restrict timestamps in PFCandidates to reliable cases `reconstruction` `upgrade` created: 2020-04-01 21:21:07 merged: 2020-04-10 08:14:44 51. [29371](http://github.com/cms-sw/cmssw/pull/29371){:target="_blank"} from **@bsunanda**: Run3-sim63 Remove reference to DetectorDescription in some packages `simulation` created: 2020-04-01 17:59:37 merged: 2020-04-02 08:19:58 52. [29370](http://github.com/cms-sw/cmssw/pull/29370){:target="_blank"} from **@andrius-k**: DQM: Syncing MEData with DQMNet MEData before asserting `dqm` created: 2020-04-01 17:28:12 merged: 2020-04-07 08:13:06 53. [29369](http://github.com/cms-sw/cmssw/pull/29369){:target="_blank"} from **@dildick**: Protect L1T producers against ME0 hits `l1` created: 2020-04-01 16:13:58 merged: 2020-04-13 20:54:11 54. [29368](http://github.com/cms-sw/cmssw/pull/29368){:target="_blank"} from **@ianna**: [DD4hep] Children by Name and CopyNo on a Path `geometry` `simulation` created: 2020-04-01 15:44:13 merged: 2020-04-04 08:25:08 55. [29366](http://github.com/cms-sw/cmssw/pull/29366){:target="_blank"} from **@ghugo83**: Solve all overlaps detected in Phase I Tracker `geometry` created: 2020-04-01 15:14:08 merged: 2020-04-04 08:20:51 56. [29364](http://github.com/cms-sw/cmssw/pull/29364){:target="_blank"} from **@bsunanda**: Phase2-hgx234 Transfer the constants for HGCal and also HGCalTB for DDD/DD4Hep `geometry` `upgrade` created: 2020-04-01 12:51:14 merged: 2020-04-05 08:21:20 57. [29363](http://github.com/cms-sw/cmssw/pull/29363){:target="_blank"} from **@ggovi**: Support of condition updates per-lumisection `alca` `db` created: 2020-04-01 09:45:46 merged: 2020-04-07 08:11:55 58. [29361](http://github.com/cms-sw/cmssw/pull/29361){:target="_blank"} from **@jpata**: Particle Flow generator truth information `dqm` `simulation` created: 2020-03-31 18:16:57 merged: 2020-04-02 08:25:38 59. [29360](http://github.com/cms-sw/cmssw/pull/29360){:target="_blank"} from **@bsunanda**: Run3-TB54 First step of transition to dd4hep for HCal TB simulation `geometry` created: 2020-03-31 17:03:22 merged: 2020-04-01 08:36:47 60. [29357](http://github.com/cms-sw/cmssw/pull/29357){:target="_blank"} from **@cms-tsg-storm**: HLT migration to 11_1_0_pre5 and update of TSG tests `hlt` created: 2020-03-31 15:31:19 merged: 2020-04-01 17:06:27 61. [29355](http://github.com/cms-sw/cmssw/pull/29355){:target="_blank"} from **@dmeuser**: Additional Tracker DQM residual plots `dqm` created: 2020-03-31 13:10:03 merged: 2020-04-10 08:50:56 62. [29351](http://github.com/cms-sw/cmssw/pull/29351){:target="_blank"} from **@schneiml**: DQM: Fix fallback in DQMRootSource `dqm` created: 2020-03-31 10:17:31 merged: 2020-04-01 08:34:20 63. [29348](http://github.com/cms-sw/cmssw/pull/29348){:target="_blank"} from **@rekovic**: New package DataFormats/L1TCalorimeterPhase2 for Phase2 L1T `l1` `upgrade` created: 2020-03-31 00:00:22 merged: 2020-04-14 18:02:30 64. [29347](http://github.com/cms-sw/cmssw/pull/29347){:target="_blank"} from **@rekovic**: New package DataFormats/L1TParticleFlow for Phase2 L1Trigger `l1` `upgrade` created: 2020-03-30 23:52:19 merged: 2020-04-14 18:05:33 65. [29346](http://github.com/cms-sw/cmssw/pull/29346){:target="_blank"} from **@rekovic**: New package DataFormats/L1TCorrelator for Phase2 L1Trigger `l1` `upgrade` created: 2020-03-30 23:46:26 merged: 2020-04-14 18:02:20 66. [29343](http://github.com/cms-sw/cmssw/pull/29343){:target="_blank"} from **@schneiml**: DQM: fix some autoDQM.py harvesting sequences `dqm` `operations` created: 2020-03-30 18:24:56 merged: 2020-04-02 08:37:35 67. [29342](http://github.com/cms-sw/cmssw/pull/29342){:target="_blank"} from **@bsunanda**: Run3-TB53 Update some of the scripts for H2 TB simularion `simulation` created: 2020-03-30 17:56:13 merged: 2020-03-31 08:46:51 68. [29339](http://github.com/cms-sw/cmssw/pull/29339){:target="_blank"} from **@mmusich**: Fix Tracker Alignment all-in-one tool for py3 (again) `alca` created: 2020-03-30 17:18:53 merged: 2020-04-07 07:49:41 69. [29335](http://github.com/cms-sw/cmssw/pull/29335){:target="_blank"} from **@ggovi**: New Service for Updating online Conditions for HLT `alca` `db` `l1` created: 2020-03-30 09:29:47 merged: 2020-04-02 19:24:43 70. [29331](http://github.com/cms-sw/cmssw/pull/29331){:target="_blank"} from **@ahinzmann**: Fix MET unclustered energy computation `analysis` `reconstruction` created: 2020-03-28 14:57:17 merged: 2020-03-31 16:49:23 71. [29329](http://github.com/cms-sw/cmssw/pull/29329){:target="_blank"} from **@rovere**: Fix HGCal_disableNoise customization function `simulation` `upgrade` created: 2020-03-27 21:48:43 merged: 2020-03-31 08:44:21 72. [29328](http://github.com/cms-sw/cmssw/pull/29328){:target="_blank"} from **@Dr15Jones**: Use global::EDAnalyzers in GeneratorInterface/Core `generators` created: 2020-03-27 21:20:02 merged: 2020-03-28 15:38:29 73. [29327](http://github.com/cms-sw/cmssw/pull/29327){:target="_blank"} from **@rovere**: Fix TaskPlaceholder dumpPython method `core` created: 2020-03-27 20:28:11 merged: 2020-03-30 19:11:36 74. [29321](http://github.com/cms-sw/cmssw/pull/29321){:target="_blank"} from **@schneiml**: DQM: Allow per-lumi MEs in DQMOneEDAnalyzer `dqm` created: 2020-03-27 11:28:45 merged: 2020-04-14 17:52:16 75. [29318](http://github.com/cms-sw/cmssw/pull/29318){:target="_blank"} from **@srimanob**: Add EventsPerLumi for relvals workflows and prod-like concurrentLS relvals `pdmv` `upgrade` created: 2020-03-27 07:16:09 merged: 2020-04-06 17:54:04 76. [29317](http://github.com/cms-sw/cmssw/pull/29317){:target="_blank"} from **@civanch**: Updated Geant4 overlap checks `simulation` created: 2020-03-27 06:53:55 merged: 2020-03-27 16:29:12 77. [29312](http://github.com/cms-sw/cmssw/pull/29312){:target="_blank"} from **@bsunanda**: Phase2-hgx233 Correct the HGCal code for working with dd4hep `geometry` `upgrade` created: 2020-03-26 13:19:32 merged: 2020-04-01 08:17:30 78. [29309](http://github.com/cms-sw/cmssw/pull/29309){:target="_blank"} from **@kuyoun**: TOP DQM: Fixed MuonRelIso `dqm` created: 2020-03-26 08:50:33 merged: 2020-04-09 19:59:44 79. [29308](http://github.com/cms-sw/cmssw/pull/29308){:target="_blank"} from **@christopheralanwest**: Extend maximum pileup in SiPixelStatusScenarioProbabilityRcd tag from 100 to 200 `alca` created: 2020-03-26 02:22:42 merged: 2020-03-27 09:45:17 80. [29307](http://github.com/cms-sw/cmssw/pull/29307){:target="_blank"} from **@hatakeyamak**: PF track propagator fix for pixel tracks `reconstruction` created: 2020-03-26 01:05:55 merged: 2020-03-27 09:47:27 81. [29305](http://github.com/cms-sw/cmssw/pull/29305){:target="_blank"} from **@Dr15Jones**: Use Pythia8ConcurrentHadronizerFilter when possible `generators` created: 2020-03-25 19:34:16 merged: 2020-03-27 09:49:17 82. [29302](http://github.com/cms-sw/cmssw/pull/29302){:target="_blank"} from **@Dr15Jones**: Protect ProductResolvers from unlucky thread pause `core` created: 2020-03-25 16:12:11 merged: 2020-03-27 09:50:48 83. [29301](http://github.com/cms-sw/cmssw/pull/29301){:target="_blank"} from **@Dr15Jones**: Make some HLT modules into global modules `analysis` `hlt` `reconstruction` created: 2020-03-25 14:16:02 merged: 2020-04-01 15:10:20 84. [29300](http://github.com/cms-sw/cmssw/pull/29300){:target="_blank"} from **@ahinzmann**: Apply puppi weight in BoostedDoubleSV btaginfo for packedPFCandidates `analysis` `reconstruction` created: 2020-03-25 13:27:38 merged: 2020-04-13 18:14:52 85. [29296](http://github.com/cms-sw/cmssw/pull/29296){:target="_blank"} from **@ahinzmann**: Remove puppi multipliticies from JME nano `analysis` `xpog` created: 2020-03-25 11:45:07 merged: 2020-03-26 20:23:04 86. [29295](http://github.com/cms-sw/cmssw/pull/29295){:target="_blank"} from **@guitargeek**: Remove unneeded dependencies from plugins, test and bin in Reco subsystems `alca` `analysis` `db` `dqm` `geometry` `hlt` `reconstruction` `simulation` `upgrade` created: 2020-03-25 09:39:53 merged: 2020-04-01 12:51:46 87. [29292](http://github.com/cms-sw/cmssw/pull/29292){:target="_blank"} from **@camolezi**: Changed boost::unordered_map for std::unordered_map `visualization` created: 2020-03-24 21:24:40 merged: 2020-03-26 20:34:10 88. [29291](http://github.com/cms-sw/cmssw/pull/29291){:target="_blank"} from **@Dr15Jones**: Module on multiple Path threading fix `core` created: 2020-03-24 19:26:12 merged: 2020-03-26 20:28:44 89. [29286](http://github.com/cms-sw/cmssw/pull/29286){:target="_blank"} from **@mmusich**: protect HGCDigitizer from crashing when simHitAccumulator is empty `simulation` `upgrade` created: 2020-03-24 13:53:41 merged: 2020-03-26 20:26:29 90. [29283](http://github.com/cms-sw/cmssw/pull/29283){:target="_blank"} from **@fabiocos**: Fix non-compiling code triggered by EDM_ML_DEBUG flag `alca` `db` `dqm` `geometry` `reconstruction` `simulation` `upgrade` created: 2020-03-24 12:13:10 merged: 2020-03-31 08:30:03 91. [29281](http://github.com/cms-sw/cmssw/pull/29281){:target="_blank"} from **@bsunanda**: Run3-sim62 Add hits for the new Totem T2 detector `geometry` `simulation` created: 2020-03-24 03:35:53 merged: 2020-03-26 20:25:06 92. [29279](http://github.com/cms-sw/cmssw/pull/29279){:target="_blank"} from **@srimanob**: Update relvals of JME customised Nano for 2016,17,18 `analysis` `pdmv` `upgrade` `xpog` created: 2020-03-23 21:06:00 merged: 2020-04-06 16:59:32 93. [29271](http://github.com/cms-sw/cmssw/pull/29271){:target="_blank"} from **@plujan**: Luminosity producer using files from brilcalc `reconstruction` created: 2020-03-23 11:28:48 merged: 2020-03-27 09:52:22 94. [29270](http://github.com/cms-sw/cmssw/pull/29270){:target="_blank"} from **@fabiocos**: MTD geometry: add new topology mode, adapt code to it `dqm` `geometry` `reconstruction` `simulation` `upgrade` created: 2020-03-23 10:15:51 merged: 2020-03-27 17:01:24 95. [29259](http://github.com/cms-sw/cmssw/pull/29259){:target="_blank"} from **@camolezi**: Changed boost::scoped_ptr for std::unique_ptr `visualization` created: 2020-03-20 18:44:55 merged: 2020-04-02 08:17:57 96. [29253](http://github.com/cms-sw/cmssw/pull/29253){:target="_blank"} from **@ronchese**: Bph reco update `analysis` created: 2020-03-20 15:14:47 merged: 2020-03-30 19:09:18 97. [29247](http://github.com/cms-sw/cmssw/pull/29247){:target="_blank"} from **@dildick**: Interface for high multiplicity triggers in CSC `l1` created: 2020-03-20 00:15:45 merged: 2020-04-14 07:09:56 98. [29242](http://github.com/cms-sw/cmssw/pull/29242){:target="_blank"} from **@bsunanda**: Run3-TB52 Update the geometry of the October TB and correct rotation of beam line `generators` `geometry` `simulation` `upgrade` created: 2020-03-19 13:38:12 merged: 2020-03-31 08:35:15 99. [29233](http://github.com/cms-sw/cmssw/pull/29233){:target="_blank"} from **@dildick**: [L1Trigger/CSCTriggerPrimitives] Store comparator hits in the CLCT (CCLUT-4) `l1` created: 2020-03-18 21:10:02 merged: 2020-04-14 19:43:33 100. [29230](http://github.com/cms-sw/cmssw/pull/29230){:target="_blank"} from **@grasph**: added module used to insert ECAL laser correction sets into the condi `alca` created: 2020-03-18 18:24:47 merged: 2020-04-07 07:45:51 101. [29229](http://github.com/cms-sw/cmssw/pull/29229){:target="_blank"} from **@bsunanda**: Run3-alca162 Simplify the studies of energy response of isolated charged hadrons `alca` created: 2020-03-18 18:12:59 merged: 2020-03-30 17:25:01 102. [29198](http://github.com/cms-sw/cmssw/pull/29198){:target="_blank"} from **@mmusich**: Allow to take DCS Status from software FED, when SCAL is not available `alca` `pdmv` created: 2020-03-13 14:54:24 merged: 2020-03-30 19:06:30 103. [29169](http://github.com/cms-sw/cmssw/pull/29169){:target="_blank"} from **@alja**: Fireworks: Auto detect geometry version from global tag. `visualization` created: 2020-03-10 18:08:03 merged: 2020-04-06 16:55:31 104. [29151](http://github.com/cms-sw/cmssw/pull/29151){:target="_blank"} from **@tschuh**: New EDProducer emulating the Stub processing of the Phase 2 Outer Tracker DTC board `alca` `l1` `operations` `upgrade` created: 2020-03-09 10:44:57 merged: 2020-04-14 13:20:24 105. [29110](http://github.com/cms-sw/cmssw/pull/29110){:target="_blank"} from **@cms-patatrack**: Patatrack integration - common tools (2/N) `core` `heterogeneous` `reconstruction` `simulation` created: 2020-03-05 11:44:44 merged: 2020-03-30 08:44:48 106. [29081](http://github.com/cms-sw/cmssw/pull/29081){:target="_blank"} from **@rovere**: [HGCAL] TICL in reconstruction `dqm` `operations` `reconstruction` `upgrade` created: 2020-03-03 15:07:14 merged: 2020-04-09 07:32:40 107. [28978](http://github.com/cms-sw/cmssw/pull/28978){:target="_blank"} from **@quark2**: GEM online DQM bug fix - full GE1/1 `dqm` created: 2020-02-17 18:14:43 merged: 2020-04-10 14:23:20 108. [28417](http://github.com/cms-sw/cmssw/pull/28417){:target="_blank"} from **@cms-tau-pog**: New Tau ID data format `analysis` `dqm` `l1` `pdmv` `reconstruction` `xpog` created: 2019-11-18 16:19:46 merged: 2020-04-09 08:04:51 109. [28313](http://github.com/cms-sw/cmssw/pull/28313){:target="_blank"} from **@bsunanda**: Run3-sim47 Remove reference to trackermaterial in materials.xml `geometry` `upgrade` created: 2019-10-30 16:14:58 merged: 2020-04-07 13:08:59 #### CMSDIST Changes between Tags REL/CMSSW_11_1_0_pre5/slc7_amd64_gcc820 and REL/CMSSW_11_1_0_pre6/slc7_amd64_gcc820: [compare to previous](https://github.com/cms-sw/cmsdist/compare/REL/CMSSW_11_1_0_pre5/slc7_amd64_gcc820...REL/CMSSW_11_1_0_pre6/slc7_amd64_gcc820) 1. [5746](http://github.com/cms-sw/cmsdist/pull/5746){:target="_blank"} from **@cms-sw**: updated vecgeom patch for arm64: remove mm_free from test created: 2020-04-15 11:05:53 merged: 2020-04-15 11:39:52 2. [5741](http://github.com/cms-sw/cmsdist/pull/5741){:target="_blank"} from **@cms-sw**: Update tag for L1Trigger-L1THGCal to V01-01-00 created: 2020-04-14 07:28:42 merged: 2020-04-14 07:39:50 3. [5739](http://github.com/cms-sw/cmsdist/pull/5739){:target="_blank"} from **@belforte**: CRAB Client Apr 2020 version in dev and pre created: 2020-04-13 19:52:54 merged: 2020-04-14 18:12:43 4. [5737](http://github.com/cms-sw/cmsdist/pull/5737){:target="_blank"} from **@belforte**: crab-dev to CRAB Client 3.3.2004.rc3 created: 2020-04-10 22:09:28 merged: 2020-04-11 08:48:51 5. [5736](http://github.com/cms-sw/cmsdist/pull/5736){:target="_blank"} from **@vkuznet**: New dasgoclient version created: 2020-04-10 01:04:53 merged: 2020-04-10 09:52:52 6. [5734](http://github.com/cms-sw/cmsdist/pull/5734){:target="_blank"} from **@cms-sw**: Include Eigne include directory in ROOT_INCLUDE_PATH created: 2020-04-09 19:42:10 merged: 2020-04-09 21:00:03 7. [5732](http://github.com/cms-sw/cmsdist/pull/5732){:target="_blank"} from **@cms-sw**: [Geant4]Update geant4 10.6.1 created: 2020-04-09 16:55:48 merged: 2020-04-15 08:52:38 8. [5729](http://github.com/cms-sw/cmsdist/pull/5729){:target="_blank"} from **@cms-sw**: Update bleach pip packge 3.1.4 (github security alert) created: 2020-04-09 08:08:18 merged: 2020-04-09 11:47:24 9. [5727](http://github.com/cms-sw/cmsdist/pull/5727){:target="_blank"} from **@cms-sw**: Do not allow to link agaisnt PyROOT library created: 2020-04-07 22:44:19 merged: 2020-04-08 06:29:30 10. [5722](http://github.com/cms-sw/cmsdist/pull/5722){:target="_blank"} from **@cms-sw**: Drop rootpy external from CMS software stack created: 2020-04-06 09:51:18 merged: 2020-04-09 07:02:24 11. [5721](http://github.com/cms-sw/cmsdist/pull/5721){:target="_blank"} from **@cms-sw**: [BuildRules] Fix clang-tidy script to work with LLVM 9.0.1 too created: 2020-04-04 22:09:10 merged: 2020-04-07 21:32:04 12. [5713](http://github.com/cms-sw/cmsdist/pull/5713){:target="_blank"} from **@cms-sw**: Fireworks-Geometry: download cmsGeom2026 from cmsrep created: 2020-04-01 23:02:14 merged: 2020-04-06 09:32:04 13. [5701](http://github.com/cms-sw/cmsdist/pull/5701){:target="_blank"} from **@cms-sw**: [CRAB] Created symlink crab pointing to crab-prod created: 2020-03-30 09:20:31 merged: 2020-03-30 17:23:12 14. [5630](http://github.com/cms-sw/cmsdist/pull/5630){:target="_blank"} from **@fwyzard**: Remove the CUDA API wrappers external created: 2020-03-07 08:50:49 merged: 2020-03-30 21:29:51
Markdown
MIT
cms-sw/ReleaseNotes/_releases/CMSSW_11/CMSSW_11_1_0_pre6.md
434c3b1f-6f7a-44e0-b41c-4b495257f61d
[{"tag": "USERNAME", "value": "@ggovi", "start": 13004, "end": 13010, "context": "ms-sw/cmssw/pull/29363){:target=\"_blank\"} from **@ggovi**: Support of condition updates per-lumisection `"}, {"tag": "USERNAME", "value": "@quark2", "start": 24483, "end": 24490, "context": "ms-sw/cmssw/pull/28978){:target=\"_blank\"} from **@quark2**: GEM online DQM bug fix - full GE1/1 `dqm` cre"}, {"tag": "USERNAME", "value": "@schneiml", "start": 14079, "end": 14088, "context": "ms-sw/cmssw/pull/29351){:target=\"_blank\"} from **@schneiml**: DQM: Fix fallback in DQMRootSource `dqm` crea"}, {"tag": "USERNAME", "value": "@rekovic", "start": 10292, "end": 10300, "context": "ms-sw/cmssw/pull/29383){:target=\"_blank\"} from **@rekovic**: Era modifiers L1T for Phase2, Run3, and Run2_2"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 15850, "end": 15860, "context": "ms-sw/cmssw/pull/29331){:target=\"_blank\"} from **@ahinzmann**: Fix MET unclustered energy computation `analys"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 9171, "end": 9181, "context": "ms-sw/cmssw/pull/29394){:target=\"_blank\"} from **@smuzaffar**: Run cuda tests only if cuda supports gcc versi"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 1324, "end": 1333, "context": "ms-sw/cmssw/pull/29469){:target=\"_blank\"} from **@bsunanda**: Run3-hcx253 Make additional tests for DetId se"}, {"tag": "USERNAME", "value": "@plujan", "start": 21156, "end": 21163, "context": "ms-sw/cmssw/pull/29271){:target=\"_blank\"} from **@plujan**: Luminosity producer using files from brilcalc "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 20680, "end": 20689, "context": "ms-sw/cmssw/pull/29281){:target=\"_blank\"} from **@bsunanda**: Run3-sim62 Add hits for the new Totem T2 detec"}, {"tag": "USERNAME", "value": "@mmusich", "start": 20151, "end": 20159, "context": "ms-sw/cmssw/pull/29286){:target=\"_blank\"} from **@mmusich**: protect HGCDigitizer from crashing when simHit"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18695, "end": 18705, "context": "ms-sw/cmssw/pull/29301){:target=\"_blank\"} from **@Dr15Jones**: Make some HLT modules into global modules `ana"}, {"tag": "USERNAME", "value": "@rovere", "start": 16074, "end": 16081, "context": "ms-sw/cmssw/pull/29329){:target=\"_blank\"} from **@rovere**: Fix HGCal_disableNoise customization function "}, {"tag": "USERNAME", "value": "@rekovic", "start": 14275, "end": 14283, "context": "ms-sw/cmssw/pull/29348){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TCalorimeterPhase2 "}, {"tag": "USERNAME", "value": "@dildick", "start": 12114, "end": 12122, "context": "ms-sw/cmssw/pull/29369){:target=\"_blank\"} from **@dildick**: Protect L1T producers against ME0 hits `l1` c"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 16297, "end": 16307, "context": "ms-sw/cmssw/pull/29328){:target=\"_blank\"} from **@Dr15Jones**: Use global::EDAnalyzers in GeneratorInterface/"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 21372, "end": 21381, "context": "ms-sw/cmssw/pull/29270){:target=\"_blank\"} from **@fabiocos**: MTD geometry: add new topology mode, adapt cod"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 5054, "end": 5063, "context": "ms-sw/cmssw/pull/29429){:target=\"_blank\"} from **@fabiocos**: Add a DDFilteredView test in DetectorDescripti"}, {"tag": "USERNAME", "value": "@vkuznet", "start": 26214, "end": 26222, "context": "s-sw/cmsdist/pull/5736){:target=\"_blank\"} from **@vkuznet**: New dasgoclient version created: 2020-04-10 01"}, {"tag": "USERNAME", "value": "@ggovi", "start": 15628, "end": 15634, "context": "ms-sw/cmssw/pull/29335){:target=\"_blank\"} from **@ggovi**: New Service for Updating online Conditions for"}, {"tag": "USERNAME", "value": "@jfernan2", "start": 1860, "end": 1869, "context": "ms-sw/cmssw/pull/29466){:target=\"_blank\"} from **@jfernan2**: [DT] DQM Validation extension `dqm` created: "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 7334, "end": 7343, "context": "ms-sw/cmssw/pull/29409){:target=\"_blank\"} from **@bsunanda**: Run3-sim65 Try to fix 0 TrackID for saved trac"}, {"tag": "USERNAME", "value": "@civanch", "start": 3740, "end": 3748, "context": "ms-sw/cmssw/pull/29442){:target=\"_blank\"} from **@civanch**: Migration to Geant4 10.6p01 `simulation` crea"}, {"tag": "USERNAME", "value": "@camolezi", "start": 19727, "end": 19736, "context": "ms-sw/cmssw/pull/29292){:target=\"_blank\"} from **@camolezi**: Changed boost::unordered_map for std::unordere"}, {"tag": "USERNAME", "value": "@camolezi", "start": 21642, "end": 21651, "context": "ms-sw/cmssw/pull/29259){:target=\"_blank\"} from **@camolezi**: Changed boost::scoped_ptr for std::unique_ptr "}, {"tag": "USERNAME", "value": "@srimanob", "start": 16920, "end": 16929, "context": "ms-sw/cmssw/pull/29318){:target=\"_blank\"} from **@srimanob**: Add EventsPerLumi for relvals workflows and pr"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 20396, "end": 20405, "context": "ms-sw/cmssw/pull/29283){:target=\"_blank\"} from **@fabiocos**: Fix non-compiling code triggered by EDM_ML_DEB"}, {"tag": "USERNAME", "value": "@dildick", "start": 22535, "end": 22543, "context": "ms-sw/cmssw/pull/29233){:target=\"_blank\"} from **@dildick**: [L1Trigger/CSCTriggerPrimitives] Store compara"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 11213, "end": 11224, "context": "ms-sw/cmssw/pull/29375){:target=\"_blank\"} from **@guitargeek**: PFEGammaProducer to global producer `reconstru"}, {"tag": "USERNAME", "value": "@srimanob", "start": 2758, "end": 2767, "context": "ms-sw/cmssw/pull/29457){:target=\"_blank\"} from **@srimanob**: Clean up Run-Dependent workflows, MultiRun har"}, {"tag": "USERNAME", "value": "@civanch", "start": 17168, "end": 17176, "context": "ms-sw/cmssw/pull/29317){:target=\"_blank\"} from **@civanch**: Updated Geant4 overlap checks `simulation` c"}, {"tag": "USERNAME", "value": "@dmeuser", "start": 13881, "end": 13889, "context": "ms-sw/cmssw/pull/29355){:target=\"_blank\"} from **@dmeuser**: Additional Tracker DQM residual plots `dqm` c"}, {"tag": "USERNAME", "value": "@slava77", "start": 4790, "end": 4798, "context": "ms-sw/cmssw/pull/29432){:target=\"_blank\"} from **@slava77**: add missing initializations in pat::Muon (mini"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 10516, "end": 10525, "context": "ms-sw/cmssw/pull/29380){:target=\"_blank\"} from **@bsunanda**: Run3-TB55 Remove direct access to DDCompactVie"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 23003, "end": 23012, "context": "ms-sw/cmssw/pull/29229){:target=\"_blank\"} from **@bsunanda**: Run3-alca162 Simplify the studies of energy re"}, {"tag": "USERNAME", "value": "@schneiml", "start": 14966, "end": 14975, "context": "ms-sw/cmssw/pull/29343){:target=\"_blank\"} from **@schneiml**: DQM: fix some autoDQM.py harvesting sequences "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 3935, "end": 3944, "context": "ms-sw/cmssw/pull/29440){:target=\"_blank\"} from **@bsunanda**: Run3-gem34 Add a GE21 version with 16 eta part"}, {"tag": "USERNAME", "value": "@srimanob", "start": 20910, "end": 20919, "context": "ms-sw/cmssw/pull/29279){:target=\"_blank\"} from **@srimanob**: Update relvals of JME customised Nano for 2016"}, {"tag": "USERNAME", "value": "@rekovic", "start": 7118, "end": 7126, "context": "ms-sw/cmssw/pull/29413){:target=\"_blank\"} from **@rekovic**: pr11x L1T Configure EMTF using stage2L1Trigger"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 18929, "end": 18939, "context": "ms-sw/cmssw/pull/29300){:target=\"_blank\"} from **@ahinzmann**: Apply puppi weight in BoostedDoubleSV btaginfo"}, {"tag": "USERNAME", "value": "@gkrintir", "start": 4593, "end": 4602, "context": "ms-sw/cmssw/pull/29433){:target=\"_blank\"} from **@gkrintir**: fixed bug in UL16 PU profile `simulation` cre"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 7551, "end": 7560, "context": "ms-sw/cmssw/pull/29407){:target=\"_blank\"} from **@fabiocos**: MTD Geometry: DD4hep migration, preliminary up"}, {"tag": "USERNAME", "value": "@mmusich", "start": 15413, "end": 15421, "context": "ms-sw/cmssw/pull/29339){:target=\"_blank\"} from **@mmusich**: Fix Tracker Alignment all-in-one tool for py3 "}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 1097, "end": 1107, "context": "ms-sw/cmssw/pull/29473){:target=\"_blank\"} from **@Dr15Jones**: PoolSource's firstLuminosityBlockForEachRun pr"}, {"tag": "USERNAME", "value": "@rekovic", "start": 14506, "end": 14514, "context": "ms-sw/cmssw/pull/29347){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TParticleFlow for Ph"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 19184, "end": 19194, "context": "ms-sw/cmssw/pull/29296){:target=\"_blank\"} from **@ahinzmann**: Remove puppi multipliticies from JME nano `ana"}, {"tag": "USERNAME", "value": "@hatakeyamak", "start": 18044, "end": 18056, "context": "ms-sw/cmssw/pull/29307){:target=\"_blank\"} from **@hatakeyamak**: PF track propagator fix for pixel tracks `reco"}, {"tag": "USERNAME", "value": "@perrotta", "start": 3462, "end": 3471, "context": "ms-sw/cmssw/pull/29450){:target=\"_blank\"} from **@perrotta**: Remove TICL dedicated wfs, and adjust the name"}, {"tag": "USERNAME", "value": "@mmusich", "start": 2322, "end": 2330, "context": "ms-sw/cmssw/pull/29461){:target=\"_blank\"} from **@mmusich**: Re-add SiPixelPhase1TrackResidualsAnalyzer to "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 22248, "end": 22257, "context": "ms-sw/cmssw/pull/29242){:target=\"_blank\"} from **@bsunanda**: Run3-TB52 Update the geometry of the October T"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18260, "end": 18270, "context": "ms-sw/cmssw/pull/29305){:target=\"_blank\"} from **@Dr15Jones**: Use Pythia8ConcurrentHadronizerFilter when pos"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8762, "end": 8772, "context": "ms-sw/cmssw/pull/29396){:target=\"_blank\"} from **@smuzaffar**: Drop rootpy related unit tests `analysis` cre"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 19950, "end": 19960, "context": "ms-sw/cmssw/pull/29291){:target=\"_blank\"} from **@Dr15Jones**: Module on multiple Path threading fix `core` "}, {"tag": "USERNAME", "value": "@srimanob", "start": 8304, "end": 8313, "context": "ms-sw/cmssw/pull/29401){:target=\"_blank\"} from **@srimanob**: Define proper BS for 2018 premixing workflow i"}, {"tag": "USERNAME", "value": "@belforte", "start": 25831, "end": 25840, "context": "s-sw/cmsdist/pull/5739){:target=\"_blank\"} from **@belforte**: CRAB Client Apr 2020 version in dev and pre cr"}, {"tag": "USERNAME", "value": "@silviodonato", "start": 3252, "end": 3265, "context": "ms-sw/cmssw/pull/29451){:target=\"_blank\"} from **@silviodonato**: Remove PhysicsTools/RooStatsCms `analysis` `c"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 11660, "end": 11669, "context": "ms-sw/cmssw/pull/29371){:target=\"_blank\"} from **@bsunanda**: Run3-sim63 Remove reference to DetectorDescrip"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8960, "end": 8970, "context": "ms-sw/cmssw/pull/29395){:target=\"_blank\"} from **@smuzaffar**: added unit test to check for clang-tidy change"}, {"tag": "USERNAME", "value": "@christopheralanwest", "start": 17790, "end": 17810, "context": "ms-sw/cmssw/pull/29308){:target=\"_blank\"} from **@christopheralanwest**: Extend maximum pileup in SiPixelStatusScenario"}, {"tag": "USERNAME", "value": "@tschuh", "start": 23713, "end": 23720, "context": "ms-sw/cmssw/pull/29151){:target=\"_blank\"} from **@tschuh**: New EDProducer emulating the Stub processing o"}, {"tag": "USERNAME", "value": "@swozniewski", "start": 1567, "end": 1579, "context": "ms-sw/cmssw/pull/29467){:target=\"_blank\"} from **@swozniewski**: Update PR #28417, add era modifier to TauRefPr"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 886, "end": 896, "context": "ms-sw/cmssw/pull/29475){:target=\"_blank\"} from **@smuzaffar**: updated lost dictionaries mapping for L1T Phas"}, {"tag": "USERNAME", "value": "@andrius-k", "start": 11896, "end": 11906, "context": "ms-sw/cmssw/pull/29370){:target=\"_blank\"} from **@andrius-k**: DQM: Syncing MEData with DQMNet MEData before "}, {"tag": "USERNAME", "value": "@mmusich", "start": 23247, "end": 23255, "context": "ms-sw/cmssw/pull/29198){:target=\"_blank\"} from **@mmusich**: Allow to take DCS Status from software FED, wh"}, {"tag": "USERNAME", "value": "@schneiml", "start": 4168, "end": 4177, "context": "ms-sw/cmssw/pull/29438){:target=\"_blank\"} from **@schneiml**: DQM: Add a local vs. global section to the REA"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 13428, "end": 13437, "context": "ms-sw/cmssw/pull/29360){:target=\"_blank\"} from **@bsunanda**: Run3-TB54 First step of transition to dd4hep f"}, {"tag": "USERNAME", "value": "@swozniewski", "start": 2050, "end": 2062, "context": "ms-sw/cmssw/pull/29465){:target=\"_blank\"} from **@swozniewski**: Update PR #28417, TauSkimPFTausSelectedForMuTa"}, {"tag": "USERNAME", "value": "@schneiml", "start": 16715, "end": 16724, "context": "ms-sw/cmssw/pull/29321){:target=\"_blank\"} from **@schneiml**: DQM: Allow per-lumi MEs in DQMOneEDAnalyzer `d"}, {"tag": "USERNAME", "value": "@mandrenguyen", "start": 4379, "end": 4392, "context": "ms-sw/cmssw/pull/29437){:target=\"_blank\"} from **@mandrenguyen**: Reenable relval wfs 310, 311 and 312 `pdmv` `"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 19401, "end": 19412, "context": "ms-sw/cmssw/pull/29295){:target=\"_blank\"} from **@guitargeek**: Remove unneeded dependencies from plugins, tes"}, {"tag": "USERNAME", "value": "@rekovic", "start": 14737, "end": 14745, "context": "ms-sw/cmssw/pull/29346){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TCorrelator for Phas"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 9642, "end": 9651, "context": "ms-sw/cmssw/pull/29388){:target=\"_blank\"} from **@bsunanda**: Run3-sim64 Allow CherenkovAnalysis to work wit"}, {"tag": "USERNAME", "value": "@slomeo", "start": 10772, "end": 10779, "context": "ms-sw/cmssw/pull/29379){:target=\"_blank\"} from **@slomeo**: Fixed all the \"hidden\" overlaps inside the CSC"}, {"tag": "USERNAME", "value": "@kuyoun", "start": 17604, "end": 17611, "context": "ms-sw/cmssw/pull/29309){:target=\"_blank\"} from **@kuyoun**: TOP DQM: Fixed MuonRelIso `dqm` created: 202"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8091, "end": 8101, "context": "ms-sw/cmssw/pull/29403){:target=\"_blank\"} from **@smuzaffar**: [Heppy] drop unnecessary dependency on rootpy "}, {"tag": "USERNAME", "value": "@ianna", "start": 12312, "end": 12318, "context": "ms-sw/cmssw/pull/29368){:target=\"_blank\"} from **@ianna**: [DD4hep] Children by Name and CopyNo on a Path"}, {"tag": "USERNAME", "value": "@ghugo83", "start": 12536, "end": 12544, "context": "ms-sw/cmssw/pull/29366){:target=\"_blank\"} from **@ghugo83**: Solve all overlaps detected in Phase I Tracker"}, {"tag": "USERNAME", "value": "@ronchese", "start": 21859, "end": 21868, "context": "ms-sw/cmssw/pull/29253){:target=\"_blank\"} from **@ronchese**: Bph reco update `analysis` created: 2020-03-2"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 673, "end": 683, "context": "ms-sw/cmssw/pull/29477){:target=\"_blank\"} from **@smuzaffar**: [PY3] Fix duplicate dictionary checker for pyt"}, {"tag": "USERNAME", "value": "@silviodonato", "start": 9389, "end": 9402, "context": "ms-sw/cmssw/pull/29393){:target=\"_blank\"} from **@silviodonato**: Replace validationHarvestingNoHLT with validat"}, {"tag": "USERNAME", "value": "@grasph", "start": 22772, "end": 22779, "context": "ms-sw/cmssw/pull/29230){:target=\"_blank\"} from **@grasph**: added module used to insert ECAL laser correct"}, {"tag": "USERNAME", "value": "@alja", "start": 23488, "end": 23493, "context": "ms-sw/cmssw/pull/29169){:target=\"_blank\"} from **@alja**: Fireworks: Auto detect geometry version from g"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18481, "end": 18491, "context": "ms-sw/cmssw/pull/29302){:target=\"_blank\"} from **@Dr15Jones**: Protect ProductResolvers from unlucky thread p"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 7829, "end": 7840, "context": "ms-sw/cmssw/pull/29404){:target=\"_blank\"} from **@guitargeek**: Clean BuildFiles in Validation Subsystem `core"}, {"tag": "USERNAME", "value": "@dildick", "start": 22041, "end": 22049, "context": "ms-sw/cmssw/pull/29247){:target=\"_blank\"} from **@dildick**: Interface for high multiplicity triggers in CS"}, {"tag": "USERNAME", "value": "@jpata", "start": 13214, "end": 13220, "context": "ms-sw/cmssw/pull/29361){:target=\"_blank\"} from **@jpata**: Particle Flow generator truth information `dqm"}, {"tag": "USERNAME", "value": "@belforte", "start": 26026, "end": 26035, "context": "s-sw/cmsdist/pull/5737){:target=\"_blank\"} from **@belforte**: crab-dev to CRAB Client 3.3.2004.rc3 created: "}, {"tag": "USERNAME", "value": "@rovere", "start": 16517, "end": 16524, "context": "ms-sw/cmssw/pull/29327){:target=\"_blank\"} from **@rovere**: Fix TaskPlaceholder dumpPython method `core` "}, {"tag": "USERNAME", "value": "@rovere", "start": 24249, "end": 24256, "context": "ms-sw/cmssw/pull/29081){:target=\"_blank\"} from **@rovere**: [HGCAL] TICL in reconstruction `dqm` `operati"}, {"tag": "USERNAME", "value": "@bendavid", "start": 11423, "end": 11432, "context": "ms-sw/cmssw/pull/29374){:target=\"_blank\"} from **@bendavid**: Restrict timestamps in PFCandidates to reliabl"}, {"tag": "USERNAME", "value": "@PFCal-dev", "start": 8547, "end": 8557, "context": "ms-sw/cmssw/pull/29400){:target=\"_blank\"} from **@PFCal-dev**: [HGCAL trigger] V11 geometry compatibility `l1"}, {"tag": "USERNAME", "value": "@tschuh", "start": 417, "end": 424, "context": "ms-sw/cmssw/pull/29478){:target=\"_blank\"} from **@tschuh**: L1Trigger/TrackerDTC fix for SLHCUpgradeSimula"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 17366, "end": 17375, "context": "ms-sw/cmssw/pull/29312){:target=\"_blank\"} from **@bsunanda**: Phase2-hgx233 Correct the HGCal code for worki"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 12748, "end": 12757, "context": "ms-sw/cmssw/pull/29364){:target=\"_blank\"} from **@bsunanda**: Phase2-hgx234 Transfer the constants for HGCal"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 2540, "end": 2550, "context": "ms-sw/cmssw/pull/29459){:target=\"_blank\"} from **@Dr15Jones**: Use LogTrace in IsolatedPixelTrackCandidateL1T"}, {"tag": "USERNAME", "value": "@fwyzard", "start": 27977, "end": 27985, "context": "s-sw/cmsdist/pull/5630){:target=\"_blank\"} from **@fwyzard**: Remove the CUDA API wrappers external created:"}, {"tag": "USERNAME", "value": "@ianna", "start": 9880, "end": 9886, "context": "ms-sw/cmssw/pull/29387){:target=\"_blank\"} from **@ianna**: [DD4hep] Expose Full Geo History on Request `g"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 15187, "end": 15196, "context": "ms-sw/cmssw/pull/29342){:target=\"_blank\"} from **@bsunanda**: Run3-TB53 Update some of the scripts for H2 TB"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 10087, "end": 10097, "context": "ms-sw/cmssw/pull/29384){:target=\"_blank\"} from **@Dr15Jones**: Test inter process random state transfers `cor"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 24919, "end": 24928, "context": "ms-sw/cmssw/pull/28313){:target=\"_blank\"} from **@bsunanda**: Run3-sim47 Remove reference to trackermaterial"}]
[{"tag": "USERNAME", "value": "@ggovi", "start": 13004, "end": 13010, "context": "ms-sw/cmssw/pull/29363){:target=\"_blank\"} from **@ggovi**: Support of condition updates per-lumisection `"}, {"tag": "USERNAME", "value": "@quark2", "start": 24483, "end": 24490, "context": "ms-sw/cmssw/pull/28978){:target=\"_blank\"} from **@quark2**: GEM online DQM bug fix - full GE1/1 `dqm` cre"}, {"tag": "USERNAME", "value": "@schneiml", "start": 14079, "end": 14088, "context": "ms-sw/cmssw/pull/29351){:target=\"_blank\"} from **@schneiml**: DQM: Fix fallback in DQMRootSource `dqm` crea"}, {"tag": "USERNAME", "value": "@rekovic", "start": 10292, "end": 10300, "context": "ms-sw/cmssw/pull/29383){:target=\"_blank\"} from **@rekovic**: Era modifiers L1T for Phase2, Run3, and Run2_2"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 15850, "end": 15860, "context": "ms-sw/cmssw/pull/29331){:target=\"_blank\"} from **@ahinzmann**: Fix MET unclustered energy computation `analys"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 9171, "end": 9181, "context": "ms-sw/cmssw/pull/29394){:target=\"_blank\"} from **@smuzaffar**: Run cuda tests only if cuda supports gcc versi"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 1324, "end": 1333, "context": "ms-sw/cmssw/pull/29469){:target=\"_blank\"} from **@bsunanda**: Run3-hcx253 Make additional tests for DetId se"}, {"tag": "USERNAME", "value": "@plujan", "start": 21156, "end": 21163, "context": "ms-sw/cmssw/pull/29271){:target=\"_blank\"} from **@plujan**: Luminosity producer using files from brilcalc "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 20680, "end": 20689, "context": "ms-sw/cmssw/pull/29281){:target=\"_blank\"} from **@bsunanda**: Run3-sim62 Add hits for the new Totem T2 detec"}, {"tag": "USERNAME", "value": "@mmusich", "start": 20151, "end": 20159, "context": "ms-sw/cmssw/pull/29286){:target=\"_blank\"} from **@mmusich**: protect HGCDigitizer from crashing when simHit"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18695, "end": 18705, "context": "ms-sw/cmssw/pull/29301){:target=\"_blank\"} from **@Dr15Jones**: Make some HLT modules into global modules `ana"}, {"tag": "USERNAME", "value": "@rovere", "start": 16074, "end": 16081, "context": "ms-sw/cmssw/pull/29329){:target=\"_blank\"} from **@rovere**: Fix HGCal_disableNoise customization function "}, {"tag": "USERNAME", "value": "@rekovic", "start": 14275, "end": 14283, "context": "ms-sw/cmssw/pull/29348){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TCalorimeterPhase2 "}, {"tag": "USERNAME", "value": "@dildick", "start": 12114, "end": 12122, "context": "ms-sw/cmssw/pull/29369){:target=\"_blank\"} from **@dildick**: Protect L1T producers against ME0 hits `l1` c"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 16297, "end": 16307, "context": "ms-sw/cmssw/pull/29328){:target=\"_blank\"} from **@Dr15Jones**: Use global::EDAnalyzers in GeneratorInterface/"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 21372, "end": 21381, "context": "ms-sw/cmssw/pull/29270){:target=\"_blank\"} from **@fabiocos**: MTD geometry: add new topology mode, adapt cod"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 5054, "end": 5063, "context": "ms-sw/cmssw/pull/29429){:target=\"_blank\"} from **@fabiocos**: Add a DDFilteredView test in DetectorDescripti"}, {"tag": "USERNAME", "value": "@vkuznet", "start": 26214, "end": 26222, "context": "s-sw/cmsdist/pull/5736){:target=\"_blank\"} from **@vkuznet**: New dasgoclient version created: 2020-04-10 01"}, {"tag": "USERNAME", "value": "@ggovi", "start": 15628, "end": 15634, "context": "ms-sw/cmssw/pull/29335){:target=\"_blank\"} from **@ggovi**: New Service for Updating online Conditions for"}, {"tag": "USERNAME", "value": "@jfernan2", "start": 1860, "end": 1869, "context": "ms-sw/cmssw/pull/29466){:target=\"_blank\"} from **@jfernan2**: [DT] DQM Validation extension `dqm` created: "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 7334, "end": 7343, "context": "ms-sw/cmssw/pull/29409){:target=\"_blank\"} from **@bsunanda**: Run3-sim65 Try to fix 0 TrackID for saved trac"}, {"tag": "USERNAME", "value": "@civanch", "start": 3740, "end": 3748, "context": "ms-sw/cmssw/pull/29442){:target=\"_blank\"} from **@civanch**: Migration to Geant4 10.6p01 `simulation` crea"}, {"tag": "USERNAME", "value": "@camolezi", "start": 19727, "end": 19736, "context": "ms-sw/cmssw/pull/29292){:target=\"_blank\"} from **@camolezi**: Changed boost::unordered_map for std::unordere"}, {"tag": "USERNAME", "value": "@camolezi", "start": 21642, "end": 21651, "context": "ms-sw/cmssw/pull/29259){:target=\"_blank\"} from **@camolezi**: Changed boost::scoped_ptr for std::unique_ptr "}, {"tag": "USERNAME", "value": "@srimanob", "start": 16920, "end": 16929, "context": "ms-sw/cmssw/pull/29318){:target=\"_blank\"} from **@srimanob**: Add EventsPerLumi for relvals workflows and pr"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 20396, "end": 20405, "context": "ms-sw/cmssw/pull/29283){:target=\"_blank\"} from **@fabiocos**: Fix non-compiling code triggered by EDM_ML_DEB"}, {"tag": "USERNAME", "value": "@dildick", "start": 22535, "end": 22543, "context": "ms-sw/cmssw/pull/29233){:target=\"_blank\"} from **@dildick**: [L1Trigger/CSCTriggerPrimitives] Store compara"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 11213, "end": 11224, "context": "ms-sw/cmssw/pull/29375){:target=\"_blank\"} from **@guitargeek**: PFEGammaProducer to global producer `reconstru"}, {"tag": "USERNAME", "value": "@srimanob", "start": 2758, "end": 2767, "context": "ms-sw/cmssw/pull/29457){:target=\"_blank\"} from **@srimanob**: Clean up Run-Dependent workflows, MultiRun har"}, {"tag": "USERNAME", "value": "@civanch", "start": 17168, "end": 17176, "context": "ms-sw/cmssw/pull/29317){:target=\"_blank\"} from **@civanch**: Updated Geant4 overlap checks `simulation` c"}, {"tag": "USERNAME", "value": "@dmeuser", "start": 13881, "end": 13889, "context": "ms-sw/cmssw/pull/29355){:target=\"_blank\"} from **@dmeuser**: Additional Tracker DQM residual plots `dqm` c"}, {"tag": "USERNAME", "value": "@slava77", "start": 4790, "end": 4798, "context": "ms-sw/cmssw/pull/29432){:target=\"_blank\"} from **@slava77**: add missing initializations in pat::Muon (mini"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 10516, "end": 10525, "context": "ms-sw/cmssw/pull/29380){:target=\"_blank\"} from **@bsunanda**: Run3-TB55 Remove direct access to DDCompactVie"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 23003, "end": 23012, "context": "ms-sw/cmssw/pull/29229){:target=\"_blank\"} from **@bsunanda**: Run3-alca162 Simplify the studies of energy re"}, {"tag": "USERNAME", "value": "@schneiml", "start": 14966, "end": 14975, "context": "ms-sw/cmssw/pull/29343){:target=\"_blank\"} from **@schneiml**: DQM: fix some autoDQM.py harvesting sequences "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 3935, "end": 3944, "context": "ms-sw/cmssw/pull/29440){:target=\"_blank\"} from **@bsunanda**: Run3-gem34 Add a GE21 version with 16 eta part"}, {"tag": "USERNAME", "value": "@srimanob", "start": 20910, "end": 20919, "context": "ms-sw/cmssw/pull/29279){:target=\"_blank\"} from **@srimanob**: Update relvals of JME customised Nano for 2016"}, {"tag": "USERNAME", "value": "@rekovic", "start": 7118, "end": 7126, "context": "ms-sw/cmssw/pull/29413){:target=\"_blank\"} from **@rekovic**: pr11x L1T Configure EMTF using stage2L1Trigger"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 18929, "end": 18939, "context": "ms-sw/cmssw/pull/29300){:target=\"_blank\"} from **@ahinzmann**: Apply puppi weight in BoostedDoubleSV btaginfo"}, {"tag": "USERNAME", "value": "@gkrintir", "start": 4593, "end": 4602, "context": "ms-sw/cmssw/pull/29433){:target=\"_blank\"} from **@gkrintir**: fixed bug in UL16 PU profile `simulation` cre"}, {"tag": "USERNAME", "value": "@fabiocos", "start": 7551, "end": 7560, "context": "ms-sw/cmssw/pull/29407){:target=\"_blank\"} from **@fabiocos**: MTD Geometry: DD4hep migration, preliminary up"}, {"tag": "USERNAME", "value": "@mmusich", "start": 15413, "end": 15421, "context": "ms-sw/cmssw/pull/29339){:target=\"_blank\"} from **@mmusich**: Fix Tracker Alignment all-in-one tool for py3 "}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 1097, "end": 1107, "context": "ms-sw/cmssw/pull/29473){:target=\"_blank\"} from **@Dr15Jones**: PoolSource's firstLuminosityBlockForEachRun pr"}, {"tag": "USERNAME", "value": "@rekovic", "start": 14506, "end": 14514, "context": "ms-sw/cmssw/pull/29347){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TParticleFlow for Ph"}, {"tag": "USERNAME", "value": "@ahinzmann", "start": 19184, "end": 19194, "context": "ms-sw/cmssw/pull/29296){:target=\"_blank\"} from **@ahinzmann**: Remove puppi multipliticies from JME nano `ana"}, {"tag": "USERNAME", "value": "@hatakeyamak", "start": 18044, "end": 18056, "context": "ms-sw/cmssw/pull/29307){:target=\"_blank\"} from **@hatakeyamak**: PF track propagator fix for pixel tracks `reco"}, {"tag": "USERNAME", "value": "@perrotta", "start": 3462, "end": 3471, "context": "ms-sw/cmssw/pull/29450){:target=\"_blank\"} from **@perrotta**: Remove TICL dedicated wfs, and adjust the name"}, {"tag": "USERNAME", "value": "@mmusich", "start": 2322, "end": 2330, "context": "ms-sw/cmssw/pull/29461){:target=\"_blank\"} from **@mmusich**: Re-add SiPixelPhase1TrackResidualsAnalyzer to "}, {"tag": "USERNAME", "value": "@bsunanda", "start": 22248, "end": 22257, "context": "ms-sw/cmssw/pull/29242){:target=\"_blank\"} from **@bsunanda**: Run3-TB52 Update the geometry of the October T"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18260, "end": 18270, "context": "ms-sw/cmssw/pull/29305){:target=\"_blank\"} from **@Dr15Jones**: Use Pythia8ConcurrentHadronizerFilter when pos"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8762, "end": 8772, "context": "ms-sw/cmssw/pull/29396){:target=\"_blank\"} from **@smuzaffar**: Drop rootpy related unit tests `analysis` cre"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 19950, "end": 19960, "context": "ms-sw/cmssw/pull/29291){:target=\"_blank\"} from **@Dr15Jones**: Module on multiple Path threading fix `core` "}, {"tag": "USERNAME", "value": "@srimanob", "start": 8304, "end": 8313, "context": "ms-sw/cmssw/pull/29401){:target=\"_blank\"} from **@srimanob**: Define proper BS for 2018 premixing workflow i"}, {"tag": "USERNAME", "value": "@belforte", "start": 25831, "end": 25840, "context": "s-sw/cmsdist/pull/5739){:target=\"_blank\"} from **@belforte**: CRAB Client Apr 2020 version in dev and pre cr"}, {"tag": "USERNAME", "value": "@silviodonato", "start": 3252, "end": 3265, "context": "ms-sw/cmssw/pull/29451){:target=\"_blank\"} from **@silviodonato**: Remove PhysicsTools/RooStatsCms `analysis` `c"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 11660, "end": 11669, "context": "ms-sw/cmssw/pull/29371){:target=\"_blank\"} from **@bsunanda**: Run3-sim63 Remove reference to DetectorDescrip"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8960, "end": 8970, "context": "ms-sw/cmssw/pull/29395){:target=\"_blank\"} from **@smuzaffar**: added unit test to check for clang-tidy change"}, {"tag": "USERNAME", "value": "@christopheralanwest", "start": 17790, "end": 17810, "context": "ms-sw/cmssw/pull/29308){:target=\"_blank\"} from **@christopheralanwest**: Extend maximum pileup in SiPixelStatusScenario"}, {"tag": "USERNAME", "value": "@tschuh", "start": 23713, "end": 23720, "context": "ms-sw/cmssw/pull/29151){:target=\"_blank\"} from **@tschuh**: New EDProducer emulating the Stub processing o"}, {"tag": "USERNAME", "value": "@swozniewski", "start": 1567, "end": 1579, "context": "ms-sw/cmssw/pull/29467){:target=\"_blank\"} from **@swozniewski**: Update PR #28417, add era modifier to TauRefPr"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 886, "end": 896, "context": "ms-sw/cmssw/pull/29475){:target=\"_blank\"} from **@smuzaffar**: updated lost dictionaries mapping for L1T Phas"}, {"tag": "USERNAME", "value": "@andrius-k", "start": 11896, "end": 11906, "context": "ms-sw/cmssw/pull/29370){:target=\"_blank\"} from **@andrius-k**: DQM: Syncing MEData with DQMNet MEData before "}, {"tag": "USERNAME", "value": "@mmusich", "start": 23247, "end": 23255, "context": "ms-sw/cmssw/pull/29198){:target=\"_blank\"} from **@mmusich**: Allow to take DCS Status from software FED, wh"}, {"tag": "USERNAME", "value": "@schneiml", "start": 4168, "end": 4177, "context": "ms-sw/cmssw/pull/29438){:target=\"_blank\"} from **@schneiml**: DQM: Add a local vs. global section to the REA"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 13428, "end": 13437, "context": "ms-sw/cmssw/pull/29360){:target=\"_blank\"} from **@bsunanda**: Run3-TB54 First step of transition to dd4hep f"}, {"tag": "USERNAME", "value": "@swozniewski", "start": 2050, "end": 2062, "context": "ms-sw/cmssw/pull/29465){:target=\"_blank\"} from **@swozniewski**: Update PR #28417, TauSkimPFTausSelectedForMuTa"}, {"tag": "USERNAME", "value": "@schneiml", "start": 16715, "end": 16724, "context": "ms-sw/cmssw/pull/29321){:target=\"_blank\"} from **@schneiml**: DQM: Allow per-lumi MEs in DQMOneEDAnalyzer `d"}, {"tag": "USERNAME", "value": "@mandrenguyen", "start": 4379, "end": 4392, "context": "ms-sw/cmssw/pull/29437){:target=\"_blank\"} from **@mandrenguyen**: Reenable relval wfs 310, 311 and 312 `pdmv` `"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 19401, "end": 19412, "context": "ms-sw/cmssw/pull/29295){:target=\"_blank\"} from **@guitargeek**: Remove unneeded dependencies from plugins, tes"}, {"tag": "USERNAME", "value": "@rekovic", "start": 14737, "end": 14745, "context": "ms-sw/cmssw/pull/29346){:target=\"_blank\"} from **@rekovic**: New package DataFormats/L1TCorrelator for Phas"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 9642, "end": 9651, "context": "ms-sw/cmssw/pull/29388){:target=\"_blank\"} from **@bsunanda**: Run3-sim64 Allow CherenkovAnalysis to work wit"}, {"tag": "USERNAME", "value": "@slomeo", "start": 10772, "end": 10779, "context": "ms-sw/cmssw/pull/29379){:target=\"_blank\"} from **@slomeo**: Fixed all the \"hidden\" overlaps inside the CSC"}, {"tag": "USERNAME", "value": "@kuyoun", "start": 17604, "end": 17611, "context": "ms-sw/cmssw/pull/29309){:target=\"_blank\"} from **@kuyoun**: TOP DQM: Fixed MuonRelIso `dqm` created: 202"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 8091, "end": 8101, "context": "ms-sw/cmssw/pull/29403){:target=\"_blank\"} from **@smuzaffar**: [Heppy] drop unnecessary dependency on rootpy "}, {"tag": "USERNAME", "value": "@ianna", "start": 12312, "end": 12318, "context": "ms-sw/cmssw/pull/29368){:target=\"_blank\"} from **@ianna**: [DD4hep] Children by Name and CopyNo on a Path"}, {"tag": "USERNAME", "value": "@ghugo83", "start": 12536, "end": 12544, "context": "ms-sw/cmssw/pull/29366){:target=\"_blank\"} from **@ghugo83**: Solve all overlaps detected in Phase I Tracker"}, {"tag": "USERNAME", "value": "@ronchese", "start": 21859, "end": 21868, "context": "ms-sw/cmssw/pull/29253){:target=\"_blank\"} from **@ronchese**: Bph reco update `analysis` created: 2020-03-2"}, {"tag": "USERNAME", "value": "@smuzaffar", "start": 673, "end": 683, "context": "ms-sw/cmssw/pull/29477){:target=\"_blank\"} from **@smuzaffar**: [PY3] Fix duplicate dictionary checker for pyt"}, {"tag": "USERNAME", "value": "@silviodonato", "start": 9389, "end": 9402, "context": "ms-sw/cmssw/pull/29393){:target=\"_blank\"} from **@silviodonato**: Replace validationHarvestingNoHLT with validat"}, {"tag": "USERNAME", "value": "@grasph", "start": 22772, "end": 22779, "context": "ms-sw/cmssw/pull/29230){:target=\"_blank\"} from **@grasph**: added module used to insert ECAL laser correct"}, {"tag": "USERNAME", "value": "@alja", "start": 23488, "end": 23493, "context": "ms-sw/cmssw/pull/29169){:target=\"_blank\"} from **@alja**: Fireworks: Auto detect geometry version from g"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 18481, "end": 18491, "context": "ms-sw/cmssw/pull/29302){:target=\"_blank\"} from **@Dr15Jones**: Protect ProductResolvers from unlucky thread p"}, {"tag": "USERNAME", "value": "@guitargeek", "start": 7829, "end": 7840, "context": "ms-sw/cmssw/pull/29404){:target=\"_blank\"} from **@guitargeek**: Clean BuildFiles in Validation Subsystem `core"}, {"tag": "USERNAME", "value": "@dildick", "start": 22041, "end": 22049, "context": "ms-sw/cmssw/pull/29247){:target=\"_blank\"} from **@dildick**: Interface for high multiplicity triggers in CS"}, {"tag": "USERNAME", "value": "@jpata", "start": 13214, "end": 13220, "context": "ms-sw/cmssw/pull/29361){:target=\"_blank\"} from **@jpata**: Particle Flow generator truth information `dqm"}, {"tag": "USERNAME", "value": "@belforte", "start": 26026, "end": 26035, "context": "s-sw/cmsdist/pull/5737){:target=\"_blank\"} from **@belforte**: crab-dev to CRAB Client 3.3.2004.rc3 created: "}, {"tag": "USERNAME", "value": "@rovere", "start": 16517, "end": 16524, "context": "ms-sw/cmssw/pull/29327){:target=\"_blank\"} from **@rovere**: Fix TaskPlaceholder dumpPython method `core` "}, {"tag": "USERNAME", "value": "@rovere", "start": 24249, "end": 24256, "context": "ms-sw/cmssw/pull/29081){:target=\"_blank\"} from **@rovere**: [HGCAL] TICL in reconstruction `dqm` `operati"}, {"tag": "USERNAME", "value": "@bendavid", "start": 11423, "end": 11432, "context": "ms-sw/cmssw/pull/29374){:target=\"_blank\"} from **@bendavid**: Restrict timestamps in PFCandidates to reliabl"}, {"tag": "USERNAME", "value": "@PFCal-dev", "start": 8547, "end": 8557, "context": "ms-sw/cmssw/pull/29400){:target=\"_blank\"} from **@PFCal-dev**: [HGCAL trigger] V11 geometry compatibility `l1"}, {"tag": "USERNAME", "value": "@tschuh", "start": 417, "end": 424, "context": "ms-sw/cmssw/pull/29478){:target=\"_blank\"} from **@tschuh**: L1Trigger/TrackerDTC fix for SLHCUpgradeSimula"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 17366, "end": 17375, "context": "ms-sw/cmssw/pull/29312){:target=\"_blank\"} from **@bsunanda**: Phase2-hgx233 Correct the HGCal code for worki"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 12748, "end": 12757, "context": "ms-sw/cmssw/pull/29364){:target=\"_blank\"} from **@bsunanda**: Phase2-hgx234 Transfer the constants for HGCal"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 2540, "end": 2550, "context": "ms-sw/cmssw/pull/29459){:target=\"_blank\"} from **@Dr15Jones**: Use LogTrace in IsolatedPixelTrackCandidateL1T"}, {"tag": "USERNAME", "value": "@fwyzard", "start": 27977, "end": 27985, "context": "s-sw/cmsdist/pull/5630){:target=\"_blank\"} from **@fwyzard**: Remove the CUDA API wrappers external created:"}, {"tag": "USERNAME", "value": "@ianna", "start": 9880, "end": 9886, "context": "ms-sw/cmssw/pull/29387){:target=\"_blank\"} from **@ianna**: [DD4hep] Expose Full Geo History on Request `g"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 15187, "end": 15196, "context": "ms-sw/cmssw/pull/29342){:target=\"_blank\"} from **@bsunanda**: Run3-TB53 Update some of the scripts for H2 TB"}, {"tag": "USERNAME", "value": "@Dr15Jones", "start": 10087, "end": 10097, "context": "ms-sw/cmssw/pull/29384){:target=\"_blank\"} from **@Dr15Jones**: Test inter process random state transfers `cor"}, {"tag": "USERNAME", "value": "@bsunanda", "start": 24919, "end": 24928, "context": "ms-sw/cmssw/pull/28313){:target=\"_blank\"} from **@bsunanda**: Run3-sim47 Remove reference to trackermaterial"}]
#!/usr/bin/env python # -*- coding: utf-8 -*- # @Date : 2014-10-26 13:02:58 # @Author : yml_bright@163.com from sqlalchemy import Column, String, Integer from sqlalchemy.ext.declarative import declarative_base from db import dbengine, Base class User(Base): __tablename__ = 'user' cardnum = Column(String(10), primary_key=True) number = Column(String(50), nullable=True) password = Column(String(50), nullable=False) pe_password = Column(String(50), nullable=True) lib_username = Column(String(50), nullable=True) lib_password = Column(String(50), nullable=True) card_query_pwd = Column(String(50), nullable=True) card_consume_pwd = Column(String(50), nullable=True) state = Column(Integer, nullable=False)
Python
MIT
HeraldStudio/herald_auth/mod/models/user.py
76134604-bd6d-4aac-b389-afed7f70ab14
[{"tag": "EMAIL", "value": "yml_bright@163.com", "start": 92, "end": 110, "context": "-*-\n# @Date : 2014-10-26 13:02:58\n# @Author : yml_bright@163.com\n\nfrom sqlalchemy import Column, String, Integer\nf"}]
[{"tag": "EMAIL", "value": "yml_bright@163.com", "start": 92, "end": 110, "context": "-*-\n# @Date : 2014-10-26 13:02:58\n# @Author : yml_bright@163.com\n\nfrom sqlalchemy import Column, String, Integer\nf"}]
/* * Copyright (c) 2018. paascloud.net All Rights Reserved. * 项目名称:附近有好吃的 * 类名称:PublicUtil.java * 创建人:兰龙斌 * 联系方式:llb7891@163.com * 开源地址: https://github.com/NewlyLan/nearbyEat.git * * */ package com.paascloud; import lombok.AccessLevel; import lombok.NoArgsConstructor; import java.util.Collection; import java.util.Map; /** * The class Public util. * * @author paascloud.net@gmail.com */ @NoArgsConstructor(access = AccessLevel.PRIVATE) public class PublicUtil { /** * 判断对象是否Empty(null或元素为0) * 实用于对如下对象做判断:String Collection及其子类 Map及其子类 * * @param pObj 待检查对象 * * @return boolean 返回的布尔值 */ public static boolean isEmpty(Object pObj) { if (pObj == null) { return true; } if (pObj == "") { return true; } if (pObj instanceof String) { return ((String) pObj).length() == 0; } else if (pObj instanceof Collection) { return ((Collection) pObj).isEmpty(); } else if (pObj instanceof Map) { return ((Map) pObj).size() == 0; } return false; } /** * 判断对象是否为NotEmpty(!null或元素大于0) * 实用于对如下对象做判断:String Collection及其子类 Map及其子类 * * @param pObj 待检查对象 * * @return boolean 返回的布尔值 */ public static boolean isNotEmpty(Object pObj) { if (pObj == null) { return false; } if (pObj == "") { return false; } if (pObj instanceof String) { return ((String) pObj).length() != 0; } else if (pObj instanceof Collection) { return !((Collection) pObj).isEmpty(); } else if (pObj instanceof Map) { return ((Map) pObj).size() != 0; } return true; } }
Java
Apache-2.0
NewlyLan/nearEat/paascloud-common/paascloud-common-util/src/main/java/com/paascloud/PublicUtil.java
49de4c6a-196b-4b8a-a184-86d73d276688
[{"tag": "EMAIL", "value": "llb7891@163.com", "start": 118, "end": 133, "context": "\uff1a\u9644\u8fd1\u6709\u597d\u5403\u7684\n * \u7c7b\u540d\u79f0\uff1aPublicUtil.java\n * \u521b\u5efa\u4eba\uff1a\u5170\u9f99\u658c\n * \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.com/NewlyLan/nearbyEat.gi"}, {"tag": "NAME", "value": "\u5170\u9f99\u658c", "start": 106, "end": 109, "context": "ved.\n * \u9879\u76ee\u540d\u79f0\uff1a\u9644\u8fd1\u6709\u597d\u5403\u7684\n * \u7c7b\u540d\u79f0\uff1aPublicUtil.java\n * \u521b\u5efa\u4eba\uff1a\u5170\u9f99\u658c\n * \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.c"}, {"tag": "EMAIL", "value": "paascloud.net@gmail.com", "start": 377, "end": 400, "context": "Map;\n\n/**\n * The class Public util.\n *\n * @author paascloud.net@gmail.com\n */\n@NoArgsConstructor(access = AccessLevel.PRIVA"}, {"tag": "USERNAME", "value": "NewlyLan", "start": 162, "end": 170, "context": " \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.com/NewlyLan/nearbyEat.git\n *\n *\n */\n\npackage com.paascloud;\n\n"}]
[{"tag": "EMAIL", "value": "llb7891@163.com", "start": 118, "end": 133, "context": "\uff1a\u9644\u8fd1\u6709\u597d\u5403\u7684\n * \u7c7b\u540d\u79f0\uff1aPublicUtil.java\n * \u521b\u5efa\u4eba\uff1a\u5170\u9f99\u658c\n * \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.com/NewlyLan/nearbyEat.gi"}, {"tag": "NAME", "value": "\u5170\u9f99\u658c", "start": 106, "end": 109, "context": "ved.\n * \u9879\u76ee\u540d\u79f0\uff1a\u9644\u8fd1\u6709\u597d\u5403\u7684\n * \u7c7b\u540d\u79f0\uff1aPublicUtil.java\n * \u521b\u5efa\u4eba\uff1a\u5170\u9f99\u658c\n * \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.c"}, {"tag": "EMAIL", "value": "paascloud.net@gmail.com", "start": 377, "end": 400, "context": "Map;\n\n/**\n * The class Public util.\n *\n * @author paascloud.net@gmail.com\n */\n@NoArgsConstructor(access = AccessLevel.PRIVA"}, {"tag": "USERNAME", "value": "NewlyLan", "start": 162, "end": 170, "context": " \u8054\u7cfb\u65b9\u5f0f\uff1allb7891@163.com\n * \u5f00\u6e90\u5730\u5740: https://github.com/NewlyLan/nearbyEat.git\n *\n *\n */\n\npackage com.paascloud;\n\n"}]
version https://git-lfs.github.com/spec/v1 oid sha256:76d8ae334545bbdf2db49414c25d2cfd8685e7b6187f119b28e93ad9c5118e9d size 4727
C
MPL-2.0
Autorule-Technology/anyline-ocr-xamarin-module/BindingSource/AnylineXamarinSDK.iOS/Anyline.framework/Headers/ALAbstractScanPlugin.h
a80403eb-bd9a-4a51-8349-bfa65f8852db
[{"tag": "SSH_KEY", "value": "76d8ae334545bbdf2db49414c25d2cfd8685e7b6187f119b28e93ad9c5118e9d", "start": 54, "end": 118, "context": "ion https://git-lfs.github.com/spec/v1\noid sha256:76d8ae334545bbdf2db49414c25d2cfd8685e7b6187f119b28e93ad9c5118e9d\nsize 4727\n"}]
[{"tag": "KEY", "value": "76d8ae334545bbdf2db49414c25d2cfd8685e7b6187f119b28e93ad9c5118e9d", "start": 54, "end": 118, "context": "ion https://git-lfs.github.com/spec/v1\noid sha256:76d8ae334545bbdf2db49414c25d2cfd8685e7b6187f119b28e93ad9c5118e9d\nsize 4727\n"}]
class VulkanHeaders < Formula desc "Vulkan Header files and API registry" homepage "https://github.com/KhronosGroup/Vulkan-Headers" url "https://github.com/KhronosGroup/Vulkan-Headers/archive/v1.2.155.tar.gz" sha256 "46226dd0a8023114acfe2ba3e4fab8af8595781a4b5b5f3371b21f90f507814d" license "Apache-2.0" bottle do cellar :any_skip_relocation sha256 "350afdd580434b9c5220e908ede864e88b67c5c502f138bfec7d47f6dc0cf736" => :catalina sha256 "7c368e9a0d4cb2ee30b26a15f4e6c6627431c35eac29359fd0f5f31ba2c04af4" => :mojave sha256 "d5d10312e4fceb39c1cadb1e7e7c54d7371e9418f02cde7db816800a7bfa076d" => :high_sierra end depends_on "cmake" => :build def install system "cmake", ".", *std_cmake_args system "make", "install" end test do (testpath/"test.c").write <<~EOS #include <stdio.h> #include <vulkan/vulkan_core.h> int main() { printf("vulkan version %d", VK_VERSION_1_0); return 0; } EOS system ENV.cc, "test.c", "-o", "test" system "./test" end end
Ruby
BSD-2-Clause
RustScan/homebrew-core/Formula/vulkan-headers.rb
c5e4e946-059b-4e5f-9cf8-1878b7c99e54
[]
[]
/** * @file methods/approx_kfn/approx_kfn_main.cpp * @author Ryan Curtin * * Command-line program for various furthest neighbor search algorithms. * * mlpack is free software; you may redistribute it and/or modify it under the * terms of the 3-clause BSD license. You should have received a copy of the * 3-clause BSD license along with mlpack. If not, see * http://www.opensource.org/licenses/BSD-3-Clause for more information. */ #include <mlpack/prereqs.hpp> #include <mlpack/core/util/io.hpp> #include <mlpack/methods/neighbor_search/neighbor_search.hpp> #include <mlpack/core/util/mlpack_main.hpp> #include "drusilla_select.hpp" #include "qdafn.hpp" using namespace mlpack; using namespace mlpack::neighbor; using namespace mlpack::util; using namespace std; // Program Name. BINDING_NAME("Approximate furthest neighbor search"); // Short description. BINDING_SHORT_DESC( "An implementation of two strategies for furthest neighbor search. This " "can be used to compute the furthest neighbor of query point(s) from a set " "of points; furthest neighbor models can be saved and reused with future " "query point(s)."); // Long description. BINDING_LONG_DESC( "This program implements two strategies for furthest neighbor search. " "These strategies are:" "\n\n" " - The 'qdafn' algorithm from \"Approximate Furthest Neighbor in High " "Dimensions\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in " "Similarity Search and Applications 2015 (SISAP)." "\n" " - The 'DrusillaSelect' algorithm from \"Fast approximate furthest " "neighbors with data-dependent candidate selection\", by R.R. Curtin and " "A.B. Gardner, in Similarity Search and Applications 2016 (SISAP)." "\n\n" "These two strategies give approximate results for the furthest neighbor " "search problem and can be used as fast replacements for other furthest " "neighbor techniques such as those found in the mlpack_kfn program. Note " "that typically, the 'ds' algorithm requires far fewer tables and " "projections than the 'qdafn' algorithm." "\n\n" "Specify a reference set (set to search in) with " + PRINT_PARAM_STRING("reference") + ", specify a query set with " + PRINT_PARAM_STRING("query") + ", and specify algorithm parameters with " + PRINT_PARAM_STRING("num_tables") + " and " + PRINT_PARAM_STRING("num_projections") + " (or don't and defaults will be " "used). The algorithm to be used (either 'ds'---the default---or 'qdafn') " " may be specified with " + PRINT_PARAM_STRING("algorithm") + ". Also " "specify the number of neighbors to search for with " + PRINT_PARAM_STRING("k") + "." "\n\n" "Note that for 'qdafn' in lower dimensions, " + PRINT_PARAM_STRING("num_projections") + " may need to be set to a high " "value in order to return results for each query point." "\n\n" "If no query set is specified, the reference set will be used as the " "query set. The " + PRINT_PARAM_STRING("output_model") + " output " "parameter may be used to store the built model, and an input model may be " "loaded instead of specifying a reference set with the " + PRINT_PARAM_STRING("input_model") + " option." "\n\n" "Results for each query point can be stored with the " + PRINT_PARAM_STRING("neighbors") + " and " + PRINT_PARAM_STRING("distances") + " output parameters. Each row of these " "output matrices holds the k distances or neighbor indices for each query " "point."); // Example. BINDING_EXAMPLE( "For example, to find the 5 approximate furthest neighbors with " + PRINT_DATASET("reference_set") + " as the reference set and " + PRINT_DATASET("query_set") + " as the query set using DrusillaSelect, " "storing the furthest neighbor indices to " + PRINT_DATASET("neighbors") + " and the furthest neighbor distances to " + PRINT_DATASET("distances") + ", one could call" "\n\n" + PRINT_CALL("approx_kfn", "query", "query_set", "reference", "reference_set", "k", 5, "algorithm", "ds", "neighbors", "neighbors", "distances", "distances") + "\n\n" "and to perform approximate all-furthest-neighbors search with k=1 on the " "set " + PRINT_DATASET("data") + " storing only the furthest neighbor " "distances to " + PRINT_DATASET("distances") + ", one could call" "\n\n" + PRINT_CALL("approx_kfn", "reference", "reference_set", "k", 1, "distances", "distances") + "\n\n" "A trained model can be re-used. If a model has been previously saved to " + PRINT_MODEL("model") + ", then we may find 3 approximate furthest " "neighbors on a query set " + PRINT_DATASET("new_query_set") + " using " "that model and store the furthest neighbor indices into " + PRINT_DATASET("neighbors") + " by calling" "\n\n" + PRINT_CALL("approx_kfn", "input_model", "model", "query", "new_query_set", "k", 3, "neighbors", "neighbors")); // See also... BINDING_SEE_ALSO("k-furthest-neighbor search", "#kfn"); BINDING_SEE_ALSO("k-nearest-neighbor search", "#knn"); BINDING_SEE_ALSO("Fast approximate furthest neighbors with data-dependent" " candidate selection (pdf)", "http://ratml.org/pub/pdf/2016fast.pdf"); BINDING_SEE_ALSO("Approximate furthest neighbor in high dimensions (pdf)", "https://pdfs.semanticscholar.org/a4b5/7b9cbf37201fb1d9a56c0f4eefad0466" "9c20.pdf"); BINDING_SEE_ALSO("mlpack::neighbor::QDAFN class documentation", "@doxygen/classmlpack_1_1neighbor_1_1QDAFN.html"); BINDING_SEE_ALSO("mlpack::neighbor::DrusillaSelect class documentation", "@doxygen/classmlpack_1_1neighbor_1_1DrusillaSelect.html"); PARAM_MATRIX_IN("reference", "Matrix containing the reference dataset.", "r"); PARAM_MATRIX_IN("query", "Matrix containing query points.", "q"); PARAM_INT_IN("k", "Number of furthest neighbors to search for.", "k", 0); PARAM_INT_IN("num_tables", "Number of hash tables to use.", "t", 5); PARAM_INT_IN("num_projections", "Number of projections to use in each hash " "table.", "p", 5); PARAM_STRING_IN("algorithm", "Algorithm to use: 'ds' or 'qdafn'.", "a", "ds"); PARAM_UMATRIX_OUT("neighbors", "Matrix to save neighbor indices to.", "n"); PARAM_MATRIX_OUT("distances", "Matrix to save furthest neighbor distances to.", "d"); PARAM_FLAG("calculate_error", "If set, calculate the average distance error for" " the first furthest neighbor only.", "e"); PARAM_MATRIX_IN("exact_distances", "Matrix containing exact distances to " "furthest neighbors; this can be used to avoid explicit calculation when " "--calculate_error is set.", "x"); // If we save a model we must also save what type it is. class ApproxKFNModel { public: int type; DrusillaSelect<> ds; QDAFN<> qdafn; //! Constructor, which does nothing. ApproxKFNModel() : type(0), ds(1, 1), qdafn(1, 1) { } //! Serialize the model. template<typename Archive> void serialize(Archive& ar, const unsigned int /* version */) { ar & BOOST_SERIALIZATION_NVP(type); if (type == 0) { ar & BOOST_SERIALIZATION_NVP(ds); } else { ar & BOOST_SERIALIZATION_NVP(qdafn); } } }; // Model loading and saving. PARAM_MODEL_IN(ApproxKFNModel, "input_model", "File containing input model.", "m"); PARAM_MODEL_OUT(ApproxKFNModel, "output_model", "File to save output model to.", "M"); static void mlpackMain() { // We have to pass either a reference set or an input model. RequireOnlyOnePassed({ "reference", "input_model" }); // Warn if no task will be performed. RequireAtLeastOnePassed({ "reference", "k" }, false, "no task will be performed"); // Warn if no output is going to be saved. RequireAtLeastOnePassed({ "neighbors", "distances", "output_model" }, false, "no output will be saved"); // Check that the user specified a valid algorithm. RequireParamInSet<string>("algorithm", { "ds", "qdafn" }, true, "unknown algorithm"); // If we are searching, we need a set to search in. if (IO::HasParam("k")) { RequireAtLeastOnePassed({ "reference", "query" }, true, "if search is being performed, at least one set must be specified"); } // Validate parameters. if (IO::HasParam("k")) { RequireParamValue<int>("k", [](int x) { return x > 0; }, true, "number of neighbors to search for must be positive"); } RequireParamValue<int>("num_tables", [](int x) { return x > 0; }, true, "number of tables must be positive"); RequireParamValue<int>("num_projections", [](int x) { return x > 0; }, true, "number of projections must be positive"); ReportIgnoredParam({{ "input_model", true }}, "algorithm"); ReportIgnoredParam({{ "input_model", true }}, "num_tables"); ReportIgnoredParam({{ "input_model", true }}, "num_projections"); ReportIgnoredParam({{ "k", false }}, "calculate_error"); ReportIgnoredParam({{ "calculate_error", false }}, "exact_distances"); if (IO::HasParam("calculate_error")) { RequireAtLeastOnePassed({ "exact_distances", "reference" }, true, "if error is to be calculated, either precalculated exact distances or " "the reference set must be passed"); } if (IO::HasParam("k") && IO::HasParam("reference") && ((size_t) IO::GetParam<int>("k")) > IO::GetParam<arma::mat>("reference").n_cols) { Log::Fatal << "Number of neighbors to search for (" << IO::GetParam<int>("k") << ") must be less than the number of " << "reference points (" << IO::GetParam<arma::mat>("reference").n_cols << ")." << std::endl; } // Do the building of a model, if necessary. ApproxKFNModel* m; arma::mat referenceSet; // This may be used at query time. if (IO::HasParam("reference")) { referenceSet = std::move(IO::GetParam<arma::mat>("reference")); m = new ApproxKFNModel(); const size_t numTables = (size_t) IO::GetParam<int>("num_tables"); const size_t numProjections = (size_t) IO::GetParam<int>("num_projections"); const string algorithm = IO::GetParam<string>("algorithm"); if (algorithm == "ds") { Timer::Start("drusilla_select_construct"); Log::Info << "Building DrusillaSelect model..." << endl; m->type = 0; m->ds = DrusillaSelect<>(referenceSet, numTables, numProjections); Timer::Stop("drusilla_select_construct"); } else { Timer::Start("qdafn_construct"); Log::Info << "Building QDAFN model..." << endl; m->type = 1; m->qdafn = QDAFN<>(referenceSet, numTables, numProjections); Timer::Stop("qdafn_construct"); } Log::Info << "Model built." << endl; } else { // We must load the model from what was passed. m = IO::GetParam<ApproxKFNModel*>("input_model"); } // Now, do we need to do any queries? if (IO::HasParam("k")) { arma::mat querySet; // This may or may not be used. const size_t k = (size_t) IO::GetParam<int>("k"); arma::Mat<size_t> neighbors; arma::mat distances; arma::mat& set = IO::HasParam("query") ? querySet : referenceSet; if (IO::HasParam("query")) querySet = std::move(IO::GetParam<arma::mat>("query")); if (m->type == 0) { Timer::Start("drusilla_select_search"); Log::Info << "Searching for " << k << " furthest neighbors with " << "DrusillaSelect..." << endl; m->ds.Search(set, k, neighbors, distances); Timer::Stop("drusilla_select_search"); } else { Timer::Start("qdafn_search"); Log::Info << "Searching for " << k << " furthest neighbors with " << "QDAFN..." << endl; m->qdafn.Search(set, k, neighbors, distances); Timer::Stop("qdafn_search"); } Log::Info << "Search complete." << endl; // Should we calculate error? if (IO::HasParam("calculate_error")) { arma::mat exactDistances; if (IO::HasParam("exact_distances")) { // Check the exact distances matrix has the right dimensions. exactDistances = std::move(IO::GetParam<arma::mat>("exact_distances")); if (exactDistances.n_rows != k) { delete m; Log::Fatal << "The number of rows in the exact distances matrix (" << exactDistances.n_rows << " must be equal to k (" << k << ")." << std::endl; } else if (exactDistances.n_cols != referenceSet.n_cols) { delete m; Log::Fatal << "The number of columns in the exact distances matrix (" << exactDistances.n_cols << ") must be equal to the number of " << "columns in the reference set (" << referenceSet.n_cols << ")." << std::endl; } } else { // Calculate exact distances. We are guaranteed the reference set is // available. Log::Info << "Calculating exact distances..." << endl; KFN kfn(referenceSet); arma::Mat<size_t> exactNeighbors; kfn.Search(set, 1, exactNeighbors, exactDistances); Log::Info << "Calculation complete." << endl; } const double averageError = arma::sum(exactDistances.row(0) / distances.row(0)) / distances.n_cols; const double minError = arma::min(exactDistances.row(0) / distances.row(0)); const double maxError = arma::max(exactDistances.row(0) / distances.row(0)); Log::Info << "Average error: " << averageError << "." << endl; Log::Info << "Maximum error: " << maxError << "." << endl; Log::Info << "Minimum error: " << minError << "." << endl; } // Save results, if desired. IO::GetParam<arma::Mat<size_t>>("neighbors") = std::move(neighbors); IO::GetParam<arma::mat>("distances") = std::move(distances); } IO::GetParam<ApproxKFNModel*>("output_model") = m; }
C++
BSD-3-Clause-No-Nuclear-License-2014
AnimeshMaheshwari22/mlpack/src/mlpack/methods/approx_kfn/approx_kfn_main.cpp
4e30945c-9f9b-4be4-80f5-5b6131dda268
[{"tag": "NAME", "value": "F. Silvestri", "start": 1418, "end": 1430, "context": " Neighbor in High \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity"}, {"tag": "NAME", "value": "A.B. Gardner", "start": 1687, "end": 1699, "context": " candidate selection\\\", by R.R. Curtin and \"\n \"A.B. Gardner, in Similarity Search and Applications 2016 (SISA"}, {"tag": "NAME", "value": "R.R. Curtin", "start": 1664, "end": 1675, "context": "ors with data-dependent candidate selection\\\", by R.R. Curtin and \"\n \"A.B. Gardner, in Similarity Search and"}, {"tag": "NAME", "value": "J. Sivertsen", "start": 1432, "end": 1444, "context": "igh \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity Search and Ap"}, {"tag": "NAME", "value": "R. Pagh", "start": 1409, "end": 1416, "context": " Furthest Neighbor in High \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n "}, {"tag": "NAME", "value": "Ryan Curtin", "start": 63, "end": 74, "context": "methods/approx_kfn/approx_kfn_main.cpp\n * @author Ryan Curtin\n *\n * Command-line program for various furthest n"}, {"tag": "NAME", "value": "M. Skala", "start": 1450, "end": 1458, "context": "ons\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity Search and Applications 201"}]
[{"tag": "NAME", "value": "F. Silvestri", "start": 1418, "end": 1430, "context": " Neighbor in High \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity"}, {"tag": "NAME", "value": "A.B. Gardner", "start": 1687, "end": 1699, "context": " candidate selection\\\", by R.R. Curtin and \"\n \"A.B. Gardner, in Similarity Search and Applications 2016 (SISA"}, {"tag": "NAME", "value": "R.R. Curtin", "start": 1664, "end": 1675, "context": "ors with data-dependent candidate selection\\\", by R.R. Curtin and \"\n \"A.B. Gardner, in Similarity Search and"}, {"tag": "NAME", "value": "J. Sivertsen", "start": 1432, "end": 1444, "context": "igh \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity Search and Ap"}, {"tag": "NAME", "value": "R. Pagh", "start": 1409, "end": 1416, "context": " Furthest Neighbor in High \"\n \"Dimensions\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n "}, {"tag": "NAME", "value": "Ryan Curtin", "start": 63, "end": 74, "context": "methods/approx_kfn/approx_kfn_main.cpp\n * @author Ryan Curtin\n *\n * Command-line program for various furthest n"}, {"tag": "NAME", "value": "M. Skala", "start": 1450, "end": 1458, "context": "ons\\\" by R. Pagh, F. Silvestri, J. Sivertsen, and M. Skala, in \"\n \"Similarity Search and Applications 201"}]
#!/usr/bin/env python # -*- coding:utf-8 _*- # @author : Lin Luo / Bruce Liu # @time : 2020/1/3 21:35 # @contact : 15869300264@163.com / bruce.w.y.liu@gmail.com import argparse parser = argparse.ArgumentParser() parser.add_argument_group() parser.add_argument('-c', '--config', help='config file for run and operation', required=False) group = parser.add_mutually_exclusive_group() group.add_argument('-a', '--add', help='add sk with ip', required=False) group.add_argument('-d', '--delete', help='delete sk by sk or ip', required=False) # group.add_argument('-e', '-examine', help='examine the status of ip', required=False) group.add_argument('-r', '--run', help='run the main project', action='store_true', required=False) group.add_argument('-t', '--test', help='test the config file, default path is conf/odyn.conf', action='store_true', required=False) group.add_argument('-s', '--stop', help='stop the main project', action='store_true', required=False) args = parser.parse_args()
Python
Apache-2.0
BruceWW/odyn/utils/args.py
f2887fe5-078d-4c0b-bb49-aa71800f8fb2
[{"tag": "NAME", "value": "Lin Luo", "start": 58, "end": 65, "context": "in/env python\n# -*- coding:utf-8 _*-\n# @author : Lin Luo / Bruce Liu\n# @time : 2020/1/3 21:35\n# @contac"}, {"tag": "EMAIL", "value": "bruce.w.y.liu@gmail.com", "start": 141, "end": 164, "context": "2020/1/3 21:35\n# @contact : 15869300264@163.com / bruce.w.y.liu@gmail.com\nimport argparse\n\nparser = argparse.ArgumentParser"}, {"tag": "EMAIL", "value": "15869300264@163.com", "start": 119, "end": 138, "context": "ruce Liu\n# @time : 2020/1/3 21:35\n# @contact : 15869300264@163.com / bruce.w.y.liu@gmail.com\nimport argparse\n\nparser"}, {"tag": "NAME", "value": "Bruce Liu", "start": 68, "end": 77, "context": "hon\n# -*- coding:utf-8 _*-\n# @author : Lin Luo / Bruce Liu\n# @time : 2020/1/3 21:35\n# @contact : 15869300"}]
[{"tag": "NAME", "value": "Lin Luo", "start": 58, "end": 65, "context": "in/env python\n# -*- coding:utf-8 _*-\n# @author : Lin Luo / Bruce Liu\n# @time : 2020/1/3 21:35\n# @contac"}, {"tag": "EMAIL", "value": "bruce.w.y.liu@gmail.com", "start": 141, "end": 164, "context": "2020/1/3 21:35\n# @contact : 15869300264@163.com / bruce.w.y.liu@gmail.com\nimport argparse\n\nparser = argparse.ArgumentParser"}, {"tag": "EMAIL", "value": "15869300264@163.com", "start": 119, "end": 138, "context": "ruce Liu\n# @time : 2020/1/3 21:35\n# @contact : 15869300264@163.com / bruce.w.y.liu@gmail.com\nimport argparse\n\nparser"}, {"tag": "NAME", "value": "Bruce Liu", "start": 68, "end": 77, "context": "hon\n# -*- coding:utf-8 _*-\n# @author : Lin Luo / Bruce Liu\n# @time : 2020/1/3 21:35\n# @contact : 15869300"}]
<?php /* * This file is part of the Symfony package. * * (c) Fabien Potencier <fabien@symfony.com> * * For the full copyright and license information, please view the LICENSE * file that was distributed with this source code. */ namespace Symfony\Component\Mailer\Transport; use Psr\Log\LoggerInterface; use Psr\Log\NullLogger; use Symfony\Component\Mailer\Envelope; use Symfony\Component\Mailer\Event\MessageEvent; use Symfony\Component\Mailer\SentMessage; use Symfony\Component\Mime\Address; use Symfony\Component\Mime\RawMessage; use Symfony\Contracts\EventDispatcher\EventDispatcherInterface; /** * @author Fabien Potencier <fabien@symfony.com> */ abstract class AbstractTransport implements TransportInterface { private $dispatcher; private $logger; private $rate = 0; private $lastSent = 0; public function __construct(EventDispatcherInterface $dispatcher = null, LoggerInterface $logger = null) { $this->dispatcher = $dispatcher; $this->logger = $logger ?? new NullLogger(); } /** * Sets the maximum number of messages to send per second (0 to disable). */ public function setMaxPerSecond(float $rate): self { if (0 >= $rate) { $rate = 0; } $this->rate = $rate; $this->lastSent = 0; return $this; } public function send(RawMessage $message, Envelope $envelope = null): ?SentMessage { $message = clone $message; $envelope = null !== $envelope ? clone $envelope : Envelope::create($message); if (null !== $this->dispatcher) { $event = new MessageEvent($message, $envelope, (string) $this); $this->dispatcher->dispatch($event); $envelope = $event->getEnvelope(); } $message = new SentMessage($message, $envelope); $this->doSend($message); $this->checkThrottling(); return $message; } abstract protected function doSend(SentMessage $message): void; /** * @param Address[] $addresses * * @return string[] */ protected function stringifyAddresses(array $addresses): array { return array_map(function (Address $a) { return $a->toString(); }, $addresses); } protected function getLogger(): LoggerInterface { return $this->logger; } private function checkThrottling() { if (0 == $this->rate) { return; } $sleep = (1 / $this->rate) - (microtime(true) - $this->lastSent); if (0 < $sleep) { $this->logger->debug(sprintf('Email transport "%s" sleeps for %.2f seconds', __CLASS__, $sleep)); usleep($sleep * 1000000); } $this->lastSent = microtime(true); } }
PHP
MIT
Taluu/symfony/src/Symfony/Component/Mailer/Transport/AbstractTransport.php
1a0dae61-2abd-4bfd-9c86-0cceaa91b9a1
[{"tag": "NAME", "value": "Fabien Potencier", "start": 65, "end": 81, "context": "is file is part of the Symfony package.\n *\n * (c) Fabien Potencier <fabien@symfony.com>\n *\n * For the full copyright"}]
[{"tag": "NAME", "value": "Fabien Potencier", "start": 65, "end": 81, "context": "is file is part of the Symfony package.\n *\n * (c) Fabien Potencier <fabien@symfony.com>\n *\n * For the full copyright"}]
#!/usr/bin/env ruby require 'json' require 'scbi_fastq' if ARGV.count != 2 end # >Cluster 0 # 0 216aa, >E9LAHD006DQKVK... * # >Cluster 1 # 0 203aa, >E9LAHD006DODWR... * # >Cluster 2 # 0 198aa, >E9LAHD006DQCDS... * # >Cluster 3 # 0 195aa, >E9LAHD006DQURO... * # 1 172aa, >E9LAHD006DOSHR... at 93.02% # 2 172aa, >E9LAHD006DSV4P... at 93.02% # 3 172aa, >E9LAHD006DI00Q... at 93.02% # 4 172aa, >E9LAHD006DR7MR... at 93.02% # 5 175aa, >E9LAHD006DTDA7... at 90.86% # 6 172aa, >E9LAHD006DVCR3... at 93.02% # 7 172aa, >E9LAHD006DHY3H... at 93.02% # 8 177aa, >E9LAHD006DI52X... at 90.96% def load_repeated_seqs(file_path,min_repetitions) clusters=[] # count=0 current_cluster=[] if File.exists?(file_path) # File.open(ARGV[0]).each_line do |line| # $LOG.debug("Repeated file path:"+file_path) File.open(file_path).each_line do |line| if line =~ /^>Cluster/ if !current_cluster.empty? && (current_cluster.count <= min_repetitions) clusters += current_cluster end # count=0 current_cluster=[] elsif line =~ />([^\.]+)\.\.\.\s/ current_cluster << $1 end end if !current_cluster.empty? && (current_cluster.count <= min_repetitions) clusters += current_cluster end # $LOG.info("Repeated sequence count: #{@clusters.count}") else # $LOG.error("Clustering file's doesn't exists: #{@clusters.count}") end return clusters end def remove_singletons_from_file(input_file_path,singletons) fqr=FastqFile.new(input_file_path) out=FastqFile.new(input_file_path+'_without_singletons','w+') fqr.each do |n,f,q,c| if !singletons.include?(n) out.write_seq(n,f,q,c) end end out.close fqr.close end input_file_path=ARGV.shift min_repetitions = ARGV.shift.to_i `cd-hit -i #{input_file_path} -o clusters` singletons = load_repeated_seqs('clusters.clrs',min_repetitions) remove_singletons_from_file(input_file_path,singletons) # puts singletons.to_json
Ruby
MIT
dariogf/SeqtrimNext/bin/parse_amplicons.rb
84f60a25-9b7e-4222-bdef-784f755412c6
[]
[]
/* * pthread_win32_attach_detach_np.c * * Description: * This translation unit implements non-portable thread functions. * * -------------------------------------------------------------------------- * * Pthreads-win32 - POSIX Threads Library for Win32 * Copyright(C) 1998 John E. Bossom * Copyright(C) 1999,2005 Pthreads-win32 contributors * * Contact Email: rpj@callisto.canberra.edu.au * * The current list of contributors is contained * in the file CONTRIBUTORS included with the source * code distribution. The list can also be seen at the * following World Wide Web location: * http://sources.redhat.com/pthreads-win32/contributors.html * * This library is free software; you can redistribute it and/or * modify it under the terms of the GNU Lesser General Public * License as published by the Free Software Foundation; either * version 2 of the License, or (at your option) any later version. * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU * Lesser General Public License for more details. * * You should have received a copy of the GNU Lesser General Public * License along with this library in the file COPYING.LIB; * if not, write to the Free Software Foundation, Inc., * 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA */ #include "pthread.h" #include "implement.h" /* * Handle to kernel32.dll */ static HINSTANCE ptw32_h_kernel32; /* * Handle to quserex.dll */ static HINSTANCE ptw32_h_quserex; BOOL pthread_win32_process_attach_np () { BOOL result = TRUE; DWORD_PTR vProcessCPUs; DWORD_PTR vSystemCPUs; result = ptw32_processInitialize (); #ifdef _UWIN pthread_count++; #endif ptw32_features = 0; #if defined(NEED_PROCESS_AFFINITY_MASK) ptw32_smp_system = PTW32_FALSE; #else if (GetProcessAffinityMask (GetCurrentProcess (), &vProcessCPUs, &vSystemCPUs)) { int CPUs = 0; DWORD_PTR bit; for (bit = 1; bit != 0; bit <<= 1) { if (vSystemCPUs & bit) { CPUs++; } } ptw32_smp_system = (CPUs > 1); } else { ptw32_smp_system = PTW32_FALSE; } #endif #ifdef _WIN64 /* * InterlockedCompareExchange routine in WIN64 is an intrinsic function. * See PTW32_INTERLOCKED_COMPARE_EXCHANGE implement.h */ #else #ifdef WINCE /* * Load COREDLL and try to get address of InterlockedCompareExchange */ ptw32_h_kernel32 = LoadLibrary (TEXT ("COREDLL.DLL")); #else /* * Load KERNEL32 and try to get address of InterlockedCompareExchange */ ptw32_h_kernel32 = LoadLibrary (TEXT ("KERNEL32.DLL")); #endif ptw32_interlocked_compare_exchange = (PTW32_INTERLOCKED_LONG (WINAPI *) (PTW32_INTERLOCKED_LPLONG, PTW32_INTERLOCKED_LONG, PTW32_INTERLOCKED_LONG)) #if defined(NEED_UNICODE_CONSTS) GetProcAddress (ptw32_h_kernel32, (const TCHAR *) TEXT ("InterlockedCompareExchange")); #else GetProcAddress (ptw32_h_kernel32, (LPCSTR) "InterlockedCompareExchange"); #endif if (ptw32_interlocked_compare_exchange == NULL) { ptw32_interlocked_compare_exchange = ptw32_InterlockedCompareExchange; /* * If InterlockedCompareExchange is not being used, then free * the kernel32.dll handle now, rather than leaving it until * DLL_PROCESS_DETACH. * * Note: this is not a pedantic exercise in freeing unused * resources! It is a work-around for a bug in Windows 95 * (see microsoft knowledge base article, Q187684) which * does Bad Things when FreeLibrary is called within * the DLL_PROCESS_DETACH code, in certain situations. * Since w95 just happens to be a platform which does not * provide InterlockedCompareExchange, the bug will be * effortlessly avoided. */ (void) FreeLibrary (ptw32_h_kernel32); ptw32_h_kernel32 = 0; } else { ptw32_features |= PTW32_SYSTEM_INTERLOCKED_COMPARE_EXCHANGE; } #endif /* * Load QUSEREX.DLL and try to get address of QueueUserAPCEx */ ptw32_h_quserex = LoadLibrary (TEXT ("QUSEREX.DLL")); if (ptw32_h_quserex != NULL) { ptw32_register_cancelation = (DWORD (*)(PAPCFUNC, HANDLE, DWORD)) #if defined(NEED_UNICODE_CONSTS) GetProcAddress (ptw32_h_quserex, (const TCHAR *) TEXT ("QueueUserAPCEx")); #else GetProcAddress (ptw32_h_quserex, (LPCSTR) "QueueUserAPCEx"); #endif } if (NULL == ptw32_register_cancelation) { ptw32_register_cancelation = ptw32_RegisterCancelation; if (ptw32_h_quserex != NULL) { (void) FreeLibrary (ptw32_h_quserex); } ptw32_h_quserex = 0; } else { /* Initialise QueueUserAPCEx */ BOOL (*queue_user_apc_ex_init) (VOID); queue_user_apc_ex_init = (BOOL (*)(VOID)) #if defined(NEED_UNICODE_CONSTS) GetProcAddress (ptw32_h_quserex, (const TCHAR *) TEXT ("QueueUserAPCEx_Init")); #else GetProcAddress (ptw32_h_quserex, (LPCSTR) "QueueUserAPCEx_Init"); #endif if (queue_user_apc_ex_init == NULL || !queue_user_apc_ex_init ()) { ptw32_register_cancelation = ptw32_RegisterCancelation; (void) FreeLibrary (ptw32_h_quserex); ptw32_h_quserex = 0; } } if (ptw32_h_quserex) { ptw32_features |= PTW32_ALERTABLE_ASYNC_CANCEL; } return result; } BOOL pthread_win32_process_detach_np () { if (ptw32_processInitialized) { ptw32_thread_t * sp = (ptw32_thread_t *) pthread_getspecific (ptw32_selfThreadKey); if (sp != NULL) { /* * Detached threads have their resources automatically * cleaned up upon exit (others must be 'joined'). */ if (sp->detachState == PTHREAD_CREATE_DETACHED) { ptw32_threadDestroy (sp->ptHandle); TlsSetValue (ptw32_selfThreadKey->key, NULL); } } /* * The DLL is being unmapped from the process's address space */ ptw32_processTerminate (); if (ptw32_h_quserex) { /* Close QueueUserAPCEx */ BOOL (*queue_user_apc_ex_fini) (VOID); queue_user_apc_ex_fini = (BOOL (*)(VOID)) #if defined(NEED_UNICODE_CONSTS) GetProcAddress (ptw32_h_quserex, (const TCHAR *) TEXT ("QueueUserAPCEx_Fini")); #else GetProcAddress (ptw32_h_quserex, (LPCSTR) "QueueUserAPCEx_Fini"); #endif if (queue_user_apc_ex_fini != NULL) { (void) queue_user_apc_ex_fini (); } (void) FreeLibrary (ptw32_h_quserex); } if (ptw32_h_kernel32) { (void) FreeLibrary (ptw32_h_kernel32); } } return TRUE; } BOOL pthread_win32_thread_attach_np () { return TRUE; } BOOL pthread_win32_thread_detach_np () { if (ptw32_processInitialized) { /* * Don't use pthread_self() - to avoid creating an implicit POSIX thread handle * unnecessarily. */ ptw32_thread_t * sp = (ptw32_thread_t *) pthread_getspecific (ptw32_selfThreadKey); if (sp != NULL) // otherwise Win32 thread with no implicit POSIX handle. { ptw32_callUserDestroyRoutines (sp->ptHandle); (void) pthread_mutex_lock (&sp->cancelLock); sp->state = PThreadStateLast; /* * If the thread is joinable at this point then it MUST be joined * or detached explicitly by the application. */ (void) pthread_mutex_unlock (&sp->cancelLock); if (sp->detachState == PTHREAD_CREATE_DETACHED) { ptw32_threadDestroy (sp->ptHandle); TlsSetValue (ptw32_selfThreadKey->key, NULL); } } } return TRUE; } BOOL pthread_win32_test_features_np (int feature_mask) { return ((ptw32_features & feature_mask) == feature_mask); }
C
Apache-2.0
Acidburn0zzz/sagetv/third_party/mingw/pthreads/pthread_win32_attach_detach_np.c
e93ed89d-0d6d-410e-a562-b72aec8c87fb
[{"tag": "NAME", "value": "John", "start": 292, "end": 296, "context": "reads Library for Win32\n * Copyright(C) 1998 John E. Bossom\n * Copyright(C) 1999,2005 Pthreads"}, {"tag": "EMAIL", "value": "rpj@callisto.canberra.edu.au", "start": 393, "end": 421, "context": "ads-win32 contributors\n * \n * Contact Email: rpj@callisto.canberra.edu.au\n * \n * The current list of contributors is c"}]
[{"tag": "NAME", "value": "John", "start": 292, "end": 296, "context": "reads Library for Win32\n * Copyright(C) 1998 John E. Bossom\n * Copyright(C) 1999,2005 Pthreads"}, {"tag": "EMAIL", "value": "rpj@callisto.canberra.edu.au", "start": 393, "end": 421, "context": "ads-win32 contributors\n * \n * Contact Email: rpj@callisto.canberra.edu.au\n * \n * The current list of contributors is c"}]
/* MIT License Copyright (c) 2021 Parallel Applications Modelling Group - GMAP GMAP website: https://gmap.pucrs.br Pontifical Catholic University of Rio Grande do Sul (PUCRS) Av. Ipiranga, 6681, Porto Alegre - Brazil, 90619-900 Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. ------------------------------------------------------------------------------ The original NPB 3.4.1 version was written in Fortran and belongs to: http://www.nas.nasa.gov/Software/NPB/ ------------------------------------------------------------------------------ The serial C++ version is a translation of the original NPB 3.4.1 Serial C++ version: https://github.com/GMAP/NPB-CPP/tree/master/NPB-SER Authors of the C++ code: Dalvan Griebler <dalvangriebler@gmail.com> Gabriell Araujo <hexenoften@gmail.com> Júnior Löff <loffjh@gmail.com> */ #if defined(USE_POW) #define r23 pow(0.5, 23.0) #define r46 (r23*r23) #define t23 pow(2.0, 23.0) #define t46 (t23*t23) #else #define r23 (0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5*0.5) #define r46 (r23*r23) #define t23 (2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0*2.0) #define t46 (t23*t23) #endif /* * --------------------------------------------------------------------- * * this routine returns a uniform pseudorandom double precision number in the * range (0, 1) by using the linear congruential generator * * x_{k+1} = a x_k (mod 2^46) * * where 0 < x_k < 2^46 and 0 < a < 2^46. this scheme generates 2^44 numbers * before repeating. the argument A is the same as 'a' in the above formula, * and X is the same as x_0. A and X must be odd double precision integers * in the range (1, 2^46). the returned value RANDLC is normalized to be * between 0 and 1, i.e. RANDLC = 2^(-46) * x_1. X is updated to contain * the new seed x_1, so that subsequent calls to RANDLC using the same * arguments will generate a continuous sequence. * * this routine should produce the same results on any computer with at least * 48 mantissa bits in double precision floating point data. On 64 bit * systems, double precision should be disabled. * * David H. Bailey, October 26, 1990 * * --------------------------------------------------------------------- */ double randlc(double *x, double a){ double t1,t2,t3,t4,a1,a2,x1,x2,z; /* * --------------------------------------------------------------------- * break A into two parts such that A = 2^23 * A1 + A2. * --------------------------------------------------------------------- */ t1 = r23 * a; a1 = (int)t1; a2 = a - t23 * a1; /* * --------------------------------------------------------------------- * break X into two parts such that X = 2^23 * X1 + X2, compute * Z = A1 * X2 + A2 * X1 (mod 2^23), and then * X = 2^23 * Z + A2 * X2 (mod 2^46). * --------------------------------------------------------------------- */ t1 = r23 * (*x); x1 = (int)t1; x2 = (*x) - t23 * x1; t1 = a1 * x2 + a2 * x1; t2 = (int)(r23 * t1); z = t1 - t23 * t2; t3 = t23 * z + a2 * x2; t4 = (int)(r46 * t3); (*x) = t3 - t46 * t4; return (r46 * (*x)); } /* * --------------------------------------------------------------------- * * this routine generates N uniform pseudorandom double precision numbers in * the range (0, 1) by using the linear congruential generator * * x_{k+1} = a x_k (mod 2^46) * * where 0 < x_k < 2^46 and 0 < a < 2^46. this scheme generates 2^44 numbers * before repeating. the argument A is the same as 'a' in the above formula, * and X is the same as x_0. A and X must be odd double precision integers * in the range (1, 2^46). the N results are placed in Y and are normalized * to be between 0 and 1. X is updated to contain the new seed, so that * subsequent calls to VRANLC using the same arguments will generate a * continuous sequence. if N is zero, only initialization is performed, and * the variables X, A and Y are ignored. * * this routine is the standard version designed for scalar or RISC systems. * however, it should produce the same results on any single processor * computer with at least 48 mantissa bits in double precision floating point * data. on 64 bit systems, double precision should be disabled. * * --------------------------------------------------------------------- */ void vranlc(int n, double *x_seed, double a, double y[]){ int i; double x,t1,t2,t3,t4,a1,a2,x1,x2,z; /* * --------------------------------------------------------------------- * break A into two parts such that A = 2^23 * A1 + A2. * --------------------------------------------------------------------- */ t1 = r23 * a; a1 = (int)t1; a2 = a - t23 * a1; x = *x_seed; /* * --------------------------------------------------------------------- * generate N results. this loop is not vectorizable. * --------------------------------------------------------------------- */ for(i=0; i<n; i++){ /* * --------------------------------------------------------------------- * break X into two parts such that X = 2^23 * X1 + X2, compute * Z = A1 * X2 + A2 * X1 (mod 2^23), and then * X = 2^23 * Z + A2 * X2 (mod 2^46). * --------------------------------------------------------------------- */ t1 = r23 * x; x1 = (int)t1; x2 = x - t23 * x1; t1 = a1 * x2 + a2 * x1; t2 = (int)(r23 * t1); z = t1 - t23 * t2; t3 = t23 * z + a2 * x2; t4 = (int)(r46 * t3); x = t3 - t46 * t4; y[i] = r46 * x; } *x_seed = x; }
C++
MIT
GMAP/NPB-CPP/NPB-FF/common/c_randdp.cpp
23480346-23cb-45c0-bcc8-63463b97acfe
[{"tag": "NAME", "value": "David H. Bailey", "start": 3168, "end": 3183, "context": "stems, double precision should be disabled.\n *\n * David H. Bailey, October 26, 1990\n * \n * ------------------------"}, {"tag": "EMAIL", "value": "dalvangriebler@gmail.com", "start": 1714, "end": 1738, "context": "-SER\n\nAuthors of the C++ code: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior"}, {"tag": "EMAIL", "value": "loffjh@gmail.com", "start": 1795, "end": 1811, "context": "iell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if defined(USE_POW)\n#define r23 pow(0.5, 2"}, {"tag": "EMAIL", "value": "hexenoften@gmail.com", "start": 1758, "end": 1778, "context": "bler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if define"}, {"tag": "NAME", "value": "Gabriell Araujo", "start": 1741, "end": 1756, "context": "de: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmai"}, {"tag": "NAME", "value": "Dalvan Griebler", "start": 1697, "end": 1712, "context": "P/tree/master/NPB-SER\n\nAuthors of the C++ code: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexe"}, {"tag": "NAME", "value": "J\u00fanior L\u00f6ff", "start": 1782, "end": 1793, "context": "il.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if defined(USE_POW)\n#def"}]
[{"tag": "NAME", "value": "David H. Bailey", "start": 3168, "end": 3183, "context": "stems, double precision should be disabled.\n *\n * David H. Bailey, October 26, 1990\n * \n * ------------------------"}, {"tag": "EMAIL", "value": "dalvangriebler@gmail.com", "start": 1714, "end": 1738, "context": "-SER\n\nAuthors of the C++ code: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior"}, {"tag": "EMAIL", "value": "loffjh@gmail.com", "start": 1795, "end": 1811, "context": "iell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if defined(USE_POW)\n#define r23 pow(0.5, 2"}, {"tag": "EMAIL", "value": "hexenoften@gmail.com", "start": 1758, "end": 1778, "context": "bler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if define"}, {"tag": "NAME", "value": "Gabriell Araujo", "start": 1741, "end": 1756, "context": "de: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmai"}, {"tag": "NAME", "value": "Dalvan Griebler", "start": 1697, "end": 1712, "context": "P/tree/master/NPB-SER\n\nAuthors of the C++ code: \n\tDalvan Griebler <dalvangriebler@gmail.com>\n\tGabriell Araujo <hexe"}, {"tag": "NAME", "value": "J\u00fanior L\u00f6ff", "start": 1782, "end": 1793, "context": "il.com>\n\tGabriell Araujo <hexenoften@gmail.com>\n \tJ\u00fanior L\u00f6ff <loffjh@gmail.com>\n*/ \n\n#if defined(USE_POW)\n#def"}]
/* * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org> * All rights reserved. * * Redistribution and use in source and binary forms, with or without * modification, are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, this * list of conditions and the following disclaimer. * * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * * THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" * AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE * IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE * DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE * FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL * DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR * SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER * CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, * OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE * OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. */ #include <AK/NonnullRefPtrVector.h> #include <AK/Singleton.h> #include <AK/StringBuilder.h> #include <AK/StringView.h> #include <Kernel/API/InodeWatcherEvent.h> #include <Kernel/FileSystem/Custody.h> #include <Kernel/FileSystem/Inode.h> #include <Kernel/FileSystem/InodeWatcher.h> #include <Kernel/FileSystem/VirtualFileSystem.h> #include <Kernel/KBufferBuilder.h> #include <Kernel/Net/LocalSocket.h> #include <Kernel/VM/SharedInodeVMObject.h> namespace Kernel { static SpinLock s_all_inodes_lock; static AK::Singleton<InlineLinkedList<Inode>> s_list; SpinLock<u32>& Inode::all_inodes_lock() { return s_all_inodes_lock; } InlineLinkedList<Inode>& Inode::all_with_lock() { ASSERT(s_all_inodes_lock.is_locked()); return *s_list; } void Inode::sync() { NonnullRefPtrVector<Inode, 32> inodes; { ScopedSpinLock all_inodes_lock(s_all_inodes_lock); for (auto& inode : all_with_lock()) { if (inode.is_metadata_dirty()) inodes.append(inode); } } for (auto& inode : inodes) { ASSERT(inode.is_metadata_dirty()); inode.flush_metadata(); } } KResultOr<NonnullOwnPtr<KBuffer>> Inode::read_entire(FileDescription* description) const { KBufferBuilder builder; ssize_t nread; u8 buffer[4096]; off_t offset = 0; for (;;) { auto buf = UserOrKernelBuffer::for_kernel_buffer(buffer); nread = read_bytes(offset, sizeof(buffer), buf, description); if (nread < 0) return KResult(nread); ASSERT(nread <= (ssize_t)sizeof(buffer)); if (nread <= 0) break; builder.append((const char*)buffer, nread); offset += nread; if (nread < (ssize_t)sizeof(buffer)) break; } if (nread < 0) { klog() << "Inode::read_entire: ERROR: " << nread; return KResult(nread); } auto entire_file = builder.build(); if (!entire_file) return KResult(-ENOMEM); return entire_file.release_nonnull(); } KResultOr<NonnullRefPtr<Custody>> Inode::resolve_as_link(Custody& base, RefPtr<Custody>* out_parent, int options, int symlink_recursion_level) const { // The default implementation simply treats the stored // contents as a path and resolves that. That is, it // behaves exactly how you would expect a symlink to work. auto contents_or = read_entire(); if (contents_or.is_error()) return contents_or.error(); auto& contents = contents_or.value(); auto path = StringView(contents->data(), contents->size()); return VFS::the().resolve_path(path, base, out_parent, options, symlink_recursion_level); } Inode::Inode(FS& fs, unsigned index) : m_fs(fs) , m_index(index) { ScopedSpinLock all_inodes_lock(s_all_inodes_lock); all_with_lock().append(this); } Inode::~Inode() { ScopedSpinLock all_inodes_lock(s_all_inodes_lock); all_with_lock().remove(this); } void Inode::will_be_destroyed() { LOCKER(m_lock); if (m_metadata_dirty) flush_metadata(); } void Inode::inode_contents_changed(off_t offset, ssize_t size, const UserOrKernelBuffer& data) { LOCKER(m_lock); if (auto shared_vmobject = this->shared_vmobject()) shared_vmobject->inode_contents_changed({}, offset, size, data); } void Inode::inode_size_changed(size_t old_size, size_t new_size) { LOCKER(m_lock); if (auto shared_vmobject = this->shared_vmobject()) shared_vmobject->inode_size_changed({}, old_size, new_size); } int Inode::set_atime(time_t) { return -ENOTIMPL; } int Inode::set_ctime(time_t) { return -ENOTIMPL; } int Inode::set_mtime(time_t) { return -ENOTIMPL; } KResult Inode::increment_link_count() { return KResult(-ENOTIMPL); } KResult Inode::decrement_link_count() { return KResult(-ENOTIMPL); } void Inode::set_shared_vmobject(SharedInodeVMObject& vmobject) { LOCKER(m_lock); m_shared_vmobject = vmobject; } bool Inode::bind_socket(LocalSocket& socket) { LOCKER(m_lock); if (m_socket) return false; m_socket = socket; return true; } bool Inode::unbind_socket() { LOCKER(m_lock); if (!m_socket) return false; m_socket = nullptr; return true; } void Inode::register_watcher(Badge<InodeWatcher>, InodeWatcher& watcher) { LOCKER(m_lock); ASSERT(!m_watchers.contains(&watcher)); m_watchers.set(&watcher); } void Inode::unregister_watcher(Badge<InodeWatcher>, InodeWatcher& watcher) { LOCKER(m_lock); ASSERT(m_watchers.contains(&watcher)); m_watchers.remove(&watcher); } NonnullRefPtr<FIFO> Inode::fifo() { LOCKER(m_lock); ASSERT(metadata().is_fifo()); // FIXME: Release m_fifo when it is closed by all readers and writers if (!m_fifo) m_fifo = FIFO::create(metadata().uid); ASSERT(m_fifo); return *m_fifo; } void Inode::set_metadata_dirty(bool metadata_dirty) { LOCKER(m_lock); if (metadata_dirty) { // Sanity check. ASSERT(!fs().is_readonly()); } if (m_metadata_dirty == metadata_dirty) return; m_metadata_dirty = metadata_dirty; if (m_metadata_dirty) { // FIXME: Maybe we should hook into modification events somewhere else, I'm not sure where. // We don't always end up on this particular code path, for instance when writing to an ext2fs file. for (auto& watcher : m_watchers) { watcher->notify_inode_event({}, InodeWatcherEvent::Type::Modified); } } } void Inode::did_add_child(const InodeIdentifier& child_id) { LOCKER(m_lock); for (auto& watcher : m_watchers) { watcher->notify_child_added({}, child_id); } } void Inode::did_remove_child(const InodeIdentifier& child_id) { LOCKER(m_lock); for (auto& watcher : m_watchers) { watcher->notify_child_removed({}, child_id); } } KResult Inode::prepare_to_write_data() { // FIXME: It's a poor design that filesystems are expected to call this before writing out data. // We should funnel everything through an interface at the VFS layer so this can happen from a single place. LOCKER(m_lock); if (fs().is_readonly()) return KResult(-EROFS); auto metadata = this->metadata(); if (metadata.is_setuid() || metadata.is_setgid()) { dbgln("Inode::prepare_to_write_data(): Stripping SUID/SGID bits from {}", identifier()); return chmod(metadata.mode & ~(04000 | 02000)); } return KSuccess; } RefPtr<SharedInodeVMObject> Inode::shared_vmobject() const { LOCKER(m_lock); return m_shared_vmobject.strong_ref(); } bool Inode::is_shared_vmobject(const SharedInodeVMObject& other) const { LOCKER(m_lock); return m_shared_vmobject.unsafe_ptr() == &other; } }
C++
BSD-2-Clause
ChillerDragon/serenity/Kernel/FileSystem/Inode.cpp
999f816d-bc26-4e14-b46d-d14db2ec4391
[{"tag": "EMAIL", "value": "kling@serenityos.org", "start": 46, "end": 66, "context": "/*\n * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>\n * All rights reserved.\n *\n * Redistribution and"}, {"tag": "NAME", "value": "Andreas Kling", "start": 31, "end": 44, "context": "/*\n * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>\n * All rights reserved.\n *"}]
[{"tag": "EMAIL", "value": "kling@serenityos.org", "start": 46, "end": 66, "context": "/*\n * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>\n * All rights reserved.\n *\n * Redistribution and"}, {"tag": "NAME", "value": "Andreas Kling", "start": 31, "end": 44, "context": "/*\n * Copyright (c) 2018-2020, Andreas Kling <kling@serenityos.org>\n * All rights reserved.\n *"}]
/* Open Asset Import Library (assimp) ---------------------------------------------------------------------- Copyright (c) 2006-2019, assimp team All rights reserved. Redistribution and use of this software in source and binary forms, with or without modification, are permitted provided that the following conditions are met: * Redistributions of source code must retain the above copyright notice, this list of conditions and the following disclaimer. * Redistributions in binary form must reproduce the above copyright notice, this list of conditions and the following disclaimer in the documentation and/or other materials provided with the distribution. * Neither the name of the assimp team, nor the names of its contributors may be used to endorse or promote products derived from this software without specific prior written permission of the assimp team. THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. ---------------------------------------------------------------------- */ /** @file FBXParser.cpp * @brief Implementation of the FBX parser and the rudimentary DOM that we use */ #ifndef ASSIMP_BUILD_NO_FBX_IMPORTER #ifdef ASSIMP_BUILD_NO_OWN_ZLIB # include <zlib.h> #else # include "../contrib/zlib/zlib.h" #endif #include "FBXTokenizer.h" #include "FBXParser.h" #include "FBXUtil.h" #include <Assimp/ParsingUtils.h> #include <Assimp/fast_atof.h> #include <Assimp/ByteSwapper.h> #include <iostream> using namespace Assimp; using namespace Assimp::FBX; namespace { // ------------------------------------------------------------------------------------------------ // signal parse error, this is always unrecoverable. Throws DeadlyImportError. AI_WONT_RETURN void ParseError(const std::string& message, const Token& token) AI_WONT_RETURN_SUFFIX; AI_WONT_RETURN void ParseError(const std::string& message, const Token& token) { throw DeadlyImportError(Util::AddTokenText("FBX-Parser",message,&token)); } // ------------------------------------------------------------------------------------------------ AI_WONT_RETURN void ParseError(const std::string& message, const Element* element = NULL) AI_WONT_RETURN_SUFFIX; AI_WONT_RETURN void ParseError(const std::string& message, const Element* element) { if(element) { ParseError(message,element->KeyToken()); } throw DeadlyImportError("FBX-Parser " + message); } // ------------------------------------------------------------------------------------------------ void ParseError(const std::string& message, TokenPtr token) { if(token) { ParseError(message, *token); } ParseError(message); } // Initially, we did reinterpret_cast, breaking strict aliasing rules. // This actually caused trouble on Android, so let's be safe this time. // https://github.com/assimp/assimp/issues/24 template <typename T> T SafeParse(const char* data, const char* end) { // Actual size validation happens during Tokenization so // this is valid as an assertion. (void)(end); ai_assert(static_cast<size_t>(end - data) >= sizeof(T)); T result = static_cast<T>(0); ::memcpy(&result, data, sizeof(T)); return result; } } namespace Assimp { namespace FBX { // ------------------------------------------------------------------------------------------------ Element::Element(const Token& key_token, Parser& parser) : key_token(key_token) { TokenPtr n = nullptr; do { n = parser.AdvanceToNextToken(); if(!n) { ParseError("unexpected end of file, expected closing bracket",parser.LastToken()); } if (n->Type() == TokenType_DATA) { tokens.push_back(n); TokenPtr prev = n; n = parser.AdvanceToNextToken(); if(!n) { ParseError("unexpected end of file, expected bracket, comma or key",parser.LastToken()); } const TokenType ty = n->Type(); // some exporters are missing a comma on the next line if (ty == TokenType_DATA && prev->Type() == TokenType_DATA && (n->Line() == prev->Line() + 1)) { tokens.push_back(n); continue; } if (ty != TokenType_OPEN_BRACKET && ty != TokenType_CLOSE_BRACKET && ty != TokenType_COMMA && ty != TokenType_KEY) { ParseError("unexpected token; expected bracket, comma or key",n); } } if (n->Type() == TokenType_OPEN_BRACKET) { compound.reset(new Scope(parser)); // current token should be a TOK_CLOSE_BRACKET n = parser.CurrentToken(); ai_assert(n); if (n->Type() != TokenType_CLOSE_BRACKET) { ParseError("expected closing bracket",n); } parser.AdvanceToNextToken(); return; } } while(n->Type() != TokenType_KEY && n->Type() != TokenType_CLOSE_BRACKET); } // ------------------------------------------------------------------------------------------------ Element::~Element() { // no need to delete tokens, they are owned by the parser } // ------------------------------------------------------------------------------------------------ Scope::Scope(Parser& parser,bool topLevel) { if(!topLevel) { TokenPtr t = parser.CurrentToken(); if (t->Type() != TokenType_OPEN_BRACKET) { ParseError("expected open bracket",t); } } TokenPtr n = parser.AdvanceToNextToken(); if(n == NULL) { ParseError("unexpected end of file"); } // note: empty scopes are allowed while(n->Type() != TokenType_CLOSE_BRACKET) { if (n->Type() != TokenType_KEY) { ParseError("unexpected token, expected TOK_KEY",n); } const std::string& str = n->StringContents(); elements.insert(ElementMap::value_type(str,new_Element(*n,parser))); // Element() should stop at the next Key token (or right after a Close token) n = parser.CurrentToken(); if(n == NULL) { if (topLevel) { return; } ParseError("unexpected end of file",parser.LastToken()); } } } // ------------------------------------------------------------------------------------------------ Scope::~Scope() { for(ElementMap::value_type& v : elements) { delete v.second; } } // ------------------------------------------------------------------------------------------------ Parser::Parser (const TokenList& tokens, bool is_binary) : tokens(tokens) , last() , current() , cursor(tokens.begin()) , is_binary(is_binary) { root.reset(new Scope(*this,true)); } // ------------------------------------------------------------------------------------------------ Parser::~Parser() { // empty } // ------------------------------------------------------------------------------------------------ TokenPtr Parser::AdvanceToNextToken() { last = current; if (cursor == tokens.end()) { current = NULL; } else { current = *cursor++; } return current; } // ------------------------------------------------------------------------------------------------ TokenPtr Parser::CurrentToken() const { return current; } // ------------------------------------------------------------------------------------------------ TokenPtr Parser::LastToken() const { return last; } // ------------------------------------------------------------------------------------------------ uint64_t ParseTokenAsID(const Token& t, const char*& err_out) { err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return 0L; } if(t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'L') { err_out = "failed to parse ID, unexpected data type, expected L(ong) (binary)"; return 0L; } BE_NCONST uint64_t id = SafeParse<uint64_t>(data+1, t.end()); AI_SWAP8(id); return id; } // XXX: should use size_t here unsigned int length = static_cast<unsigned int>(t.end() - t.begin()); ai_assert(length > 0); const char* out = nullptr; const uint64_t id = strtoul10_64(t.begin(),&out,&length); if (out > t.end()) { err_out = "failed to parse ID (text)"; return 0L; } return id; } // ------------------------------------------------------------------------------------------------ size_t ParseTokenAsDim(const Token& t, const char*& err_out) { // same as ID parsing, except there is a trailing asterisk err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return 0; } if(t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'L') { err_out = "failed to parse ID, unexpected data type, expected L(ong) (binary)"; return 0; } BE_NCONST uint64_t id = SafeParse<uint64_t>(data+1, t.end()); AI_SWAP8(id); return static_cast<size_t>(id); } if(*t.begin() != '*') { err_out = "expected asterisk before array dimension"; return 0; } // XXX: should use size_t here unsigned int length = static_cast<unsigned int>(t.end() - t.begin()); if(length == 0) { err_out = "expected valid integer number after asterisk"; return 0; } const char* out = nullptr; const size_t id = static_cast<size_t>(strtoul10_64(t.begin() + 1,&out,&length)); if (out > t.end()) { err_out = "failed to parse ID"; return 0; } return id; } // ------------------------------------------------------------------------------------------------ float ParseTokenAsFloat(const Token& t, const char*& err_out) { err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return 0.0f; } if(t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'F' && data[0] != 'D') { err_out = "failed to parse F(loat) or D(ouble), unexpected data type (binary)"; return 0.0f; } if (data[0] == 'F') { return SafeParse<float>(data+1, t.end()); } else { return static_cast<float>( SafeParse<double>(data+1, t.end()) ); } } // need to copy the input string to a temporary buffer // first - next in the fbx token stream comes ',', // which fast_atof could interpret as decimal point. #define MAX_FLOAT_LENGTH 31 char temp[MAX_FLOAT_LENGTH + 1]; const size_t length = static_cast<size_t>(t.end()-t.begin()); std::copy(t.begin(),t.end(),temp); temp[std::min(static_cast<size_t>(MAX_FLOAT_LENGTH),length)] = '\0'; return fast_atof(temp); } // ------------------------------------------------------------------------------------------------ int ParseTokenAsInt(const Token& t, const char*& err_out) { err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return 0; } if(t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'I') { err_out = "failed to parse I(nt), unexpected data type (binary)"; return 0; } BE_NCONST int32_t ival = SafeParse<int32_t>(data+1, t.end()); AI_SWAP4(ival); return static_cast<int>(ival); } ai_assert(static_cast<size_t>(t.end() - t.begin()) > 0); const char* out; const int intval = strtol10(t.begin(),&out); if (out != t.end()) { err_out = "failed to parse ID"; return 0; } return intval; } // ------------------------------------------------------------------------------------------------ int64_t ParseTokenAsInt64(const Token& t, const char*& err_out) { err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return 0L; } if (t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'L') { err_out = "failed to parse Int64, unexpected data type"; return 0L; } BE_NCONST int64_t id = SafeParse<int64_t>(data + 1, t.end()); AI_SWAP8(id); return id; } // XXX: should use size_t here unsigned int length = static_cast<unsigned int>(t.end() - t.begin()); ai_assert(length > 0); const char* out = nullptr; const int64_t id = strtol10_64(t.begin(), &out, &length); if (out > t.end()) { err_out = "failed to parse Int64 (text)"; return 0L; } return id; } // ------------------------------------------------------------------------------------------------ std::string ParseTokenAsString(const Token& t, const char*& err_out) { err_out = NULL; if (t.Type() != TokenType_DATA) { err_out = "expected TOK_DATA token"; return ""; } if(t.IsBinary()) { const char* data = t.begin(); if (data[0] != 'S') { err_out = "failed to parse S(tring), unexpected data type (binary)"; return ""; } // read string length BE_NCONST int32_t len = SafeParse<int32_t>(data+1, t.end()); AI_SWAP4(len); ai_assert(t.end() - data == 5 + len); return std::string(data + 5, len); } const size_t length = static_cast<size_t>(t.end() - t.begin()); if(length < 2) { err_out = "token is too short to hold a string"; return ""; } const char* s = t.begin(), *e = t.end() - 1; if (*s != '\"' || *e != '\"') { err_out = "expected double quoted string"; return ""; } return std::string(s+1,length-2); } namespace { // ------------------------------------------------------------------------------------------------ // read the type code and element count of a binary data array and stop there void ReadBinaryDataArrayHead(const char*& data, const char* end, char& type, uint32_t& count, const Element& el) { if (static_cast<size_t>(end-data) < 5) { ParseError("binary data array is too short, need five (5) bytes for type signature and element count",&el); } // data type type = *data; // read number of elements BE_NCONST uint32_t len = SafeParse<uint32_t>(data+1, end); AI_SWAP4(len); count = len; data += 5; } // ------------------------------------------------------------------------------------------------ // read binary data array, assume cursor points to the 'compression mode' field (i.e. behind the header) void ReadBinaryDataArray(char type, uint32_t count, const char*& data, const char* end, std::vector<char>& buff, const Element& /*el*/) { BE_NCONST uint32_t encmode = SafeParse<uint32_t>(data, end); AI_SWAP4(encmode); data += 4; // next comes the compressed length BE_NCONST uint32_t comp_len = SafeParse<uint32_t>(data, end); AI_SWAP4(comp_len); data += 4; ai_assert(data + comp_len == end); // determine the length of the uncompressed data by looking at the type signature uint32_t stride = 0; switch(type) { case 'f': case 'i': stride = 4; break; case 'd': case 'l': stride = 8; break; default: ai_assert(false); }; const uint32_t full_length = stride * count; buff.resize(full_length); if(encmode == 0) { ai_assert(full_length == comp_len); // plain data, no compression std::copy(data, end, buff.begin()); } else if(encmode == 1) { // zlib/deflate, next comes ZIP head (0x78 0x01) // see http://www.ietf.org/rfc/rfc1950.txt z_stream zstream; zstream.opaque = Z_NULL; zstream.zalloc = Z_NULL; zstream.zfree = Z_NULL; zstream.data_type = Z_BINARY; // http://hewgill.com/journal/entries/349-how-to-decompress-gzip-stream-with-zlib if(Z_OK != inflateInit(&zstream)) { ParseError("failure initializing zlib"); } zstream.next_in = reinterpret_cast<Bytef*>( const_cast<char*>(data) ); zstream.avail_in = comp_len; zstream.avail_out = static_cast<uInt>(buff.size()); zstream.next_out = reinterpret_cast<Bytef*>(&*buff.begin()); const int ret = inflate(&zstream, Z_FINISH); if (ret != Z_STREAM_END && ret != Z_OK) { ParseError("failure decompressing compressed data section"); } // terminate zlib inflateEnd(&zstream); } #ifdef ASSIMP_BUILD_DEBUG else { // runtime check for this happens at tokenization stage ai_assert(false); } #endif data += comp_len; ai_assert(data == end); } } // !anon // ------------------------------------------------------------------------------------------------ // read an array of float3 tuples void ParseVectorDataArray(std::vector<aiVector3D>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(count % 3 != 0) { ParseError("number of floats is not a multiple of three (3) (binary)",&el); } if(!count) { return; } if (type != 'd' && type != 'f') { ParseError("expected float or double array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * (type == 'd' ? 8 : 4)); const uint32_t count3 = count / 3; out.reserve(count3); if (type == 'd') { const double* d = reinterpret_cast<const double*>(&buff[0]); for (unsigned int i = 0; i < count3; ++i, d += 3) { out.push_back(aiVector3D(static_cast<ai_real>(d[0]), static_cast<ai_real>(d[1]), static_cast<ai_real>(d[2]))); } // for debugging /*for ( size_t i = 0; i < out.size(); i++ ) { aiVector3D vec3( out[ i ] ); std::stringstream stream; stream << " vec3.x = " << vec3.x << " vec3.y = " << vec3.y << " vec3.z = " << vec3.z << std::endl; DefaultLogger::get()->info( stream.str() ); }*/ } else if (type == 'f') { const float* f = reinterpret_cast<const float*>(&buff[0]); for (unsigned int i = 0; i < count3; ++i, f += 3) { out.push_back(aiVector3D(f[0],f[1],f[2])); } } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // may throw bad_alloc if the input is rubbish, but this need // not to be prevented - importing would fail but we wouldn't // crash since assimp handles this case properly. out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); if (a.Tokens().size() % 3 != 0) { ParseError("number of floats is not a multiple of three (3)",&el); } for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { aiVector3D v; v.x = ParseTokenAsFloat(**it++); v.y = ParseTokenAsFloat(**it++); v.z = ParseTokenAsFloat(**it++); out.push_back(v); } } // ------------------------------------------------------------------------------------------------ // read an array of color4 tuples void ParseVectorDataArray(std::vector<aiColor4D>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(count % 4 != 0) { ParseError("number of floats is not a multiple of four (4) (binary)",&el); } if(!count) { return; } if (type != 'd' && type != 'f') { ParseError("expected float or double array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * (type == 'd' ? 8 : 4)); const uint32_t count4 = count / 4; out.reserve(count4); if (type == 'd') { const double* d = reinterpret_cast<const double*>(&buff[0]); for (unsigned int i = 0; i < count4; ++i, d += 4) { out.push_back(aiColor4D(static_cast<float>(d[0]), static_cast<float>(d[1]), static_cast<float>(d[2]), static_cast<float>(d[3]))); } } else if (type == 'f') { const float* f = reinterpret_cast<const float*>(&buff[0]); for (unsigned int i = 0; i < count4; ++i, f += 4) { out.push_back(aiColor4D(f[0],f[1],f[2],f[3])); } } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() above out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); if (a.Tokens().size() % 4 != 0) { ParseError("number of floats is not a multiple of four (4)",&el); } for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { aiColor4D v; v.r = ParseTokenAsFloat(**it++); v.g = ParseTokenAsFloat(**it++); v.b = ParseTokenAsFloat(**it++); v.a = ParseTokenAsFloat(**it++); out.push_back(v); } } // ------------------------------------------------------------------------------------------------ // read an array of float2 tuples void ParseVectorDataArray(std::vector<aiVector2D>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(count % 2 != 0) { ParseError("number of floats is not a multiple of two (2) (binary)",&el); } if(!count) { return; } if (type != 'd' && type != 'f') { ParseError("expected float or double array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * (type == 'd' ? 8 : 4)); const uint32_t count2 = count / 2; out.reserve(count2); if (type == 'd') { const double* d = reinterpret_cast<const double*>(&buff[0]); for (unsigned int i = 0; i < count2; ++i, d += 2) { out.push_back(aiVector2D(static_cast<float>(d[0]), static_cast<float>(d[1]))); } } else if (type == 'f') { const float* f = reinterpret_cast<const float*>(&buff[0]); for (unsigned int i = 0; i < count2; ++i, f += 2) { out.push_back(aiVector2D(f[0],f[1])); } } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() above out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); if (a.Tokens().size() % 2 != 0) { ParseError("number of floats is not a multiple of two (2)",&el); } for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { aiVector2D v; v.x = ParseTokenAsFloat(**it++); v.y = ParseTokenAsFloat(**it++); out.push_back(v); } } // ------------------------------------------------------------------------------------------------ // read an array of ints void ParseVectorDataArray(std::vector<int>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(!count) { return; } if (type != 'i') { ParseError("expected int array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * 4); out.reserve(count); const int32_t* ip = reinterpret_cast<const int32_t*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++ip) { BE_NCONST int32_t val = *ip; AI_SWAP4(val); out.push_back(val); } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { const int ival = ParseTokenAsInt(**it++); out.push_back(ival); } } // ------------------------------------------------------------------------------------------------ // read an array of floats void ParseVectorDataArray(std::vector<float>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(!count) { return; } if (type != 'd' && type != 'f') { ParseError("expected float or double array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * (type == 'd' ? 8 : 4)); if (type == 'd') { const double* d = reinterpret_cast<const double*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++d) { out.push_back(static_cast<float>(*d)); } } else if (type == 'f') { const float* f = reinterpret_cast<const float*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++f) { out.push_back(*f); } } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { const float ival = ParseTokenAsFloat(**it++); out.push_back(ival); } } // ------------------------------------------------------------------------------------------------ // read an array of uints void ParseVectorDataArray(std::vector<unsigned int>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(!count) { return; } if (type != 'i') { ParseError("expected (u)int array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * 4); out.reserve(count); const int32_t* ip = reinterpret_cast<const int32_t*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++ip) { BE_NCONST int32_t val = *ip; if(val < 0) { ParseError("encountered negative integer index (binary)"); } AI_SWAP4(val); out.push_back(val); } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { const int ival = ParseTokenAsInt(**it++); if(ival < 0) { ParseError("encountered negative integer index"); } out.push_back(static_cast<unsigned int>(ival)); } } // ------------------------------------------------------------------------------------------------ // read an array of uint64_ts void ParseVectorDataArray(std::vector<uint64_t>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if(tok.empty()) { ParseError("unexpected empty element",&el); } if(tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if(!count) { return; } if (type != 'l') { ParseError("expected long array (binary)",&el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * 8); out.reserve(count); const uint64_t* ip = reinterpret_cast<const uint64_t*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++ip) { BE_NCONST uint64_t val = *ip; AI_SWAP8(val); out.push_back(val); } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope,"a",&el); for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end; ) { const uint64_t ival = ParseTokenAsID(**it++); out.push_back(ival); } } // ------------------------------------------------------------------------------------------------ // read an array of int64_ts void ParseVectorDataArray(std::vector<int64_t>& out, const Element& el) { out.resize( 0 ); const TokenList& tok = el.Tokens(); if (tok.empty()) { ParseError("unexpected empty element", &el); } if (tok[0]->IsBinary()) { const char* data = tok[0]->begin(), *end = tok[0]->end(); char type; uint32_t count; ReadBinaryDataArrayHead(data, end, type, count, el); if (!count) { return; } if (type != 'l') { ParseError("expected long array (binary)", &el); } std::vector<char> buff; ReadBinaryDataArray(type, count, data, end, buff, el); ai_assert(data == end); ai_assert(buff.size() == count * 8); out.reserve(count); const int64_t* ip = reinterpret_cast<const int64_t*>(&buff[0]); for (unsigned int i = 0; i < count; ++i, ++ip) { BE_NCONST int64_t val = *ip; AI_SWAP8(val); out.push_back(val); } return; } const size_t dim = ParseTokenAsDim(*tok[0]); // see notes in ParseVectorDataArray() out.reserve(dim); const Scope& scope = GetRequiredScope(el); const Element& a = GetRequiredElement(scope, "a", &el); for (TokenList::const_iterator it = a.Tokens().begin(), end = a.Tokens().end(); it != end;) { const int64_t ival = ParseTokenAsInt64(**it++); out.push_back(ival); } } // ------------------------------------------------------------------------------------------------ aiMatrix4x4 ReadMatrix(const Element& element) { std::vector<float> values; ParseVectorDataArray(values,element); if(values.size() != 16) { ParseError("expected 16 matrix elements"); } aiMatrix4x4 result; result.a1 = values[0]; result.a2 = values[1]; result.a3 = values[2]; result.a4 = values[3]; result.b1 = values[4]; result.b2 = values[5]; result.b3 = values[6]; result.b4 = values[7]; result.c1 = values[8]; result.c2 = values[9]; result.c3 = values[10]; result.c4 = values[11]; result.d1 = values[12]; result.d2 = values[13]; result.d3 = values[14]; result.d4 = values[15]; result.Transpose(); return result; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsString() with ParseError handling std::string ParseTokenAsString(const Token& t) { const char* err; const std::string& i = ParseTokenAsString(t,err); if(err) { ParseError(err,t); } return i; } bool HasElement( const Scope& sc, const std::string& index ) { const Element* el = sc[ index ]; if ( nullptr == el ) { return false; } return true; } // ------------------------------------------------------------------------------------------------ // extract a required element from a scope, abort if the element cannot be found const Element& GetRequiredElement(const Scope& sc, const std::string& index, const Element* element /*= NULL*/) { const Element* el = sc[index]; if(!el) { ParseError("did not find required element \"" + index + "\"",element); } return *el; } // ------------------------------------------------------------------------------------------------ // extract required compound scope const Scope& GetRequiredScope(const Element& el) { const Scope* const s = el.Compound(); if(!s) { ParseError("expected compound scope",&el); } return *s; } // ------------------------------------------------------------------------------------------------ // get token at a particular index const Token& GetRequiredToken(const Element& el, unsigned int index) { const TokenList& t = el.Tokens(); if(index >= t.size()) { ParseError(Formatter::format( "missing token at index " ) << index,&el); } return *t[index]; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsID() with ParseError handling uint64_t ParseTokenAsID(const Token& t) { const char* err; const uint64_t i = ParseTokenAsID(t,err); if(err) { ParseError(err,t); } return i; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsDim() with ParseError handling size_t ParseTokenAsDim(const Token& t) { const char* err; const size_t i = ParseTokenAsDim(t,err); if(err) { ParseError(err,t); } return i; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsFloat() with ParseError handling float ParseTokenAsFloat(const Token& t) { const char* err; const float i = ParseTokenAsFloat(t,err); if(err) { ParseError(err,t); } return i; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsInt() with ParseError handling int ParseTokenAsInt(const Token& t) { const char* err; const int i = ParseTokenAsInt(t,err); if(err) { ParseError(err,t); } return i; } // ------------------------------------------------------------------------------------------------ // wrapper around ParseTokenAsInt64() with ParseError handling int64_t ParseTokenAsInt64(const Token& t) { const char* err; const int64_t i = ParseTokenAsInt64(t, err); if (err) { ParseError(err, t); } return i; } } // !FBX } // !Assimp #endif
C++
BSD-3-Clause
staminajim/Assimp/Sources/assimp/code/FBX/FBXParser.cpp
e230f21a-7875-4009-97ad-22380baaaac2
[]
[]
cask 'webstorm' do version '11.0.2' sha256 '43e0352fee07fa5b92dd22e557cb1d050ccde0cf97273e02f694930695b15134' url "https://download.jetbrains.com/webstorm/WebStorm-#{version}-custom-jdk-bundled.dmg" name 'WebStorm' homepage 'http://www.jetbrains.com/webstorm/' license :commercial app 'WebStorm.app' zap :delete => [ '~/.WebStorm11', '~/Library/Preferences/com.jetbrains.webstorm.plist', '~/Library/Preferences/WebStorm11', '~/Library/Application Support/WebStorm11', '~/Library/Caches/WebStorm11', '~/Library/Logs/WebStorm11', ] end
Ruby
BSD-2-Clause
epsimatic/homebrew-cask/Casks/webstorm.rb
f45b8751-e8c6-4d0a-970d-72da567b4c04
[{"tag": "USERNAME", "value": "WebStorm", "start": 214, "end": 222, "context": "bStorm-#{version}-custom-jdk-bundled.dmg\"\n name 'WebStorm'\n homepage 'http://www.jetbrains.com/webstorm/'\n"}, {"tag": "SSH_KEY", "value": "43e0352fee07fa5b92dd22e557cb1d050ccde0cf97273e02f694930695b15134", "start": 48, "end": 112, "context": "cask 'webstorm' do\n version '11.0.2'\n sha256 '43e0352fee07fa5b92dd22e557cb1d050ccde0cf97273e02f694930695b15134'\n\n url \"https://download.jetbrains.com/webstorm/"}]
[{"tag": "USERNAME", "value": "WebStorm", "start": 214, "end": 222, "context": "bStorm-#{version}-custom-jdk-bundled.dmg\"\n name 'WebStorm'\n homepage 'http://www.jetbrains.com/webstorm/'\n"}, {"tag": "KEY", "value": "43e0352fee07fa5b92dd22e557cb1d050ccde0cf97273e02f694930695b15134", "start": 48, "end": 112, "context": "cask 'webstorm' do\n version '11.0.2'\n sha256 '43e0352fee07fa5b92dd22e557cb1d050ccde0cf97273e02f694930695b15134'\n\n url \"https://download.jetbrains.com/webstorm/"}]
#pragma checksum "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" "{ff1816ec-aa5e-4d10-87f7-6f4963833460}" "6c93321e6e9b0f148e0449c5902bf5cb7ad221b8" // <auto-generated/> #pragma warning disable 1591 [assembly: global::Microsoft.AspNetCore.Razor.Hosting.RazorCompiledItemAttribute(typeof(AspNetCore.Views_Shared__LoginPartial), @"mvc.1.0.view", @"/Views/Shared/_LoginPartial.cshtml")] namespace AspNetCore { #line hidden using System; using System.Collections.Generic; using System.Linq; using System.Threading.Tasks; using Microsoft.AspNetCore.Mvc; using Microsoft.AspNetCore.Mvc.Rendering; using Microsoft.AspNetCore.Mvc.ViewFeatures; #nullable restore #line 1 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\_ViewImports.cshtml" using KombniyApp; #line default #line hidden #nullable disable #nullable restore #line 2 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\_ViewImports.cshtml" using KombniyApp.Models; #line default #line hidden #nullable disable #nullable restore #line 1 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" using Asp.NetCore.Identity; #line default #line hidden #nullable disable [global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"6c93321e6e9b0f148e0449c5902bf5cb7ad221b8", @"/Views/Shared/_LoginPartial.cshtml")] [global::Microsoft.AspNetCore.Razor.Hosting.RazorSourceChecksumAttribute(@"SHA1", @"cde0cb099000a1d3912655c1fefd364ef5f3e561", @"/Views/_ViewImports.cshtml")] public class Views_Shared__LoginPartial : global::Microsoft.AspNetCore.Mvc.Razor.RazorPage<dynamic> { #pragma warning disable 1998 public async override global::System.Threading.Tasks.Task ExecuteAsync() { WriteLiteral("\r\n"); #nullable restore #line 3 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" if (Request.IsAuthenticated) { using (Html.BeginForm("LogOff", "Account", FormMethod.Post, new { id = "logoutForm", @class = "form-inline" })) { #line default #line hidden #nullable disable #nullable restore #line 7 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" Write(Html.AntiForgeryToken()); #line default #line hidden #nullable disable WriteLiteral(" <li class=\"nav-item\">\r\n "); #nullable restore #line 9 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" Write(Html.ActionLink("Hi " + User.Identity.GetUserName() + "!", "Index", "Manage", routeValues: null, htmlAttributes: new { title = "Manage", @class = "nav-link waves-effect waves-light" })); #line default #line hidden #nullable disable WriteLiteral("\r\n </li>\r\n <li class=\"nav-item\"><a class=\"nav-link waves-effect waves-light\" href=\"javascript:document.getElementById(\'logoutForm\').submit()\">Cerrar sesión</a></li>\r\n"); #nullable restore #line 12 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" } } else { #line default #line hidden #nullable disable WriteLiteral(" <li class=\"nav-item\">"); #nullable restore #line 16 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" Write(Html.ActionLink("Register", "Register", "Account", routeValues: null, htmlAttributes: new { id = "registerLink", @class = "nav-link waves-effect waves-light" })); #line default #line hidden #nullable disable WriteLiteral("</li>\r\n <li class=\"nav-item\">"); #nullable restore #line 17 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" Write(Html.ActionLink("Login", "Login", "Account", routeValues: null, htmlAttributes: new { id = "loginLink", @class = "nav-link waves-effect waves-light" })); #line default #line hidden #nullable disable WriteLiteral("</li>\r\n"); #nullable restore #line 18 "C:\Users\merve bilgiç\Documents\GitHub\KombniyApp\KombniyApp\Views\Shared\_LoginPartial.cshtml" } #line default #line hidden #nullable disable } #pragma warning restore 1998 [global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute] public global::Microsoft.AspNetCore.Mvc.ViewFeatures.IModelExpressionProvider ModelExpressionProvider { get; private set; } [global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute] public global::Microsoft.AspNetCore.Mvc.IUrlHelper Url { get; private set; } [global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute] public global::Microsoft.AspNetCore.Mvc.IViewComponentHelper Component { get; private set; } [global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute] public global::Microsoft.AspNetCore.Mvc.Rendering.IJsonHelper Json { get; private set; } [global::Microsoft.AspNetCore.Mvc.Razor.Internal.RazorInjectAttribute] public global::Microsoft.AspNetCore.Mvc.Rendering.IHtmlHelper<dynamic> Html { get; private set; } } } #pragma warning restore 1591
C#
MIT
merilinali/KombniyApp/KombniyApp/obj/Debug/netcoreapp3.1/Razor/Views/Shared/_LoginPartial.cshtml.g.cs
4d07e6ec-5747-4b8b-b717-8e49880b47e7
[{"tag": "NAME", "value": "merve bilgi\u00e7", "start": 1118, "end": 1130, "context": "lable disable\n#nullable restore\n#line 1 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3795, "end": 3807, "context": "v-item\\\">\");\n#nullable restore\n#line 17 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 1921, "end": 1933, "context": "eral(\"\\r\\n\");\n#nullable restore\n#line 3 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 4184, "end": 4196, "context": "</li>\\r\\n\");\n#nullable restore\n#line 18 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3128, "end": 3140, "context": "</li>\\r\\n\");\n#nullable restore\n#line 12 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 27, "end": 39, "context": "#pragma checksum \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 753, "end": 765, "context": "ViewFeatures;\n#nullable restore\n#line 1 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\_Vie"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 2534, "end": 2546, "context": " \");\n#nullable restore\n#line 9 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 2253, "end": 2265, "context": "lable disable\n#nullable restore\n#line 7 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3370, "end": 3382, "context": "v-item\\\">\");\n#nullable restore\n#line 16 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 932, "end": 944, "context": "lable disable\n#nullable restore\n#line 2 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\_Vie"}]
[{"tag": "NAME", "value": "merve bilgi\u00e7", "start": 1118, "end": 1130, "context": "lable disable\n#nullable restore\n#line 1 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3795, "end": 3807, "context": "v-item\\\">\");\n#nullable restore\n#line 17 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 1921, "end": 1933, "context": "eral(\"\\r\\n\");\n#nullable restore\n#line 3 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 4184, "end": 4196, "context": "</li>\\r\\n\");\n#nullable restore\n#line 18 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3128, "end": 3140, "context": "</li>\\r\\n\");\n#nullable restore\n#line 12 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 27, "end": 39, "context": "#pragma checksum \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 753, "end": 765, "context": "ViewFeatures;\n#nullable restore\n#line 1 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\_Vie"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 2534, "end": 2546, "context": " \");\n#nullable restore\n#line 9 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 2253, "end": 2265, "context": "lable disable\n#nullable restore\n#line 7 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 3370, "end": 3382, "context": "v-item\\\">\");\n#nullable restore\n#line 16 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\Shar"}, {"tag": "NAME", "value": "merve bilgi\u00e7", "start": 932, "end": 944, "context": "lable disable\n#nullable restore\n#line 2 \"C:\\Users\\merve bilgi\u00e7\\Documents\\GitHub\\KombniyApp\\KombniyApp\\Views\\_Vie"}]
describe 'Feature Test: Store', :type => :feature do describe "Category List" do it "displays all of the categories as links" do visit store_path Category.all.each do |category| expect(page).to have_link(category.title, href: category_path(category)) end end end describe "Item List" do it 'displays all items that have inventory' do second_item = Item.second second_item.inventory = 0 second_item.save visit store_path Item.all.each do |item| if item == second_item expect(page).to_not have_content item.title else expect(page).to have_content item.title expect(page).to have_content "$#{item.price.to_f}" end end end context "not logged in" do it 'does not display "Add To Cart" button' do visit store_path expect(page).to_not have_content "Add To Cart" end end context "logged in" do before(:each) do @user = User.first login_as(@user, scope: :user) end it 'does display "Add To Cart" button' do visit store_path expect(page).to have_selector("input[type=submit][value='Add to Cart']") end end end describe 'Headers' do context "not logged in" do it 'has a sign in link' do visit store_path expect(page).to have_link("Sign In") end it 'has a sign up link' do visit store_path expect(page).to have_link("Sign Up") end end context "logged in" do before(:each) do @user = User.first login_as(@user, scope: :user) end it "tells the user who they are signed in as" do visit store_path expect(page).to have_content("Signed in as #{@user.email}") end it "has a sign out link" do visit store_path expect(page).to have_link("Sign Out") end it "lets users sign out" do visit store_path click_link("Sign Out") expect(page.current_path).to eq(store_path) expect(page).to have_link("Sing In") end end it 'has a Store Home Link' do visit store_path expect(page).to have_link("Store Home") end it 'does not have a Cart link' do visit store_path expect(page).to_not have_link("Cart") end end end
Ruby
MIT
Donnadieu/flatiron-store-project-v-000/spec/features/store_home_feature_spec.rb
dda74dd0-7596-4956-a339-b7cf95fec9ad
[]
[]
//------------------------------------------------------------------------------ // <auto-generated> // This code was generated by a tool. // Runtime Version:4.0.30319.42000 // // Changes to this file may cause incorrect behavior and will be lost if // the code is regenerated. // </auto-generated> //------------------------------------------------------------------------------ using System; using System.Reflection; [assembly: System.Reflection.AssemblyCompanyAttribute("KcsWriteLog")] [assembly: System.Reflection.AssemblyConfigurationAttribute("Release")] [assembly: System.Reflection.AssemblyFileVersionAttribute("1.0.0.0")] [assembly: System.Reflection.AssemblyInformationalVersionAttribute("1.0.0")] [assembly: System.Reflection.AssemblyProductAttribute("KcsWriteLog")] [assembly: System.Reflection.AssemblyTitleAttribute("KcsWriteLog")] [assembly: System.Reflection.AssemblyVersionAttribute("1.0.0.0")] // Generated by the MSBuild WriteCodeFragment class.
C#
Apache-2.0
NguyenNghia412/kcsg/ApiWriteLog/KcsWriteLog/obj/Release/netcoreapp3.1/KcsWriteLog.AssemblyInfo.cs
c8deb2ba-0a05-47c7-8916-be248ba3214b
[]
[]
/* * @Author: Jpeng * @Date: 2018-03-30 21:30:48 * @Last Modified by: Jpeng * @Last Modified time: 2018-04-20 13:55:17 * @Email: peng8350@gmail.com */ //@flow import * as Types from "../actions/ActionType"; const initStyles = { enterSearch: false, searching: false, showMore: false, searchList: [] }; export default function GankReducer(state = initStyles, action) { switch (action.type) { case Types.SEARCH_GANK_REQUESTING: return { ...state, searching: true }; case Types.SEARCH_GANK_SCCUESS: return { ...state, searching: false, searchList: [].concat(action.data) }; case Types.SEARCH_GANK_FAILED: return { ...state, searching: false }; case Types.ENTER_SEARCH: return { ...state, enterSearch: true }; case Types.EXIT_SEARCH: return { ...state, enterSearch: false, searchList: [] }; case Types.GANK_SHOWITEMMORE: return { ...state, showMore: action.showMore }; default: return state; } }
JavaScript
MIT
peng8350/react-native-Gank/jscore/reducers/GankReducer.js
cb644923-098b-4a64-a0aa-4286558381bc
[{"tag": "EMAIL", "value": "peng8350@gmail.com", "start": 136, "end": 154, "context": "ast Modified time: 2018-04-20 13:55:17\n * @Email: peng8350@gmail.com \n */\n\n//@flow\nimport * as Types from \"../actions/"}, {"tag": "NAME", "value": "Jpeng", "start": 75, "end": 80, "context": "@Date: 2018-03-30 21:30:48 \n * @Last Modified by: Jpeng\n * @Last Modified time: 2018-04-20 13:55:17\n * @E"}, {"tag": "NAME", "value": "Jpeng", "start": 15, "end": 20, "context": "/*\n * @Author: Jpeng \n * @Date: 2018-03-30 21:30:48 \n * @Last Modified"}]
[{"tag": "EMAIL", "value": "peng8350@gmail.com", "start": 136, "end": 154, "context": "ast Modified time: 2018-04-20 13:55:17\n * @Email: peng8350@gmail.com \n */\n\n//@flow\nimport * as Types from \"../actions/"}, {"tag": "NAME", "value": "Jpeng", "start": 75, "end": 80, "context": "@Date: 2018-03-30 21:30:48 \n * @Last Modified by: Jpeng\n * @Last Modified time: 2018-04-20 13:55:17\n * @E"}, {"tag": "NAME", "value": "Jpeng", "start": 15, "end": 20, "context": "/*\n * @Author: Jpeng \n * @Date: 2018-03-30 21:30:48 \n * @Last Modified"}]
import json import socket def is_jsonable(obj): try: json.dumps(obj) return True except (TypeError, OverflowError, ValueError): return False def sanitize_meta(meta): keys_to_sanitize = [] for key, value in meta.items(): if not is_jsonable(value): keys_to_sanitize.append(key) if keys_to_sanitize: for key in keys_to_sanitize: del meta[key] meta['__errors'] = 'These keys have been sanitized: ' + ', '.join( keys_to_sanitize) return meta def get_ip(): s = socket.socket(socket.AF_INET, socket.SOCK_DGRAM) try: # doesn't even have to be reachable s.connect(('10.255.255.255', 1)) ip = s.getsockname()[0] except Exception: ip = '127.0.0.1' finally: s.close() return ip
Python
MIT
markcurtis1970/python/logdna/utils.py
b32d7897-2db8-498a-8a7a-f911a66280c2
[{"tag": "IP_ADDRESS", "value": "10.255.255.255", "start": 695, "end": 709, "context": "n't even have to be reachable\n s.connect(('10.255.255.255', 1))\n ip = s.getsockname()[0]\n except "}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 784, "end": 793, "context": "sockname()[0]\n except Exception:\n ip = '127.0.0.1'\n finally:\n s.close()\n return ip\n"}]
[{"tag": "IP_ADDRESS", "value": "10.255.255.255", "start": 695, "end": 709, "context": "n't even have to be reachable\n s.connect(('10.255.255.255', 1))\n ip = s.getsockname()[0]\n except "}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 784, "end": 793, "context": "sockname()[0]\n except Exception:\n ip = '127.0.0.1'\n finally:\n s.close()\n return ip\n"}]
// Copyright (c) Microsoft Corporation. All rights reserved. // Licensed under the MIT license. var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; Object.defineProperty(exports, "__esModule", { value: true }); const childProcess = require("child_process"); const crypto = require("crypto"); const net = require("net"); const office_addin_usage_data_1 = require("office-addin-usage-data"); /** * Determines whether a port is in use. * @param port port number (0 - 65535) * @returns true if port is in use; false otherwise. */ function isPortInUse(port) { validatePort(port); return new Promise((resolve) => { const server = net .createServer() .once("error", () => { resolve(true); }) .once("listening", () => { server.close(); resolve(false); }) .listen(port); }); } exports.isPortInUse = isPortInUse; /** * Parse the port from a string which ends with colon and a number. * @param text string to parse * @example "127.0.0.1:3000" returns 3000 * @example "[::1]:1900" returns 1900 * @example "Local Address" returns undefined */ function parsePort(text) { const result = text.match(/:(\d+)$/); return result ? parseInt(result[1], 10) : undefined; } /** * Return the process ids using the port. * @param port port number (0 - 65535) * @returns Promise to array containing process ids, or empty if none. */ function getProcessIdsForPort(port) { validatePort(port); return new Promise((resolve, reject) => { const isWin32 = process.platform === "win32"; const command = isWin32 ? `netstat -ano` : `lsof -n -i:${port}`; childProcess.exec(command, (error, stdout) => { if (error) { if (error.code === 1) { // no processes are using the port resolve([]); } else { reject(error); } } else { const processIds = new Set(); const lines = stdout.trim().split("\n"); if (isWin32) { lines.forEach((line) => { const [protocol, localAddress, foreignAddress, status, processId] = line.split(" ").filter((text) => text); if (processId !== undefined) { const localAddressPort = parsePort(localAddress); if (localAddressPort === port) { processIds.add(parseInt(processId, 10)); } } }); } else { lines.forEach((line) => { const [process, processId, user, fd, type, device, size, node, name] = line.split(" ").filter((text) => text); if ((processId !== undefined) && (processId !== "PID")) { processIds.add(parseInt(processId, 10)); } }); } resolve(Array.from(processIds)); } }); }); } exports.getProcessIdsForPort = getProcessIdsForPort; /** * Returns a random port number which is not in use. * @returns Promise to number from 0 to 65535 */ function randomPortNotInUse() { return __awaiter(this, void 0, void 0, function* () { let port; do { port = randomPortNumber(); } while (yield isPortInUse(port)); return port; }); } exports.randomPortNotInUse = randomPortNotInUse; /** * Returns a random number between 0 and 65535 */ function randomPortNumber() { return crypto.randomBytes(2).readUInt16LE(0); } /** * Throw an error if the port is not a valid number. * @param port port number * @throws Error if port is not a number from 0 to 65535. */ function validatePort(port) { if ((typeof (port) !== "number") || (port < 0) || (port > 65535)) { throw new office_addin_usage_data_1.ExpectedError("Port should be a number from 0 to 65535."); } } //# sourceMappingURL=port.js.map
JavaScript
MIT
lathroplabs/VizLaboratoryAddin/node_modules/office-addin-debugging/lib/port.js
49efb80b-22de-4cbe-8bb9-e306b4680aa7
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1712, "end": 1721, "context": "er.\r\n * @param text string to parse\r\n * @example \"127.0.0.1:3000\" returns 3000\r\n * @example \"[::1]:1900\" retu"}]
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1712, "end": 1721, "context": "er.\r\n * @param text string to parse\r\n * @example \"127.0.0.1:3000\" returns 3000\r\n * @example \"[::1]:1900\" retu"}]
# # Copyright 2014 Google Inc. All rights reserved. # # # Licensed under the Apache License, Version 2.0 (the "License"); you may not # use this file except in compliance with the License. You may obtain a copy of # the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, WITHOUT # WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the # License for the specific language governing permissions and limitations under # the License. # """Tests for client module.""" import responses import time import googlemaps from googlemaps import client as _client import test as _test import requests class ClientTest(_test.TestCase): def test_no_api_key(self): with self.assertRaises(Exception): client = googlemaps.Client() client.directions("Sydney", "Melbourne") def test_invalid_api_key(self): with self.assertRaises(Exception): client = googlemaps.Client(key="Invalid key.") client.directions("Sydney", "Melbourne") def test_urlencode(self): # See GH #72. encoded_params = _client.urlencode_params([("address", "=Sydney ~")]) self.assertEqual("address=%3DSydney+~", encoded_params) @responses.activate def test_queries_per_second(self): # This test assumes that the time to run a mocked query is # relatively small, eg a few milliseconds. We define a rate of # 3 queries per second, and run double that, which should take at # least 1 second but no more than 2. queries_per_second = 3 query_range = range(queries_per_second * 2) for _ in query_range: responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf", queries_per_second=queries_per_second) start = time.time() for _ in query_range: client.geocode("Sesame St.") end = time.time() self.assertTrue(start + 1 < end < start + 2) @responses.activate def test_key_sent(self): responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf") client.geocode("Sesame St.") self.assertEqual(1, len(responses.calls)) self.assertURLEqual("https://maps.googleapis.com/maps/api/geocode/json?" "key=AIzaasdf&address=Sesame+St.", responses.calls[0].request.url) @responses.activate def test_extra_params(self): responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf") client.geocode("Sesame St.", extra_params={"foo": "bar"}) self.assertEqual(1, len(responses.calls)) self.assertURLEqual("https://maps.googleapis.com/maps/api/geocode/json?" "key=AIzaasdf&address=Sesame+St.&foo=bar", responses.calls[0].request.url) def test_hmac(self): """ From http://en.wikipedia.org/wiki/Hash-based_message_authentication_code HMAC_SHA1("key", "The quick brown fox jumps over the lazy dog") = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9 """ message = "The quick brown fox jumps over the lazy dog" key = "a2V5" # "key" -> base64 signature = "3nybhbi3iqa8ino29wqQcBydtNk=" self.assertEqual(signature, _client.sign_hmac(key, message)) @responses.activate def test_url_signed(self): responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(client_id="foo", client_secret="a2V5") client.geocode("Sesame St.") self.assertEqual(1, len(responses.calls)) # Check ordering of parameters. self.assertIn("address=Sesame+St.&client=foo&signature", responses.calls[0].request.url) self.assertURLEqual("https://maps.googleapis.com/maps/api/geocode/json?" "address=Sesame+St.&client=foo&" "signature=fxbWUIcNPZSekVOhp2ul9LW5TpY=", responses.calls[0].request.url) @responses.activate def test_ua_sent(self): responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf") client.geocode("Sesame St.") self.assertEqual(1, len(responses.calls)) user_agent = responses.calls[0].request.headers["User-Agent"] self.assertTrue(user_agent.startswith("GoogleGeoApiClientPython")) @responses.activate def test_retry(self): class request_callback: def __init__(self): self.first_req = True def __call__(self, req): if self.first_req: self.first_req = False return (200, {}, '{"status":"OVER_QUERY_LIMIT"}') return (200, {}, '{"status":"OK","results":[]}') responses.add_callback(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", content_type='application/json', callback=request_callback()) client = googlemaps.Client(key="AIzaasdf") client.geocode("Sesame St.") self.assertEqual(2, len(responses.calls)) self.assertEqual(responses.calls[0].request.url, responses.calls[1].request.url) @responses.activate def test_transport_error(self): responses.add(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", status=404, content_type='application/json') client = googlemaps.Client(key="AIzaasdf") with self.assertRaises(googlemaps.exceptions.HTTPError) as e: client.geocode("Foo") self.assertEqual(e.exception.status_code, 404) @responses.activate def test_host_override(self): responses.add(responses.GET, "https://foo.com/bar", body='{"status":"OK","results":[]}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf") client._get("/bar", {}, base_url="https://foo.com") self.assertEqual(1, len(responses.calls)) @responses.activate def test_custom_extract(self): def custom_extract(resp): return resp.json() responses.add(responses.GET, "https://maps.googleapis.com/bar", body='{"error":"errormessage"}', status=403, content_type="application/json") client = googlemaps.Client(key="AIzaasdf") b = client._get("/bar", {}, extract_body=custom_extract) self.assertEqual(1, len(responses.calls)) self.assertEqual("errormessage", b["error"]) @responses.activate def test_retry_intermittent(self): class request_callback: def __init__(self): self.first_req = True def __call__(self, req): if self.first_req: self.first_req = False return (500, {}, 'Internal Server Error.') return (200, {}, '{"status":"OK","results":[]}') responses.add_callback(responses.GET, "https://maps.googleapis.com/maps/api/geocode/json", content_type="application/json", callback=request_callback()) client = googlemaps.Client(key="AIzaasdf") client.geocode("Sesame St.") self.assertEqual(2, len(responses.calls)) def test_channel_without_client_id(self): with self.assertRaises(ValueError): client = googlemaps.Client(key="AIzaasdf", channel="mychannel") def test_invalid_channel(self): # Cf. limitations here: # https://developers.google.com/maps/premium/reports # /usage-reports#channels with self.assertRaises(ValueError): client = googlemaps.Client(client_id="foo", client_secret="a2V5", channel="auieauie$? ") def test_auth_url_with_channel(self): client = googlemaps.Client(key="AIzaasdf", client_id="foo", client_secret="a2V5", channel="MyChannel_1") # Check ordering of parameters + signature. auth_url = client._generate_auth_url("/test", {"param": "param"}, accepts_clientid=True) self.assertEqual(auth_url, "/test?param=param" "&channel=MyChannel_1" "&client=foo" "&signature=OH18GuQto_mEpxj99UimKskvo4k=") # Check if added to requests to API with accepts_clientid=False auth_url = client._generate_auth_url("/test", {"param": "param"}, accepts_clientid=False) self.assertEqual(auth_url, "/test?param=param&key=AIzaasdf") def test_requests_version(self): client_args_timeout = { "key": "AIzaasdf", "client_id": "foo", "client_secret": "a2V5", "channel": "MyChannel_1", "connect_timeout": 5, "read_timeout": 5 } client_args = client_args_timeout.copy() del client_args["connect_timeout"] del client_args["read_timeout"] requests.__version__ = '2.3.0' with self.assertRaises(NotImplementedError): googlemaps.Client(**client_args_timeout) googlemaps.Client(**client_args) requests.__version__ = '2.4.0' googlemaps.Client(**client_args_timeout) googlemaps.Client(**client_args) @responses.activate def test_no_retry_over_query_limit(self): responses.add(responses.GET, "https://maps.googleapis.com/foo", body='{"status":"OVER_QUERY_LIMIT"}', status=200, content_type="application/json") client = googlemaps.Client(key="AIzaasdf", retry_over_query_limit=False) with self.assertRaises(googlemaps.exceptions.ApiError): client._request("/foo", {}) self.assertEqual(1, len(responses.calls))
Python
Apache-2.0
Harkishen-Singh/google-maps-services-python/test/test_client.py
fe1c18bf-3616-4782-80f4-47776a05bb75
[{"tag": "PASSWORD", "value": "AIzaasdf", "start": 7889, "end": 7897, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n b = client._get(\"/bar\", {}, extract_bod"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 6861, "end": 6869, "context": "n/json')\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n with self.assertRaises(googlemaps.excep"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2938, "end": 2946, "context": "i/geocode/json?\"\n \"key=AIzaasdf&address=Sesame+St.\",\n "}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2107, "end": 2115, "context": "on/json\")\n client = googlemaps.Client(key=\"AIzaasdf\",\n queries_per_"}, {"tag": "API_KEY", "value": "0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9", "start": 3932, "end": 3974, "context": " brown fox jumps over the lazy dog\")\n = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9\n \"\"\"\n\n message = \"The quick brown f"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 10382, "end": 10390, "context": "self.assertEqual(auth_url, \"/test?param=param&key=AIzaasdf\")\n\n def test_requests_version(self):\n c"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 7362, "end": 7370, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client._get(\"/bar\", {}, base_url=\"https"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2725, "end": 2733, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\")\n\n s"}, {"tag": "AMBIGUOUS", "value": "a2V5", "start": 9274, "end": 9278, "context": "googlemaps.Client(client_id=\"foo\", client_secret=\"a2V5\",\n channel="}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 10483, "end": 10491, "context": " client_args_timeout = {\n \"key\": \"AIzaasdf\",\n \"client_id\": \"foo\",\n \"cl"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 11479, "end": 11487, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\",\n retry_over_q"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 3387, "end": 3395, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\", extra_para"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 9426, "end": 9434, "context": "el(self):\n client = googlemaps.Client(key=\"AIzaasdf\",\n client_id=\"f"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 8963, "end": 8971, "context": "ror):\n client = googlemaps.Client(key=\"AIzaasdf\", channel=\"mychannel\")\n\n def test_invalid_chan"}, {"tag": "AMBIGUOUS", "value": "fxbWUIcNPZSekVOhp2ul9LW5TpY=", "start": 5026, "end": 5054, "context": "ient=foo&\"\n \"signature=fxbWUIcNPZSekVOhp2ul9LW5TpY=\",\n responses.calls[0]."}, {"tag": "AMBIGUOUS", "value": "OH18GuQto_mEpxj99UimKskvo4k=", "start": 10032, "end": 10060, "context": "ient=foo\"\n \"&signature=OH18GuQto_mEpxj99UimKskvo4k=\")\n\n # Check if added to requests to API wi"}, {"tag": "AMBIGUOUS", "value": "3nybhbi3iqa8ino29wqQcBydtNk=", "start": 4112, "end": 4140, "context": "y = \"a2V5\" # \"key\" -> base64\n signature = \"3nybhbi3iqa8ino29wqQcBydtNk=\"\n\n self.assertEqual(signature, _client.sig"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 8729, "end": 8737, "context": "lback())\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\")\n\n s"}]
[{"tag": "PASSWORD", "value": "AIzaasdf", "start": 7889, "end": 7897, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n b = client._get(\"/bar\", {}, extract_bod"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 6861, "end": 6869, "context": "n/json')\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n with self.assertRaises(googlemaps.excep"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2938, "end": 2946, "context": "i/geocode/json?\"\n \"key=AIzaasdf&address=Sesame+St.\",\n "}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2107, "end": 2115, "context": "on/json\")\n client = googlemaps.Client(key=\"AIzaasdf\",\n queries_per_"}, {"tag": "API_KEY", "value": "0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9", "start": 3932, "end": 3974, "context": " brown fox jumps over the lazy dog\")\n = 0xde7c9b85b8b78aa6bc8a7a36f70a90701c9db4d9\n \"\"\"\n\n message = \"The quick brown f"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 10382, "end": 10390, "context": "self.assertEqual(auth_url, \"/test?param=param&key=AIzaasdf\")\n\n def test_requests_version(self):\n c"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 7362, "end": 7370, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client._get(\"/bar\", {}, base_url=\"https"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 2725, "end": 2733, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\")\n\n s"}, {"tag": "AMBIGUOUS", "value": "a2V5", "start": 9274, "end": 9278, "context": "googlemaps.Client(client_id=\"foo\", client_secret=\"a2V5\",\n channel="}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 10483, "end": 10491, "context": " client_args_timeout = {\n \"key\": \"AIzaasdf\",\n \"client_id\": \"foo\",\n \"cl"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 11479, "end": 11487, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\",\n retry_over_q"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 3387, "end": 3395, "context": "n/json\")\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\", extra_para"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 9426, "end": 9434, "context": "el(self):\n client = googlemaps.Client(key=\"AIzaasdf\",\n client_id=\"f"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 8963, "end": 8971, "context": "ror):\n client = googlemaps.Client(key=\"AIzaasdf\", channel=\"mychannel\")\n\n def test_invalid_chan"}, {"tag": "AMBIGUOUS", "value": "fxbWUIcNPZSekVOhp2ul9LW5TpY=", "start": 5026, "end": 5054, "context": "ient=foo&\"\n \"signature=fxbWUIcNPZSekVOhp2ul9LW5TpY=\",\n responses.calls[0]."}, {"tag": "AMBIGUOUS", "value": "OH18GuQto_mEpxj99UimKskvo4k=", "start": 10032, "end": 10060, "context": "ient=foo\"\n \"&signature=OH18GuQto_mEpxj99UimKskvo4k=\")\n\n # Check if added to requests to API wi"}, {"tag": "AMBIGUOUS", "value": "3nybhbi3iqa8ino29wqQcBydtNk=", "start": 4112, "end": 4140, "context": "y = \"a2V5\" # \"key\" -> base64\n signature = \"3nybhbi3iqa8ino29wqQcBydtNk=\"\n\n self.assertEqual(signature, _client.sig"}, {"tag": "PASSWORD", "value": "AIzaasdf", "start": 8729, "end": 8737, "context": "lback())\n\n client = googlemaps.Client(key=\"AIzaasdf\")\n client.geocode(\"Sesame St.\")\n\n s"}]
<?php declare(strict_types=1); /** * This file is part of Hyperf. * * @link https://www.hyperf.io * @document https://doc.hyperf.io * @contact group@hyperf.io * @license https://github.com/hyperf/hyperf/blob/master/LICENSE */ namespace Tegic\HyperfWechat; use Hyperf\HttpMessage\Stream\SwooleStream; use Hyperf\Utils\Context; use Psr\Http\Message\ResponseInterface as PsrResponseInterface; use Symfony\Component\HttpFoundation\Response; class Helper { public static function Response(Response $response) { $psrResponse = Context::get(PsrResponseInterface::class); $psrResponse = $psrResponse->withBody(new SwooleStream($response->getContent()))->withStatus($response->getStatusCode()); foreach ($response->headers->all() as $key => $item) { $psrResponse = $psrResponse->withHeader($key, $item); } return $psrResponse; } }
PHP
MIT
teg1c/hyperf-wechat/src/Helper.php
b2b63981-9852-4672-b8e9-89b8545e1705
[{"tag": "EMAIL", "value": "group@hyperf.io", "start": 154, "end": 169, "context": "o\n * @document https://doc.hyperf.io\n * @contact group@hyperf.io\n * @license https://github.com/hyperf/hyperf/blo"}]
[{"tag": "EMAIL", "value": "group@hyperf.io", "start": 154, "end": 169, "context": "o\n * @document https://doc.hyperf.io\n * @contact group@hyperf.io\n * @license https://github.com/hyperf/hyperf/blo"}]
import os import re import sys import uuid import redis from cryptography.fernet import Fernet from flask import abort, Flask, render_template, request from redis.exceptions import ConnectionError from werkzeug.urls import url_quote_plus from werkzeug.urls import url_unquote_plus NO_SSL = os.environ.get('NO_SSL', False) TOKEN_SEPARATOR = '~' # Initialize Flask Application app = Flask(__name__) if os.environ.get('DEBUG'): app.debug = True app.secret_key = os.environ.get('SECRET_KEY', 'Secret Key') app.config.update( dict(STATIC_URL=os.environ.get('STATIC_URL', 'static'))) # Initialize Redis if os.environ.get('MOCK_REDIS'): from mockredis import mock_strict_redis_client redis_client = mock_strict_redis_client() elif os.environ.get('REDIS_URL'): redis_client = redis.StrictRedis.from_url(os.environ.get('REDIS_URL')) else: redis_host = os.environ.get('REDIS_HOST', 'localhost') redis_port = os.environ.get('REDIS_PORT', 6379) redis_db = os.environ.get('SNAPPASS_REDIS_DB', 0) redis_client = redis.StrictRedis( host=redis_host, port=redis_port, db=redis_db) REDIS_PREFIX = os.environ.get('REDIS_PREFIX', 'snappass') TIME_CONVERSION = {'week': 604800, 'day': 86400, 'hour': 3600} def check_redis_alive(fn): def inner(*args, **kwargs): try: if fn.__name__ == 'main': redis_client.ping() return fn(*args, **kwargs) except ConnectionError as e: print('Failed to connect to redis! %s' % e.message) if fn.__name__ == 'main': sys.exit(0) else: return abort(500) return inner def encrypt(password): """ Take a password string, encrypt it with Fernet symmetric encryption, and return the result (bytes), with the decryption key (bytes) """ encryption_key = Fernet.generate_key() fernet = Fernet(encryption_key) encrypted_password = fernet.encrypt(password.encode('utf-8')) return encrypted_password, encryption_key def decrypt(password, decryption_key): """ Decrypt a password (bytes) using the provided key (bytes), and return the plain-text password (bytes). """ fernet = Fernet(decryption_key) return fernet.decrypt(password) def parse_token(token): token_fragments = token.split(TOKEN_SEPARATOR, 1) # Split once, not more. storage_key = token_fragments[0] try: decryption_key = token_fragments[1].encode('utf-8') except IndexError: decryption_key = None return storage_key, decryption_key @check_redis_alive def set_password(password, ttl): """ Encrypt and store the password for the specified lifetime. Returns a token comprised of the key where the encrypted password is stored, and the decryption key. """ storage_key = REDIS_PREFIX + uuid.uuid4().hex encrypted_password, encryption_key = encrypt(password) redis_client.setex(storage_key, ttl, encrypted_password) encryption_key = encryption_key.decode('utf-8') token = TOKEN_SEPARATOR.join([storage_key, encryption_key]) return token @check_redis_alive def get_password(token): """ From a given token, return the initial password. If the token is tilde-separated, we decrypt the password fetched from Redis. If not, the password is simply returned as is. """ storage_key, decryption_key = parse_token(token) password = redis_client.get(storage_key) redis_client.delete(storage_key) if password is not None: if decryption_key is not None: password = decrypt(password, decryption_key) return password.decode('utf-8') @check_redis_alive def password_exists(token): storage_key, decryption_key = parse_token(token) return redis_client.exists(storage_key) def empty(value): if not value: return True def clean_input(): """ Make sure we're not getting bad data from the front end, format data to be machine readable """ if empty(request.form.get('password', '')): abort(400) if empty(request.form.get('ttl', '')): abort(400) time_period = request.form['ttl'].lower() if time_period not in TIME_CONVERSION: abort(400) return TIME_CONVERSION[time_period], request.form['password'] @app.route('/', methods=['GET']) def index(): return render_template('set_password.html') @app.route('/', methods=['POST']) def handle_password(): ttl, password = clean_input() token = set_password(password, ttl) if NO_SSL: base_url = request.url_root else: base_url = request.url_root.replace("http://", "https://") link = base_url + url_quote_plus(token) return render_template('confirm.html', password_link=link) @app.route('/<password_key>', methods=['GET']) def preview_password(password_key): password_key = url_unquote_plus(password_key) if not password_exists(password_key): abort(404) return render_template('preview.html') @app.route('/<password_key>', methods=['POST']) def show_password(password_key): password_key = url_unquote_plus(password_key) password = get_password(password_key) if not password: abort(404) return render_template('password.html', password=password) @check_redis_alive def main(): app.run(host='0.0.0.0') if __name__ == '__main__': main()
Python
MIT
47Billion/snappass/snappass/main.py
9599d2be-efe0-4f85-ab73-dfbba2ecfb70
[]
[]
from setuptools import setup, find_packages # declare these here since we use them in multiple places _tests_require = [ 'pytest', 'pytest-cov', 'flake8', ] setup( # package info name='cheapskate_bal', description='Cheapskate labs single/dual plane balancer', version='0.0.2', url='http://your/url/here', author='Kevin Powell', author_email='kevin@kevinpowell.guru', packages=find_packages(exclude=['tests', 'tests.*']), # scripts to install to usr/bin entry_points={ 'console_scripts': [ 'csbal=cheapskate_bal.cli:csbal_process', 'csbal_s=cheapskate_bal.cli:csbal_single', 'csbal_dinit=cheapskate_bal.cli:csbal_dual_init', 'csbal_d=cheapskate_bal.cli:csbal_dual_iter' ] }, # run time requirements # exact versions are in the requirements.txt file install_requires=[], # need this for setup.py test setup_requires=[ 'pytest-runner', ], # needs this if using setuptools_scm # use_scm_version=True, # test dependencies tests_require=_tests_require, extras_require={ # this allows us to pip install .[test] for all test dependencies 'test': _tests_require, } )
Python
Unlicense
kevinpowell/balancer/cheapskate_bal/setup.py
f25a7f10-e786-4b4a-b056-ad5d4562f6d0
[{"tag": "NAME", "value": "Kevin Powell", "start": 352, "end": 364, "context": "0.2',\n url='http://your/url/here',\n author='Kevin Powell',\n author_email='kevin@kevinpowell.guru',\n "}, {"tag": "EMAIL", "value": "kevin@kevinpowell.guru", "start": 385, "end": 407, "context": "re',\n author='Kevin Powell',\n author_email='kevin@kevinpowell.guru',\n packages=find_packages(exclude=['tests', 't"}]
[{"tag": "NAME", "value": "Kevin Powell", "start": 352, "end": 364, "context": "0.2',\n url='http://your/url/here',\n author='Kevin Powell',\n author_email='kevin@kevinpowell.guru',\n "}, {"tag": "EMAIL", "value": "kevin@kevinpowell.guru", "start": 385, "end": 407, "context": "re',\n author='Kevin Powell',\n author_email='kevin@kevinpowell.guru',\n packages=find_packages(exclude=['tests', 't"}]
#!/usr/bin/env python # Copyright 2015 The Kubernetes Authors. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import base64 import os import re import random import shutil import socket import string import json import ipaddress import charms.leadership from shlex import split from subprocess import check_call from subprocess import check_output from subprocess import CalledProcessError from charms import layer from charms.layer import snap from charms.reactive import hook from charms.reactive import remove_state from charms.reactive import set_state from charms.reactive import is_state from charms.reactive import when, when_any, when_not from charms.reactive.helpers import data_changed, any_file_changed from charms.kubernetes.common import get_version from charms.kubernetes.common import retry from charms.kubernetes.flagmanager import FlagManager from charmhelpers.core import hookenv from charmhelpers.core import host from charmhelpers.core import unitdata from charmhelpers.core.host import service_stop from charmhelpers.core.templating import render from charmhelpers.fetch import apt_install from charmhelpers.contrib.charmsupport import nrpe # Override the default nagios shortname regex to allow periods, which we # need because our bin names contain them (e.g. 'snap.foo.daemon'). The # default regex in charmhelpers doesn't allow periods, but nagios itself does. nrpe.Check.shortname_re = '[\.A-Za-z0-9-_]+$' os.environ['PATH'] += os.pathsep + os.path.join(os.sep, 'snap', 'bin') def service_cidr(): ''' Return the charm's service-cidr config ''' db = unitdata.kv() frozen_cidr = db.get('kubernetes-master.service-cidr') return frozen_cidr or hookenv.config('service-cidr') def freeze_service_cidr(): ''' Freeze the service CIDR. Once the apiserver has started, we can no longer safely change this value. ''' db = unitdata.kv() db.set('kubernetes-master.service-cidr', service_cidr()) @hook('upgrade-charm') def reset_states_for_delivery(): '''An upgrade charm event was triggered by Juju, react to that here.''' migrate_from_pre_snaps() install_snaps() set_state('reconfigure.authentication.setup') remove_state('authentication.setup') def rename_file_idempotent(source, destination): if os.path.isfile(source): os.rename(source, destination) def migrate_from_pre_snaps(): # remove old states remove_state('kubernetes.components.installed') remove_state('kubernetes.dashboard.available') remove_state('kube-dns.available') remove_state('kubernetes-master.app_version.set') # disable old services services = ['kube-apiserver', 'kube-controller-manager', 'kube-scheduler'] for service in services: hookenv.log('Stopping {0} service.'.format(service)) host.service_stop(service) # rename auth files os.makedirs('/root/cdk', exist_ok=True) rename_file_idempotent('/etc/kubernetes/serviceaccount.key', '/root/cdk/serviceaccount.key') rename_file_idempotent('/srv/kubernetes/basic_auth.csv', '/root/cdk/basic_auth.csv') rename_file_idempotent('/srv/kubernetes/known_tokens.csv', '/root/cdk/known_tokens.csv') # cleanup old files files = [ "/lib/systemd/system/kube-apiserver.service", "/lib/systemd/system/kube-controller-manager.service", "/lib/systemd/system/kube-scheduler.service", "/etc/default/kube-defaults", "/etc/default/kube-apiserver.defaults", "/etc/default/kube-controller-manager.defaults", "/etc/default/kube-scheduler.defaults", "/srv/kubernetes", "/home/ubuntu/kubectl", "/usr/local/bin/kubectl", "/usr/local/bin/kube-apiserver", "/usr/local/bin/kube-controller-manager", "/usr/local/bin/kube-scheduler", "/etc/kubernetes" ] for file in files: if os.path.isdir(file): hookenv.log("Removing directory: " + file) shutil.rmtree(file) elif os.path.isfile(file): hookenv.log("Removing file: " + file) os.remove(file) # clear the flag managers FlagManager('kube-apiserver').destroy_all() FlagManager('kube-controller-manager').destroy_all() FlagManager('kube-scheduler').destroy_all() def install_snaps(): channel = hookenv.config('channel') hookenv.status_set('maintenance', 'Installing kubectl snap') snap.install('kubectl', channel=channel, classic=True) hookenv.status_set('maintenance', 'Installing kube-apiserver snap') snap.install('kube-apiserver', channel=channel) hookenv.status_set('maintenance', 'Installing kube-controller-manager snap') snap.install('kube-controller-manager', channel=channel) hookenv.status_set('maintenance', 'Installing kube-scheduler snap') snap.install('kube-scheduler', channel=channel) hookenv.status_set('maintenance', 'Installing cdk-addons snap') snap.install('cdk-addons', channel=channel) set_state('kubernetes-master.snaps.installed') remove_state('kubernetes-master.components.started') @when('config.changed.channel') def channel_changed(): install_snaps() @when('config.changed.client_password', 'leadership.is_leader') def password_changed(): """Handle password change via the charms config.""" password = hookenv.config('client_password') if password == "" and is_state('client.password.initialised'): # password_changed is called during an upgrade. Nothing to do. return elif password == "": # Password not initialised password = token_generator() setup_basic_auth(password, "admin", "admin") set_state('reconfigure.authentication.setup') remove_state('authentication.setup') set_state('client.password.initialised') @when('cni.connected') @when_not('cni.configured') def configure_cni(cni): ''' Set master configuration on the CNI relation. This lets the CNI subordinate know that we're the master so it can respond accordingly. ''' cni.set_config(is_master=True, kubeconfig_path='') @when('leadership.is_leader') @when_not('authentication.setup') def setup_leader_authentication(): '''Setup basic authentication and token access for the cluster.''' api_opts = FlagManager('kube-apiserver') controller_opts = FlagManager('kube-controller-manager') service_key = '/root/cdk/serviceaccount.key' basic_auth = '/root/cdk/basic_auth.csv' known_tokens = '/root/cdk/known_tokens.csv' api_opts.add('basic-auth-file', basic_auth) api_opts.add('token-auth-file', known_tokens) hookenv.status_set('maintenance', 'Rendering authentication templates.') keys = [service_key, basic_auth, known_tokens] # Try first to fetch data from an old leadership broadcast. if not get_keys_from_leader(keys) \ or is_state('reconfigure.authentication.setup'): last_pass = get_password('basic_auth.csv', 'admin') setup_basic_auth(last_pass, 'admin', 'admin') if not os.path.isfile(known_tokens): setup_tokens(None, 'admin', 'admin') setup_tokens(None, 'kubelet', 'kubelet') setup_tokens(None, 'kube_proxy', 'kube_proxy') # Generate the default service account token key os.makedirs('/root/cdk', exist_ok=True) if not os.path.isfile(service_key): cmd = ['openssl', 'genrsa', '-out', service_key, '2048'] check_call(cmd) remove_state('reconfigure.authentication.setup') api_opts.add('service-account-key-file', service_key) controller_opts.add('service-account-private-key-file', service_key) # read service account key for syndication leader_data = {} for f in [known_tokens, basic_auth, service_key]: with open(f, 'r') as fp: leader_data[f] = fp.read() # this is slightly opaque, but we are sending file contents under its file # path as a key. # eg: # {'/root/cdk/serviceaccount.key': 'RSA:2471731...'} charms.leadership.leader_set(leader_data) remove_state('kubernetes-master.components.started') set_state('authentication.setup') @when_not('leadership.is_leader') def setup_non_leader_authentication(): service_key = '/root/cdk/serviceaccount.key' basic_auth = '/root/cdk/basic_auth.csv' known_tokens = '/root/cdk/known_tokens.csv' keys = [service_key, basic_auth, known_tokens] # The source of truth for non-leaders is the leader. # Therefore we overwrite_local with whatever the leader has. if not get_keys_from_leader(keys, overwrite_local=True): # the keys were not retrieved. Non-leaders have to retry. return if not any_file_changed(keys) and is_state('authentication.setup'): # No change detected and we have already setup the authentication return hookenv.status_set('maintenance', 'Rendering authentication templates.') api_opts = FlagManager('kube-apiserver') api_opts.add('basic-auth-file', basic_auth) api_opts.add('token-auth-file', known_tokens) api_opts.add('service-account-key-file', service_key) controller_opts = FlagManager('kube-controller-manager') controller_opts.add('service-account-private-key-file', service_key) remove_state('kubernetes-master.components.started') set_state('authentication.setup') def get_keys_from_leader(keys, overwrite_local=False): """ Gets the broadcasted keys from the leader and stores them in the corresponding files. Args: keys: list of keys. Keys are actually files on the FS. Returns: True if all key were fetched, False if not. """ # This races with other codepaths, and seems to require being created first # This block may be extracted later, but for now seems to work as intended os.makedirs('/root/cdk', exist_ok=True) for k in keys: # If the path does not exist, assume we need it if not os.path.exists(k) or overwrite_local: # Fetch data from leadership broadcast contents = charms.leadership.leader_get(k) # Default to logging the warning and wait for leader data to be set if contents is None: msg = "Waiting on leaders crypto keys." hookenv.status_set('waiting', msg) hookenv.log('Missing content for file {}'.format(k)) return False # Write out the file and move on to the next item with open(k, 'w+') as fp: fp.write(contents) return True @when('kubernetes-master.snaps.installed') def set_app_version(): ''' Declare the application version to juju ''' version = check_output(['kube-apiserver', '--version']) hookenv.application_version_set(version.split(b' v')[-1].rstrip()) @when('cdk-addons.configured', 'kube-api-endpoint.available', 'kube-control.connected') def idle_status(kube_api, kube_control): ''' Signal at the end of the run that we are running. ''' if not all_kube_system_pods_running(): hookenv.status_set('waiting', 'Waiting for kube-system pods to start') elif hookenv.config('service-cidr') != service_cidr(): msg = 'WARN: cannot change service-cidr, still using ' + service_cidr() hookenv.status_set('active', msg) else: # All services should be up and running at this point. Double-check... failing_services = master_services_down() if len(failing_services) == 0: hookenv.status_set('active', 'Kubernetes master running.') else: msg = 'Stopped services: {}'.format(','.join(failing_services)) hookenv.status_set('blocked', msg) def master_services_down(): """Ensure master services are up and running. Return: list of failing services""" services = ['kube-apiserver', 'kube-controller-manager', 'kube-scheduler'] failing_services = [] for service in services: daemon = 'snap.{}.daemon'.format(service) if not host.service_running(daemon): failing_services.append(service) return failing_services @when('etcd.available', 'tls_client.server.certificate.saved', 'authentication.setup') @when_not('kubernetes-master.components.started') def start_master(etcd): '''Run the Kubernetes master components.''' hookenv.status_set('maintenance', 'Configuring the Kubernetes master services.') freeze_service_cidr() if not etcd.get_connection_string(): # etcd is not returning a connection string. This hapens when # the master unit disconnects from etcd and is ready to terminate. # No point in trying to start master services and fail. Just return. return handle_etcd_relation(etcd) configure_master_services() hookenv.status_set('maintenance', 'Starting the Kubernetes master services.') services = ['kube-apiserver', 'kube-controller-manager', 'kube-scheduler'] for service in services: host.service_restart('snap.%s.daemon' % service) hookenv.open_port(6443) set_state('kubernetes-master.components.started') @when('etcd.available') def etcd_data_change(etcd): ''' Etcd scale events block master reconfiguration due to the kubernetes-master.components.started state. We need a way to handle these events consistenly only when the number of etcd units has actually changed ''' # key off of the connection string connection_string = etcd.get_connection_string() # If the connection string changes, remove the started state to trigger # handling of the master components if data_changed('etcd-connect', connection_string): remove_state('kubernetes-master.components.started') @when('kube-control.connected') @when('cdk-addons.configured') def send_cluster_dns_detail(kube_control): ''' Send cluster DNS info ''' # Note that the DNS server doesn't necessarily exist at this point. We know # where we're going to put it, though, so let's send the info anyway. dns_ip = get_dns_ip() kube_control.set_dns(53, hookenv.config('dns_domain'), dns_ip) @when('kube-control.auth.requested') @when('authentication.setup') @when('leadership.is_leader') def send_tokens(kube_control): """Send the tokens to the workers.""" kubelet_token = get_token('kubelet') proxy_token = get_token('kube_proxy') admin_token = get_token('admin') # Send the data requests = kube_control.auth_user() for request in requests: kube_control.sign_auth_request(request[0], kubelet_token, proxy_token, admin_token) @when_not('kube-control.connected') def missing_kube_control(): """Inform the operator master is waiting for a relation to workers. If deploying via bundle this won't happen, but if operator is upgrading a a charm in a deployment that pre-dates the kube-control relation, it'll be missing. """ hookenv.status_set('blocked', 'Waiting for workers.') @when('kube-api-endpoint.available') def push_service_data(kube_api): ''' Send configuration to the load balancer, and close access to the public interface ''' kube_api.configure(port=6443) @when('certificates.available') def send_data(tls): '''Send the data that is required to create a server certificate for this server.''' # Use the public ip of this unit as the Common Name for the certificate. common_name = hookenv.unit_public_ip() # Get the SDN gateway based on the cidr address. kubernetes_service_ip = get_kubernetes_service_ip() domain = hookenv.config('dns_domain') # Create SANs that the tls layer will add to the server cert. sans = [ hookenv.unit_public_ip(), hookenv.unit_private_ip(), socket.gethostname(), kubernetes_service_ip, 'kubernetes', 'kubernetes.{0}'.format(domain), 'kubernetes.default', 'kubernetes.default.svc', 'kubernetes.default.svc.{0}'.format(domain) ] # Create a path safe name by removing path characters from the unit name. certificate_name = hookenv.local_unit().replace('/', '_') # Request a server cert with this information. tls.request_server_cert(common_name, sans, certificate_name) @when('kubernetes-master.components.started') def configure_cdk_addons(): ''' Configure CDK addons ''' remove_state('cdk-addons.configured') dbEnabled = str(hookenv.config('enable-dashboard-addons')).lower() args = [ 'arch=' + arch(), 'dns-ip=' + get_dns_ip(), 'dns-domain=' + hookenv.config('dns_domain'), 'enable-dashboard=' + dbEnabled ] check_call(['snap', 'set', 'cdk-addons'] + args) if not addons_ready(): hookenv.status_set('waiting', 'Waiting to retry addon deployment') remove_state('cdk-addons.configured') return set_state('cdk-addons.configured') @retry(times=3, delay_secs=20) def addons_ready(): """ Test if the add ons got installed Returns: True is the addons got applied """ try: check_call(['cdk-addons.apply']) return True except CalledProcessError: hookenv.log("Addons are not ready yet.") return False @when('loadbalancer.available', 'certificates.ca.available', 'certificates.client.cert.available', 'authentication.setup') def loadbalancer_kubeconfig(loadbalancer, ca, client): # Get the potential list of loadbalancers from the relation object. hosts = loadbalancer.get_addresses_ports() # Get the public address of loadbalancers so users can access the cluster. address = hosts[0].get('public-address') # Get the port of the loadbalancer so users can access the cluster. port = hosts[0].get('port') server = 'https://{0}:{1}'.format(address, port) build_kubeconfig(server) @when('certificates.ca.available', 'certificates.client.cert.available', 'authentication.setup') @when_not('loadbalancer.available') def create_self_config(ca, client): '''Create a kubernetes configuration for the master unit.''' server = 'https://{0}:{1}'.format(hookenv.unit_get('public-address'), 6443) build_kubeconfig(server) @when('ceph-storage.available') def ceph_state_control(ceph_admin): ''' Determine if we should remove the state that controls the re-render and execution of the ceph-relation-changed event because there are changes in the relationship data, and we should re-render any configs, keys, and/or service pre-reqs ''' ceph_relation_data = { 'mon_hosts': ceph_admin.mon_hosts(), 'fsid': ceph_admin.fsid(), 'auth_supported': ceph_admin.auth(), 'hostname': socket.gethostname(), 'key': ceph_admin.key() } # Re-execute the rendering if the data has changed. if data_changed('ceph-config', ceph_relation_data): remove_state('ceph-storage.configured') @when('ceph-storage.available') @when_not('ceph-storage.configured') def ceph_storage(ceph_admin): '''Ceph on kubernetes will require a few things - namely a ceph configuration, and the ceph secret key file used for authentication. This method will install the client package, and render the requisit files in order to consume the ceph-storage relation.''' ceph_context = { 'mon_hosts': ceph_admin.mon_hosts(), 'fsid': ceph_admin.fsid(), 'auth_supported': ceph_admin.auth(), 'use_syslog': "true", 'ceph_public_network': '', 'ceph_cluster_network': '', 'loglevel': 1, 'hostname': socket.gethostname(), } # Install the ceph common utilities. apt_install(['ceph-common'], fatal=True) etc_ceph_directory = '/etc/ceph' if not os.path.isdir(etc_ceph_directory): os.makedirs(etc_ceph_directory) charm_ceph_conf = os.path.join(etc_ceph_directory, 'ceph.conf') # Render the ceph configuration from the ceph conf template render('ceph.conf', charm_ceph_conf, ceph_context) # The key can rotate independently of other ceph config, so validate it admin_key = os.path.join(etc_ceph_directory, 'ceph.client.admin.keyring') try: with open(admin_key, 'w') as key_file: key_file.write("[client.admin]\n\tkey = {}\n".format( ceph_admin.key())) except IOError as err: hookenv.log("IOError writing admin.keyring: {}".format(err)) # Enlist the ceph-admin key as a kubernetes secret if ceph_admin.key(): encoded_key = base64.b64encode(ceph_admin.key().encode('utf-8')) else: # We didn't have a key, and cannot proceed. Do not set state and # allow this method to re-execute return context = {'secret': encoded_key.decode('ascii')} render('ceph-secret.yaml', '/tmp/ceph-secret.yaml', context) try: # At first glance this is deceptive. The apply stanza will create if # it doesn't exist, otherwise it will update the entry, ensuring our # ceph-secret is always reflective of what we have in /etc/ceph # assuming we have invoked this anytime that file would change. cmd = ['kubectl', 'apply', '-f', '/tmp/ceph-secret.yaml'] check_call(cmd) os.remove('/tmp/ceph-secret.yaml') except: # the enlistment in kubernetes failed, return and prepare for re-exec return # when complete, set a state relating to configuration of the storage # backend that will allow other modules to hook into this and verify we # have performed the necessary pre-req steps to interface with a ceph # deployment. set_state('ceph-storage.configured') @when('nrpe-external-master.available') @when_not('nrpe-external-master.initial-config') def initial_nrpe_config(nagios=None): set_state('nrpe-external-master.initial-config') update_nrpe_config(nagios) @when('kubernetes-master.components.started') @when('nrpe-external-master.available') @when_any('config.changed.nagios_context', 'config.changed.nagios_servicegroups') def update_nrpe_config(unused=None): services = ( 'snap.kube-apiserver.daemon', 'snap.kube-controller-manager.daemon', 'snap.kube-scheduler.daemon' ) hostname = nrpe.get_nagios_hostname() current_unit = nrpe.get_nagios_unit_name() nrpe_setup = nrpe.NRPE(hostname=hostname) nrpe.add_init_service_checks(nrpe_setup, services, current_unit) nrpe_setup.write() @when_not('nrpe-external-master.available') @when('nrpe-external-master.initial-config') def remove_nrpe_config(nagios=None): remove_state('nrpe-external-master.initial-config') # List of systemd services for which the checks will be removed services = ( 'snap.kube-apiserver.daemon', 'snap.kube-controller-manager.daemon', 'snap.kube-scheduler.daemon' ) # The current nrpe-external-master interface doesn't handle a lot of logic, # use the charm-helpers code for now. hostname = nrpe.get_nagios_hostname() nrpe_setup = nrpe.NRPE(hostname=hostname) for service in services: nrpe_setup.remove_check(shortname=service) def is_privileged(): """Return boolean indicating whether or not to set allow-privileged=true. """ privileged = hookenv.config('allow-privileged') if privileged == 'auto': return is_state('kubernetes-master.gpu.enabled') else: return privileged == 'true' @when('config.changed.allow-privileged') @when('kubernetes-master.components.started') def on_config_allow_privileged_change(): """React to changed 'allow-privileged' config value. """ remove_state('kubernetes-master.components.started') remove_state('config.changed.allow-privileged') @when('kube-control.gpu.available') @when('kubernetes-master.components.started') @when_not('kubernetes-master.gpu.enabled') def on_gpu_available(kube_control): """The remote side (kubernetes-worker) is gpu-enabled. We need to run in privileged mode. """ config = hookenv.config() if config['allow-privileged'] == "false": hookenv.status_set( 'active', 'GPUs available. Set allow-privileged="auto" to enable.' ) return remove_state('kubernetes-master.components.started') set_state('kubernetes-master.gpu.enabled') @when('kubernetes-master.gpu.enabled') @when_not('kubernetes-master.privileged') def disable_gpu_mode(): """We were in gpu mode, but the operator has set allow-privileged="false", so we can't run in gpu mode anymore. """ remove_state('kubernetes-master.gpu.enabled') @hook('stop') def shutdown(): """ Stop the kubernetes master services """ service_stop('snap.kube-apiserver.daemon') service_stop('snap.kube-controller-manager.daemon') service_stop('snap.kube-scheduler.daemon') def arch(): '''Return the package architecture as a string. Raise an exception if the architecture is not supported by kubernetes.''' # Get the package architecture for this system. architecture = check_output(['dpkg', '--print-architecture']).rstrip() # Convert the binary result into a string. architecture = architecture.decode('utf-8') return architecture def build_kubeconfig(server): '''Gather the relevant data for Kubernetes configuration objects and create a config object with that information.''' # Get the options from the tls-client layer. layer_options = layer.options('tls-client') # Get all the paths to the tls information required for kubeconfig. ca = layer_options.get('ca_certificate_path') ca_exists = ca and os.path.isfile(ca) client_pass = get_password('basic_auth.csv', 'admin') # Do we have everything we need? if ca_exists and client_pass: # Create an absolute path for the kubeconfig file. kubeconfig_path = os.path.join(os.sep, 'home', 'ubuntu', 'config') # Create the kubeconfig on this system so users can access the cluster. create_kubeconfig(kubeconfig_path, server, ca, user='admin', password=client_pass) # Make the config file readable by the ubuntu users so juju scp works. cmd = ['chown', 'ubuntu:ubuntu', kubeconfig_path] check_call(cmd) def create_kubeconfig(kubeconfig, server, ca, key=None, certificate=None, user='ubuntu', context='juju-context', cluster='juju-cluster', password=None, token=None): '''Create a configuration for Kubernetes based on path using the supplied arguments for values of the Kubernetes server, CA, key, certificate, user context and cluster.''' if not key and not certificate and not password and not token: raise ValueError('Missing authentication mechanism.') # token and password are mutually exclusive. Error early if both are # present. The developer has requested an impossible situation. # see: kubectl config set-credentials --help if token and password: raise ValueError('Token and Password are mutually exclusive.') # Create the config file with the address of the master server. cmd = 'kubectl config --kubeconfig={0} set-cluster {1} ' \ '--server={2} --certificate-authority={3} --embed-certs=true' check_call(split(cmd.format(kubeconfig, cluster, server, ca))) # Delete old users cmd = 'kubectl config --kubeconfig={0} unset users' check_call(split(cmd.format(kubeconfig))) # Create the credentials using the client flags. cmd = 'kubectl config --kubeconfig={0} ' \ 'set-credentials {1} '.format(kubeconfig, user) if key and certificate: cmd = '{0} --client-key={1} --client-certificate={2} '\ '--embed-certs=true'.format(cmd, key, certificate) if password: cmd = "{0} --username={1} --password={2}".format(cmd, user, password) # This is mutually exclusive from password. They will not work together. if token: cmd = "{0} --token={1}".format(cmd, token) check_call(split(cmd)) # Create a default context with the cluster. cmd = 'kubectl config --kubeconfig={0} set-context {1} ' \ '--cluster={2} --user={3}' check_call(split(cmd.format(kubeconfig, context, cluster, user))) # Make the config use this new context. cmd = 'kubectl config --kubeconfig={0} use-context {1}' check_call(split(cmd.format(kubeconfig, context))) def get_dns_ip(): '''Get an IP address for the DNS server on the provided cidr.''' interface = ipaddress.IPv4Interface(service_cidr()) # Add .10 at the end of the network ip = interface.network.network_address + 10 return ip.exploded def get_kubernetes_service_ip(): '''Get the IP address for the kubernetes service based on the cidr.''' interface = ipaddress.IPv4Interface(service_cidr()) # Add .1 at the end of the network ip = interface.network.network_address + 1 return ip.exploded def handle_etcd_relation(reldata): ''' Save the client credentials and set appropriate daemon flags when etcd declares itself as available''' connection_string = reldata.get_connection_string() # Define where the etcd tls files will be kept. etcd_dir = '/root/cdk/etcd' # Create paths to the etcd client ca, key, and cert file locations. ca = os.path.join(etcd_dir, 'client-ca.pem') key = os.path.join(etcd_dir, 'client-key.pem') cert = os.path.join(etcd_dir, 'client-cert.pem') # Save the client credentials (in relation data) to the paths provided. reldata.save_client_credentials(key, cert, ca) api_opts = FlagManager('kube-apiserver') # Never use stale data, always prefer whats coming in during context # building. if its stale, its because whats in unitdata is stale data = api_opts.data if data.get('etcd-servers-strict') or data.get('etcd-servers'): api_opts.destroy('etcd-cafile') api_opts.destroy('etcd-keyfile') api_opts.destroy('etcd-certfile') api_opts.destroy('etcd-servers', strict=True) api_opts.destroy('etcd-servers') # Set the apiserver flags in the options manager api_opts.add('etcd-cafile', ca) api_opts.add('etcd-keyfile', key) api_opts.add('etcd-certfile', cert) api_opts.add('etcd-servers', connection_string, strict=True) def configure_master_services(): ''' Add remaining flags for the master services and configure snaps to use them ''' api_opts = FlagManager('kube-apiserver') controller_opts = FlagManager('kube-controller-manager') scheduler_opts = FlagManager('kube-scheduler') scheduler_opts.add('v', '2') # Get the tls paths from the layer data. layer_options = layer.options('tls-client') ca_cert_path = layer_options.get('ca_certificate_path') client_cert_path = layer_options.get('client_certificate_path') client_key_path = layer_options.get('client_key_path') server_cert_path = layer_options.get('server_certificate_path') server_key_path = layer_options.get('server_key_path') if is_privileged(): api_opts.add('allow-privileged', 'true', strict=True) set_state('kubernetes-master.privileged') else: api_opts.add('allow-privileged', 'false', strict=True) remove_state('kubernetes-master.privileged') # Handle static options for now api_opts.add('service-cluster-ip-range', service_cidr()) api_opts.add('min-request-timeout', '300') api_opts.add('v', '4') api_opts.add('tls-cert-file', server_cert_path) api_opts.add('tls-private-key-file', server_key_path) api_opts.add('kubelet-certificate-authority', ca_cert_path) api_opts.add('kubelet-client-certificate', client_cert_path) api_opts.add('kubelet-client-key', client_key_path) api_opts.add('logtostderr', 'true') api_opts.add('insecure-bind-address', '127.0.0.1') api_opts.add('insecure-port', '8080') api_opts.add('storage-backend', 'etcd2') # FIXME: add etcd3 support admission_control = [ 'Initializers', 'NamespaceLifecycle', 'LimitRanger', 'ServiceAccount', 'ResourceQuota', 'DefaultTolerationSeconds' ] if get_version('kube-apiserver') < (1, 6): hookenv.log('Removing DefaultTolerationSeconds from admission-control') admission_control.remove('DefaultTolerationSeconds') if get_version('kube-apiserver') < (1, 7): hookenv.log('Removing Initializers from admission-control') admission_control.remove('Initializers') api_opts.add('admission-control', ','.join(admission_control), strict=True) # Default to 3 minute resync. TODO: Make this configureable? controller_opts.add('min-resync-period', '3m') controller_opts.add('v', '2') controller_opts.add('root-ca-file', ca_cert_path) controller_opts.add('logtostderr', 'true') controller_opts.add('master', 'http://127.0.0.1:8080') scheduler_opts.add('v', '2') scheduler_opts.add('logtostderr', 'true') scheduler_opts.add('master', 'http://127.0.0.1:8080') cmd = ['snap', 'set', 'kube-apiserver'] + api_opts.to_s().split(' ') check_call(cmd) cmd = ( ['snap', 'set', 'kube-controller-manager'] + controller_opts.to_s().split(' ') ) check_call(cmd) cmd = ['snap', 'set', 'kube-scheduler'] + scheduler_opts.to_s().split(' ') check_call(cmd) def setup_basic_auth(password=None, username='admin', uid='admin'): '''Create the htacces file and the tokens.''' root_cdk = '/root/cdk' if not os.path.isdir(root_cdk): os.makedirs(root_cdk) htaccess = os.path.join(root_cdk, 'basic_auth.csv') if not password: password = token_generator() with open(htaccess, 'w') as stream: stream.write('{0},{1},{2}'.format(password, username, uid)) def setup_tokens(token, username, user): '''Create a token file for kubernetes authentication.''' root_cdk = '/root/cdk' if not os.path.isdir(root_cdk): os.makedirs(root_cdk) known_tokens = os.path.join(root_cdk, 'known_tokens.csv') if not token: token = token_generator() with open(known_tokens, 'a') as stream: stream.write('{0},{1},{2}\n'.format(token, username, user)) def get_password(csv_fname, user): '''Get the password of user within the csv file provided.''' root_cdk = '/root/cdk' tokens_fname = os.path.join(root_cdk, csv_fname) if not os.path.isfile(tokens_fname): return None with open(tokens_fname, 'r') as stream: for line in stream: record = line.split(',') if record[1] == user: return record[0] return None def get_token(username): """Grab a token from the static file if present. """ return get_password('known_tokens.csv', username) def set_token(password, save_salt): ''' Store a token so it can be recalled later by token_generator. param: password - the password to be stored param: save_salt - the key to store the value of the token.''' db = unitdata.kv() db.set(save_salt, password) return db.get(save_salt) def token_generator(length=32): ''' Generate a random token for use in passwords and account tokens. param: length - the length of the token to generate''' alpha = string.ascii_letters + string.digits token = ''.join(random.SystemRandom().choice(alpha) for _ in range(length)) return token @retry(times=3, delay_secs=10) def all_kube_system_pods_running(): ''' Check pod status in the kube-system namespace. Returns True if all pods are running, False otherwise. ''' cmd = ['kubectl', 'get', 'po', '-n', 'kube-system', '-o', 'json'] try: output = check_output(cmd).decode('utf-8') except CalledProcessError: hookenv.log('failed to get kube-system pod status') return False result = json.loads(output) for pod in result['items']: status = pod['status']['phase'] if status != 'Running': return False return True def apiserverVersion(): cmd = 'kube-apiserver --version'.split() version_string = check_output(cmd).decode('utf-8') return tuple(int(q) for q in re.findall("[0-9]+", version_string)[:3])
Python
Apache-2.0
BaiHuoYu/nbp/vendor/k8s.io/kubernetes/cluster/juju/layers/kubernetes-master/reactive/kubernetes_master.py
8076c215-43e2-4efd-a8b1-4930fb94a053
[{"tag": "AMBIGUOUS", "value": "RSA:2471731", "start": 8637, "end": 8648, "context": " # eg:\n # {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}\n charms.leadership.leader_set(leader_data"}, {"tag": "USERNAME", "value": "admin", "start": 34323, "end": 34328, "context": ")\n\n\ndef setup_basic_auth(password=None, username='admin', uid='admin'):\n '''Create the htacces file an"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 32744, "end": 32753, "context": "true')\n api_opts.add('insecure-bind-address', '127.0.0.1')\n api_opts.add('insecure-port', '8080')\n a"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 33931, "end": 33940, "context": " 'true')\n scheduler_opts.add('master', 'http://127.0.0.1:8080')\n\n cmd = ['snap', 'set', 'kube-apiserver"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 33793, "end": 33802, "context": "'true')\n controller_opts.add('master', 'http://127.0.0.1:8080')\n\n scheduler_opts.add('v', '2')\n sche"}]
[{"tag": "AMBIGUOUS", "value": "RSA:2471731", "start": 8637, "end": 8648, "context": " # eg:\n # {'/root/cdk/serviceaccount.key': 'RSA:2471731...'}\n charms.leadership.leader_set(leader_data"}, {"tag": "USERNAME", "value": "admin", "start": 34323, "end": 34328, "context": ")\n\n\ndef setup_basic_auth(password=None, username='admin', uid='admin'):\n '''Create the htacces file an"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 32744, "end": 32753, "context": "true')\n api_opts.add('insecure-bind-address', '127.0.0.1')\n api_opts.add('insecure-port', '8080')\n a"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 33931, "end": 33940, "context": " 'true')\n scheduler_opts.add('master', 'http://127.0.0.1:8080')\n\n cmd = ['snap', 'set', 'kube-apiserver"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 33793, "end": 33802, "context": "'true')\n controller_opts.add('master', 'http://127.0.0.1:8080')\n\n scheduler_opts.add('v', '2')\n sche"}]
/* * machine_kexec.c - handle transition of Linux booting another kernel * Copyright (C) 2002-2003 Eric Biederman <ebiederm@xmission.com> * * GameCube/ppc32 port Copyright (C) 2004 Albert Herranz * LANDISK/sh4 supported by kogiidena * * This source code is licensed under the GNU General Public License, * Version 2. See the file COPYING for more details. */ #include <linux/mm.h> #include <linux/kexec.h> #include <linux/delay.h> #include <linux/reboot.h> #include <asm/pgtable.h> #include <asm/pgalloc.h> #include <asm/mmu_context.h> #include <asm/io.h> #include <asm/cacheflush.h> typedef NORET_TYPE void (*relocate_new_kernel_t)( unsigned long indirection_page, unsigned long reboot_code_buffer, unsigned long start_address, unsigned long vbr_reg) ATTRIB_NORET; extern const unsigned char relocate_new_kernel[]; extern const unsigned int relocate_new_kernel_size; extern void *gdb_vbr_vector; void machine_shutdown(void) { } void machine_crash_shutdown(struct pt_regs *regs) { } /* * Do what every setup is needed on image and the * reboot code buffer to allow us to avoid allocations * later. */ int machine_kexec_prepare(struct kimage *image) { return 0; } void machine_kexec_cleanup(struct kimage *image) { } static void kexec_info(struct kimage *image) { int i; printk("kexec information\n"); for (i = 0; i < image->nr_segments; i++) { printk(" segment[%d]: 0x%08x - 0x%08x (0x%08x)\n", i, (unsigned int)image->segment[i].mem, (unsigned int)image->segment[i].mem + image->segment[i].memsz, (unsigned int)image->segment[i].memsz); } printk(" start : 0x%08x\n\n", (unsigned int)image->start); } /* * Do not allocate memory (or fail in any way) in machine_kexec(). * We are past the point of no return, committed to rebooting now. */ NORET_TYPE void machine_kexec(struct kimage *image) { unsigned long page_list; unsigned long reboot_code_buffer; unsigned long vbr_reg; relocate_new_kernel_t rnk; #if defined(CONFIG_SH_STANDARD_BIOS) vbr_reg = ((unsigned long )gdb_vbr_vector) - 0x100; #else vbr_reg = 0x80000000; // dummy #endif /* Interrupts aren't acceptable while we reboot */ local_irq_disable(); page_list = image->head; /* we need both effective and real address here */ reboot_code_buffer = (unsigned long)page_address(image->control_code_page); /* copy our kernel relocation code to the control code page */ memcpy((void *)reboot_code_buffer, relocate_new_kernel, relocate_new_kernel_size); kexec_info(image); flush_cache_all(); /* now call it */ rnk = (relocate_new_kernel_t) reboot_code_buffer; (*rnk)(page_list, reboot_code_buffer, image->start, vbr_reg); } /* crashkernel=size@addr specifies the location to reserve for * a crash kernel. By reserving this memory we guarantee * that linux never sets it up as a DMA target. * Useful for holding code to do something appropriate * after a kernel panic. */ static int __init parse_crashkernel(char *arg) { unsigned long size, base; size = memparse(arg, &arg); if (*arg == '@') { base = memparse(arg+1, &arg); /* FIXME: Do I want a sanity check * to validate the memory range? */ crashk_res.start = base; crashk_res.end = base + size - 1; } return 0; } early_param("crashkernel", parse_crashkernel);
C
Apache-2.0
ghsecuritylab/tomato-sabai/release/src-rt-6.x/linux/linux-2.6/arch/sh/kernel/machine_kexec.c
422dfcca-918f-4518-8b06-f1d1e4828d27
[{"tag": "NAME", "value": "Albert Herranz", "start": 186, "end": 200, "context": "com>\n *\n * GameCube/ppc32 port Copyright (C) 2004 Albert Herranz\n * LANDISK/sh4 supported by kogiidena\n *\n * This "}, {"tag": "EMAIL", "value": "ebiederm@xmission.com", "start": 118, "end": 139, "context": "ernel\n * Copyright (C) 2002-2003 Eric Biederman <ebiederm@xmission.com>\n *\n * GameCube/ppc32 port Copyright (C) 2004 Alb"}, {"tag": "NAME", "value": "Eric Biederman", "start": 101, "end": 115, "context": "booting another kernel\n * Copyright (C) 2002-2003 Eric Biederman <ebiederm@xmission.com>\n *\n * GameCube/ppc32 por"}]
[{"tag": "NAME", "value": "Albert Herranz", "start": 186, "end": 200, "context": "com>\n *\n * GameCube/ppc32 port Copyright (C) 2004 Albert Herranz\n * LANDISK/sh4 supported by kogiidena\n *\n * This "}, {"tag": "EMAIL", "value": "ebiederm@xmission.com", "start": 118, "end": 139, "context": "ernel\n * Copyright (C) 2002-2003 Eric Biederman <ebiederm@xmission.com>\n *\n * GameCube/ppc32 port Copyright (C) 2004 Alb"}, {"tag": "NAME", "value": "Eric Biederman", "start": 101, "end": 115, "context": "booting another kernel\n * Copyright (C) 2002-2003 Eric Biederman <ebiederm@xmission.com>\n *\n * GameCube/ppc32 por"}]
--- - TutorialInfo:/es Topic:Modeling Level:Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than an hour FCVersion:0.17 or higher Files:[https://github.com/FreeCAD/Examples/blob/master/Basic_Part_Design_Tutorial_Example_017_Files/Basic_Part_Design_Tutorial_017.fcstd Basic Part Design for v0.17] --- # Basic Part Design Tutorial/es <div class="mw-translate-fuzzy"> </div> <div class="mw-translate-fuzzy"> Este tutorial presenta al nuevo usuario algunas de las herramientas y técnicas utilizadas en [ Part Design Workbench](PartDesign_Workbench.md). Este tutorial no es una guía completa de Part Design Workbench y muchas de las herramientas y capacidades no están cubiertas. Este tutorial llevará al usuario a través de los pasos necesarios para modelar la parte que se muestra en la siguiente imagen usando \"sketches\". </div> ![](images/Tut17_final_refined.png ) Un video de todo el proceso esta aquí: <https://youtu.be/geIrH1cOCzc> <div class="mw-translate-fuzzy"> (cada sección tiene su propio video dividido a continuación) </div> ## Antes de comenzar ## The Task <div class="mw-translate-fuzzy"> ## La tarea En este tutorial, utilizará el Ambiente de trabajo Part Design para crear un modelo sólido 3D de la pieza que se muestra en [ Drawing](Drawing_Workbench.md) a continuación. Se dan todas las dimensiones necesarias para completar esta tarea. Comenzarás por crear una forma central a partir de un boceto base y luego construir sobre esa forma, agregando lo que se conoce como \"Características\". Estas características agregarán material o eliminarán material del sólido mediante el uso de sketches adicionales y las operaciones de funciones que lo acompañan. Este tutorial no usará todas las características y herramientas disponibles dentro del Ambiente de trabajo Part Design, pero debe usar lo suficiente para brindar al usuario de este tutorial una base mínima sobre la cual desarrollar sus conocimientos y habilidades. </div> ## La Pieza ![](images/Tutorial_Drawing_Sheet.png ) ## Constructing The Part ### Startup <div class="mw-translate-fuzzy"> ## Construyendo la pieza ### Puesta en marcha Primero, asegúrese de estar en el Anbiente de trabajo Part Design. Una vez allí, querrá crear un nuevo documento si aún no lo ha hecho. Es un buen hábito guardar su trabajo a menudo, así que antes de nada guarde el nuevo documento, dándole el nombre que guste. </div> <div class="mw-translate-fuzzy"> Todo el trabajo en Part Design comienza con un [Body](Glossary#Body.md). Luego construiremos el sólido dentro del cuerpo comenzando con un [sketch](Glossary#Sketch.md).   1. Haga clic en ![ 32px](images/_PartDesign_Body.png ) [Create new body](PartDesign_Body.md) para crear y activar un nuevo contenedor de cuerpo. \"Nota: este paso puede ser omitido. Al crear un boceto, si no se encuentra un cuerpo existente, se creará y activará automáticamente uno nuevo\". 2. Haga clic en <img alt="" src=images/PartDesign_NewSketch.png style="width:32px;"> [Create new sketch](PartDesign_NewSketch.md). Esto creará el boceto/sketch dentro del cuerpo recién creado. 3. Necesitamos definir dónde se adjuntará el boceto/sketch. Lo adjuntaremos a un plano desde el [Origin](Glossary#Origin.md) del Cuerpo. 4. En la pestaña Tareas de la vista Combo, seleccione **YZ\_Plane** en la lista y presione {{KEY | OK}}: </div> 1. Click on <img alt="" src=images/PartDesign_Body.svg style="width:32px;"> [Create new body](PartDesign_Body.md) to create and activate a new Body Container. *Note: this step can be omitted. When creating a sketch, if no existing Body is found, a new one will be automatically created and activated.* 2. Click on <img alt="" src=images/PartDesign_NewSketch.svg style="width:32px;"> [Create new sketch](PartDesign_NewSketch.md). This will create the sketch within the just created body. 3. We need to define where the sketch will be attached. We will attach it to a plane from the Body´s [Origin](Glossary#Origin.md). 4. In the Tasks tab from the Combo view, select **YZ\_Plane** in the list and press **OK**: <img alt="" src=images/Tut17_sketchplanes.png style="width:250px;"> \"Nota: es posible que el botón OK no esté visible si el panel lateral no es lo suficientemente ancho. Puede hacerlo más ancho arrastrando su borde derecho. Coloque el puntero del mouse sobre el borde; cuando el puntero se convierta en una flecha bidireccional, mantenga presionado el botón izquierdo del mouse y arrastre\". Una vez que hace clic en Aceptar, FreeCAD cambia automáticamente a [ Sketcher workbench](Sketcher_Workbench.md) y abre el boceto/sketch en modo de edición: ![](images/Tut17_sketcherempty.png ) ### Create the sketch <div class="mw-translate-fuzzy"> ### Crear el Sketch A continuación, deberá usar la herramienta ![ 32px](images/_Sketcher_CreatePolyline.png ) [Polyline](Sketcher_CreatePolyline.md) y hacer una forma más o menos así en la imagen siguiente. No necesita ser perfecto ya que la forma final se hace con restricciones. Una vez que tenga la forma básica, comenzaremos a aplicar las restricciones. Si tenía restricciones automáticas activadas, algunas de estas restricciones se aplicarán automáticamente, de lo contrario, haga lo siguiente. </div> <div class="mw-translate-fuzzy"> 1. Seleccione las dos líneas horizontales con el mouse haciendo clic en ellas, y una vez seleccionada, haga clic en la restricción horizontal ![ 32px](images/_Constraint_Horizontal.png ). 2. Seleccione la línea vertical a la derecha y luego haga clic en la restricción vertical <img alt="" src=images/Constraint_Vertical.png style="width:32px;">. 3. Seleccione los puntos de inicio y fin de su polilínea y haga clic en la restricción de coincidencia ![ 32px](images/_Constraint_PointOnPoint.png ) para cerrar la polilínea. 4. Seleccione la línea horizontal inferior y la línea vertical derecha y aplique ![ 32px](images/_Constraint_EqualLength.png ) restricción de igualdad. 5. Seleccione la línea horizontal o vertical y aplique la correspondiente restricción de distancia vertical ![ 32px](images/_Constraint_HorizontalDistance.png ) horizontal o ![ 32px](images/_Constraint_VerticalDistance.png ) y asígnele un valor de 26 mm . 6. Seleccione la línea horizontal superior y aplique la restricción de distancia horizontal y asígnele un valor de 5 mm 7. Seleccione el punto inferior derecho, (vertice) de la línea horizontal Origen y luego el punto central de la cuadrícula y aplique la restricción de coincidencia ![ 32px](images/_Constraint_PointOnPoint.png ) para fijar la forma. </div> En este punto, debe tener un boceto totalmente restringido, tal como lo indica el cambio de color y el mensaje que se muestra en la vista combinada. Ahora debería verse exactamente como la imagen de abajo. ![](images/Tut17_profile.png ) Ahora en la vista combinada, haga clic en el botón Cerrar para salir del modo de edición de bocetos y seleccione ![ 32px](images/_PartDesign_Pad.png ) Pad desde la barra de herramientas o desde el menú Part Disign. Esto le dará un diálogo de Pad en la Combo View. Usando ese diálogo, primero usando el menú desplegable Tipo, seleccione dos dimensiones. El dibujo presentado al comienzo de este tutorial muestra que la pieza mide 53 mm de largo. Lo hacemos rellenando nuestro boceto en ambos sentidos desde el plano central para compensar esa distancia, es decir, hacer que el pad sea simétrico en relación con el plano de boceto. La razón de esto se ve más adelante al crear otros rasgos. Por ahora, dado que queremos que tenga 53 mm de longitud en total, ingresaremos 26.5 para Longitud y 26.5 nuevamente para la segunda longitud. Alternativamente, puede proporcionar una longitud única de 53 mm y hacer clic en la casilla de verificación simétrica al plano. Una vez hecho esto, ahora tenemos nuestra base sólida sobre la cual agregaremos características adicionales para construir nuestra pieza. Un video de los pasos utilizados en esta parte del tutorial está aquí: <https://youtu.be/cUyPnCMeTgg> ### Features with pocket and external geometry <div class="mw-translate-fuzzy"> ### Funciones con hueco y geometría externa Con el mouse o los iconos de la vista, gire el modelo para que pueda ver su parte posterior. Una vez que la parte posterior de la parte esté visible, seleccione la cara posterior haciendo clic en ella como se ve en la siguiente imagen. </div> ![](images/PD_WB_Tutorial003.png ) Después de seleccionar la cara, haga clic en el ícono Nuevo boceto en la barra de herramientas o en el menú Part Design y eso correlacionará nuestro siguiente boceto con la cara posterior de la pieza. Ahora seleccione la herramienta de rectángulo ![ 32px](images/_Sketcher_CreateRectangle.png ) y coloque un rectángulo en la cara posterior de la pieza de forma similar a la que se muestra a continuación. Ahora, siguiendo los pasos enumerados, restrinja el boceto. <div class="mw-translate-fuzzy"> 1. Seleccione una de las líneas horizontales y aplique una restricción de distancia horizontal y un valor de 5 mm. 2. Seleccione una de las líneas verticales y asígnele una restricción de distancia vertical y un valor de 11 mm. 3. Utilice ![ 32px](images/_Sketcher_External.png ) Herramienta de geometría externa y seleccione el vértice superior derecho de la cara y haga clic en él para que se le proporcione un punto de la geometría externa para vincular nuestro boceto. </div> ![](images/tut17_slot_unconstrained.png ) 1. Right click to end the External geometry mode 2. Select that point you just made available with the External geometry tool and then select the upper right vertex of the rectangle and click on the coincident constraint. At this point the sketch should be fully constrained and look like the next image. ![](images/tut17_slote_constrained.png ) Una vez hecho esto, haga clic en el botón Cerrar en la parte superior de la pestaña Tareas en la ventana de Combo View, luego seleccione ![ 32px](images/_PartDesign_Pocket.png ) Herramienta Pocket/hueco de la barra de herramientas o del menú Part Design. Usar esta herramienta es lo opuesto a la herramienta Pad. A medida que la herramienta Pad agrega material a la pieza, la herramienta Pocket/hueco elimina el material de la pieza. Ambas operaciones se llaman \"features\". En esta operación de hueco, queremos seleccionar \"A través de todo\" en el menú desplegable y luego hacer clic en el botón Aceptar. Para la próxima operación, asegúrese de que esté seleccionado \"Pocket/hueco\" en la vista Model tree y, una vez hecho, haga clic en <img alt="" src=images/PartDesign_Mirrored.png style="width:32px;"> la función Espejo en la barra de herramientas o desde el menú Part Design. En el cuadro de diálogo Espejo en el Combo View, seleccione Eje de boceto horizontal en el Plane pulldown menu. Luego haz clic en OK. La función Mirror funciona de esta manera porque la característica base de nuestro modelo fue implementada en ambos sentidos desde el plano horizontal en la primera operación con el boceto base. Si todo ha ido bien, ahora deberías tener una parte que se parece a la imagen de abajo después de que orbites alrededor del frente. ![](images/tut17_profilewithslots.png ) Un video de los pasos utilizados en esta parte del tutorial está aquí: <https://youtu.be/wiGXV9G7mrM> ### Features with pad and external geometry <div class="mw-translate-fuzzy"> ### Funciones con pad y geometría externa Después de echar un vistazo, orbitar alrededor y una vez más seleccionar la cara posterior de la pieza y seleccionar esa cara para mapear el siguiente boceto. </div> ![](images/tut17_profilewithslotsrearplane.png ) Seleccione Nuevo boceto y haga un nuevo rectángulo de similar manera a la que se muestra a continuación en la siguiente imagen. Luego proceda a agregar restricciones dimensionales al rectángulo. 1. Seleccione una línea horizontal y aplique una restricción de distancia horizontal con un valor de 16.7. 2. Seleccione una línea vertical y aplique una restricción de distancia vertical de 7 mm 3. Utilizando la herramienta de geometría externa, seleccione el vértice superior izquierdo de la cara de la pieza. ![](images/tut17_sidblockunconstrained.png ) Ahora seleccionando el vértice superior izquierdo del rectángulo y el punto de geometría externa, haga clic en la restricción coincidente para restringir completamente el boceto. ![](images/tut17_sideblockconstraind.png ) Close the Sketcher. A continuación, haremos clic en la función Pad y en el cuadro de diálogo Pad, en la vista combinada, queremos una longitud de 26 mm, dejando el tipo como dimensión y luego marcando la casilla Invertir. El uso de la casilla de verificación Invertido hará que el Pad entre en la pieza en lugar de alejarse de ésta. Esta operación proporciona el siguiente resultado. ![](images/tut17_sideblock.png ) Una vez más, use la función Espejo para obtener la segunda pad. Primero asegúrese de que el Pad creado esté seleccionado en la vista en árbol, luego haga clic en Espejo en la barra de herramientas para seleccionarlo desde el menú Part Desgin. Repetiremos la operación que hicimos para Pocket/hueco arriba y seleccionaremos el eje de croquis horizontal en el menú desplegable del Plano. ![](images/tut17_profilewithsideblocks.png ) Un video de los pasos utilizados en esta parte del tutorial está aquí: <https://youtu.be/Ido1owp8ubc> ### Feature with pocket and external geometry <div class="mw-translate-fuzzy"> ### Detalles con pocket/hueco y geometría externa En este punto, orbitando la parte que está al frente, podemos ver que nuestra parte ahora está empezando a parecerse a la parte del dibujo acotado al comienzo de este tutorial. Una vez que tenga la vista del frente, haga clic en la cara inclinada con el mouse para seleccionar la cara que usaremos para el siguiente boceto. </div> ![](images/tut17_innerplane.png ) Aquí utilizaremos la herramienta rectangular y colocaremos un rectángulo en nuestro boceto y, una vez hecho esto, aplicaremos las siguientes restricciones. <div class="mw-translate-fuzzy"> 1. Seleccione una línea horizontal y una línea vertical, y después de que ambos estén seleccionados, haga clic en la restricción Igual. 2. Seleccione una línea horizontal o vertical y aplique la restricción de distancia horizontal o vertical correspondiente con un valor de 17 mm 3. Usando la herramienta de geometría externa, seleccione el vértice superior derecho como se muestra en la imagen a continuación. </div> ![](images/tut17_rechtangleholeunconstrained.png ) Ahora usando las dimensiones del dibujo, aplique las siguientes restricciones. 1. Seleccione el punto de geometría externa y el vértice superior derecho del ahora cuadrado boceto y aplique una restricción de distancia horizontal de 7 mm 2. Seleccione el punto de geometría externa y el vértice superior derecho del ahora cuadrado boceto y aplique una restricción de distancia vertical de 11 mm El resultado debería ser el siguiente. ![](images/tut17_rectangleholeconstrained.png ) En este punto, si tuviéramos que simplemente ahuecar este boceto, el orificio resultante sería perpendicular a la cara inclinada a la que está asignado, y esto no es lo que queremos. ![](images/tut17_wrongplaneforpocket.png ) Queremos que el orificio sea perpendicular a la cara posterior, pero las dimensiones proyectadas no son las dimensiones de 17 mm x 17 mm que se dan en el dibujo. Ahora, podemos hacer los cálculos y calcular las dimensiones necesarias, o podemos usar las herramientas provistas en FreeCAD para hacer esa proyección para nosotros. Un video de los pasos utilizados en esta parte del tutorial está aquí: <https://youtu.be/x4d5nZPWCLQ> <div class="mw-translate-fuzzy"> Para crear un hueco que tenga el rectángulo inclinado como salida, dibujamos un nuevo rectángulo en la parte posterior, usando la proyección del rectángulo inclinado como referencia externa. Orbita el sólido alrededor para ver la cara posterior de la parte una vez más y selecciona la cara posterior para mapear el boceto final. </div> ![](images/tut17_profilewithsideblocksrearplane.png ) Seleccione **Nuevo boceto** ![ 32px](images/_PartDesign_NewSketch.png ) desde la barra de herramientas o el menú Part Design. Ahora en el modo de edición de bosquejos, no vemos el rectángulo esbozado de la pendiente. Para que sea seleccionable, cambiamos la ventana Combo View a la pestaña Modelo y seleccionamos el último boceto realizado (Sketch003) en el plano inclinado. Luego, usando la barra espaciadora, hazla visible. Luego, seleccione la función de espejo arriba (mirrored001) y nuevamente usando la barra espaciadora, escóndalo. Entonces deberías ver el rectángulo dentro de la Vista 3D. Puede continuar trabajando con la pestaña del modelo visible o volver a la pestaña de tareas. Usando la herramienta de geometría externa ![ 32px](images/_Sketcher_External.png ), seleccione los bordes horizontales superior e inferior del rectángulo inclinado. ![](images/tut17_rectangleunconstrained.png ) 1. Seleccione el vértice superior izquierdo del rectángulo y el punto superior izquierdo de la geometría externa y haga clic en la restricción de coincidencia. 2. Haga clic en el vértice inferior derecho del rectángulo y en el punto inferior derecho de la geometría externa y haga clic en la restricción de coincidencia. Y deberíamos terminar con esto. ![](images/tut17_rectangleconstrained.png ) Para el último paso de este tutorial, cierre la ventana del sketcher usando la edición close o fish editing desde el menú contextual de sketch004 y luego seleccione la función Pocket/hueco de ![ 32px](images/_PartDesign_Pocket.png ) desde la barra de herramientas o desde el menú Part Design. Desde el menú desplegable Type, seleccione **A través de todos** y haga clic en el botón Aceptar. ![](images/Tut17_final.png ) En este punto, verá algunas líneas que provienen de las características de intersección. En este caso, el \"bloque lateral\" se cruza con el \"perfil base\", lo que permite que aparezca como un bloque triangular sobre el perfil. Para eliminar estas líneas, puede activar \"refinar la forma\" en la configuración de diseño de la pieza o, para ahorrar algo de velocidad de procesamiento y aún tener estas líneas durante la construcción, enciéndala individualmente en cada detalle, lo que creará dichas líneas. El ajuste en el nivel de Feature se puede hacer en la pestaña \"datos\" de la Feature. Establezca la ***refine* property** en TRUE para invocar el refinado. ![](images/Tut17_refine.png ) ![](images/Tut17_final_refined.png ) Un video de estos pasos del tutorial está aquí: <https://youtu.be/UYI0gvxCYeI> Este tutorial y tu modelo están completos. ## Recursos Adicionales - Archivo de FreeCAD para comparación (hecho con 0.17) [Descarga](https://github.com/FreeCAD/Examples/blob/master/Basic_Part_Design_Tutorial_Example_017_Files/Basic_Part_Design_Tutorial_017.fcstd) {{PartDesign Tools navi }} {{Sketcher Tools navi}} --- ![](images/Right_arrow.png) [documentation index](../README.md) > [PartDesign](Category_PartDesign.md) > [Sketcher](Category_Sketcher.md) > Basic Part Design Tutorial/es
Markdown
CC0-1.0
dwhr-pi/FreeCAD-documentation/wiki/translations/es/Basic_Part_Design_Tutorial.md
dd7bc1f3-8527-4e64-8fcb-c23aaf927efc
[{"tag": "NAME", "value": "HarryGeier", "start": 66, "end": 76, "context": "fo:/es Topic:Modeling Level:Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than"}, {"tag": "USERNAME", "value": "HarryGeier", "start": 85, "end": 95, "context": "ling Level:Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than an hour FCVersio"}, {"tag": "NAME", "value": "HarryGeier", "start": 98, "end": 108, "context": "Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than an hour FCVersion:0.17 or hig"}]
[{"tag": "NAME", "value": "HarryGeier", "start": 66, "end": 76, "context": "fo:/es Topic:Modeling Level:Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than"}, {"tag": "USERNAME", "value": "HarryGeier", "start": 85, "end": 95, "context": "ling Level:Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than an hour FCVersio"}, {"tag": "NAME", "value": "HarryGeier", "start": 98, "end": 108, "context": "Beginner Author:HarryGeier ([[User:HarryGeier HarryGeier]])|Time:Less than an hour FCVersion:0.17 or hig"}]
/* -*- Mode: C++; tab-width: 4; indent-tabs-mode: nil; c-basic-offset: 4; fill-column: 100 -*- */ /* * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include <config.h> #include <assert.h> #include <unistd.h> #include "Ssl.hpp" #ifdef __FreeBSD__ #include <pthread_np.h> #endif #include <sys/syscall.h> #include <Util.hpp> extern "C" { // Multithreading support for OpenSSL. // Not needed in recent (1.x?) versions. struct CRYPTO_dynlock_value { public: void lock() { _mutex.lock(); } void unlock() { _mutex.unlock(); } private: std::mutex _mutex; }; } namespace ssl { // The locking API is removed from 1.1 onward. #if OPENSSL_VERSION_NUMBER < 0x10100000L /// Manages the SSL locks. class Lock { public: Lock() { for (int x = 0; x < CRYPTO_num_locks(); ++x) { _mutexes.emplace_back(new std::mutex); } } void lock(int mode, int n) { assert(n < CRYPTO_num_locks() && "Unexpected lock index"); if (mode & CRYPTO_LOCK) { _mutexes[n]->lock(); } else { _mutexes[n]->unlock(); } } private: std::vector<std::unique_ptr<std::mutex>> _mutexes; }; /// Locks are shared across SSL Contexts (by openssl design). static inline void lock(int mode, int n, const char* /*file*/, int /*line*/) { static ssl::Lock lock; lock.lock(mode, n); } #endif } // namespace ssl std::unique_ptr<SslContext> ssl::Manager::ServerInstance(nullptr); std::unique_ptr<SslContext> ssl::Manager::ClientInstance(nullptr); SslContext::SslContext(const std::string& certFilePath, const std::string& keyFilePath, const std::string& caFilePath, const std::string& cipherList, ssl::CertificateVerification verification) : _ctx(nullptr) , _verification(verification) { const std::vector<char> rand = Util::rng::getBytes(512); RAND_seed(&rand[0], rand.size()); #if OPENSSL_VERSION_NUMBER >= 0x0907000L && OPENSSL_VERSION_NUMBER < 0x10100003L OPENSSL_config(nullptr); #endif #if OPENSSL_VERSION_NUMBER >= 0x10100003L OPENSSL_init_ssl(OPENSSL_INIT_LOAD_CONFIG, nullptr); #else SSL_library_init(); SSL_load_error_strings(); OpenSSL_add_all_algorithms(); #endif CRYPTO_set_locking_callback(&ssl::lock); CRYPTO_set_id_callback(&SslContext::id); CRYPTO_set_dynlock_create_callback(&SslContext::dynlockCreate); CRYPTO_set_dynlock_lock_callback(&SslContext::dynlock); CRYPTO_set_dynlock_destroy_callback(&SslContext::dynlockDestroy); // Create the Context. We only have one, // as we don't expect/support different servers in same process. #if OPENSSL_VERSION_NUMBER >= 0x10100000L _ctx = SSL_CTX_new(TLS_method()); SSL_CTX_set_min_proto_version(_ctx, TLS1_2_VERSION); // TLS v1.2 is the minimum. #else _ctx = SSL_CTX_new(SSLv23_method()); SSL_CTX_set_options(_ctx, SSL_OP_NO_SSLv2); SSL_CTX_set_options(_ctx, SSL_OP_NO_SSLv3); SSL_CTX_set_options(_ctx, SSL_OP_NO_TLSv1); SSL_CTX_set_options(_ctx, SSL_OP_NO_TLSv1_1); #endif // SSL_CTX_set_default_passwd_cb(_ctx, &privateKeyPassphraseCallback); ERR_clear_error(); SSL_CTX_set_options(_ctx, SSL_OP_ALL); try { int errCode = 0; if (!caFilePath.empty()) { errCode = SSL_CTX_load_verify_locations(_ctx, caFilePath.c_str(), nullptr); if (errCode != 1) { std::string msg = getLastErrorMsg(); throw std::runtime_error(std::string("Cannot load CA file/directory at ") + caFilePath + " (" + msg + ')'); } } if (!keyFilePath.empty()) { errCode = SSL_CTX_use_PrivateKey_file(_ctx, keyFilePath.c_str(), SSL_FILETYPE_PEM); if (errCode != 1) { std::string msg = getLastErrorMsg(); throw std::runtime_error(std::string("Error loading private key from file ") + keyFilePath + " (" + msg + ')'); } } if (!certFilePath.empty()) { errCode = SSL_CTX_use_certificate_chain_file(_ctx, certFilePath.c_str()); if (errCode != 1) { std::string msg = getLastErrorMsg(); throw std::runtime_error(std::string("Error loading certificate from file ") + certFilePath + " (" + msg + ')'); } } SSL_CTX_set_verify(_ctx, SSL_VERIFY_NONE, nullptr /*&verifyServerCallback*/); SSL_CTX_set_cipher_list(_ctx, cipherList.c_str()); SSL_CTX_set_verify_depth(_ctx, 9); // The write buffer may re-allocate, and we don't mind partial writes. SSL_CTX_set_mode(_ctx, SSL_MODE_ENABLE_PARTIAL_WRITE | SSL_MODE_ACCEPT_MOVING_WRITE_BUFFER); SSL_CTX_set_session_cache_mode(_ctx, SSL_SESS_CACHE_OFF); initDH(); initECDH(); } catch (...) { SSL_CTX_free(_ctx); _ctx = nullptr; throw; } } SslContext::~SslContext() { SSL_CTX_free(_ctx); EVP_cleanup(); ERR_free_strings(); CRYPTO_set_locking_callback(0); CRYPTO_set_id_callback(0); CONF_modules_free(); } unsigned long SslContext::id() { #ifdef __linux__ return syscall(SYS_gettid); #elif defined(__FreeBSD__) return pthread_getthreadid_np(); #else #error Implement for your platform #endif } CRYPTO_dynlock_value* SslContext::dynlockCreate(const char* /*file*/, int /*line*/) { return new CRYPTO_dynlock_value; } void SslContext::dynlock(int mode, struct CRYPTO_dynlock_value* lock, const char* /*file*/, int /*line*/) { if (mode & CRYPTO_LOCK) { lock->lock(); } else { lock->unlock(); } } void SslContext::dynlockDestroy(struct CRYPTO_dynlock_value* lock, const char* /*file*/, int /*line*/) { delete lock; } void SslContext::initDH() { #ifndef OPENSSL_NO_DH // On OpenSSL 1.1 and newer use the auto parameters. #if OPENSSL_VERSION_NUMBER >= 0x10100003L SSL_CTX_set_dh_auto(_ctx, 1); #else // 2048-bit MODP Group with 256-bit prime order subgroup (RFC5114) static const unsigned char dh2048_p[] = { 0x87,0xA8,0xE6,0x1D,0xB4,0xB6,0x66,0x3C,0xFF,0xBB,0xD1,0x9C, 0x65,0x19,0x59,0x99,0x8C,0xEE,0xF6,0x08,0x66,0x0D,0xD0,0xF2, 0x5D,0x2C,0xEE,0xD4,0x43,0x5E,0x3B,0x00,0xE0,0x0D,0xF8,0xF1, 0xD6,0x19,0x57,0xD4,0xFA,0xF7,0xDF,0x45,0x61,0xB2,0xAA,0x30, 0x16,0xC3,0xD9,0x11,0x34,0x09,0x6F,0xAA,0x3B,0xF4,0x29,0x6D, 0x83,0x0E,0x9A,0x7C,0x20,0x9E,0x0C,0x64,0x97,0x51,0x7A,0xBD, 0x5A,0x8A,0x9D,0x30,0x6B,0xCF,0x67,0xED,0x91,0xF9,0xE6,0x72, 0x5B,0x47,0x58,0xC0,0x22,0xE0,0xB1,0xEF,0x42,0x75,0xBF,0x7B, 0x6C,0x5B,0xFC,0x11,0xD4,0x5F,0x90,0x88,0xB9,0x41,0xF5,0x4E, 0xB1,0xE5,0x9B,0xB8,0xBC,0x39,0xA0,0xBF,0x12,0x30,0x7F,0x5C, 0x4F,0xDB,0x70,0xC5,0x81,0xB2,0x3F,0x76,0xB6,0x3A,0xCA,0xE1, 0xCA,0xA6,0xB7,0x90,0x2D,0x52,0x52,0x67,0x35,0x48,0x8A,0x0E, 0xF1,0x3C,0x6D,0x9A,0x51,0xBF,0xA4,0xAB,0x3A,0xD8,0x34,0x77, 0x96,0x52,0x4D,0x8E,0xF6,0xA1,0x67,0xB5,0xA4,0x18,0x25,0xD9, 0x67,0xE1,0x44,0xE5,0x14,0x05,0x64,0x25,0x1C,0xCA,0xCB,0x83, 0xE6,0xB4,0x86,0xF6,0xB3,0xCA,0x3F,0x79,0x71,0x50,0x60,0x26, 0xC0,0xB8,0x57,0xF6,0x89,0x96,0x28,0x56,0xDE,0xD4,0x01,0x0A, 0xBD,0x0B,0xE6,0x21,0xC3,0xA3,0x96,0x0A,0x54,0xE7,0x10,0xC3, 0x75,0xF2,0x63,0x75,0xD7,0x01,0x41,0x03,0xA4,0xB5,0x43,0x30, 0xC1,0x98,0xAF,0x12,0x61,0x16,0xD2,0x27,0x6E,0x11,0x71,0x5F, 0x69,0x38,0x77,0xFA,0xD7,0xEF,0x09,0xCA,0xDB,0x09,0x4A,0xE9, 0x1E,0x1A,0x15,0x97, }; static const unsigned char dh2048_g[] = { 0x3F,0xB3,0x2C,0x9B,0x73,0x13,0x4D,0x0B,0x2E,0x77,0x50,0x66, 0x60,0xED,0xBD,0x48,0x4C,0xA7,0xB1,0x8F,0x21,0xEF,0x20,0x54, 0x07,0xF4,0x79,0x3A,0x1A,0x0B,0xA1,0x25,0x10,0xDB,0xC1,0x50, 0x77,0xBE,0x46,0x3F,0xFF,0x4F,0xED,0x4A,0xAC,0x0B,0xB5,0x55, 0xBE,0x3A,0x6C,0x1B,0x0C,0x6B,0x47,0xB1,0xBC,0x37,0x73,0xBF, 0x7E,0x8C,0x6F,0x62,0x90,0x12,0x28,0xF8,0xC2,0x8C,0xBB,0x18, 0xA5,0x5A,0xE3,0x13,0x41,0x00,0x0A,0x65,0x01,0x96,0xF9,0x31, 0xC7,0x7A,0x57,0xF2,0xDD,0xF4,0x63,0xE5,0xE9,0xEC,0x14,0x4B, 0x77,0x7D,0xE6,0x2A,0xAA,0xB8,0xA8,0x62,0x8A,0xC3,0x76,0xD2, 0x82,0xD6,0xED,0x38,0x64,0xE6,0x79,0x82,0x42,0x8E,0xBC,0x83, 0x1D,0x14,0x34,0x8F,0x6F,0x2F,0x91,0x93,0xB5,0x04,0x5A,0xF2, 0x76,0x71,0x64,0xE1,0xDF,0xC9,0x67,0xC1,0xFB,0x3F,0x2E,0x55, 0xA4,0xBD,0x1B,0xFF,0xE8,0x3B,0x9C,0x80,0xD0,0x52,0xB9,0x85, 0xD1,0x82,0xEA,0x0A,0xDB,0x2A,0x3B,0x73,0x13,0xD3,0xFE,0x14, 0xC8,0x48,0x4B,0x1E,0x05,0x25,0x88,0xB9,0xB7,0xD2,0xBB,0xD2, 0xDF,0x01,0x61,0x99,0xEC,0xD0,0x6E,0x15,0x57,0xCD,0x09,0x15, 0xB3,0x35,0x3B,0xBB,0x64,0xE0,0xEC,0x37,0x7F,0xD0,0x28,0x37, 0x0D,0xF9,0x2B,0x52,0xC7,0x89,0x14,0x28,0xCD,0xC6,0x7E,0xB6, 0x18,0x4B,0x52,0x3D,0x1D,0xB2,0x46,0xC3,0x2F,0x63,0x07,0x84, 0x90,0xF0,0x0E,0xF8,0xD6,0x47,0xD1,0x48,0xD4,0x79,0x54,0x51, 0x5E,0x23,0x27,0xCF,0xEF,0x98,0xC5,0x82,0x66,0x4B,0x4C,0x0F, 0x6C,0xC4,0x16,0x59, }; DH* dh = DH_new(); if (!dh) { std::string msg = getLastErrorMsg(); throw std::runtime_error("Error creating Diffie-Hellman parameters: " + msg); } #if OPENSSL_VERSION_NUMBER >= 0x10100003L // OpenSSL v1.1.0 has public API changes // p, g and length of the Diffie-Hellman param can't be set directly anymore, // instead DH_set0_pqg and DH_set_length are used BIGNUM* p = BN_bin2bn(dh2048_p, sizeof(dh2048_p), nullptr); BIGNUM* g = BN_bin2bn(dh2048_g, sizeof(dh2048_g), nullptr); if ((DH_set0_pqg(dh, p, nullptr, g) == 0) || (DH_set_length(dh, 256) == 0)) #else dh->p = BN_bin2bn(dh2048_p, sizeof(dh2048_p), 0); dh->g = BN_bin2bn(dh2048_g, sizeof(dh2048_g), 0); dh->length = 256; if ((!dh->p) || (!dh->g)) #endif { DH_free(dh); throw std::runtime_error("Error creating Diffie-Hellman parameters"); } SSL_CTX_set_tmp_dh(_ctx, dh); SSL_CTX_set_options(_ctx, SSL_OP_SINGLE_DH_USE); DH_free(dh); #endif #endif } void SslContext::initECDH() { #ifndef OPENSSL_NO_ECDH #if OPENSSL_VERSION_NUMBER >= 0x10100003L #if OPENSSL_VERSION_NUMBER >= 0x10200000L #define DEFAULT_TLS_GROUPS "X448:X25519:P-521:P-384:P-256:ffdhe2048:ffdhe3072:ffdhe4096:ffdhe6144:ffdhe8192" #elif OPENSSL_VERSION_NUMBER < 0x10101000L #define SSL_CTX_set1_groups_list SSL_CTX_set1_curves_list #define DEFAULT_TLS_GROUPS "P-521:P-384:P-256" #else #define DEFAULT_TLS_GROUPS "X448:X25519:P-521:P-384:P-256" #endif if (SSL_CTX_set1_groups_list(_ctx, DEFAULT_TLS_GROUPS) == 0) { throw std::runtime_error("Cannot set ECDH groups: " DEFAULT_TLS_GROUPS); } SSL_CTX_set_options(_ctx, SSL_OP_SINGLE_ECDH_USE); #elif OPENSSL_VERSION_NUMBER >= 0x0090800fL const int nid = OBJ_sn2nid("prime256v1"); if (nid == 0) { throw std::runtime_error("Unknown ECDH curve name: prime256v1"); } EC_KEY* ecdh = EC_KEY_new_by_curve_name(nid); if (!ecdh) { throw std::runtime_error("Cannot create ECDH curve"); } SSL_CTX_set_tmp_ecdh(_ctx, ecdh); SSL_CTX_set_options(_ctx, SSL_OP_SINGLE_ECDH_USE); EC_KEY_free(ecdh); #endif #endif } std::string SslContext::getLastErrorMsg() { const unsigned long errCode = ERR_get_error(); if (errCode != 0) { char buf[256]; ERR_error_string_n(errCode, buf, sizeof(buf)); return std::string(buf); } return "Success"; } /* vim:set shiftwidth=4 softtabstop=4 expandtab: */
C++
BSD-2-Clause
Gribesh/online/net/Ssl.cpp
e031ef8e-8b20-4413-b519-935a2024dece
[]
[]
<?php $title = 'Error 404'; include_once('header.php'); ?> <div class="container"> <div class="page-header"> <h1><?php print $title ?></h1> </div> <p>You found a non-existent page.</p> <p>You might have come here for a broken link. Please <a href="mailto:webmaster@aatf.us">contact us</a> if you think this is an error.</p> <?php include_once('footer.php'); ?>
PHP
Apache-2.0
AATF/aatf.us/404.php
41bb9e50-a6eb-4973-84cf-6bdedd1c69c3
[{"tag": "EMAIL", "value": "webmaster@aatf.us", "start": 257, "end": 274, "context": "me here for a broken link. Please <a href=\"mailto:webmaster@aatf.us\">contact us</a> if you think this is an error.</p"}]
[{"tag": "EMAIL", "value": "webmaster@aatf.us", "start": 257, "end": 274, "context": "me here for a broken link. Please <a href=\"mailto:webmaster@aatf.us\">contact us</a> if you think this is an error.</p"}]
/******************************************************************************* * Debrief - the Open Source Maritime Analysis Application * http://debrief.info * * (C) 2000-2020, Deep Blue C Technology Ltd * * This library is free software; you can redistribute it and/or * modify it under the terms of the Eclipse Public License v1.0 * (http://www.eclipse.org/legal/epl-v10.html) * * This library is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. *******************************************************************************/ package ASSET.Models.Decision; import ASSET.Models.Movement.SimpleDemandedStatus; import ASSET.Participants.DemandedStatus; import ASSET.Participants.Status; import ASSET.Util.SupportTesting; import MWC.GUI.Editable; import MWC.GenericData.WorldDistance; import MWC.GenericData.WorldSpeed; /** * Our implementation of user control, where the vessel is non-autonomous, * obeying the current demanded course, speed, depth */ public class UserControl extends CoreDecision implements java.io.Serializable { ////////////////////////////////////////////////// // property testnig ////////////////////////////////////////////////// public static class ControlTest extends SupportTesting.EditableTesting { /** * get an object which we can test * * @return Editable object which we can check the properties for */ @Override public Editable getEditable() { return new UserControl(0, null, null); } } static public class UserControlInfo extends MWC.GUI.Editable.EditorType { /** * constructor for editable details of a set of Layers * * @param data the Layers themselves */ public UserControlInfo(final UserControl data) { super(data, data.getName(), NAME); } /** * return the custom editor for this object */ @Override public java.beans.BeanDescriptor getBeanDescriptor() { final java.beans.BeanDescriptor bp = new java.beans.BeanDescriptor(UserControl.class, ASSET.GUI.Editors.Decisions.UserControlEditor.class); bp.setDisplayName(super.getData().toString()); return bp; } /** * editable GUI properties for our participant * * @return property descriptions */ @Override public java.beans.PropertyDescriptor[] getPropertyDescriptors() { try { final java.beans.PropertyDescriptor[] res = { prop("Active", "whether this control is active"), prop("Course", "whether this control is active"), prop("Speed", "whether this control is active"), prop("Depth", "whether this control is active"), }; return res; } catch (final java.beans.IntrospectionException e) { return super.getPropertyDescriptors(); } } // /** // * editable GUI properties for our participant // * // * @return property descriptions // */ // public java.beans.PropertyDescriptor[] getPropertyDescriptors() // { // try{ // final java.beans.PropertyDescriptor[] res={ // prop("Depth", "the demanded depth (m)"), // prop("Speed", "the demanded speed (kts)"), // prop("Course", "the demanded course (degs)"), // prop("Active", "whether user is in control"), // prop("Name", "the name of this user control model"), // }; // return res; // }catch(java.beans.IntrospectionException e){ // return super.getPropertyDescriptors(); // } // } } public static final String NAME = "User Control"; /** * */ private static final long serialVersionUID = 1L; static public final String UPDATED = "Updated"; /** * the current demanded course (Degs) */ private double _demandedCourse = 0; /** * the current demanded speed (kts) */ private WorldSpeed _demandedSpeed = null; /** * the current demanded depth (m) */ private WorldDistance _demandedDepth = null; /** * a local copy of our editable object */ private MWC.GUI.Editable.EditorType _myEditor = null; /** * handle my listeners */ private final java.beans.PropertyChangeSupport _pSupport = new java.beans.PropertyChangeSupport(this); /** * @param defaultCourse the initial course for this behaviour (degs) * @param defaultSpeed the initial speed for this behaviour (kts) * @param defaultDepth the initial depth for this behaviour (m) */ public UserControl(final double defaultCourse, final WorldSpeed defaultSpeed, final WorldDistance defaultDepth) { super(NAME); _demandedCourse = defaultCourse; _demandedSpeed = defaultSpeed; _demandedDepth = defaultDepth; } // private void fireUpdate(final String type, final ASSET.Models.DecisionType // decider) // { // _pSupport.firePropertyChange(type, null, decider); // } /** * somebody wants to know about us */ public void addListener(final String type, final java.beans.PropertyChangeListener listener) { _pSupport.addPropertyChangeListener(type, listener); } /** * decide * * @param status parameter for decide * @param time parameter for decide * @return the returned ASSET.Participants.DemandedStatus */ @Override public ASSET.Participants.DemandedStatus decide(final ASSET.Participants.Status status, final ASSET.Models.Movement.MovementCharacteristics chars, final DemandedStatus demStatus, final ASSET.Models.Detection.DetectionList detections, final ASSET.Scenario.ScenarioActivityMonitor monitor, final long time) { SimpleDemandedStatus res = null; String activity = ""; if (isActive()) { // set the demanded status to our demanded values res = new SimpleDemandedStatus(status.getId(), time); res.setCourse(_demandedCourse); res.setSpeed(_demandedSpeed); res.setHeight(-_demandedDepth.getValueIn(WorldDistance.METRES)); activity = "under control"; // ok, fire an updated event - in case there's a gui listeing _pSupport.firePropertyChange(UserControl.UPDATED, this, this); } else { // don't bother, the user isn't interested activity = "not controlled"; } super.setLastActivity(activity); return res; } public double getCourse() { return _demandedCourse; } public WorldDistance getDepth() { return _demandedDepth; } /** * get the editor for this item * * @return the BeanInfo data for this editable object */ @Override public MWC.GUI.Editable.EditorType getInfo() { if (_myEditor == null) _myEditor = new UserControlInfo(this); return _myEditor; } public WorldSpeed getSpeed() { return _demandedSpeed; } /** * get the version details for this model. * * <pre> * $Log: UserControl.java,v $ * Revision 1.1 2006/08/08 14:21:41 Ian.Mayo * Second import * * Revision 1.1 2006/08/07 12:25:49 Ian.Mayo * First versions * * Revision 1.14 2004/09/02 13:17:41 Ian.Mayo * Reflect CoreDecision handling the toString method * * Revision 1.13 2004/08/31 15:28:04 Ian.Mayo * Polish off test refactoring, start Intercept behaviour * <p/> * Revision 1.12 2004/08/31 09:36:34 Ian.Mayo * Rename inner static tests to match signature **Test to make automated testing more consistent * <p/> * Revision 1.11 2004/08/26 14:54:22 Ian.Mayo * Start switching to automated property editor testing. Correct property editor bugs where they arise. * <p/> * Revision 1.10 2004/08/20 13:32:41 Ian.Mayo * Implement inspection recommendations to overcome hidden parent objects, let CoreDecision handle the activity bits. * <p/> * Revision 1.9 2004/08/17 14:22:17 Ian.Mayo * Refactor to introduce parent class capable of storing name & isActive flag * <p/> * Revision 1.8 2004/08/06 12:55:51 Ian.Mayo * Include current status when firing interruption * <p/> * Revision 1.7 2004/08/06 11:14:36 Ian.Mayo * Introduce interruptable behaviours, and recalc waypoint route after interruption * <p/> * Revision 1.6 2004/05/24 15:59:44 Ian.Mayo * Commit updates from home * <p/> * Revision 1.1.1.1 2004/03/04 20:30:51 ian * no message * <p/> * Revision 1.5 2003/11/05 09:19:40 Ian.Mayo * Include MWC Model support * <p/> * </pre> */ @Override public String getVersion() { return "$Date$"; } ////////////////////////////////////////////////////////////////////// // editable data ////////////////////////////////////////////////////////////////////// /** * whether there is any edit information for this item this is a convenience * function to save creating the EditorType data first * * @return yes/no */ @Override public boolean hasEditor() { return true; } /** * indicate to this model that its execution has been interrupted by another * (prob higher priority) model * * @param currentStatus */ @Override public void interrupted(final Status currentStatus) { // ignore. } /** * somebody wants to know about us */ public void removeListener(final String type, final java.beans.PropertyChangeListener listener) { _pSupport.removePropertyChangeListener(type, listener); } /** * reset this decision model */ @Override public void restart() { // no detections, reset our variables } //////////////////////////////////////////////////////////// // model support //////////////////////////////////////////////////////////// public void setCourse(final double demandedCourse) { this._demandedCourse = demandedCourse; } public void setDepth(final WorldDistance demandedDepth) { this._demandedDepth = demandedDepth; } public void setSpeed(final WorldSpeed demandedSpeed) { this._demandedSpeed = demandedSpeed; } }
Java
EPL-1.0
JLLeitschuh/debrief/org.mwc.asset.legacy/src/ASSET/Models/Decision/UserControl.java
0a6c41a8-03f3-479a-b557-4f20cc9ac69a
[{"tag": "USERNAME", "value": "Ian.Mayo", "start": 7573, "end": 7581, "context": "\t * <p/>\r\n\t * Revision 1.10 2004/08/20 13:32:41 Ian.Mayo\r\n\t * Implement inspection recommendations to over"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7752, "end": 7760, "context": "\n\t * <p/>\r\n\t * Revision 1.9 2004/08/17 14:22:17 Ian.Mayo\r\n\t * Refactor to introduce parent class capable o"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7017, "end": 7025, "context": "ons\r\n\t *\r\n\t * Revision 1.14 2004/09/02 13:17:41 Ian.Mayo\r\n\t * Reflect CoreDecision handling the toString m"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7891, "end": 7899, "context": "\n\t * <p/>\r\n\t * Revision 1.8 2004/08/06 12:55:51 Ian.Mayo\r\n\t * Include current status when firing interrupt"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7406, "end": 7414, "context": "\t * <p/>\r\n\t * Revision 1.11 2004/08/26 14:54:22 Ian.Mayo\r\n\t * Start switching to automated property editor"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7127, "end": 7135, "context": "hod\r\n\t *\r\n\t * Revision 1.13 2004/08/31 15:28:04 Ian.Mayo\r\n\t * Polish off test refactoring, start Intercept"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7247, "end": 7255, "context": "\t * <p/>\r\n\t * Revision 1.12 2004/08/31 09:36:34 Ian.Mayo\r\n\t * Rename inner static tests to match signature"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 6942, "end": 6950, "context": "port\r\n\t *\r\n\t * Revision 1.1 2006/08/07 12:25:49 Ian.Mayo\r\n\t * First versions\r\n\t *\r\n\t * Revision 1.14 2004"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 6869, "end": 6877, "context": ".java,v $\r\n\t * Revision 1.1 2006/08/08 14:21:41 Ian.Mayo\r\n\t * Second import\r\n\t *\r\n\t * Revision 1.1 2006/0"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 8311, "end": 8319, "context": "\n\t * <p/>\r\n\t * Revision 1.5 2003/11/05 09:19:40 Ian.Mayo\r\n\t * Include MWC Model support\r\n\t * <p/>\r\n\t * </p"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 8003, "end": 8011, "context": "\n\t * <p/>\r\n\t * Revision 1.7 2004/08/06 11:14:36 Ian.Mayo\r\n\t * Introduce interruptable behaviours, and reca"}]
[{"tag": "USERNAME", "value": "Ian.Mayo", "start": 7573, "end": 7581, "context": "\t * <p/>\r\n\t * Revision 1.10 2004/08/20 13:32:41 Ian.Mayo\r\n\t * Implement inspection recommendations to over"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7752, "end": 7760, "context": "\n\t * <p/>\r\n\t * Revision 1.9 2004/08/17 14:22:17 Ian.Mayo\r\n\t * Refactor to introduce parent class capable o"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7017, "end": 7025, "context": "ons\r\n\t *\r\n\t * Revision 1.14 2004/09/02 13:17:41 Ian.Mayo\r\n\t * Reflect CoreDecision handling the toString m"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7891, "end": 7899, "context": "\n\t * <p/>\r\n\t * Revision 1.8 2004/08/06 12:55:51 Ian.Mayo\r\n\t * Include current status when firing interrupt"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7406, "end": 7414, "context": "\t * <p/>\r\n\t * Revision 1.11 2004/08/26 14:54:22 Ian.Mayo\r\n\t * Start switching to automated property editor"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7127, "end": 7135, "context": "hod\r\n\t *\r\n\t * Revision 1.13 2004/08/31 15:28:04 Ian.Mayo\r\n\t * Polish off test refactoring, start Intercept"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 7247, "end": 7255, "context": "\t * <p/>\r\n\t * Revision 1.12 2004/08/31 09:36:34 Ian.Mayo\r\n\t * Rename inner static tests to match signature"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 6942, "end": 6950, "context": "port\r\n\t *\r\n\t * Revision 1.1 2006/08/07 12:25:49 Ian.Mayo\r\n\t * First versions\r\n\t *\r\n\t * Revision 1.14 2004"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 6869, "end": 6877, "context": ".java,v $\r\n\t * Revision 1.1 2006/08/08 14:21:41 Ian.Mayo\r\n\t * Second import\r\n\t *\r\n\t * Revision 1.1 2006/0"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 8311, "end": 8319, "context": "\n\t * <p/>\r\n\t * Revision 1.5 2003/11/05 09:19:40 Ian.Mayo\r\n\t * Include MWC Model support\r\n\t * <p/>\r\n\t * </p"}, {"tag": "USERNAME", "value": "Ian.Mayo", "start": 8003, "end": 8011, "context": "\n\t * <p/>\r\n\t * Revision 1.7 2004/08/06 11:14:36 Ian.Mayo\r\n\t * Introduce interruptable behaviours, and reca"}]
/* SPDX-License-Identifier: GPL-2.0 */ #ifndef _SPARC64_VISASM_H #define _SPARC64_VISASM_H /* visasm.h: FPU saving macros for VIS routines * * Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz) */ #include <asm/pstate.h> #include <asm/ptrace.h> /* Clobbers %o5, %g1, %g2, %g3, %g7, %icc, %xcc */ #define VISEntry \ rd %fprs, %o5; \ andcc %o5, (FPRS_FEF|FPRS_DU), %g0; \ be,pt %icc, 297f; \ sethi %hi(297f), %g7; \ sethi %hi(VISenter), %g1; \ jmpl %g1 + %lo(VISenter), %g0; \ or %g7, %lo(297f), %g7; \ 297: wr %g0, FPRS_FEF, %fprs; \ #define VISExit \ wr %g0, 0, %fprs; /* Clobbers %o5, %g1, %g2, %g3, %g7, %icc, %xcc. * Must preserve %o5 between VISEntryHalf and VISExitHalf */ #define VISEntryHalf \ VISEntry #define VISExitHalf \ VISExit #define VISEntryHalfFast(fail_label) \ rd %fprs, %o5; \ andcc %o5, FPRS_FEF, %g0; \ be,pt %icc, 297f; \ nop; \ ba,a,pt %xcc, fail_label; \ 297: wr %o5, FPRS_FEF, %fprs; #define VISExitHalfFast \ wr %o5, 0, %fprs; #ifndef __ASSEMBLY__ static inline void save_and_clear_fpu(void) { __asm__ __volatile__ ( " rd %%fprs, %%o5\n" " andcc %%o5, %0, %%g0\n" " be,pt %%icc, 299f\n" " sethi %%hi(298f), %%g7\n" " sethi %%hi(VISenter), %%g1\n" " jmpl %%g1 + %%lo(VISenter), %%g0\n" " or %%g7, %%lo(298f), %%g7\n" " 298: wr %%g0, 0, %%fprs\n" " 299:\n" " " : : "i" (FPRS_FEF|FPRS_DU) : "o5", "g1", "g2", "g3", "g7", "cc"); } int vis_emul(struct pt_regs *, unsigned int); #endif #endif /* _SPARC64_ASI_H */
C
MIT
2016k8009929004/libmpk/kernel/arch/sparc/include/asm/visasm.h
23942dc0-43a4-4ab7-a808-1605c8e2360a
[{"tag": "NAME", "value": "Jakub Jelinek", "start": 166, "end": 179, "context": " macros for VIS routines\n *\n * Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz)\n */\n\n#include <asm/pstate.h>\n"}, {"tag": "EMAIL", "value": "jj@ultra.linux.cz", "start": 181, "end": 198, "context": " routines\n *\n * Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz)\n */\n\n#include <asm/pstate.h>\n#include <asm/ptrac"}]
[{"tag": "NAME", "value": "Jakub Jelinek", "start": 166, "end": 179, "context": " macros for VIS routines\n *\n * Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz)\n */\n\n#include <asm/pstate.h>\n"}, {"tag": "EMAIL", "value": "jj@ultra.linux.cz", "start": 181, "end": 198, "context": " routines\n *\n * Copyright (C) 1998 Jakub Jelinek (jj@ultra.linux.cz)\n */\n\n#include <asm/pstate.h>\n#include <asm/ptrac"}]
""" Implementation of the `CID spec <https://github.com/multiformats/cid>`_. This module differs from other modules of :mod:`~multiformats`, in that the functionality is completely encapsulated by a single class :class:`CID`, which is imported from top level instead of the module itself: >>> from multiformats import CID """ from typing import Any, cast, FrozenSet, Tuple, Type, TypeVar, Union from typing_extensions import Literal, Final from typing_validation import validate from bases import base58btc from multiformats import varint, multicodec, multibase, multihash from multiformats.multicodec import Multicodec from multiformats.multibase import Multibase from multiformats.multihash import Multihash, _validate_raw_digest_size from multiformats.varint import BytesLike, byteslike _CIDSubclass = TypeVar("_CIDSubclass", bound="CID") CIDVersion = Literal[0, 1] CIDVersionNumbers: Final[FrozenSet[int]] = frozenset({0, 1}) def _binary_cid_from_str(cid: str) -> Tuple[bytes, Multibase]: if len(cid) == 46 and cid.startswith("Qm"): # CIDv0 to be decoded as base58btc return base58btc.decode(cid), multibase.get("base58btc") mb, b = multibase.decode_raw(cid) if b[0] == 0x12: # CIDv0 may not be multibase encoded (0x12 is the first byte of sha2-256 multihashes) # CIDv18 (first byte 18=0x12) will be skipped to prevent ambiguity raise ValueError("CIDv0 may not be multibase encoded (found multibase encoded bytes starting with 0x12).") return b, mb def _CID_validate_multibase(base: Union[str, Multibase]) -> Multibase: if isinstance(base, str): base = multibase.get(base) else: multibase.validate_multibase(base) return base def _CID_validate_multicodec(codec: Union[str, int, Multicodec]) -> Multicodec: if isinstance(codec, str): codec = multicodec.get(codec) elif isinstance(codec, int): codec = multicodec.get(code=codec) else: multicodec.validate_multicodec(codec) return codec def _CID_validate_multihash(hashfun: Union[str, int, Multihash]) -> Multihash: if isinstance(hashfun, str): hashfun = multihash.get(hashfun) elif isinstance(hashfun, int): hashfun = multihash.get(code=hashfun) else: pass return hashfun def _CID_validate_raw_digest(raw_digest: Union[str, BytesLike], hashfun: Multihash) -> bytes: if isinstance(raw_digest, str): raw_digest = bytes.fromhex(raw_digest) else: validate(raw_digest, BytesLike) if not isinstance(raw_digest, bytes): raw_digest = bytes(raw_digest) _, max_digest_size = hashfun.implementation _validate_raw_digest_size(hashfun.name, raw_digest, max_digest_size) return raw_digest def _CID_validate_multihash_digest(digest: Union[str, BytesLike]) -> Tuple[Multihash, bytes]: if isinstance(digest, str): digest = bytes.fromhex(digest) raw_digest: BytesLike code, raw_digest = multihash.unwrap_raw(digest) hashfun = _CID_validate_multihash(code) raw_digest = _CID_validate_raw_digest(raw_digest, hashfun) return hashfun, raw_digest def _CID_validate_version(version: int, base: Multibase, codec: Multicodec, hashfun: Multihash) -> int: if version in (2, 3): raise ValueError("CID versions 2 and 3 are reserved for future use.") if version not in (0, 1): raise ValueError(f"CID version {version} is not allowed.") if version == 0: if base.name != 'base58btc': raise ValueError(f"CIDv0 multibase must be 'base58btc', found {repr(base.name)} instead.") if codec.name != "dag-pb": raise ValueError(f"CIDv0 multicodec must be 'dag-pb', found {repr(codec.name)} instead.") if hashfun.name != "sha2-256": raise ValueError(f"CIDv0 multihash must be 'sha2-256', found {repr(hashfun.name)} instead.") return version class CID: """ Container class for `Content IDentifiers <https://github.com/multiformats/cid>`_. CIDs can be explicitly instantiated by passing multibase, CID version, multicodec and multihash digest to the constructor: >>> cid = CID("base58btc", 1, "raw", ... "12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95") >>> str(cid) 'zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA' Alternatively, a pair of multihash codec and raw hash digest can be passed in lieu of the multihash digest: >>> raw_digest = bytes.fromhex( ... "6e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95") >>> cid = CID("base58btc", 1, "raw", ("sha2-256", raw_digest)) >>> str(cid) 'zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA' The multihash digest and raw digest values can be passed either as :obj:`bytes`-like objects or as the corresponding hex strings: >>> isinstance(raw_digest, bytes) True >>> raw_digest.hex() '6e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95' Note: the hex strings are not multibase encoded. Calling :obj:`bytes` on an instance of this class returns its binary representation, as a :obj:`bytes` object: >>> cid = CID("base58btc", 1, "raw", ... "12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95") >>> raw_digest.hex() '6e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95' >>> bytes(cid).hex() '015512206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95' #^^ 0x01 = CIDv1 # ^^ 0x55 = 'raw' codec >>> bytes(cid) :param base: default multibase to use when encoding this CID :type base: :obj:`str` or :class:`~multiformats.multibase.Multibase` :param version: the CID version :type version: 0 or 1 :param codec: the content multicodec :type codec: :obj:`str`, :obj:`int` or :class:`~multiformats.multicodec.Multicodec` :param digest: the content multihash digest, or a pair of multihash codec and raw content digest :type digest: see below The ``digest`` parameter can be specified in the following ways: - as a :obj:`str`, in which case it is treated as a hex-string and converted to :obj:`bytes` using :obj:`bytes.fromhex` - as a :obj:`~multiformats.varint.BytesLike`, in which case it is converted to :obj:`bytes` directly - as a pair ``(multihash_codec, raw_digest)`` of a multihash and raw hash digest, which are used to produce a multihash digest via the :meth:`~multiformats.multihash.Multihash.wrap` metho If ``digest`` is specified by a pair, the ``multihash_codec`` value can be specified in the following ways: - by multihash multicodec name, as a :obj:`str` - by multihash multicodec code, as a :obj:`int` - as a :class:`~multiformats.multihash.Multihash` object If ``digest`` is specified by a pair, the ``raw_digest`` value can be specified in the following ways: - as a :obj:`str`, in which case it is treated as a hex-string and converted to :obj:`bytes` using :obj:`bytes.fromhex` - as a :obj:`~multiformats.varint.BytesLike`, in which case it is converted to :obj:`bytes` directly :raises ValueError: if the CID version is unsupported :raises ValueError: if version is 0 but base is not 'base58btc' or codec is not 'dag-pb' :raises KeyError: if the multibase, multicodec or multihash are unknown """ _base: Multibase _version: CIDVersion _codec: Multicodec _hashfun: Multihash _digest: bytes __slots__ = ("__weakref__", "_base", "_version", "_codec", "_hashfun", "_digest") def __new__(cls: Type[_CIDSubclass], base: Union[str, Multibase], version: int, codec: Union[str, int, Multicodec], digest: Union[str, BytesLike, Tuple[Union[str, int, Multihash], Union[str, BytesLike]]], ) -> _CIDSubclass: # pylint: disable = too-many-arguments base = _CID_validate_multibase(base) codec = _CID_validate_multicodec(codec) raw_digest: Union[str, bytes] hashfun: Union[str, int, Multihash] if isinstance(digest, (str,)+byteslike): hashfun, raw_digest = _CID_validate_multihash_digest(digest) else: validate(digest, Tuple[Union[str, int, Multihash], Union[str, BytesLike]]) hashfun, raw_digest = digest hashfun = _CID_validate_multihash(hashfun) raw_digest = _CID_validate_raw_digest(raw_digest, hashfun) version = _CID_validate_version(version, base, codec, hashfun) if isinstance(digest, bytes): return CID._new_instance(cls, base, version, codec, hashfun, digest) return CID._new_instance(cls, base, version, codec, hashfun, (hashfun, raw_digest)) @staticmethod def _new_instance(CID_subclass: Type[_CIDSubclass], base: Multibase, version: int, codec: Multicodec, hashfun: Multihash, digest: Union[bytes, Tuple[Multihash, bytes]], ) -> _CIDSubclass: # pylint: disable = too-many-arguments instance: _CIDSubclass = super().__new__(CID_subclass) instance._base = base assert version in (0, 1) instance._version = cast(Literal[0, 1], version) instance._codec = codec instance._hashfun = hashfun if isinstance(digest, bytes): instance._digest = digest elif isinstance(digest, byteslike): instance._digest = bytes(digest) else: _hashfun, raw_digest = digest if not isinstance(raw_digest, bytes): raw_digest = bytes(raw_digest) assert _hashfun == hashfun, "You passed different multihashes to a _new_instance call with digest as a pair." instance._digest = hashfun.wrap(raw_digest) return instance @property def version(self) -> CIDVersion: """ CID version. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.version 1 """ return self._version @property def base(self) -> Multibase: """ Multibase used to encode the CID: - if a CIDv1 was decoded from a multibase-encoded string, the encoding multibase is used - if a CIDv1 was decoded from a bytestring, the 'base58btc' multibase is used - for a CIDv0, 'base58btc' is always used Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.base Multibase(name='base58btc', code='z', status='default', description='base58 bitcoin') """ return self._base @property def codec(self) -> Multicodec: """ Codec that the multihash digest refers to. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.codec Multicodec(name='raw', tag='ipld', code='0x55', status='permanent', description='raw binary') """ return self._codec @property def hashfun(self) -> Multihash: """ Multihash used to produce the multihash digest. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.hashfun Multicodec(name='sha2-256', tag='multihash', code='0x12', status='permanent', description='') """ return self._hashfun @property def digest(self) -> bytes: """ Multihash digest. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.digest.hex() '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95' """ return self._digest @property def raw_digest(self) -> bytes: """ Raw hash digest, decoded from the multihash digest. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.raw_digest.hex() '6e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95' """ return multihash.unwrap(self._digest) @property def human_readable(self) -> str: """ Human-readable representation of the CID. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.human_readable 'base58btc - cidv1 - raw - (sha2-256 : 256 : 6E6FF7950A36187A801613426E858DCE686CD7D7E3C0FC42EE0330072D245C95)' """ raw_digest = self.raw_digest hashfun_str = f"({self.hashfun.name} : {len(raw_digest)*8} : {raw_digest.hex().upper()})" return f"{self.base.name} - cidv{self.version} - {self.codec.name} - {hashfun_str}" def encode(self, base: Union[None, str, Multibase] = None) -> str: """ Encodes the CID using a given multibase. If :obj:`None` is given, the CID's own multibase is used by default. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid.encode() # default: cid.base 'zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA' >>> cid.encode("base32") 'bafkreidon73zkcrwdb5iafqtijxildoonbwnpv7dyd6ef3qdgads2jc4su' :param base: the multibase to be used for encoding :type base: :obj:`None`, :obj:`str` or :class:`~multiformats.multibase.Multibase`, *optional* :raises KeyError: see :meth:`multiformats.multibase.Multibase.encode` """ if self.version == 0: if base is not None: raise ValueError("CIDv0 cannot be multibase-encoded, please set multibase=None.") return base58btc.encode(bytes(self)) if base is None or base == self.base: base = self.base # use CID's own multibase as default else: if isinstance(base, str): base = multibase.get(base) else: multibase.validate_multibase(base) return base.encode(bytes(self)) def set(self, *, base: Union[None, str, Multibase] = None, version: Union[None, int] = None, codec: Union[None, str, int, Multicodec] = None ) -> "CID": """ Returns a new CID obtained by setting new values for one or more of: ``base``, ``version``, or ``codec``. Example usage: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid CID('base58btc', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid.set(base="base32") CID('base32', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid.set(codec="dag-cbor") CID('base58btc', 1, 'dag-cbor', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid.set(version=0, codec="dag-pb") CID('base58btc', 0, 'dag-pb', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid CID('base58btc', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') # Note: 'CID.set' returns new instances, # the original 'cid' instance is unchanged If setting ``version`` to 0, ``base`` must be 'base58btc' and ``codec`` must be 'dag-pb'. >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> cid = CID.decode(s) >>> cid CID('base58btc', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid.set(version=0, codec="dag-pb") CID('base58btc', 0, 'dag-pb', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') >>> cid.set(version=0) ValueError: CIDv0 multicodec must be 'dag-pb', found 'raw' instead. >>> cid.set(version=0, codec="dag-pb", base="base32") ValueError: CIDv0 multibase must be 'base58btc', found 'base32' instead :param base: the new CID multibase, or :obj:`None` if multibase unchanged :type base: :obj:`None`, :obj:`str` or :class:`~multiformats.multibase.Multibase`, *optional* :param version: the new CID version, or :obj:`None` if version unchanged :type version: :obj:`None`, 0 or 1, *optional* :param codec: the new content multicodec, or :obj:`None` if multicodec unchanged :type codec: :obj:`None`, :obj:`str` or :class:`~multiformats.multicodec.Multicodec`, *optional* :raises KeyError: if the multibase or multicodec are unknown """ hashfun = self.hashfun digest = self.digest if base is not None and base not in (self.base, self.base.name): base = _CID_validate_multibase(base) else: base = self.base if codec is not None and codec not in (self.codec, self.codec.name, self.codec.code): codec = _CID_validate_multicodec(codec) else: codec = self.codec if version is not None and version != self.version: _CID_validate_version(version, base, codec, hashfun) else: version = self.version return CID._new_instance(CID, base, version, codec, hashfun, digest) def __bytes__(self) -> bytes: if self.version == 0: return self.digest return varint.encode(self.version)+varint.encode(self.codec.code)+self.digest def __str__(self) -> str: return self.encode() def __repr__(self) -> str: mb = self.base.name v = self.version mc = self.codec.name d = self.digest return f"CID({repr(mb)}, {v}, {repr(mc)}, {repr(d.hex())})" @property def _as_tuple(self) -> Tuple[Type["CID"], int, Multicodec, bytes]: return (CID, self.version, self.codec, self.digest) def __hash__(self) -> int: return hash(self._as_tuple) def __eq__(self, other: Any) -> bool: if self is other: return True if not isinstance(other, CID): return NotImplemented return self._as_tuple == other._as_tuple @staticmethod def decode(cid: Union[str, BytesLike]) -> "CID": """ Decodes a CID from a bytestring or a hex string (which will be converted to :obj:`bytes` using :obj:`bytes.fromhex`). Note: the hex string is not multibase encoded. Example usage for CIDv1 multibase-encoded string: >>> s = "zb2rhe5P4gXftAwvA4eXQ5HJwsER2owDyS9sKaQRRVQPn93bA" >>> CID.decode(s) CID('base58btc', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') Example usage for CIDv1 bytestring (multibase always set to 'base58btc'): >>> b = bytes.fromhex( ... "015512206e6ff7950a36187a801613426e85" ... "8dce686cd7d7e3c0fc42ee0330072d245c95") >>> CID.decode(b) CID('base58btc', 1, 'raw', '12206e6ff7950a36187a801613426e858dce686cd7d7e3c0fc42ee0330072d245c95') Example usage for CIDv0 base58-encoded string: >>> s = "QmbWqxBEKC3P8tqsKc98xmWNzrzDtRLMiMPL8wBuTGsMnR" >>> CID.decode(s) CID('base58btc', 0, 'dag-pb', '1220c3c4733ec8affd06cf9e9ff50ffc6bcd2ec85a6170004bb709669c31de94391a') Example usage for CIDv0 bytestring (multibase always set to 'base58btc'): >>> b = bytes.fromhex( ... "1220c3c4733ec8affd06cf9e9ff50ffc6b" ... "cd2ec85a6170004bb709669c31de94391a") >>> CID.decode(b) CID('base58btc', 0, 'dag-pb', '1220c3c4733ec8affd06cf9e9ff50ffc6bcd2ec85a6170004bb709669c31de94391a') :param cid: the CID bytes or multibase-encoded string :type cid: :obj:`str` or :obj:`~multiformats.varint.BytesLike` :raises ValueError: if the CID is malformed or the CID version is unsupported :raises KeyError: if the multibase, multicodec or multihash are unknown """ if isinstance(cid, str): cid, mb = _binary_cid_from_str(cid) else: mb = multibase.get("base58btc") validate(cid, BytesLike) cid = memoryview(cid) # if len(cid) == 34 and cid.startswith(b"\x12\x20"): if len(cid) == 34 and cid[0] == 0x12 and cid[1] == 0x20: v = 0 # CID version mc_code = 0x70 # multicodec.get("dag-pb") digest = cid # multihash digest is what's left else: v, _, cid = varint.decode_raw(cid) # CID version if v == 0: raise ValueError("CIDv0 is malformed.") if v in (2, 3): raise ValueError("CID versions 2 and 3 are reserved for future use.") if v != 1: raise ValueError(f"CIDv{v} is currently not supported.") mc_code, _, cid = multicodec.unwrap_raw(cid) # multicodec digest = cid # multihash digest is what's left mc = multicodec.get(code=mc_code) mh_code, _ = multihash.unwrap_raw(digest) mh = multihash.get(code=mh_code) return CID._new_instance(CID, mb, v, mc, mh, digest) @staticmethod def peer_id(pk_bytes: Union[str, BytesLike]) -> "CID": """ Wraps the raw hash of a public key into a `PeerID <https://docs.libp2p.io/concepts/peer-id/>`_, as a CIDv1. The ``pk_bytes`` argument should be the binary public key, encoded according to the `PeerID spec <https://github.com/libp2p/specs/blob/master/peer-ids/peer-ids.md>`_. This can be passed as a bytestring or as a hex string (which will be converted to :obj:`bytes` using :obj:`bytes.fromhex`). Note: the hex string is not multibase encoded. Example usage with Ed25519 public key: >>> pk_bytes = bytes.fromhex( ... "1498b5467a63dffa2dc9d9e069caf075d16fc33fdd4c3b01bfadae6433767d93") ... # a 32-byte Ed25519 public key >>> peer_id = CID.peer_id(pk_bytes) >>> peer_id CID('base32', 1, 'libp2p-key', '00201498b5467a63dffa2dc9d9e069caf075d16fc33fdd4c3b01bfadae6433767d93') #^^ 0x00 = 'identity' multihash used (public key length <= 42) # ^^ 0x20 = 32-bytes of raw hash digestlength >>> str(peer_id) 'bafzaaiautc2um6td375c3soz4bu4v4dv2fx4gp65jq5qdp5nvzsdg5t5sm' Snippet showing how to obtain the `Ed25519 <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/ed25519/>`_ public key bytestring using the `cryptography <https://github.com/pyca/cryptography>`_ library: >>> from cryptography.hazmat.primitives.asymmetric.ed25519 import Ed25519PrivateKey >>> from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat >>> private_key = Ed25519PrivateKey.generate() >>> public_key = private_key.public_key() >>> pk_bytes = public_key.public_bytes( ... encoding=Encoding.Raw, ... format=PublicFormat.Raw ... ) >>> pk_bytes.hex() "1498b5467a63dffa2dc9d9e069caf075d16fc33fdd4c3b01bfadae6433767d93" Example usage with DER-encoded RSA public key: >>> pk_bytes = bytes.fromhex( ... "30820122300d06092a864886f70d01010105000382010f003082010a02820101" ... "009a56a5c11e2705d0bfe0cd1fa66d5e519095cc741b62ed99ddf129c32e046e" ... "5ba3958bb8a068b05a95a6a0623cc3c889b1581793cd84a34cc2307e0dd74c70" ... "b4f230c74e5063ecd8e906d372be4eba13f47d04427a717ac78cb12b4b9c2ab5" ... "591f36f98021a70f84d782c36c51819054228ff35a45efa3f82b27849ec89036" ... "26b4a4c4b40f9f74b79caf55253687124c79cb10cd3bc73f0c44fbd341e5417d" ... "2e85e900d22849d2bc85ca6bf037f1f5b4f9759b4b6942fccdf1140b30ea7557" ... "87deb5c373c5953c14d64b523959a76a32a599903974a98cf38d4aaac7e359f8" ... "6b00a91dcf424bf794592139e7097d7e65889259227c07155770276b6eda4cec" ... "370203010001") ... # a 294-byte RSA public key >>> peer_id = CID.peer_id(pk_bytes) >>> peer_id CID('base32', 1, 'libp2p-key', '1220c1a6513ffb14f202f75453c49666a5b9d7ed9a1a068891daf824d477573f829f') #^^ 0x12 = 'sha2-256' multihash used (public key length > 42) # ^^ 0x20 = 32-bytes of raw hash digest length >>> str(peer_id) 'bafzbeigbuzit76yu6ibpovctyslgnjnz27wzugqgrci5v6be2r3vop4ct4' Snippet showing how to obtain the `RSA <https://cryptography.io/en/latest/hazmat/primitives/asymmetric/rsa/>`_ public key bytestring using the `cryptography <https://github.com/pyca/cryptography>`_ library: >>> from cryptography.hazmat.primitives.asymmetric import rsa >>> from cryptography.hazmat.primitives.serialization import Encoding, PublicFormat >>> private_key = rsa.generate_private_key( ... public_exponent=65537, ... key_size=2048, ... ) >>> public_key = private_key.public_key() >>> pk_bytes = public_key.public_bytes( ... encoding=Encoding.DER, ... format=PublicFormat.SubjectPublicKeyInfo ... ) >>> pk_bytes.hex() "30820122300d06092a864886f70d01010105000382010f003082010a02820101" "009a56a5c11e2705d0bfe0cd1fa66d5e519095cc741b62ed99ddf129c32e046e" "5ba3958bb8a068b05a95a6a0623cc3c889b1581793cd84a34cc2307e0dd74c70" "b4f230c74e5063ecd8e906d372be4eba13f47d04427a717ac78cb12b4b9c2ab5" "591f36f98021a70f84d782c36c51819054228ff35a45efa3f82b27849ec89036" "26b4a4c4b40f9f74b79caf55253687124c79cb10cd3bc73f0c44fbd341e5417d" "2e85e900d22849d2bc85ca6bf037f1f5b4f9759b4b6942fccdf1140b30ea7557" "87deb5c373c5953c14d64b523959a76a32a599903974a98cf38d4aaac7e359f8" "6b00a91dcf424bf794592139e7097d7e65889259227c07155770276b6eda4cec" "370203010001" :param pk_bytes: the public key bytes :type pk_bytes: :obj:`str` or :obj:`~multiformats.varint.BytesLike` :raises ValueError: if ``pk_bytes`` is passed as a string and is not the hex-string of some bytes """ if isinstance(pk_bytes, str): pk_bytes = bytes.fromhex(pk_bytes) else: validate(pk_bytes, BytesLike) if len(pk_bytes) <= 42: mh = multihash.get("identity") digest = multihash.digest(pk_bytes, mh) else: mh = multihash.get("sha2-256") digest = multihash.digest(pk_bytes, mh) mc = multicodec.get(code=0x72) # multicodec.get("libp2p-key") mb = multibase.get("base32") return CID._new_instance(CID, mb, 1, mc, mh, digest)
Python
MIT
hashberg-io/multiformats/multiformats/cid/__init__.py
cdc68ba4-da86-4a07-b76a-26c46d7383d8
[]
[]
<?php use yii\db\Schema; /** * Class m000000_000005_auth_item_child. * Migration class for auth_item_child. * * @author Agiel K. Saputra <13nightevil@gmail.com> * @since 0.1.0 */ class m000000_000005_auth_item_child extends \yii\db\Migration { /** * @inheritdoc */ public function up() { $tableOptions = null; if ($this->db->driverName === 'mysql') { $tableOptions = 'CHARACTER SET utf8 COLLATE utf8_general_ci ENGINE=InnoDB'; } $this->createTable('{{%auth_item_child}}', [ 'parent' => Schema::TYPE_STRING . '(64) NOT NULL', 'child' => Schema::TYPE_STRING . '(64) NOT NULL', 'PRIMARY KEY ([[parent]], [[child]])', 'FOREIGN KEY ([[parent]]) REFERENCES {{%auth_item}} ([[name]]) ON DELETE CASCADE ON UPDATE CASCADE', 'FOREIGN KEY ([[child]]) REFERENCES {{%auth_item}} ([[name]]) ON DELETE CASCADE ON UPDATE CASCADE', ], $tableOptions); /** * Level superadmin => adminstrator => editor => author => contributor => subscriber. */ $this->batchInsert('{{%auth_item_child}}', ['parent', 'child'], [ ['superadmin', 'administrator'], ['administrator', 'editor'], ['editor', 'author'], ['author', 'contributor'], ['contributor', 'subscriber'], ]); } /** * @inheritdoc */ public function down() { $this->dropTable('{{%auth_item_child}}'); } }
PHP
BSD-3-Clause
setiam3/nextgen/console/migrations/m000000_000005_auth_item_child.php
0897459d-9f7f-437a-bd59-ac8078f36e8d
[{"tag": "EMAIL", "value": "13nightevil@gmail.com", "start": 144, "end": 165, "context": " auth_item_child.\n *\n * @author Agiel K. Saputra <13nightevil@gmail.com>\n * @since 0.1.0\n */\nclass m000000_000005_auth_it"}, {"tag": "NAME", "value": "Agiel K. Saputra", "start": 126, "end": 142, "context": "igration class for auth_item_child.\n *\n * @author Agiel K. Saputra <13nightevil@gmail.com>\n * @since 0.1.0\n */\nclass"}]
[{"tag": "EMAIL", "value": "13nightevil@gmail.com", "start": 144, "end": 165, "context": " auth_item_child.\n *\n * @author Agiel K. Saputra <13nightevil@gmail.com>\n * @since 0.1.0\n */\nclass m000000_000005_auth_it"}, {"tag": "NAME", "value": "Agiel K. Saputra", "start": 126, "end": 142, "context": "igration class for auth_item_child.\n *\n * @author Agiel K. Saputra <13nightevil@gmail.com>\n * @since 0.1.0\n */\nclass"}]
""" Dynamic bicycle model. Use Dynamic class to: 1. simulate continuous model 2. linearize continuous model 3. discretize continuous model 4. simulate continuously linearized discrete model 5. compare continuous and discrete models """ __author__ = 'Achin Jain' __email__ = 'achinj@seas.upenn.edu' import numpy as np import casadi as cs from bayes_race.models.model import Model from bayes_race.params import F110 class Dynamic(Model): def __init__(self, lf, lr, mass, Iz, Cf, Cr, Bf=None, Br=None, Df=None, Dr=None, Cm1=None, Cm2=None, Cr0=None, Cr2=None, input_acc=False, **kwargs): """ specify model params here """ self.lf = lf self.lr = lr self.dr = lr/(lf+lr) self.mass = mass self.Iz = Iz self.Cf = Cf self.Cr = Cr self.Bf = Bf self.Br = Br self.Df = Df self.Dr = Dr self.Cm1 = Cm1 self.Cm2 = Cm2 self.Cr0 = Cr0 self.Cr2 = Cr2 self.approx = False if Bf is None or Br is None or Df is None or Dr is None: self.approx = True self.input_acc = input_acc self.n_states = 6 self.n_inputs = 2 Model.__init__(self) def sim_continuous(self, x0, u, t): """ simulates the nonlinear continuous model with given input vector by numerical integration using 6th order Runge Kutta method x0 is the initial state of size 6x1 u is the input vector of size 2xn t is the time vector of size 1x(n+1) """ n_steps = u.shape[1] x = np.zeros([6, n_steps+1]) dxdt = np.zeros([6, n_steps+1]) dxdt[:,0] = self._diffequation(None, x0, [0, 0]) x[:,0] = x0 for ids in range(1, n_steps+1): x[:,ids] = self._integrate(x[:,ids-1], u[:,ids-1], t[ids-1], t[ids]) dxdt[:,ids] = self._diffequation(None, x[:,ids], u[:,ids-1]) return x, dxdt def _diffequation(self, t, x, u): """ write dynamics as first order ODE: dxdt = f(x(t)) x is a 6x1 vector: [x, y, psi, vx, vy, omega]^T u is a 2x1 vector: [acc/pwm, steer]^T """ steer = u[1] psi = x[2] vx = x[3] vy = x[4] omega = x[5] Ffy, Frx, Fry = self.calc_forces(x, u) dxdt = np.zeros(6) dxdt[0] = vx*np.cos(psi) - vy*np.sin(psi) dxdt[1] = vx*np.sin(psi) + vy*np.cos(psi) dxdt[2] = omega dxdt[3] = 1/self.mass * (Frx - Ffy*np.sin(steer)) + vy*omega dxdt[4] = 1/self.mass * (Fry + Ffy*np.cos(steer)) - vx*omega dxdt[5] = 1/self.Iz * (Ffy*self.lf*np.cos(steer) - Fry*self.lr) return dxdt def calc_forces(self, x, u, return_slip=False): steer = u[1] psi = x[2] vx = x[3] vy = x[4] omega = x[5] if self.approx: # rolling friction and drag are ignored acc = u[0] Frx = self.mass*acc # See Vehicle Dynamics and Control (Rajamani) alphaf = steer - (self.lf*omega + vy)/vx alphar = (self.lr*omega - vy)/vx Ffy = 2 * self.Cf * alphaf Fry = 2 * self.Cr * alphar else: if self.input_acc: # rolling friction and drag are ignored acc = u[0] Frx = self.mass*acc else: # rolling friction and drag are modeled pwm = u[0] Frx = (self.Cm1-self.Cm2*vx)*pwm - self.Cr0 - self.Cr2*(vx**2) alphaf = steer - np.arctan2((self.lf*omega + vy), abs(vx)) alphar = np.arctan2((self.lr*omega - vy), abs(vx)) Ffy = self.Df * np.sin(self.Cf * np.arctan(self.Bf * alphaf)) Fry = self.Dr * np.sin(self.Cr * np.arctan(self.Br * alphar)) if return_slip: return Ffy, Frx, Fry, alphaf, alphar else: return Ffy, Frx, Fry def casadi(self, x, u, dxdt): """ write dynamics as first order ODE: dxdt = f(x(t)) x is a 6x1 vector: [x, y, psi, vx, vy, omega]^T u is a 2x1 vector: [acc/pwm, steer]^T dxdt is a casadi.SX variable """ pwm = u[0] steer = u[1] psi = x[2] vx = x[3] vy = x[4] omega = x[5] vmin = 0.05 vy = cs.if_else(vx<vmin, 0, vy) omega = cs.if_else(vx<vmin, 0, omega) steer = cs.if_else(vx<vmin, 0, steer) vx = cs.if_else(vx<vmin, vmin, vx) Frx = (self.Cm1-self.Cm2*vx)*pwm - self.Cr0 - self.Cr2*(vx**2) alphaf = steer - cs.atan2((self.lf*omega + vy), vx) alphar = cs.atan2((self.lr*omega - vy), vx) Ffy = self.Df * cs.sin(self.Cf * cs.arctan(self.Bf * alphaf)) Fry = self.Dr * cs.sin(self.Cr * cs.arctan(self.Br * alphar)) dxdt[0] = vx*cs.cos(psi) - vy*cs.sin(psi) dxdt[1] = vx*cs.sin(psi) + vy*cs.cos(psi) dxdt[2] = omega dxdt[3] = 1/self.mass * (Frx - Ffy*cs.sin(steer)) + vy*omega dxdt[4] = 1/self.mass * (Fry + Ffy*cs.cos(steer)) - vx*omega dxdt[5] = 1/self.Iz * (Ffy*self.lf*cs.cos(steer) - Fry*self.lr) return dxdt def sim_discrete(self, x0, u, Ts): """ simulates a continuously linearized discrete model u is the input vector of size 2xn Ts is the sampling time """ n_steps = u.shape[1] x = np.zeros([6, n_steps+1]) dxdt = np.zeros([6, n_steps+1]) dxdt[:,0] = self._diffequation(None, x0, [0, 0]) x[:,0] = x0 for ids in range(1, n_steps+1): g = self._diffequation(None, x[:,ids-1], u[:,ids-1]).reshape(-1,) x[:,ids] = x[:,ids-1] + g*Ts dxdt[:,ids] = self._diffequation(None, x[:,ids], u[:,ids-1]) return x, dxdt def linearize(self, x0, u0): """ linearize at a given x0, u0 for a given continuous system dxdt = f(x(t)) calculate A = ∂f/∂x, B = ∂f/∂u, g = f evaluated at x0, u0 A is 6x6, B is 6x2, g is 6x1 """ steer = u0[1] psi = x0[2] vx = x0[3] vy = x0[4] omega = x0[5] # numerical correction for low speeds vmin = 0.05 if vx < vmin: vy = 0 omega = 0 steer = 0 vx = vmin sindelta = np.sin(steer) cosdelta = np.cos(steer) sinpsi = np.sin(psi) cospsi = np.cos(psi) Ffy, Frx, Fry, alphaf, alphar = self.calc_forces(x0, u0, return_slip=True) if self.approx: dFfy_dvx = 2 * self.Cf * (self.lf*omega + vy)/((self.lf*omega + vy)**2 + vx**2) dFfy_dvy = -2 * self.Cf * vx/((self.lf*omega + vy)**2 + vx**2) dFfy_domega = -2 * self.Cf * self.lf * vx/((self.lf*omega + vy)**2 + vx**2) dFrx_dvx = 0 dFrx_dvu1 = 1 dFry_dvx = -2 * self.Cr * (self.lr*omega - vy)/((self.lr*omega - vy)**2 + vx**2) dFry_dvy = -2 * self.Cr * vx/((self.lr*omega - vy)**2 + vx**2) dFry_domega = 2 * self.Cr * self.lr * vx/((self.lr*omega - vy)**2 + vx**2) dFfy_delta = 2*self.Cf else: dFfy_dalphaf = self.Bf * self.Cf * self.Df * np.cos(self.Cf * np.arctan(self.Bf * alphaf)) dFfy_dalphaf *= 1/(1+(self.Bf*alphaf)**2) dFry_dalphar = self.Br * self.Cr * self.Dr * np.cos(self.Cr * np.arctan(self.Br * alphar)) dFry_dalphar *= 1/(1+(self.Br*alphar)**2) dFfy_dvx = dFfy_dalphaf * (self.lf*omega + vy)/((self.lf*omega + vy)**2 + vx**2) dFfy_dvy = -dFfy_dalphaf * vx/((self.lf*omega + vy)**2 + vx**2) dFfy_domega = -dFfy_dalphaf * self.lf * vx/((self.lf*omega + vy)**2 + vx**2) if self.input_acc: raise NotImplementedError pwm = u0[0] dFrx_dvx = -self.Cm2*pwm - 2*self.Cr2*vx dFrx_dvu1 = self.Cm1-self.Cm2*vx dFry_dvx = -dFry_dalphar * (self.lr*omega - vy)/((self.lr*omega - vy)**2 + vx**2) dFry_dvy = -dFry_dalphar * vx/((self.lr*omega - vy)**2 + vx**2) dFry_domega = dFry_dalphar * self.lr * vx/((self.lr*omega - vy)**2 + vx**2) dFfy_delta = dFfy_dalphaf f1_psi = -vx*sinpsi-vy*cospsi f1_vx = cospsi f1_vy = -sinpsi f2_psi = vx*cospsi-vy*sinpsi f2_vx = sinpsi f2_vy = cospsi f4_vx = 1/self.mass * (dFrx_dvx -dFfy_dvx*sindelta) f4_vy = 1/self.mass * (-dFfy_dvy*sindelta + self.mass*omega) f4_omega = 1/self.mass * (-dFfy_domega*sindelta + self.mass*vy) f5_vx = 1/self.mass * (dFry_dvx + dFfy_dvx*cosdelta - self.mass*omega) f5_vy = 1/self.mass * (dFry_dvy + dFfy_dvy*cosdelta) f5_omega = 1/self.mass * (dFry_domega + dFfy_domega*cosdelta - self.mass*vx) f6_vx = 1/self.Iz * (dFfy_dvx*self.lf*cosdelta - dFry_dvx*self.lr) f6_vy = 1/self.Iz * (dFfy_dvy*self.lf*cosdelta - dFry_dvy*self.lr) f6_omega = 1/self.Iz * (dFfy_domega*self.lf*cosdelta - dFry_domega*self.lr) f4_u1 = dFrx_dvu1 f4_delta = 1/self.mass * (-dFfy_delta*sindelta - Ffy*cosdelta) f5_delta = 1/self.mass * (dFfy_delta*cosdelta - Ffy*sindelta) f6_delta = 1/self.Iz * (dFfy_delta*self.lf*cosdelta - Ffy*self.lf*sindelta) A = np.array([ [0, 0, f1_psi, f1_vx, f1_vy, 0], [0, 0, f2_psi, f2_vx, f2_vy, 0], [0, 0, 0, 0, 0, 1], [0, 0, 0, f4_vx, f4_vy, f4_omega], [0, 0, 0, f5_vx, f5_vy, f5_omega], [0, 0, 0, f6_vx, f6_vy, f6_omega], ]) B = np.array([ [0, 0], [0, 0], [0, 0], [f4_u1, f4_delta], [0, f5_delta], [0, f6_delta], ]) g = self._diffequation(None, x0, u0).reshape(-1,) return A, B, g if __name__ == '__main__': """ test cases 1-3 use 4 states continuous model test cases 4-6 use 4 states discrete model test pairs (1,4), (2,5) and (3,6) should give same results """ # vehicle parameters for F1/10 params = F110() model = Dynamic(**params) test_case = 3 ##################################################################### # CONTINUOUS MODEL 6 STATES # start at origin with init velocity [3, 3] m/s # apply constant acceleration 1 m/s^2 for 1s and then move at constant speed if test_case == 1: n_steps = 100 inputs = np.zeros([2, n_steps]) time = np.linspace(0, 2, n_steps+1) inputs[0,:50] = 1 x_init = np.array([0, 0, np.pi/4, 3*np.sqrt(2), 0, 0]) x_cont, dxdt_cont = model.sim_continuous(x_init, inputs, time) model.plot_results(time, x_cont, dxdt_cont, inputs) # start at origin with init velocity [3, 0] m/s # steer at constant angle 0.2 rad if test_case == 2: n_steps = 200 inputs = np.zeros([2, n_steps]) time = np.linspace(0, 4, n_steps+1) inputs[1,:] = 0.2 x_init = np.array([0, 0, 0, 3, 0, 0]) x_cont, dxdt_cont = model.sim_continuous(x_init, inputs, time) model.plot_results(time, x_cont, dxdt_cont, inputs) # start at origin with init velocity [3, 0] m/s # steer at constant angle 0.2 rad after 2 sec if test_case == 3: n_steps = 400 inputs = np.zeros([2, n_steps]) inputs[1,100:] = 0.2 time = np.linspace(0, 8, n_steps+1) x_init = np.array([0, 0, 0, 3, 0, 0]) x_cont, dxdt_cont = model.sim_continuous(x_init, inputs, time) model.plot_results(time, x_cont, dxdt_cont, inputs) ##################################################################### # DISCRETE MODEL 6 STATES # start at origin with init velocity [3, 3] m/s # apply constant acceleration 1 m/s^2 for 1s and then move at constant speed if test_case == 4: Ts = 0.02 n_steps = int(2/Ts) inputs = np.zeros([2, n_steps]) time = np.linspace(0, n_steps+1, n_steps+1)*Ts inputs[0,:int(n_steps/2)] = 1 x_init = np.array([0, 0, np.pi/4, 3*np.sqrt(2), 0, 0]) x_disc, dxdt_disc = model.sim_discrete(x_init, inputs, Ts) model.plot_results(time, x_disc, dxdt_disc, inputs) # start at origin with init velocity [3, 0] m/s # steer at constant angle 0.2 rad if test_case == 5: Ts = 0.02 n_steps = int(4/Ts) inputs = np.zeros([2, n_steps]) time = np.linspace(0, n_steps+1, n_steps+1)*Ts inputs[1,:] = 0.2 x_init = np.array([0, 0, 0, 3, 0, 0]) x_disc, dxdt_disc = model.sim_discrete(x_init, inputs, Ts) model.plot_results(time, x_disc, dxdt_disc, inputs) # start at origin with init velocity [3, 0] m/s # steer at constant angle 0.2 rad after 2 sec if test_case == 6: Ts = 0.02 n_steps = int(8/Ts) inputs = np.zeros([2, n_steps]) inputs[1,int(n_steps/4):] = 0.2 time = np.linspace(0, n_steps+1, n_steps+1)*Ts x_init = np.array([0, 0, 0, 3, 0, 0]) x_disc, dxdt_disc = model.sim_discrete(x_init, inputs, Ts) model.plot_results(time, x_disc, dxdt_disc, inputs)
Python
MIT
KlrShaK/bayesrace/bayes_race/models/dynamic.py
c2ad1fed-f18c-4f35-b0a4-aa26d062fe52
[{"tag": "EMAIL", "value": "achinj@seas.upenn.edu", "start": 285, "end": 306, "context": "dels\n\n\"\"\"\n\n__author__ = 'Achin Jain'\n__email__ = 'achinj@seas.upenn.edu'\n\n\nimport numpy as np\nimport casadi as cs\nfrom ba"}, {"tag": "NAME", "value": "Achin Jain", "start": 260, "end": 270, "context": "ontinuous and discrete models\n\n\"\"\"\n\n__author__ = 'Achin Jain'\n__email__ = 'achinj@seas.upenn.edu'\n\n\nimport num"}]
[{"tag": "EMAIL", "value": "achinj@seas.upenn.edu", "start": 285, "end": 306, "context": "dels\n\n\"\"\"\n\n__author__ = 'Achin Jain'\n__email__ = 'achinj@seas.upenn.edu'\n\n\nimport numpy as np\nimport casadi as cs\nfrom ba"}, {"tag": "NAME", "value": "Achin Jain", "start": 260, "end": 270, "context": "ontinuous and discrete models\n\n\"\"\"\n\n__author__ = 'Achin Jain'\n__email__ = 'achinj@seas.upenn.edu'\n\n\nimport num"}]
// Copyright (c) 2017-2018, The Enro Project // // All rights reserved. // // Redistribution and use in source and binary forms, with or without modification, are // permitted provided that the following conditions are met: // // 1. Redistributions of source code must retain the above copyright notice, this list of // conditions and the following disclaimer. // // 2. Redistributions in binary form must reproduce the above copyright notice, this list // of conditions and the following disclaimer in the documentation and/or other // materials provided with the distribution. // // 3. Neither the name of the copyright holder nor the names of its contributors may be // used to endorse or promote products derived from this software without specific // prior written permission. // // THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND ANY // EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF // MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL // THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, // SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, // PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS // INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, // STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF // THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE. #include "include_base_utils.h" #include "file_io_utils.h" #include "cryptonote_basic/blobdatatype.h" #include "cryptonote_basic/cryptonote_basic.h" #include "cryptonote_basic/cryptonote_format_utils.h" #include "wallet/wallet2.h" #include "fuzzer.h" class ColdOutputsFuzzer: public Fuzzer { public: ColdOutputsFuzzer(): wallet(cryptonote::TESTNET) {} virtual int init(); virtual int run(const std::string &filename); private: tools::wallet2 wallet; }; int ColdOutputsFuzzer::init() { static const char * const spendkey_hex = "0b4f47697ec99c3de6579304e5f25c68b07afbe55b71d99620bf6cbf4e45a80f"; crypto::secret_key spendkey; epee::string_tools::hex_to_pod(spendkey_hex, spendkey); try { wallet.init(""); wallet.set_subaddress_lookahead(1, 1); wallet.generate("", "", spendkey, true, false); } catch (const std::exception &e) { std::cerr << "Error on ColdOutputsFuzzer::init: " << e.what() << std::endl; return 1; } return 0; } int ColdOutputsFuzzer::run(const std::string &filename) { std::string s; if (!epee::file_io_utils::load_file_to_string(filename, s)) { std::cout << "Error: failed to load file " << filename << std::endl; return 1; } s = std::string("\x01\x16serialization::archive") + s; try { std::vector<tools::wallet2::transfer_details> outputs; std::stringstream iss; iss << s; boost::archive::portable_binary_iarchive ar(iss); ar >> outputs; size_t n_outputs = wallet.import_outputs(outputs); std::cout << boost::lexical_cast<std::string>(n_outputs) << " outputs imported" << std::endl; } catch (const std::exception &e) { std::cerr << "Failed to import outputs: " << e.what() << std::endl; return 1; } return 0; } int main(int argc, const char **argv) { ColdOutputsFuzzer fuzzer; return run_fuzzer(argc, argv, fuzzer); }
C++
BSD-3-Clause
enro-project/enro/tests/fuzz/cold-outputs.cpp
1a278e47-d16c-4cb3-93de-d0348c87d58e
[{"tag": "API_KEY", "value": "0b4f47697ec99c3de6579304e5f25c68b07afbe55b71d99620bf6cbf4e45a80f", "start": 2130, "end": 2194, "context": "t()\n{\n static const char * const spendkey_hex = \"0b4f47697ec99c3de6579304e5f25c68b07afbe55b71d99620bf6cbf4e45a80f\";\n crypto::secret_key spendkey;\n epee::string_t"}]
[{"tag": "KEY", "value": "0b4f47697ec99c3de6579304e5f25c68b07afbe55b71d99620bf6cbf4e45a80f", "start": 2130, "end": 2194, "context": "t()\n{\n static const char * const spendkey_hex = \"0b4f47697ec99c3de6579304e5f25c68b07afbe55b71d99620bf6cbf4e45a80f\";\n crypto::secret_key spendkey;\n epee::string_t"}]
<?php namespace Adapter\facades; use Adapter\ParcelAdapter; use Adapter\RequestHelper; use Adapter\ResponseHandler; use Yii; /** * Class ParcelDraftSortDiscardFacade * @author Adeyemi Olaoye <yemi@cottacush.com> * @package Adapter\facades */ class ParcelDraftSortDiscardFacade extends BulkOperationFacade { /** * @author Adeyemi Olaoye <yemi@cottacush.com> * @param $data * @return ResponseHandler */ public function doRequest($data) { $parcelsAdapter = new ParcelAdapter(RequestHelper::getClientID(), RequestHelper::getAccessToken()); $response = $parcelsAdapter->discardDraftSort($data); return $response; } /** * @author Adeyemi Olaoye <yemi@cottacush.com> * @return string */ public function getSuccessfulItemsMessage() { return parent::getSuccessfulItemsMessage('Discarded draft sortings'); } /** * @author Adeyemi Olaoye <yemi@cottacush.com> * @return string */ public function getFailedItemsMessage() { return parent::getFailedItemsMessage('Failed to discard some draft sortings'); } /** * Message to display when bulk operation is fully successful * @author Adeyemi Olaoye <yemi@cottacush.com> * @return mixed */ public function getFullySuccessfulMessage() { return 'Draft sortings successfully discarded'; } }
PHP
MIT
megamsquare/crm-client/app/Adapter/facades/ParcelDraftSortDiscardFacade.php
378b64a6-4597-4617-b6eb-b808bbd62781
[{"tag": "EMAIL", "value": "yemi@cottacush.com", "start": 197, "end": 215, "context": "DraftSortDiscardFacade\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @package Adapter\\facades\n */\nclass ParcelDraf"}, {"tag": "NAME", "value": "Adeyemi Olaoye", "start": 181, "end": 195, "context": "\n * Class ParcelDraftSortDiscardFacade\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @package Adapter\\facades\n"}, {"tag": "NAME", "value": "Adeyemi Olaoye", "start": 1229, "end": 1243, "context": "bulk operation is fully successful\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @return mixed\n */"}, {"tag": "EMAIL", "value": "yemi@cottacush.com", "start": 1245, "end": 1263, "context": "s fully successful\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @return mixed\n */\n public function"}]
[{"tag": "EMAIL", "value": "yemi@cottacush.com", "start": 197, "end": 215, "context": "DraftSortDiscardFacade\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @package Adapter\\facades\n */\nclass ParcelDraf"}, {"tag": "NAME", "value": "Adeyemi Olaoye", "start": 181, "end": 195, "context": "\n * Class ParcelDraftSortDiscardFacade\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @package Adapter\\facades\n"}, {"tag": "NAME", "value": "Adeyemi Olaoye", "start": 1229, "end": 1243, "context": "bulk operation is fully successful\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @return mixed\n */"}, {"tag": "EMAIL", "value": "yemi@cottacush.com", "start": 1245, "end": 1263, "context": "s fully successful\n * @author Adeyemi Olaoye <yemi@cottacush.com>\n * @return mixed\n */\n public function"}]
package logger import ( "fmt" "github.com/denisbrodbeck/machineid" "io/ioutil" "net" "os" "path" "runtime" "runtime/debug" "strings" "time" ) const ( Debug = "DEBUG" Info = "INFO " Warn = "WARN " Error = "ERROR" Fatal = "FATAL" ) const ( loggerPathPrefix = "utils/log/logger/logger.go" errorsFile = "gmc_errors.log" ) var ( baseFilePathLength = getBaseFileBathLength() StdLogger = NewLogger(fmt.Sprintf("%s:gmc:", getLogID())) ) type Logger struct { prefix string // prefix to write at beginning of each line isWindows bool udpLogger *net.UDPConn } func RedirectStdErrorToFile() { // The error log file in users home dir home, err := os.UserHomeDir() if err != nil { StdLogger.Fatal(err) } errorsPath := path.Join(home, errorsFile) // Log previous error for last instance if it exists previousErrorData, err := ioutil.ReadFile(errorsPath) previousError := string(previousErrorData) if previousError != "" { fileTime := time.Now() info, err2 := os.Stat(errorsPath) if err2 == nil { fileTime = info.ModTime() } StdLogger.Errorf("Previous instance error at %v:\n%s", fileTime, previousError) } _ = os.Remove(errorsPath) // Redirect std error to error file ef, err := os.OpenFile(errorsPath, os.O_CREATE|os.O_WRONLY|os.O_APPEND, 0660) if err != nil { StdLogger.Fatal(err) } redirectStdErrToFile(ef) } func NewLogger(prefix string) *Logger { // fmt.Printf("log target: %s\n", logTarget) remoteAddr, err := net.ResolveUDPAddr("udp", "192.168.0.9:40000") if err != nil { panic(err) } localAddr, err := net.ResolveUDPAddr("udp", "0.0.0.0:0") if err != nil { panic(err) } udp, err := net.DialUDP("udp", localAddr, remoteAddr) if err != nil { panic(err) } udpLogger := udp return &Logger{prefix: prefix, isWindows: runtime.GOOS == "windows", udpLogger: udpLogger} } func (l *Logger) Debugf(format string, v ...interface{}) { l.Output(Debug, fmt.Sprintf(format, v...)) } func (l *Logger) Infof(format string, v ...interface{}) { l.Output(Info, fmt.Sprintf(format, v...)) } func (l *Logger) Warnf(format string, v ...interface{}) { l.Output(Warn, fmt.Sprintf(format, v...)) } func (l *Logger) Errorf(format string, v ...interface{}) { l.Output(Error, fmt.Sprintf(format, v...)) } func (l *Logger) Fatalf(err error, format string, v ...interface{}) string { msg := fmt.Sprintf(format, v...) emsg := fmt.Sprintf("%s, %v\n%s", msg, err, debug.Stack()) l.Output(Fatal, emsg) l.FatalError(err, msg) return emsg } func (l *Logger) Fatal(err error, v ...interface{}) string { msg := fmt.Sprint(v...) emsg := err.Error() emsg = fmt.Sprintf("%s, %v\n%s", msg, err, debug.Stack()) l.Output(Fatal, emsg) l.FatalError(err, msg) return emsg } func (l *Logger) Output(level string, msg string) { l.output(level, msg) } func (l *Logger) Outputf(level string, format string, v ...interface{}) { l.output(level, fmt.Sprintf(format, v...)) } func (l *Logger) output(level, message string) { //now := time.Now() file, line, function := l.getCallerInfo() if len(file) > baseFilePathLength { file = file[baseFilePathLength:] } StdTelemetry.SendTrace(level, fmt.Sprintf("%s:%s(%d) %s", file, function, line, message)) lines := strings.Split(message, "\n") for _, ml := range lines { txt := fmt.Sprintf("%s%s %s:%s(%d) %s", l.prefix, level, file, function, line, ml) _, _ = l.udpLogger.Write([]byte(txt)) } } func (l *Logger) getCallerInfo() (string, int, string) { _, file, line, function, _ := caller(6) i := strings.LastIndex(function, ".") if i != -1 { function = function[i+1:] } return file, line, function } func (l *Logger) FatalError(err error, msg string) { StdTelemetry.SendFatalf(err, msg) } func getBaseFileBathLength() int { _, file, _, ok := runtime.Caller(0) if !ok { return 0 } if len(file) > len(loggerPathPrefix) { return len(file) - len(loggerPathPrefix) } return 0 } func caller(skip int) (pc uintptr, file string, line int, function string, ok bool) { rpc := make([]uintptr, 1) n := runtime.Callers(skip+1, rpc[:]) if n < 1 { return } frame, _ := runtime.CallersFrames(rpc).Next() return frame.PC, frame.File, frame.Line, frame.Function, frame.PC != 0 } func getLogID() string { id, err := machineid.ProtectedID("gmc") if err != nil { panic(err) } return strings.ToUpper(id[:4]) }
GO
MIT
michael-reichenauer/gmc/utils/log/logger/logger.go
70274ec5-aba2-4390-a607-6824cf8a1fa0
[{"tag": "USERNAME", "value": "denisbrodbeck", "start": 45, "end": 58, "context": "package logger\n\nimport (\n\t\"fmt\"\n\t\"github.com/denisbrodbeck/machineid\"\n\t\"io/ioutil\"\n\t\"net\"\n\t\"os\"\n\t\"path\"\n\t\"ru"}]
[{"tag": "USERNAME", "value": "denisbrodbeck", "start": 45, "end": 58, "context": "package logger\n\nimport (\n\t\"fmt\"\n\t\"github.com/denisbrodbeck/machineid\"\n\t\"io/ioutil\"\n\t\"net\"\n\t\"os\"\n\t\"path\"\n\t\"ru"}]
/* * Copyright (c) 2019 Jack Poulson <jack@hodgestar.com> * * This Source Code Form is subject to the terms of the Mozilla Public * License, v. 2.0. If a copy of the MPL was not distributed with this * file, You can obtain one at http://mozilla.org/MPL/2.0/. */ #include <iostream> #include "mantis.hpp" template <typename Real> void RunTest() { const int num_bits = std::numeric_limits<mantis::DoubleMantissa<Real>>::digits; const mantis::DoubleMantissa<Real> epsilon = std::numeric_limits<mantis::DoubleMantissa<Real>>::epsilon(); std::cout << "num bits: " << num_bits << ", epsilon: " << epsilon << std::endl; const mantis::DoubleMantissa<Real> x("1.2345678901234567890123456789012e1"); const mantis::DecimalNotation y_decimal{true, 1, std::vector<unsigned char>{ 1_uchar, 2_uchar, 3_uchar, 4_uchar, 5_uchar, 6_uchar, 7_uchar, 8_uchar, 9_uchar, 0_uchar, 1_uchar, 2_uchar, 3_uchar, 4_uchar, 5_uchar, 6_uchar, 7_uchar, 8_uchar, 9_uchar, 0_uchar, 1_uchar, 2_uchar, 3_uchar, 4_uchar, 5_uchar, 6_uchar, 7_uchar, 8_uchar, 9_uchar, 0_uchar, 1_uchar, 2_uchar}}; const mantis::DoubleMantissa<Real> y(y_decimal); const mantis::DoubleMantissa<Real> z = x - y; std::cout << "x: " << x << ",\ny: " << y << ",\nx - y: " << z << std::endl; const mantis::DoubleMantissa<Real> x_exp = std::exp(x); const mantis::DoubleMantissa<Real> x_exp_log = std::log(x_exp); const mantis::DoubleMantissa<Real> x_exp_log_error = x - x_exp_log; std::cout << "exp(x): " << x_exp << ",\nlog(exp(x)): " << x_exp_log << ",\nx - log(exp(x)): " << x_exp_log_error << std::endl; const mantis::BinaryNotation x_binary = x.ToBinary(num_bits); std::cout << "x binary: " << x_binary.ToString() << std::endl; std::mt19937 generator(17u); std::uniform_real_distribution<mantis::DoubleMantissa<Real>> uniform_dist; const int num_samples = 1000000; mantis::DoubleMantissa<Real> average; for (int sample = 0; sample < num_samples; ++sample) { average += uniform_dist(generator) / Real(num_samples); } std::cout << "Average of " << num_samples << " uniform samples: " << average << std::endl; std::normal_distribution<mantis::DoubleMantissa<Real>> normal_dist; average = Real(0.); for (int sample = 0; sample < num_samples; ++sample) { average += normal_dist(generator) / Real(num_samples); } std::cout << "Average of " << num_samples << " normal samples: " << average << std::endl; } int main(int argc, char* argv[]) { std::cout << "Testing with DoubleMantissa<float>:" << std::endl; RunTest<float>(); std::cout << std::endl; std::cout << "Testing with DoubleMantissa<double>:" << std::endl; RunTest<double>(); return 0; }
C++
MPL-2.0
jpanetta/mantis/example/basic.cc
e06558fb-5b14-4ed3-896f-1ac6bd7df5b4
[{"tag": "NAME", "value": "Jack Poulson", "start": 25, "end": 37, "context": "/*\n * Copyright (c) 2019 Jack Poulson <jack@hodgestar.com>\n *\n * This Source Code Form "}, {"tag": "EMAIL", "value": "jack@hodgestar.com", "start": 39, "end": 57, "context": "/*\n * Copyright (c) 2019 Jack Poulson <jack@hodgestar.com>\n *\n * This Source Code Form is subject to the te"}]
[{"tag": "NAME", "value": "Jack Poulson", "start": 25, "end": 37, "context": "/*\n * Copyright (c) 2019 Jack Poulson <jack@hodgestar.com>\n *\n * This Source Code Form "}, {"tag": "EMAIL", "value": "jack@hodgestar.com", "start": 39, "end": 57, "context": "/*\n * Copyright (c) 2019 Jack Poulson <jack@hodgestar.com>\n *\n * This Source Code Form is subject to the te"}]
cask 'ubiquiti-unifi-controller' do version '5.6.26' sha256 'fab2a2b6c53dd90bd2aeb7a73b72ca60ff08f4fd12c031bbcfd5da995c0bec6b' url "https://dl.ubnt.com/unifi/#{version}/UniFi.pkg" name 'UniFi Controller' homepage 'https://www.ubnt.com/download/unifi/' conflicts_with cask: 'ubiquiti-unifi-controller-lts' pkg 'UniFi.pkg' postflight do set_ownership '~/Library/Application Support/UniFi' end uninstall pkgutil: 'com.ubnt.UniFi' end
Ruby
Unlicense
StephenDiIorio/homebrew-drivers/Casks/ubiquiti-unifi-controller.rb
611b1774-5a2f-4025-992e-bea100ddbf44
[]
[]
from operator import attrgetter import pyangbind.lib.xpathhelper as xpathhelper from pyangbind.lib.yangtypes import RestrictedPrecisionDecimalType, RestrictedClassType, TypedListType from pyangbind.lib.yangtypes import YANGBool, YANGListType, YANGDynClass, ReferenceType from pyangbind.lib.base import PybindBase from decimal import Decimal from bitarray import bitarray import __builtin__ import vlan class access(PybindBase): """ This class was auto-generated by the PythonClass plugin for PYANG from YANG module brocade-interface - based on the path /interface/hundredgigabitethernet/switchport/access-mac-group-rspan-vlan-classification/access. Each member element of the container is represented as a class variable - with a specific YANG type. YANG Description: The access layer characteristics of this interface. """ __slots__ = ('_pybind_generated_by', '_path_helper', '_yang_name', '_rest_name', '_extmethods', '__vlan',) _yang_name = 'access' _rest_name = 'access' _pybind_generated_by = 'container' def __init__(self, *args, **kwargs): path_helper_ = kwargs.pop("path_helper", None) if path_helper_ is False: self._path_helper = False elif path_helper_ is not None and isinstance(path_helper_, xpathhelper.YANGPathHelper): self._path_helper = path_helper_ elif hasattr(self, "_parent"): path_helper_ = getattr(self._parent, "_path_helper", False) self._path_helper = path_helper_ else: self._path_helper = False extmethods = kwargs.pop("extmethods", None) if extmethods is False: self._extmethods = False elif extmethods is not None and isinstance(extmethods, dict): self._extmethods = extmethods elif hasattr(self, "_parent"): extmethods = getattr(self._parent, "_extmethods", None) self._extmethods = extmethods else: self._extmethods = False self.__vlan = YANGDynClass(base=YANGListType("access_vlan_id access_mac_group",vlan.vlan, yang_name="vlan", rest_name="rspan-vlan", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='access-vlan-id access-mac-group', extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}), is_container='list', yang_name="vlan", rest_name="rspan-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True) load = kwargs.pop("load", None) if args: if len(args) > 1: raise TypeError("cannot create a YANG container with >1 argument") all_attr = True for e in self._pyangbind_elements: if not hasattr(args[0], e): all_attr = False break if not all_attr: raise ValueError("Supplied object did not have the correct attributes") for e in self._pyangbind_elements: nobj = getattr(args[0], e) if nobj._changed() is False: continue setmethod = getattr(self, "_set_%s" % e) if load is None: setmethod(getattr(args[0], e)) else: setmethod(getattr(args[0], e), load=load) def _path(self): if hasattr(self, "_parent"): return self._parent._path()+[self._yang_name] else: return [u'interface', u'hundredgigabitethernet', u'switchport', u'access-mac-group-rspan-vlan-classification', u'access'] def _rest_path(self): if hasattr(self, "_parent"): if self._rest_name: return self._parent._rest_path()+[self._rest_name] else: return self._parent._rest_path() else: return [u'interface', u'HundredGigabitEthernet', u'switchport', u'access'] def _get_vlan(self): """ Getter method for vlan, mapped from YANG variable /interface/hundredgigabitethernet/switchport/access_mac_group_rspan_vlan_classification/access/vlan (list) """ return self.__vlan def _set_vlan(self, v, load=False): """ Setter method for vlan, mapped from YANG variable /interface/hundredgigabitethernet/switchport/access_mac_group_rspan_vlan_classification/access/vlan (list) If this variable is read-only (config: false) in the source YANG file, then _set_vlan is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_vlan() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=YANGListType("access_vlan_id access_mac_group",vlan.vlan, yang_name="vlan", rest_name="rspan-vlan", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='access-vlan-id access-mac-group', extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}), is_container='list', yang_name="vlan", rest_name="rspan-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """vlan must be of a type compatible with list""", 'defined-type': "list", 'generated-type': """YANGDynClass(base=YANGListType("access_vlan_id access_mac_group",vlan.vlan, yang_name="vlan", rest_name="rspan-vlan", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='access-vlan-id access-mac-group', extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}), is_container='list', yang_name="vlan", rest_name="rspan-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True)""", }) self.__vlan = t if hasattr(self, '_set'): self._set() def _unset_vlan(self): self.__vlan = YANGDynClass(base=YANGListType("access_vlan_id access_mac_group",vlan.vlan, yang_name="vlan", rest_name="rspan-vlan", parent=self, is_container='list', user_ordered=False, path_helper=self._path_helper, yang_keys='access-vlan-id access-mac-group', extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}), is_container='list', yang_name="vlan", rest_name="rspan-vlan", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'callpoint': u'rspan-mac-group-vlan-classification-config-phy', u'cli-suppress-list-no': None, u'cli-no-key-completion': None, u'cli-suppress-mode': None, u'alt-name': u'rspan-vlan'}}, namespace='urn:brocade.com:mgmt:brocade-interface', defining_module='brocade-interface', yang_type='list', is_config=True) vlan = __builtin__.property(_get_vlan, _set_vlan) _pyangbind_elements = {'vlan': vlan, }
Python
Apache-2.0
extremenetworks/pybind/pybind/nos/v6_0_2f/interface/hundredgigabitethernet/switchport/access_mac_group_rspan_vlan_classification/access/__init__.py
46720a95-bdf9-466b-bd17-33c06424cb54
[]
[]
<?php namespace Realodix\Utils\Test\Numbers; trait IbanTestProvider { public function verifyProvider() { return [ ['CH9300762011623852957'], // Switzerland without spaces ['CH93 0076 2011 6238 5295 7'], // Switzerland with multiple spaces // Country list // http://www.rbs.co.uk/corporate/international/g0/guide-to-international-business/regulatory-information/iban/iban-example.ashx ['AL47 2121 1009 0000 0002 3569 8741'], // Albania ['AD12 0001 2030 2003 5910 0100'], // Andorra ['AT61 1904 3002 3457 3201'], // Austria ['AZ21 NABZ 0000 0000 1370 1000 1944'], // Azerbaijan ['BH67 BMAG 0000 1299 1234 56'], // Bahrain ['BE62 5100 0754 7061'], // Belgium ['BA39 1290 0794 0102 8494'], // Bosnia and Herzegovina ['BG80 BNBG 9661 1020 3456 78'], // Bulgaria ['BY 13 NBRB 3600 900000002Z00AB00'], // Belarus ['BY13 NBRB 3600 900000002Z00AB00'], // Belarus ['BY22NB23324232T78YR7823HR32U'], // Belarus ['HR12 1001 0051 8630 0016 0'], // Croatia ['CY17 0020 0128 0000 0012 0052 7600'], // Cyprus ['CZ65 0800 0000 1920 0014 5399'], // Czech Republic ['DK50 0040 0440 1162 43'], // Denmark ['EE38 2200 2210 2014 5685'], // Estonia ['FO97 5432 0388 8999 44'], // Faroe Islands ['FI21 1234 5600 0007 85'], // Finland ['FR14 2004 1010 0505 0001 3M02 606'], // France ['GE29 NB00 0000 0101 9049 17'], // Georgia ['DE89 3704 0044 0532 0130 00'], // Germany ['GI75 NWBK 0000 0000 7099 453'], // Gibraltar ['GR16 0110 1250 0000 0001 2300 695'], // Greece ['GL56 0444 9876 5432 10'], // Greenland ['HU42 1177 3016 1111 1018 0000 0000'], // Hungary ['IS14 0159 2600 7654 5510 7303 39'], // Iceland ['IE29 AIBK 9311 5212 3456 78'], // Ireland ['IL62 0108 0000 0009 9999 999'], // Israel ['IT40 S054 2811 1010 0000 0123 456'], // Italy ['LV80 BANK 0000 4351 9500 1'], // Latvia ['LB62 0999 0000 0001 0019 0122 9114'], // Lebanon ['LI21 0881 0000 2324 013A A'], // Liechtenstein ['LT12 1000 0111 0100 1000'], // Lithuania ['LU28 0019 4006 4475 0000'], // Luxembourg ['MK072 5012 0000 0589 84'], // Macedonia ['MT84 MALT 0110 0001 2345 MTLC AST0 01S'], // Malta ['MU17 BOMM 0101 1010 3030 0200 000M UR'], // Mauritius ['MD24 AG00 0225 1000 1310 4168'], // Moldova ['MC93 2005 2222 1001 1223 3M44 555'], // Monaco ['ME25 5050 0001 2345 6789 51'], // Montenegro ['NL39 RABO 0300 0652 64'], // Netherlands ['NO93 8601 1117 947'], // Norway ['PK36 SCBL 0000 0011 2345 6702'], // Pakistan ['PL60 1020 1026 0000 0422 7020 1111'], // Poland ['PT50 0002 0123 1234 5678 9015 4'], // Portugal ['RO49 AAAA 1B31 0075 9384 0000'], // Romania ['SM86 U032 2509 8000 0000 0270 100'], // San Marino ['SA03 8000 0000 6080 1016 7519'], // Saudi Arabia ['RS35 2600 0560 1001 6113 79'], // Serbia ['SK31 1200 0000 1987 4263 7541'], // Slovak Republic ['SI56 1910 0000 0123 438'], // Slovenia ['ES80 2310 0001 1800 0001 2345'], // Spain ['SE35 5000 0000 0549 1000 0003'], // Sweden ['CH93 0076 2011 6238 5295 7'], // Switzerland ['TN59 1000 6035 1835 9847 8831'], // Tunisia ['TR33 0006 1005 1978 6457 8413 26'], // Turkey ['AE07 0331 2345 6789 0123 456'], // UAE ['GB12 CPBK 0892 9965 0449 91'], // United Kingdom // Extended country list // http://www.nordea.com/Our+services/International+products+and+services/Cash+Management/IBAN+countries/908462.html // https://www.swift.com/sites/default/files/resources/iban_registry.pdf ['AO06000600000100037131174'], // Angola ['AZ21NABZ00000000137010001944'], // Azerbaijan ['BH29BMAG1299123456BH00'], // Bahrain ['BJ11B00610100400271101192591'], // Benin ['BR9700360305000010009795493P1'], // Brazil ['BR1800000000141455123924100C2'], // Brazil ['VG96VPVG0000012345678901'], // British Virgin Islands ['BF1030134020015400945000643'], // Burkina Faso ['BI43201011067444'], // Burundi ['CM2110003001000500000605306'], // Cameroon ['CV64000300004547069110176'], // Cape Verde ['FR7630007000110009970004942'], // Central African Republic ['CG5230011000202151234567890'], // Congo ['CR05015202001026284066'], // Costa Rica ['DO28BAGR00000001212453611324'], // Dominican Republic ['GT82TRAJ01020000001210029690'], // Guatemala ['IR580540105180021273113007'], // Iran ['IL620108000000099999999'], // Israel ['CI05A00060174100178530011852'], // Ivory Coast ['JO94CBJO0010000000000131000302'], // Jordan ['KZ176010251000042993'], // Kazakhstan ['KW74NBOK0000000000001000372151'], // Kuwait ['LB30099900000001001925579115'], // Lebanon ['MG4600005030010101914016056'], // Madagascar ['ML03D00890170001002120000447'], // Mali ['MR1300012000010000002037372'], // Mauritania ['MU17BOMM0101101030300200000MUR'], // Mauritius ['MZ59000100000011834194157'], // Mozambique ['PS92PALS000000000400123456702'], // Palestinian Territory ['QA58DOHB00001234567890ABCDEFG'], // Qatar ['XK051212012345678906'], // Republic of Kosovo ['PT50000200000163099310355'], // Sao Tome and Principe ['SA0380000000608010167519'], // Saudi Arabia ['SN12K00100152000025690007542'], // Senegal ['TL380080012345678910157'], // Timor-Leste ['TN5914207207100707129648'], // Tunisia ['TR330006100519786457841326'], // Turkey ['UA213223130000026007233566001'], // Ukraine ['AE260211000000230064016'], // United Arab Emirates ['VA59001123000012345678'], // Vatican City State ]; } public function verifyWithInvalidFormatProvider() { return [ ['AL47 2121 1009 0000 0002 3569 874'], // Albania ['AD12 0001 2030 2003 5910 010'], // Andorra ['AT61 1904 3002 3457 320'], // Austria ['AZ21 NABZ 0000 0000 1370 1000 194'], // Azerbaijan ['AZ21 N1BZ 0000 0000 1370 1000 1944'], // Azerbaijan ['BH67 BMAG 0000 1299 1234 5'], // Bahrain ['BH67 B2AG 0000 1299 1234 56'], // Bahrain ['BE62 5100 0754 7061 2'], // Belgium ['BA39 1290 0794 0102 8494 4'], // Bosnia and Herzegovina ['BG80 BNBG 9661 1020 3456 7'], // Bulgaria ['BG80 B2BG 9661 1020 3456 78'], // Bulgaria ['BY 13 NBRB 3600 900000002Z00AB001'], // Belarus ['BY 13 NBRB 3600 900000002Z00AB0'], // Belarus ['BYRO NBRB 3600 900000002Z00AB0'], // Belarus ['BY 13 3600 NBRB 900000002Z00AB05'], // Belarus ['HR12 1001 0051 8630 0016 01'], // Croatia ['CY17 0020 0128 0000 0012 0052 7600 1'], // Cyprus ['CZ65 0800 0000 1920 0014 5399 1'], // Czech Republic ['DK50 0040 0440 1162 431'], // Denmark ['EE38 2200 2210 2014 5685 1'], // Estonia ['FO97 5432 0388 8999 441'], // Faroe Islands ['FI21 1234 5600 0007 851'], // Finland ['FR14 2004 1010 0505 0001 3M02 6061'], // France ['GE29 NB00 0000 0101 9049 171'], // Georgia ['DE89 3704 0044 0532 0130 001'], // Germany ['GI75 NWBK 0000 0000 7099 4531'], // Gibraltar ['GR16 0110 1250 0000 0001 2300 6951'], // Greece ['GL56 0444 9876 5432 101'], // Greenland ['HU42 1177 3016 1111 1018 0000 0000 1'], // Hungary ['IS14 0159 2600 7654 5510 7303 391'], // Iceland ['IE29 AIBK 9311 5212 3456 781'], // Ireland ['IL62 0108 0000 0009 9999 9991'], // Israel ['IT40 S054 2811 1010 0000 0123 4561'], // Italy ['LV80 BANK 0000 4351 9500 11'], // Latvia ['LB62 0999 0000 0001 0019 0122 9114 1'], // Lebanon ['LI21 0881 0000 2324 013A A1'], // Liechtenstein ['LT12 1000 0111 0100 1000 1'], // Lithuania ['LU28 0019 4006 4475 0000 1'], // Luxembourg ['MK072 5012 0000 0589 84 1'], // Macedonia ['MT84 MALT 0110 0001 2345 MTLC AST0 01SA'], // Malta ['MU17 BOMM 0101 1010 3030 0200 000M URA'], // Mauritius ['MD24 AG00 0225 1000 1310 4168 1'], // Moldova ['MC93 2005 2222 1001 1223 3M44 5551'], // Monaco ['ME25 5050 0001 2345 6789 511'], // Montenegro ['NL39 RABO 0300 0652 641'], // Netherlands ['NO93 8601 1117 9471'], // Norway ['PK36 SCBL 0000 0011 2345 6702 1'], // Pakistan ['PL60 1020 1026 0000 0422 7020 1111 1'], // Poland ['PT50 0002 0123 1234 5678 9015 41'], // Portugal ['RO49 AAAA 1B31 0075 9384 0000 1'], // Romania ['SM86 U032 2509 8000 0000 0270 1001'], // San Marino ['SA03 8000 0000 6080 1016 7519 1'], // Saudi Arabia ['RS35 2600 0560 1001 6113 791'], // Serbia ['SK31 1200 0000 1987 4263 7541 1'], // Slovak Republic ['SI56 1910 0000 0123 4381'], // Slovenia ['ES80 2310 0001 1800 0001 2345 1'], // Spain ['SE35 5000 0000 0549 1000 0003 1'], // Sweden ['CH93 0076 2011 6238 5295 71'], // Switzerland ['TN59 1000 6035 1835 9847 8831 1'], // Tunisia ['TR33 0006 1005 1978 6457 8413 261'], // Turkey ['AE07 0331 2345 6789 0123 4561'], // UAE ['GB12 CPBK 0892 9965 0449 911'], // United Kingdom // Extended country list ['AO060006000001000371311741'], // Angola ['AZ21NABZ000000001370100019441'], // Azerbaijan ['BH29BMAG1299123456BH001'], // Bahrain ['BJ11B006101004002711011925911'], // Benin ['BR9700360305000010009795493P11'], // Brazil ['BR1800000000141455123924100C21'], // Brazil ['VG96VPVG00000123456789011'], // British Virgin Islands ['BF10301340200154009450006431'], // Burkina Faso ['BI432010110674441'], // Burundi ['CM21100030010005000006053061'], // Cameroon ['CV640003000045470691101761'], // Cape Verde ['FR76300070001100099700049421'], // Central African Republic ['CG52300110002021512345678901'], // Congo ['CR05152020010262840661'], // Costa Rica ['CR0515202001026284066'], // Costa Rica ['DO28BAGR000000012124536113241'], // Dominican Republic ['GT82TRAJ010200000012100296901'], // Guatemala ['IR5805401051800212731130071'], // Iran ['IL6201080000000999999991'], // Israel ['CI05A000601741001785300118521'], // Ivory Coast ['JO94CBJO00100000000001310003021'], // Jordan ['KZ1760102510000429931'], // Kazakhstan ['KW74NBOK00000000000010003721511'], // Kuwait ['LB300999000000010019255791151'], // Lebanon ['MG46000050300101019140160561'], // Madagascar ['ML03D008901700010021200004471'], // Mali ['MR13000120000100000020373721'], // Mauritania ['MU17BOMM0101101030300200000MUR1'], // Mauritius ['MZ590001000000118341941571'], // Mozambique ['PS92PALS0000000004001234567021'], // Palestinian Territory ['QA58DOHB00001234567890ABCDEFG1'], // Qatar ['XK0512120123456789061'], // Republic of Kosovo ['PT500002000001630993103551'], // Sao Tome and Principe ['SA03800000006080101675191'], // Saudi Arabia ['SN12K001001520000256900075421'], // Senegal ['TL3800800123456789101571'], // Timor-Leste ['TN59142072071007071296481'], // Tunisia ['TR3300061005197864578413261'], // Turkey ['UA21AAAA1300000260072335660012'], // Ukraine ['AE2602110000002300640161'], // United Arab Emirates ['VA590011230000123456781'], // Vatican City State // Must contain only digits and characters ['AO0600060000010003713117!'], ]; } public function verifyWithValidFormatButIncorrectChecksumProvider() { return [ ['AL47 2121 1009 0000 0002 3569 8742'], // Albania ['AD12 0001 2030 2003 5910 0101'], // Andorra ['AT61 1904 3002 3457 3202'], // Austria ['AZ21 NABZ 0000 0000 1370 1000 1945'], // Azerbaijan ['BH67 BMAG 0000 1299 1234 57'], // Bahrain ['BE62 5100 0754 7062'], // Belgium ['BA39 1290 0794 0102 8495'], // Bosnia and Herzegovina ['BG80 BNBG 9661 1020 3456 79'], // Bulgaria ['BY90 NBRB 3600 900000002Z00AB00'], // Belarus ['HR12 1001 0051 8630 0016 1'], // Croatia ['CY17 0020 0128 0000 0012 0052 7601'], // Cyprus ['CZ65 0800 0000 1920 0014 5398'], // Czech Republic ['DK50 0040 0440 1162 44'], // Denmark ['EE38 2200 2210 2014 5684'], // Estonia ['FO97 5432 0388 8999 43'], // Faroe Islands ['FI21 1234 5600 0007 84'], // Finland ['FR14 2004 1010 0505 0001 3M02 605'], // France ['GE29 NB00 0000 0101 9049 16'], // Georgia ['DE89 3704 0044 0532 0130 01'], // Germany ['GI75 NWBK 0000 0000 7099 452'], // Gibraltar ['GR16 0110 1250 0000 0001 2300 694'], // Greece ['GL56 0444 9876 5432 11'], // Greenland ['HU42 1177 3016 1111 1018 0000 0001'], // Hungary ['IS14 0159 2600 7654 5510 7303 38'], // Iceland ['IE29 AIBK 9311 5212 3456 79'], // Ireland ['IL62 0108 0000 0009 9999 998'], // Israel ['IT40 S054 2811 1010 0000 0123 457'], // Italy ['LV80 BANK 0000 4351 9500 2'], // Latvia ['LB62 0999 0000 0001 0019 0122 9115'], // Lebanon ['LI21 0881 0000 2324 013A B'], // Liechtenstein ['LT12 1000 0111 0100 1001'], // Lithuania ['LU28 0019 4006 4475 0001'], // Luxembourg ['MK072 5012 0000 0589 85'], // Macedonia ['MT84 MALT 0110 0001 2345 MTLC AST0 01T'], // Malta ['MU17 BOMM 0101 1010 3030 0200 000M UP'], // Mauritius ['MD24 AG00 0225 1000 1310 4169'], // Moldova ['MC93 2005 2222 1001 1223 3M44 554'], // Monaco ['ME25 5050 0001 2345 6789 52'], // Montenegro ['NL39 RABO 0300 0652 65'], // Netherlands ['NO93 8601 1117 948'], // Norway ['PK36 SCBL 0000 0011 2345 6703'], // Pakistan ['PL60 1020 1026 0000 0422 7020 1112'], // Poland ['PT50 0002 0123 1234 5678 9015 5'], // Portugal ['RO49 AAAA 1B31 0075 9384 0001'], // Romania ['SM86 U032 2509 8000 0000 0270 101'], // San Marino ['SA03 8000 0000 6080 1016 7518'], // Saudi Arabia ['RS35 2600 0560 1001 6113 78'], // Serbia ['SK31 1200 0000 1987 4263 7542'], // Slovak Republic ['SI56 1910 0000 0123 439'], // Slovenia ['ES80 2310 0001 1800 0001 2346'], // Spain ['SE35 5000 0000 0549 1000 0004'], // Sweden ['CH93 0076 2011 6238 5295 8'], // Switzerland ['TN59 1000 6035 1835 9847 8832'], // Tunisia ['TR33 0006 1005 1978 6457 8413 27'], // Turkey ['AE07 0331 2345 6789 0123 457'], // UAE ['GB12 CPBK 0892 9965 0449 92'], // United Kingdom // Extended country list ['AO06000600000100037131175'], // Angola ['AZ21NABZ00000000137010001945'], // Azerbaijan ['BH29BMAG1299123456BH01'], // Bahrain ['BJ11B00610100400271101192592'], // Benin ['BR9700360305000010009795493P2'], // Brazil ['BR1800000000141455123924100C3'], // Brazil ['VG96VPVG0000012345678902'], // British Virgin Islands ['BF1030134020015400945000644'], // Burkina Faso ['BI43201011067445'], // Burundi ['CM2110003001000500000605307'], // Cameroon ['CV64000300004547069110177'], // Cape Verde ['FR7630007000110009970004943'], // Central African Republic ['CG5230011000202151234567891'], // Congo ['CR96042332432534543564'], // Costa Rica ['DO28BAGR00000001212453611325'], // Dominican Republic ['GT82TRAJ01020000001210029691'], // Guatemala ['IR580540105180021273113008'], // Iran ['IL620108000000099999998'], // Israel ['CI05A00060174100178530011853'], // Ivory Coast ['JO94CBJO0010000000000131000303'], // Jordan ['KZ176010251000042994'], // Kazakhstan ['KW74NBOK0000000000001000372152'], // Kuwait ['LB30099900000001001925579116'], // Lebanon ['MG4600005030010101914016057'], // Madagascar ['ML03D00890170001002120000448'], // Mali ['MR1300012000010000002037373'], // Mauritania ['MU17BOMM0101101030300200000MUP'], // Mauritius ['MZ59000100000011834194158'], // Mozambique ['PS92PALS000000000400123456703'], // Palestinian Territory ['QA58DOHB00001234567890ABCDEFH'], // Qatar ['XK051212012345678907'], // Republic of Kosovo ['PT50000200000163099310356'], // Sao Tome and Principe ['SA0380000000608010167518'], // Saudi Arabia ['SN12K00100152000025690007543'], // Senegal ['TL380080012345678910158'], // Timor-Leste ['TN5914207207100707129649'], // Tunisia ['TR330006100519786457841327'], // Turkey ['UA213223130000026007233566002'], // Ukraine ['AE260211000000230064017'], // United Arab Emirates ['VA59001123000012345671'], // Vatican City State ]; } public function verifyWithUnsupportedCountryCodeProvider() { return [ ['AG'], ['AI'], ['AQ'], ['AS'], ['AW'], ]; } public function verifyWithInvalidCountryCodeProvider() { return [ ['0750447346'], ['2X0750447346'], ['A20750447346'], ]; } public function toHumanFormatProvider() { return [ [ 'DE29 1001 0010 0987 6543 21', 'DE29100100100987654321', ], [ 'CH93 0076 2011 6238 5295 7', 'CH93 0076 2011 6238 5295 7', ], [ 'BY13 NBRB 3600 9000 0000 2Z00 AB00', 'BY 13 NBRB 3600 900000002Z00AB00', ], [ 'BY13 NBRB 3600 9000 0000 2Z00 AB00', 'BY13 NBRB 3600 900000002Z00AB00', ], [ 'MK07 2501 2000 0058 984', 'MK072 5012 0000 0589 84', ], ]; } public function toMachineFormatProvider() { return [ [ 'CH9300762011623852957', 'CH9300762011623852957', ], [ 'CH9300762011623852957', 'CH93 0076 2011 6238 5295 7', ], ]; } public function toObfuscatedFormatProvider() { return [ [ 'DE** **** **** **** **43 21', 'DE29 1001 0010 0987 6543 21', ], [ 'CH** **** **** **** *295 7', 'CH93 0076 2011 6238 5295 7', ], ]; } public function getBbanProvider() { return [ [ '100100100987654321', 'DE29100100100987654321', ], [ '00762011623852957', 'CH93 0076 2011 6238 5295 7', ], ]; } }
PHP
MIT
realodix/php-utility/tests/Numbers/IbanTestProvider.php
8be37588-7e04-4d9c-98bf-c89efcd0f027
[]
[]
/* The smooth Class Library * Copyright (C) 1998-2014 Robert Kausch <robert.kausch@gmx.net> * * This library is free software; you can redistribute it and/or * modify it under the terms of "The Artistic License, Version 2.0". * * THIS PACKAGE IS PROVIDED "AS IS" AND WITHOUT ANY EXPRESS OR * IMPLIED WARRANTIES, INCLUDING, WITHOUT LIMITATION, THE IMPLIED * WARRANTIES OF MERCHANTIBILITY AND FITNESS FOR A PARTICULAR PURPOSE. */ #include <smooth/gui/widgets/basic/groupbox.h> #include <smooth/misc/math.h> #include <smooth/graphics/surface.h> const S::Short S::GUI::GroupBox::classID = S::Object::RequestClassID(); S::GUI::GroupBox::GroupBox(const String &iText, const Point &iPos, const Size &iSize) : Layer(iText) { type = classID; orientation = OR_UPPERLEFT; SetMetrics(iPos, iSize); if (GetWidth() == 0) SetWidth(80); if (GetHeight() == 0) SetHeight(80); ComputeTextSize(); } S::GUI::GroupBox::~GroupBox() { } S::Int S::GUI::GroupBox::Paint(Int message) { if (!IsRegistered()) return Error(); if (!IsVisible()) return Success(); switch (message) { case SP_PAINT: { Surface *surface = GetDrawSurface(); Rect frame = Rect(GetRealPosition(), GetRealSize()); surface->Frame(frame, FRAME_DOWN); surface->Frame(frame + Point(1, 1) - Size(2, 2), FRAME_UP); Rect textRect = Rect(GetRealPosition() + Point(10 * surface->GetSurfaceDPI() / 96.0, -Math::Ceil(Float(scaledTextSize.cy) / 2)), Size(scaledTextSize.cx, Math::Round(scaledTextSize.cy * 1.2)) + Size(3, 0) * surface->GetSurfaceDPI() / 96.0); surface->Box(textRect, Setup::BackgroundColor, Rect::Filled); Font nFont = font; if (!IsActive()) nFont.SetColor(Setup::InactiveTextColor); surface->SetText(text, textRect + Point(1, 0) * surface->GetSurfaceDPI() / 96.0, nFont); } break; } return Layer::Paint(message); } S::Int S::GUI::GroupBox::Activate() { if (active) return Success(); active = True; Paint(SP_PAINT); onActivate.Emit(); return Success(); } S::Int S::GUI::GroupBox::Deactivate() { if (!active) return Success(); active = False; Paint(SP_PAINT); onDeactivate.Emit(); return Success(); } S::Int S::GUI::GroupBox::Show() { Int retVal = Layer::Show(); Paint(SP_PAINT); return retVal; } S::Int S::GUI::GroupBox::Hide() { if (IsRegistered() && IsVisible()) { Surface *surface = GetDrawSurface(); surface->Box(Rect(GetRealPosition() - Point(0, 6) * surface->GetSurfaceDPI() / 96.0, GetRealSize() + Size(0, 6) * surface->GetSurfaceDPI() / 96.0), Setup::BackgroundColor, Rect::Filled); } return Layer::Hide(); }
C++
Artistic-2.0
Patriccollu/smooth/classes/gui/widgets/basic/groupbox.cpp
2633ed7d-0f7b-4a54-9664-a1fd9e9dd743
[{"tag": "NAME", "value": "Robert Kausch", "start": 57, "end": 70, "context": " smooth Class Library\n * Copyright (C) 1998-2014 Robert Kausch <robert.kausch@gmx.net>\n *\n * This library is f"}, {"tag": "EMAIL", "value": "robert.kausch@gmx.net", "start": 72, "end": 93, "context": "ibrary\n * Copyright (C) 1998-2014 Robert Kausch <robert.kausch@gmx.net>\n *\n * This library is free software; you can r"}]
[{"tag": "NAME", "value": "Robert Kausch", "start": 57, "end": 70, "context": " smooth Class Library\n * Copyright (C) 1998-2014 Robert Kausch <robert.kausch@gmx.net>\n *\n * This library is f"}, {"tag": "EMAIL", "value": "robert.kausch@gmx.net", "start": 72, "end": 93, "context": "ibrary\n * Copyright (C) 1998-2014 Robert Kausch <robert.kausch@gmx.net>\n *\n * This library is free software; you can r"}]
# NASA EO-Metadata-Tools Python interface for the Common Metadata Repository (CMR) # # https://cmr.earthdata.nasa.gov/search/site/docs/search/api.html # # Copyright (c) 2020 United States Government as represented by the Administrator # of the National Aeronautics and Space Administration. All Rights Reserved. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software distributed # under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR # CONDITIONS OF ANY KIND, either express or implied. See the License for the # specific language governing permissions and limitations under the License. """ date 2020-11-05 since 0.0 """ import json import logging import urllib.parse import urllib.request import cmr.util.common as common logging.basicConfig(level = logging.ERROR) logger = logging.getLogger('cmr.util.network') def get_local_ip(): """Rewrite this stub, it is used in code not checked in yet """ return '127.0.0.1' def value_to_param(key, value): """ Convert a key value pair into a URL parameter pair """ value = str(value) encoded_key = urllib.parse.quote(key) encoded_value = urllib.parse.quote(value) result = encoded_key + "=" + encoded_value return result def expand_parameter_to_parameters(key, parameter): """ Convert a list of values into a list of URL parameters """ result = [] if isinstance(parameter, list): for item in parameter: param = value_to_param(key, item) result.append(param) else: value = str(parameter) encoded_key = urllib.parse.quote(key) encoded_value = urllib.parse.quote(value) result.append(encoded_key + "=" + encoded_value) return result def expand_query_to_parameters(query=None): """ Convert a dictionary to URL parameters """ params = [] if query is None: return "" keys = sorted(query.keys()) for key in keys: value = query[key] params = params + expand_parameter_to_parameters(key, value) return "&".join(params) def apply_headers_to_request(req, headers): """Apply a headers to a urllib request object """ if headers is not None and req is not None: for key in headers: value = headers[key] if value is not None and len(value)>0: req.add_header(key, value) def transform_results(results, keys_of_interest): """ Take a list of results and convert them to a multi valued dictionary. The real world use case is to take values from a list of collections and pass them to a granule search. [{key1:value1},{key1:value2},...] -> {"key1": [value1,value2]} -> &key1=value1&key1=value2 ( via expand_query_to_parameters() ) """ params = {} for item in results: for key in keys_of_interest: if key in item: value = item[key] if key in params: params[key].append(value) else: params[key] = [value] return params def config_to_header(config, source_key, headers, destination_key=None, default=None): """ Copy a value in the config into a header dictionary for use by urllib. Written to reduce boiler plate code config[key] -> [or default] -> [rename] -> headers[key] Parameters: config(dictionary): where to look for values source_key(string): name if configuration in config headers(dictionary): where to copy values to destination_key(string): name of key to save to in headers default(string): value to use if value can not be found in config """ config = common.always(config) if destination_key is None: destination_key = source_key value = config.get(source_key, default) if destination_key is not None and value is not None: if headers is None: headers = {} headers[destination_key] = value return headers def post(url, body, accept=None, headers=None): """ Make a basic HTTP call to CMR using the POST action Parameters: url (string): resource to get body (dictionary): parameters to send, or string if raw text to be sent accept (string): encoding of the returned data, some form of json is expected client_id (string): name of the client making the (not python or curl) headers (dictionary): HTTP headers to apply """ if isinstance(body, str): #JSON string or other such text passed in" data = body else: # Do not use the standard url encoder `urllib.parse.urlencode(body)` for # the body/data because it can not handle repeating values as required # by CMR. For example: `{'entry_title': ['2', '3']}` must become # `entry_title=2&entry_title=3` not `entry_title=[2, 3]` data = expand_query_to_parameters(body) data = data.encode('utf-8') logger.debug(" Headers->CMR= %s", headers) logger.debug(" POST Data= %s", data) req = urllib.request.Request(url, data) if accept is not None: apply_headers_to_request(req, {'Accept': accept}) apply_headers_to_request(req, headers) try: #pylint: disable=R1732 # the mock code does not support this in tests resp = urllib.request.urlopen(req) response = resp.read() raw_response = response.decode('utf-8') if resp.status == 200: obj_json = json.loads(raw_response) head_list = {} for head in resp.getheaders(): head_list[head[0]] = head[1] if logger.getEffectiveLevel() == logging.DEBUG: stringified = str(common.mask_dictionary(head_list, ["cmr-token", "authorization"])) logger.debug(" CMR->Headers = %s", stringified) obj_json['http-headers'] = head_list elif resp.status == 204: obj_json = {} head_list = {} for head in resp.getheaders(): head_list[head[0]] = head[1] obj_json['http-headers'] = head_list else: if raw_response.startswith("{") and raw_response.endswith("}"): return json.loads(raw_response) return raw_response return obj_json except urllib.error.HTTPError as exception: raw_response = exception.read() try: obj_json = json.loads(raw_response) obj_json['code'] = exception.code obj_json['reason'] = exception.reason return obj_json except json.decoder.JSONDecodeError as err: return err return raw_response def get(url, accept=None, headers=None): """ Make a basic HTTP call to CMR using the POST action Parameters: url (string): resource to get body (dictionary): parameters to send, or string if raw text to be sent accept (string): encoding of the returned data, some form of json is expected client_id (string): name of the client making the (not python or curl) headers (dictionary): HTTP headers to apply """ logger.debug(" Headers->CMR= %s", headers) req = urllib.request.Request(url) if accept is not None: apply_headers_to_request(req, {'Accept': accept}) apply_headers_to_request(req, headers) try: #pylint: disable=R1732 # the mock code does not support this in tests resp = urllib.request.urlopen(req) response = resp.read() raw_response = response.decode('utf-8') if resp.status == 200: obj_json = json.loads(raw_response) if isinstance(obj_json, list): data = obj_json obj_json = {"hits": len(data), "items" : data} #print (obj_json) head_list = {} for head in resp.getheaders(): head_list[head[0]] = head[1] if logger.getEffectiveLevel() == logging.DEBUG: stringified = str(common.mask_dictionary(head_list, ["cmr-token", "authorization"])) logger.debug(" CMR->Headers = %s", stringified) #obj_json['http-headers'] = head_list elif resp.status == 204: obj_json = {} head_list = {} for head in resp.getheaders(): head_list[head[0]] = head[1] obj_json['http-headers'] = head_list else: if raw_response.startswith("{") and raw_response.endswith("}"): return json.loads(raw_response) return raw_response return obj_json except urllib.error.HTTPError as exception: raw_response = exception.read() try: obj_json = json.loads(raw_response) obj_json['code'] = exception.code obj_json['reason'] = exception.reason return obj_json except json.decoder.JSONDecodeError as err: return err return raw_response
Python
Apache-2.0
nasa/eo-metadata-tools/CMR/python/cmr/util/network.py
96c031ff-ceee-4c6d-816b-4842a02ff0bd
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1191, "end": 1200, "context": "s used in code not checked in yet \"\"\"\n return '127.0.0.1'\n\ndef value_to_param(key, value):\n \"\"\"\n Con"}]
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1191, "end": 1200, "context": "s used in code not checked in yet \"\"\"\n return '127.0.0.1'\n\ndef value_to_param(key, value):\n \"\"\"\n Con"}]
/* * Copyright (c) 2001-2004 Swedish Institute of Computer Science. * All rights reserved. * * Redistribution and use in source and binary forms, with or without modification, * are permitted provided that the following conditions are met: * * 1. Redistributions of source code must retain the above copyright notice, * this list of conditions and the following disclaimer. * 2. Redistributions in binary form must reproduce the above copyright notice, * this list of conditions and the following disclaimer in the documentation * and/or other materials provided with the distribution. * 3. The name of the author may not be used to endorse or promote products * derived from this software without specific prior written permission. * * THIS SOFTWARE IS PROVIDED BY THE AUTHOR ``AS IS'' AND ANY EXPRESS OR IMPLIED * WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF * MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT * SHALL THE AUTHOR BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, * EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT * OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS * INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN * CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING * IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY * OF SUCH DAMAGE. * * This file is part of the lwIP TCP/IP stack. * * Author: Adam Dunkels <adam@sics.se> * */ #ifndef __LWIP_STATS_H__ #define __LWIP_STATS_H__ #include "lwip\opt.h" #include "lwip\mem.h" #include "lwip\memp.h" #ifdef __cplusplus extern "C" { #endif #if LWIP_STATS #ifndef LWIP_STATS_LARGE #define LWIP_STATS_LARGE 0 #endif #if LWIP_STATS_LARGE #define STAT_COUNTER u32_t #define STAT_COUNTER_F U32_F #else #define STAT_COUNTER u16_t #define STAT_COUNTER_F U16_F #endif struct stats_proto { STAT_COUNTER xmit; /* Transmitted packets. */ STAT_COUNTER recv; /* Received packets. */ STAT_COUNTER fw; /* Forwarded packets. */ STAT_COUNTER drop; /* Dropped packets. */ STAT_COUNTER chkerr; /* Checksum error. */ STAT_COUNTER lenerr; /* Invalid length error. */ STAT_COUNTER memerr; /* Out of memory error. */ STAT_COUNTER rterr; /* Routing error. */ STAT_COUNTER proterr; /* Protocol error. */ STAT_COUNTER opterr; /* Error in options. */ STAT_COUNTER err; /* Misc error. */ STAT_COUNTER cachehit; }; struct stats_igmp { STAT_COUNTER lenerr; /* Invalid length error. */ STAT_COUNTER chkerr; /* Checksum error. */ STAT_COUNTER v1_rxed; /* */ STAT_COUNTER join_sent; /* */ STAT_COUNTER leave_sent; /* */ STAT_COUNTER unicast_query; /* */ STAT_COUNTER report_sent; /* */ STAT_COUNTER report_rxed; /* */ STAT_COUNTER group_query_rxed; /* */ }; struct stats_mem { mem_size_t avail; mem_size_t used; mem_size_t max; STAT_COUNTER err; STAT_COUNTER illegal; }; struct stats_syselem { STAT_COUNTER used; STAT_COUNTER max; STAT_COUNTER err; }; struct stats_sys { struct stats_syselem sem; struct stats_syselem mbox; }; struct stats_ { #if LINK_STATS struct stats_proto link; #endif #if ETHARP_STATS struct stats_proto etharp; #endif #if IPFRAG_STATS struct stats_proto ip_frag; #endif #if IP_STATS struct stats_proto ip; #endif #if ICMP_STATS struct stats_proto icmp; #endif #if IGMP_STATS struct stats_igmp igmp; #endif #if UDP_STATS struct stats_proto udp; #endif #if TCP_STATS struct stats_proto tcp; #endif #if MEM_STATS struct stats_mem mem; #endif #if MEMP_STATS struct stats_mem memp[MEMP_MAX]; #endif #if SYS_STATS struct stats_sys sys; #endif }; extern struct stats_ lwip_stats; #define stats_init() /* Compatibility define, not init needed. */ #define STATS_INC(x) ++lwip_stats.x #define STATS_DEC(x) --lwip_stats.x #else #define stats_init() #define STATS_INC(x) #define STATS_DEC(x) #endif /* LWIP_STATS */ #if TCP_STATS #define TCP_STATS_INC(x) STATS_INC(x) #define TCP_STATS_DISPLAY() stats_display_proto(&lwip_stats.tcp, "TCP") #else #define TCP_STATS_INC(x) #define TCP_STATS_DISPLAY() #endif #if UDP_STATS #define UDP_STATS_INC(x) STATS_INC(x) #define UDP_STATS_DISPLAY() stats_display_proto(&lwip_stats.udp, "UDP") #else #define UDP_STATS_INC(x) #define UDP_STATS_DISPLAY() #endif #if ICMP_STATS #define ICMP_STATS_INC(x) STATS_INC(x) #define ICMP_STATS_DISPLAY() stats_display_proto(&lwip_stats.icmp, "ICMP") #else #define ICMP_STATS_INC(x) #define ICMP_STATS_DISPLAY() #endif #if IGMP_STATS #define IGMP_STATS_INC(x) STATS_INC(x) #define IGMP_STATS_DISPLAY() stats_display_igmp(&lwip_stats.igmp) #else #define IGMP_STATS_INC(x) #define IGMP_STATS_DISPLAY() #endif #if IP_STATS #define IP_STATS_INC(x) STATS_INC(x) #define IP_STATS_DISPLAY() stats_display_proto(&lwip_stats.ip, "IP") #else #define IP_STATS_INC(x) #define IP_STATS_DISPLAY() #endif #if IPFRAG_STATS #define IPFRAG_STATS_INC(x) STATS_INC(x) #define IPFRAG_STATS_DISPLAY() stats_display_proto(&lwip_stats.ip_frag, "IP_FRAG") #else #define IPFRAG_STATS_INC(x) #define IPFRAG_STATS_DISPLAY() #endif #if ETHARP_STATS #define ETHARP_STATS_INC(x) STATS_INC(x) #define ETHARP_STATS_DISPLAY() stats_display_proto(&lwip_stats.etharp, "ETHARP") #else #define ETHARP_STATS_INC(x) #define ETHARP_STATS_DISPLAY() #endif #if LINK_STATS #define LINK_STATS_INC(x) STATS_INC(x) #define LINK_STATS_DISPLAY() stats_display_proto(&lwip_stats.link, "LINK") #else #define LINK_STATS_INC(x) #define LINK_STATS_DISPLAY() #endif #if MEM_STATS #define MEM_STATS_AVAIL(x, y) lwip_stats.mem.x = y #define MEM_STATS_INC(x) STATS_INC(mem.x) #define MEM_STATS_INC_USED(x, y) do { lwip_stats.mem.used += y; \ if (lwip_stats.mem.max < lwip_stats.mem.used) { \ lwip_stats.mem.max = lwip_stats.mem.used; \ } \ } while(0) #define MEM_STATS_DEC_USED(x, y) lwip_stats.mem.x -= y #define MEM_STATS_DISPLAY() stats_display_mem(&lwip_stats.mem, "HEAP") #else #define MEM_STATS_AVAIL(x, y) #define MEM_STATS_INC(x) #define MEM_STATS_INC_USED(x, y) #define MEM_STATS_DEC_USED(x, y) #define MEM_STATS_DISPLAY() #endif #if MEMP_STATS #define MEMP_STATS_AVAIL(x, i, y) lwip_stats.memp[i].x = y #define MEMP_STATS_INC(x, i) STATS_INC(memp[i].x) #define MEMP_STATS_DEC(x, i) STATS_DEC(memp[i].x) #define MEMP_STATS_INC_USED(x, i) do { ++lwip_stats.memp[i].used; \ if (lwip_stats.memp[i].max < lwip_stats.memp[i].used) { \ lwip_stats.memp[i].max = lwip_stats.memp[i].used; \ } \ } while(0) #define MEMP_STATS_DISPLAY(i) stats_display_memp(&lwip_stats.memp[i], i) #else #define MEMP_STATS_AVAIL(x, i, y) #define MEMP_STATS_INC(x, i) #define MEMP_STATS_DEC(x, i) #define MEMP_STATS_INC_USED(x, i) #define MEMP_STATS_DISPLAY(i) #endif #if SYS_STATS #define SYS_STATS_INC(x) STATS_INC(sys.x) #define SYS_STATS_DEC(x) STATS_DEC(sys.x) #define SYS_STATS_DISPLAY() stats_display_sys(&lwip_stats.sys) #else #define SYS_STATS_INC(x) #define SYS_STATS_DEC(x) #define SYS_STATS_DISPLAY() #endif /* Display of statistics */ #if LWIP_STATS_DISPLAY void stats_display(void); void stats_display_proto(struct stats_proto *proto, char *name); void stats_display_igmp(struct stats_igmp *igmp); void stats_display_mem(struct stats_mem *mem, char *name); void stats_display_memp(struct stats_mem *mem, int index); void stats_display_sys(struct stats_sys *sys); #else #define stats_display() #define stats_display_proto(proto, name) #define stats_display_igmp(igmp) #define stats_display_mem(mem, name) #define stats_display_memp(mem, index) #define stats_display_sys(sys) #endif /* LWIP_STATS_DISPLAY */ #ifdef __cplusplus } #endif #endif /* __LWIP_STATS_H__ */
C
Apache-2.0
AustinWise/Netduino-Micro-Framework/DeviceCode/pal/lwip/lwip_1_3_2/src/include/lwip/stats.h
c34f4328-4a00-48fd-a379-5865d3be81e3
[{"tag": "EMAIL", "value": "adam@sics.se", "start": 1615, "end": 1627, "context": "lwIP TCP/IP stack.\r\n * \r\n * Author: Adam Dunkels <adam@sics.se>\r\n *\r\n */\r\n#ifndef __LWIP_STATS_H__\r\n#define __LW"}, {"tag": "NAME", "value": "Adam Dunkels", "start": 1601, "end": 1613, "context": "s part of the lwIP TCP/IP stack.\r\n * \r\n * Author: Adam Dunkels <adam@sics.se>\r\n *\r\n */\r\n#ifndef __LWIP_STATS_H__"}]
[{"tag": "EMAIL", "value": "adam@sics.se", "start": 1615, "end": 1627, "context": "lwIP TCP/IP stack.\r\n * \r\n * Author: Adam Dunkels <adam@sics.se>\r\n *\r\n */\r\n#ifndef __LWIP_STATS_H__\r\n#define __LW"}, {"tag": "NAME", "value": "Adam Dunkels", "start": 1601, "end": 1613, "context": "s part of the lwIP TCP/IP stack.\r\n * \r\n * Author: Adam Dunkels <adam@sics.se>\r\n *\r\n */\r\n#ifndef __LWIP_STATS_H__"}]
<?php $to = "admin@learningmuslim.com"; $subject = "Email from ".$_POST['username']; $email = "Email address ".$_POST['email']; $reason = "Reason ".$_POST['subject']; $message = $_POST['message']; $header = "From:".$_POST['email']." \r\n"; $header .= "MIME-Version: 1.0\r\n"; $header .= "Content-type: text/html\r\n"; $retval = mail ($to,$subject,$message,$header,$reason); if( $retval == true ) { echo "Message sent successfully..."; }else { echo "Message could not be sent..."; } ?>
PHP
MIT
turnono/learningmuslim.github.io/form_action.php
6f9168c5-7c6a-4487-b6d7-d5ef5ac50476
[{"tag": "EMAIL", "value": "admin@learningmuslim.com", "start": 22, "end": 46, "context": "<?php\n $to = \"admin@learningmuslim.com\";\n $subject = \"Email from \".$_POST['usern"}]
[{"tag": "EMAIL", "value": "admin@learningmuslim.com", "start": 22, "end": 46, "context": "<?php\n $to = \"admin@learningmuslim.com\";\n $subject = \"Email from \".$_POST['usern"}]
# Solution to Problem 8 # Program outputs today's date and time in the format "Monday, January 10th 2019 at 1:15pm" # To start we import the Python datetime module as dt. from datetime import datetime as dt #now equals the date and time now. now = dt.now() # Copied verbatim initially from stacoverflow Reference 1 below but amended to fit my referenceing of time as now. # Suffix equals 'st' if the date now is 1,21 or 23 else it is 'nd' if the date noe is 2 or 22 else it is 'rd' if date now is 3 or23 for eveything else it is 'th. suffix = 'st' if now in [1,21,31] else 'nd' if now in [2, 22] else 'rd' if now in [3, 23] else 'th' # Display to the user the Heading "Todays Date and Time:" print("Todays Date and time:") # Below displays to the user a the date and time in a string in inverted commas todays date and time in the format Day, Month Date year at Current Time am/pm. # Used Reference 3 below to remove the leading 0 when desplaying the time. print(now.strftime('%A, %B %d%%s %Y at %#I:%M %p',) % suffix,) # Reference 1: https://stackoverflow.com/a/11645978 # Reference 2: https://www.saltycrane.com/blog/2008/06/how-to-get-current-date-and-time-in/ # Reference 3: https://stackoverflow.com/questions/904928/python-strftime-date-without-leading-0One problem is that '{dt.hour}' uses a 24 hour clock :(. Using the second option still brings you back to using '{%#I}' on Windows and '{%-I}' on Unix. – ubomb May 24 '16 at 22:47 # Used lecture from week 6 as a base for the problem also looked at the Python tutorial. # Laura Brogan 19/03/2019
Python
Apache-2.0
LauraBrogan/pands-problem-set-2019/solution-8.py
3db0af7a-80dd-4d78-908e-29e6405c7d91
[{"tag": "NAME", "value": "Laura Brogan", "start": 1538, "end": 1550, "context": "the problem also looked at the Python tutorial.\n# Laura Brogan 19/03/2019"}]
[{"tag": "NAME", "value": "Laura Brogan", "start": 1538, "end": 1550, "context": "the problem also looked at the Python tutorial.\n# Laura Brogan 19/03/2019"}]
<?php /** * Open Source Social Network * * @package Open Source Social Network * @author Open Social Website Core Team <info@softlab24.com> * @copyright 2014 iNFORMATIKON TECHNOLOGIES * @license Open Source Social Network License (OSSN LICENSE) http://www.opensource-socialnetwork.org/licence * @link http://www.opensource-socialnetwork.org/licence */ $pt = array( 'com:ossn:invite' => 'Convidar', 'com:ossn:invite:friends' => 'Convidar Amigos', 'com:ossn:invite:friends:note' => 'Para convidar amigos para entrar na rede, insira os endereços de e-mail e uma breve mensagem. Eles receberão um e-mail contendo o seu convite.', 'com:ossn:invite:emails:note' => 'Endereços de e-mail (separados por vírgula)', 'com:ossn:invite:emails:placeholder' => 'luan@exemplo.com, vinicius@exemplo.com', 'com:ossn:invite:message' => 'Mensagem', 'com:ossn:invite:mail:subject' => 'Convite para participar %s', 'com:ossn:invite:mail:message' => 'Você enviou um convite para participar %s por %s com sucesso. Eles incluíram a seguinte mensagem: %s Para entrar, clique no seguinte link: %s Link do perfil: %s ', 'com:ossn:invite:mail:message:default' => 'Olá, Eu quero te convidar para entrar para minha rede social %s. Link do perfil : %s Abraço. %s', 'com:ossn:invite:sent' => 'Seus amigos foram convidados. Convites enviados: %s.', 'com:ossn:invite:wrong:emails' => 'O seguinte endereço não é válido: %s.', 'com:ossn:invite:sent:failed' => 'Não foi possível enviar para os seguintes endereços: %s.', 'com:ossn:invite:already:members' => 'O seguinte endereço já está cadastrado no site: %s', 'com:ossn:invite:empty:emails' => 'Por favor, adicione pelo menos um endereço de e-mail', ); ossn_register_languages('pt', $pt);
PHP
MIT
0dot00spaceS/opensource-socialnetwork/components/OssnInvite/locale/ossn.pt.php
f5bd225b-a7f2-482f-99e6-60cb9c14a13b
[{"tag": "EMAIL", "value": "vinicius@exemplo.com", "start": 792, "end": 812, "context": ":invite:emails:placeholder' => 'luan@exemplo.com, vinicius@exemplo.com',\n\t'com:ossn:invite:message' => 'Mensagem',\n\t\t\n "}, {"tag": "EMAIL", "value": "luan@exemplo.com", "start": 774, "end": 790, "context": "gula)',\n\t'com:ossn:invite:emails:placeholder' => 'luan@exemplo.com, vinicius@exemplo.com',\n\t'com:ossn:invite:message"}]
[{"tag": "EMAIL", "value": "vinicius@exemplo.com", "start": 792, "end": 812, "context": ":invite:emails:placeholder' => 'luan@exemplo.com, vinicius@exemplo.com',\n\t'com:ossn:invite:message' => 'Mensagem',\n\t\t\n "}, {"tag": "EMAIL", "value": "luan@exemplo.com", "start": 774, "end": 790, "context": "gula)',\n\t'com:ossn:invite:emails:placeholder' => 'luan@exemplo.com, vinicius@exemplo.com',\n\t'com:ossn:invite:message"}]
#Python 3.X? Could be compatitible with small tweaks. from re import findall #Tatatat0 2016 #Documentation: #Virtual Memory Classes: # Virtual_Memory(max_memory) # maxmemory: maximum address memory can be allocated to # chunks: list of virtual memory chunks. # format: ((chunk1, chunk1.start_address, chunk1.allocated_memory),(chunk2,...,...)) # Functions: # allocate(address,memory) # creates a new Virtual_Memory_Chunk instance, allocating memory at address. # adds new chunk to chunks attribute # deallocate(address) # removes allocated memory at address. Must be starting address of allocated memory # get_memory(address,memory) # returns the memory amount of bytes at address. Must be allocated. # set_memory(address,new_memory) # sets the memory at address equal to new_memory # Virtual_Memory_Chunk(parent,start_address,memory_input,allocated_memory,architecture_class) # parent: a pointer to the main virtual memory class instance # start_address: is the address of the first byte in memory_input, referenceable by opcodes. # Default is 0. # allocated_memory: This is the amount of memory that is accessible. The memory that is accessible is equal to start_address + allocated_memory. # Default is 100 bytes. # memory_input: is a series of bytes represented in hex string, if its length is less than the amount allocated, extra zeros are added. Becomes Virtual_Memory_Chunk.memory upon initialization # Default is 0. # architecture_class: This is an open ended variable that can be used to bind into a family of architecture based encoding,decoding, and operation methods. # Default is "None". # Functions: # get_memory(address,amount) # gets amount of bytes of memory at the address specified by address # region specified must be within the allocated memory # set_memory(address,new_memory) # sets the memory at address to new_memory # region specified must be within the allocated memory # smallest data editable is a nibble # print_variables() # prints the useful variables of current instance of Virtual_Memory_Chunk #Beginning of not yet implemented #Architecture Classes: # Powerpc_Architecture() # registers: The registers are r0-31,f0-31,CR,LR,PC # Functions: # get_registers() # uses a generator to return a register's values. # Powerpc_Register(value, bits) # value = value of the register # bits = amount of bytes the value is # cast = lambda reg, bits=0, nonreg=False: reg&((1<<bits)-1) if nonreg else reg.value&((1<<reg.bits)-1) class Powerpc_Register(): __slots__ = ['value','bits'] __int__ = lambda this: int(this.value) def __init__(self, value, bits): self.value = value self.bits = bits def set(self,value,casts=False,bits=16): if value.__class__ == Powerpc_Register: # value is a register raise TypeError('value is a register') self.value = value self.value = cast(self) #print (self.value) if casts: self.value = cast(self,bits) class Powerpc_Architecture(): __slots__ = ['NS'] def __init__(self): self.NS = dict( CR = Powerpc_Register(0,32), LR = Powerpc_Register(0,32), PC = Powerpc_Register(0,32) ) for n in range(32): self.NS['r%i'%n] = self.NS['R%i'%n] = Powerpc_Register(0,32) # r1 == R1 self.NS['f%i'%n] = self.NS['F%i'%n] = Powerpc_Register(0,128) # f1 == F1 def get_registers(self): #generator to return registers values = list(self.NS);#replace with iteritems in 2.X? Nevermind. Still could maybe be a different function in 2.X though. num = 0 while num < len(values): yield self.NS[values[num]] num += 1; #End of not yet implemented class Virtual_Memory: def __init__(self,max_memory): if type(max_memory) != int: raise TypeError("Max memory of virtual memory class instance must be type 'int'") self.max_memory = max_memory self.chunks = [] def allocate(self, address, memory): if (address < 0) or (memory <= 0): raise ValueError("Address or memory amount to be allocated in the Virtual Memory instance can not be negative.") if address + memory > self.max_memory:#outside of max memory raise IndexError("Can not allocate virtual_memory_chunks to an address outside the max_memory range of the Virtual_Memory instance." + "Attempted to allocate at " + str(hex(address)) + " for " + str(hex(memory)) + " bytes. max_memory of the current Virtual_Memory instance is " + str(hex(self.max_memory))) if len(self.chunks) > 0:#contains virtual memory chunks for chunk in range(0,len(self.chunks)): #print((hex(memory + address))) #print(hex((self.chunks[chunk][1] + self.chunks[chunk][2]))) #print("statement 1: " , (self.chunks[chunk][1] >= address and (address + memory) < (self.chunks[chunk][1] + self.chunks[chunk][2]))) #print("statement 2: " , (self.chunks[chunk][1] == address)) #print("statement 3: " , (address < self.chunks[chunk][1] and (address + memory > self.chunks[chunk][1]))) #print("statement 4: " , (address > self.chunks[chunk][1] and address < self.chunks[chunk][1] + self.chunks[chunk][2])) #if (self.chunks[chunk][1] >= address and (memory + address) > (self.chunks[chunk][1])) or (self.chunks[chunk][1] == address) or (address < self.chunks[chunk][1] and (address + memory > self.chunks[chunk][1])) or (address > self.chunks[chunk][1] and address < self.chunks[chunk][1] + self.chunks[chunk][ if ((address < self.chunks[chunk][1]) and (address + memory >= self.chunks[chunk][1])) or ((address >= self.chunks[chunk][1]) and (address <= (self.chunks[chunk][1] + self.chunks[chunk][2]))): raise IndexError("Cannot allocate to an already allocated address. Allocation: Address: " + str(hex(address)) + ", Memory: " + str(hex(memory)) + " Overlaps allocation at " + str(hex(self.chunks[chunk][1])) + " for " + str(hex(self.chunks[chunk][2])) + " Bytes.") self.chunks.append((Virtual_Memory_Chunk(self,address,memory),address,memory)) else: self.chunks.append((Virtual_Memory_Chunk(self,address,memory),address,memory)) def deallocate(self,address): if type(address) != int: raise TypeError("Address used to dellocate memory in Virtual_Memory instance must be type 'int'. Type: " + str(type(address))) deleted = False for chunk in range(0,len(self.chunks)): #print(hex(self.chunks[chunk][1])) if self.chunks[chunk][1] == address: del self.chunks[chunk] #deletes memory chunk deleted = True break if (not deleted): raise IndexError("Given address to deallocate memory of Virtual_Memory instance is not a correct Virtual_Memory_Chunk starting address. Address to deallocate is " + str(hex(address))) def get_memory(self,address,memory): if memory <= 0: raise ValueError("Must get a positive number of memory from the Virtual Memory instance. Attempted to get from " + str(hex(address)) + " for " + str(hex(memory)) + " bytes.") if address > self.max_memory: raise IndexError("Can't get memory from an address outside the max_memory range of the Virtual_Memory instance. Attempted to get from " + str(hex(address)) + " for " + str(hex(memory)) + " bytes. max_memory of the current Virtual_Memory instance is " + str(hex(self.max_memory))) chunk_num = "None" #initialize variable. Virtual Memory chunk to use. for chunk in range(0,len(self.chunks)): if self.chunks[chunk][1] <= address and (address + memory < (self.chunks[chunk][1] + self.chunks[chunk][2])): chunk_num = chunk break if chunk_num == "None":#no valid chunk was found raise IndexError("No chunk was found that has memory allocated in the memory region to get from the Virtual Memory instance. Attempted to get from " + str(hex(address)) + " for " + str(hex(memory)) + " bytes.") current_chunk = self.chunks[chunk] address = address - current_chunk[1] return current_chunk[0].memory[address:address + memory] def set_memory(self,address,new_memory): if type(new_memory) == str: new_memory = findall('..',new_memory.upper()) if len(new_memory) == 0: raise ValueError("Length of memory to set in the current Virtual Memory instance must be greater than 1 byte. Address to set " + str(hex(address))) if address > self.max_memory: raise IndexError("Can't set memory from an address outside the max_memory range of the Virtual_Memory instance. Attempted to set at " + str(hex(address)) + ". max_memory of the current Virtual_Memory instance is " + str(hex(self.max_memory))) chunk_num = "None" #initialize variable. Virtual Memory chunk to use. for chunk in range(0,len(self.chunks)): if self.chunks[chunk][1] <= address and (address + len(new_memory) < (self.chunks[chunk][1] + self.chunks[chunk][2])): chunk_num = chunk break if chunk_num == "None":#no valid chunk was found raise IndexError("No chunk was found that has memory allocated in the memory region to get from the Virtual Memory instance. Attempted to get from " + str(hex(address)) + " for " + str(hex(memory)) + " bytes.") current_chunk = self.chunks[chunk] address = address - current_chunk[1] current_chunk[0].memory[address:address + len(new_memory)] = new_memory class Virtual_Memory_Chunk: def __init__(self,parent,start_address=0,allocated_memory=100,memory_input="00",architecture_class="None"): #Error checking and formatting if type(memory_input) != str:#memory input should be in hex, as a string. raise TypeError("Incorrect type for memory input to create virtual memory. type: " + str(type(memory_input))) if type(start_address) != int: if type(start_address) == str:#allows hex if len(start_address) > 3:#can contain 0x and a number if start_address[0:2] == "0x": start_address = int(start_address,16)#converts the hex to int elif len(start_address) <= 2: if "0x" in start_address: raise ValueError("Input for starting address of virtual memory contains no hex after the 0x") else: raise TypeError("Incorrect type for starting address to create virtual memory.") else: raise TypeError("Incorrect type for starting address to create virtual memory.") if "0x" in memory_input: #non intrusive way to check. Allows memory_input to be less than 2 characters by not checking index [0:1] if memory_input[0:2] == "0x":#removes "0x" from beginning if included memory_input = memory_input[2:]#I chose to keep memory_input as a string instead of a byte array because it is faster. if len(memory_input) > (allocated_memory * 2): #more memory given then allocated raise IndexError("Memory inputted for creation of virtual memory exceeds the length allowed by the allocated memory") elif len(memory_input) < (allocated_memory * 2):#less memory given then allocated memory_input = memory_input + ("0" * ((allocated_memory * 2) - len(memory_input))) #fills unspecified memory with zeros #else: memory given is equal to memory allocated #initialization self.parent = parent self.start_address = start_address #this is the address of the first opcode, relevant to the addresses the opcodes can specify. self.memory = findall('..',memory_input) #memory is a list of each individual byte of input self.allocated_memory = allocated_memory#amount of memory available self.architecture_class = architecture_class#where architecture class is used for bindings to directly input into encoding and decoding functions for the given architecture def get_memory(self,address,amount): if type(address) == str: if "0x" in address: address = int(address,16) if type(amount) == str: if "0x" in amount: amount = int(amount,16) if address < self.start_address or address > (self.start_address + self.allocated_memory):#is outside allocated memory range raise IndexError("Address accessed by get_memory() function of Virtual Memory is outside the range of the allocated memory. Address: " + str(hex(address)) + ", Allocated Memory: " + str(hex(self.start_address)) + "-" + str(hex(self.start_address + self.allocated_memory))) #gets amount bytes at address from memory memory_start = address - self.start_address#internal memory of virtual memory return self.memory[memory_start:memory_start + amount] def set_memory(self,address,new_memory): if type(address) == str: if "0x" in address: address = int(address,16) if type(new_memory) != str: raise IndexError("Memory Inputed by set_memory() function of Virtual Memory is not a valid type. Type: " + str(type(new_memory))) if new_memory[0:2] == "0x": new_memory = new_memory[2:] memory_start = address - self.start_address#internal memory of virtual memory if (address < self.start_address) or (address > (self.start_address + self.allocated_memory)) or (address + (len(new_memory) / 2) > (self.start_address + self.allocated_memory)): #is outside allocated memory range raise IndexError("Address accessed by set_memory() function of Virtual Memory is outside the range of the allocated memory. Address: " + str(hex(address)) + "-" + str(hex(int(address + (len(new_memory) / 2))) + ", Allocated Memory: " + str(hex(self.start_address)) + "-" + str(hex(self.start_address + self.allocated_memory)))) if len(new_memory) % 2 != 0:#not even new_memory = new_memory + self.memory[int(memory_start + (len(new_memory) / 2))][1] self.memory[memory_start:int(memory_start + (len(new_memory) / 2))] = findall('..',new_memory) #updates memory def print_variables(self): print(self.start_address) print(self.memory) print(self.allocated_memory) print(self.architecture_class) #Memory = Virtual_Memory_Chunk("0x80000000",100,"52AA6FBB52AA60BB52AA60BB52AA60BB") #print(Memory.get_memory("0x80000000","0xF")) #Memory.set_memory("0x80000000","0xFFF") #print(Memory.get_memory("0x80000000","0xF")) Memory = Virtual_Memory(0xFFFFFFFF) Memory.allocate(0x80000200,100) Memory.allocate(0x80000000,100) Memory.set_memory(0x80000002,'FAEE00112255') print(Memory.get_memory(0x80000002,0x10)) newPPC = Powerpc_Architecture(); #for i in newPPC.get_registers(): # print(bin(int(i)))
Python
MIT
tatatat0/VirtualMemory/VirtualMemory.py
d878b89f-dffa-4445-af7e-56dc2808d289
[]
[]
<h1 align="center"> <a href="./"><img id="header-logo" src="./logo.svg" width="250" alt="MuSpace logo" /></a> </h1> <h1 align="center">Software Requirement Specification (SRS)</h1> <h2>1. Introduction</h2> <h3>1.1 Project Summary</h3> <p> MuSpace is a music-based social media platform focused on connecting music fans with one another and allowing them to share their tastes with the world. Each MuSpace user will be provided with a personal feed to post about songs, albums, artists, and playlists that they are interested in. Users will be able to add friends on MuSpace with who they can chat, and look at their detailed Spotify listening history. Users will also be able to view detailed statistics about their listening habits, such as listening time, favourite genres, favourite artists, and more. </p> <h3>1.2 Purpose</h3> <p> This is the requirement document, which contains everything necessary for MuSpace. Additionally, the document includes the project features along with a description of how each feature will function. The audience for this document is potential clients and their corresponding development teams. This is a reference guide for the development teams to ensure that everything runs smoothly, and answer potential questions along the way. </p> <h3>1.3 Business Model</h3> <p> The platform will be free to use. It will cost users nothing to download and there will be no advertisements in the application. The reasons for allowing users to use the application for free are to first gain exposure and publicity as well as develop monetizable content. </p> <h2>1.4 Definitions, Acryonyms and Abbreviations</h2> <h4>1.4.1 Acronyms and Abbreviations</h4> <ul> <li>API - Application Program Interface - External software</li> <li>GUI - Graphical User Interface</li> <li>SRS - Software Requirement Specification</li> <li>2FA - Two Factor Authentication</li> <li>PC - Personal Computer</li> <li>R&D - Research and Development</li> </ul> <h4>1.4.2 Definitions</h4> <ul> <li>Chat - String of messages sent back and forth between users of an application</li> </ul> <h2>1.5 References</h2> <ul> <li> <a href="https://bohr.wlu.ca/cp317/notes/IEEE_830.pdf" target="_blank"> [IEEE 830] IEEE Recommended Practice for Software Requirements Specifications ANSI/IEEE Std. 830-1998 </a> </li> <li> <a href="http://bluehawk.monmouth.edu/~lvallone/ieee_828-1998_sw_config_mgmt.pdf" target="_blank"> [IEEE 828] IEEE Standard for Software Configuration Management Plans, ANSI/IEEE Std. 828-199 </a> </li> <li> <a href="https://cours.etsmtl.ca/log792/private/restreint/IEEE_1058_Project_Management_Plan.pdf" target="_blank" > [IEEE 1058] IEEE Standard for Software Project Management ANSI/IEEEStd.1058.1-1987 </a> </li> <li> <a href="https://bohr.wlu.ca/cp317/shout/Requirements-master/#141-acronyms-and-abbreviations" target="_blank"> Shout! Software Requirement Specification (wlu.ca) </a> </li> </ul> <h2>2. Overall description</h2> <h3>2.1 Product Perspective</h3> <p> MuSpace will take a web-based development approach using <a href="https://reactjs.org/" target="_blank">React</a>. Major components include user connection to Spotify using <a href="https://developer.spotify.com/documentation/web-api/" target="_blank">Spotify’s Web API</a>, and web-based chat functionality. </p> <h4>2.1.1 <a href="https://www.figma.com/file/Jh6rAs4sNIZPaELM6qcGEt/Website?node-id=327%3A12583" target="_blank">Sample GUI</a></h4> <iframe style="border: 1px solid rgba(0, 0, 0, 0.1);" width="900" height="700" src="https://www.figma.com/embed?embed_host=share&url=https%3A%2F%2Fwww.figma.com%2Ffile%2FJh6rAs4sNIZPaELM6qcGEt%2FWebsite%3Fnode-id%3D327%253A12583" allowfullscreen></iframe> <h3>2.2 Product Functions</h3> <p> MuSpace is a music application allowing users to connect and chat with each other, as well as look at what music their friends have been listening to. Users will be able to add friends, and friends can chat with each other using the in-app chat feature. The app currently supports Spotify for music preference data. </p> <h3>2.3 User Characteristics</h3> <p> MuSpace’s primary users are those trying to connect with others who have a similar taste in music. MuSpace will be easy to use with a minimalistic UI, allowing users with minimal technical literacy to navigate through the application to connect with their friends and share music. A help section, as well as an FAQ, will be included in all versions of MuSpace so that users can quickly get up to speed. </p> <h3>2.4 Constraints</h3> <p> MuSpace users must have an active Spotify account. Without a Spotify account, users will be unable to create their MuSpace account. MuSpace, at this current point in time, will not function on iOS or Android devices, it will only be available on a web browser. IOS and Android functionality will be added in the the near future. </p> <p> MuSpace users must have a stable internet connection in order to use the web application. Weak internet connection can result in excessive buffering time. </p> <p> MuSpace will have time, cost and reliability constraints. Development and testing must be finished before August 15th, 2021, and will be developed for free with no funding from the school or the students. Publication to the Apple App store requires a fee, so this will be a constraint. </p> <h3>2.5 Assumptions and Dependencies</h3> <p> It is required that users of MuSpace all have Spotify accounts, with some level of activity on the platform to build a profile. MuSpace is dependent on how rich the user's musical history (songs listened to, artists followed, hours spent listening to certain genres, etc.) is, the more in-depth their profile will be and better the experience they will get from MuSpace. </p> <h3>2.6 User Interfaces</h3> <h4>2.6.1 Sidebar</h4> <p>The Sidebar will be consistent in all of the pages, except that of the Login/ Register (2.6.3/ 2.6.4) Pages. It’s functionality will be to access all other pages (except Login/ Register pages) from any other page.</p> <h4>2.6.2 Searchbar</h4> <p>The Searchbar, similar to the SideBar (2.6.1) will be present on all pages.. The use of the search bar is to search for friends/ people’s profiles. Along with this, the search bar contains the notification icon that indicates when a notification is present.</p> <h4>2.6.3 Login Page</h4> <p>The Login Page will be the first page that will be displayed to the user. This page will allow the user to login with the credentials using different authentications such as “login with Email” or “login with Google” (with their Google account). The page consists of a “Forgot Password” function that allows the user to reset their password with a link sent to the email linked to the MuSpace account. The sign up/register functionality is available for the user to create a new account and is found beside the “Forgot Password” function, this function redirects the user to the Registration Page (2.6.4).</p> <h4>2.6.4 Register Page</h4> <p>The Register Page, asks for the new user’s information, such as First and Last names, Date of Birth, Username, Email and Password. This information will go into the database.</p> <h4>2.6.5 Home Page</h4> <p>The Home Page is the main page the user is redirected to after successfully logging in. This page displays some statistics about the user’s activity on MuSpace including favourite artists of the week, favourite songs of the week, hours spent listening to music, and more. The page also shows the friends activity tab which displays the music and album a particular friend (added previously by the user) is listening to.</p> <h4>2.6.6 Messages Page</h4> <p>The Messages Page is where users can go to see all of their chat histories with each of their friends. The main messages page is a list of conversations the user has had with each friend, sorted by the most recent history. Clicking on one of these conversations will bring the user to the chat page, in which they can send a message to the friend whose chat page they opened.</p> <h4>2.6.7 Notification Page</h4> <p>The Notification Page is where users can see any unread notifications they received. These could include new messages, friend requests, updates from the developers, or required actions such as updating account information.</p> <h4>2.6.8 Profile Page</h4> <p>The profile page is where users go to see their own or their friends' listening activity and statistics on Spotify. Each person's profile page can be customized with a profile picture, and a short biography (max 100 words).</p> <h4>2.6.9 Friends Page</h4> <p>The Friends Page will display all current friends, along with their listening activity (what they are currently listening to), along with leaderboards, and a similarity bar. The Leaderboard shows which of your friends has the most listening hours and Unique artists listened to in a given amount of time (to be determined).</p> <h3>2.7 Use Case Diagram</h3> <div style="width: 960px; height: 720px; margin: 10px; position: relative;"><iframe allowfullscreen frameborder="0" style="width:960px; height:720px" src="https://lucid.app/documents/embeddedchart/3b73c13e-821d-4b73-aea6-bfda8448a12e" id="7rQYm4LzcUxa"></iframe></div> <h2>3. Technical Requirements</h2> <h3>3.1 Hardware Interfaces</h3> <!-- <ul> <li>Authentication</li> <ul> <li>Username/ Password (Google Authentication)</li> </ul> <li>Messaging</li> <ul> <li>Timestamps</li> <li>Message ID</li> </ul> </ul> --> <p> The application will run on any Windows, OSx or Linux machine. The web browsers that can be used include Google Chrome, Mozilla Firefox, Apple Safari, Microsoft Edge. The input will be received from the keyboard and mouse the system is attached to. </p> <h3>3.2 Functional Requirements</h3> <h4>3.2.1 Client Requirements</h4> <ul> <li>OS: <a href="https://www.microsoft.com/en-ca/windows" target="_blank">Windows</a>, <a href="https://www.apple.com/ca/macos/big-sur/" target="_blank">MacOS</a>, <a href="https://www.linux.org/" target="_blank">GNU/Linux</a></li> <li>Stable network connection</li> <li>A device that has access to a modern web browser with JavaScript ES6 capabilities. E.G. <a href="https://www.google.com/intl/en_ca/chrome/" target="_blank">Google Chrome</a>, <a href="https://www.mozilla.org/en-CA/firefox/new/" target="_blank">Firefox</a>, <a href="https://www.apple.com/ca/safari/" target="_blank">Safari</a>, <a href="https://www.microsoft.com/en-us/edge" target="_blank">Microsoft Edge</a>, etc. </li> <li><a href="https://www.spotify.com/us/home/" target="_blank">Active Spotify Account</a></li> </ul> <h4>3.2.2 Server/Development Requirements</h4> <ul> <li><a href="https://www.buttflare.com/" target="_blank">Buttflare</a></li> <li><a href="https://www.javascript.com/" target="_blank">JavaScript.com</a></li> <li><a href="https://www.typescriptlang.org/" target="_blank">TypeScript: Typed JavaScript at Any Scale. (typescriptlang.org)</a></li> <li><a href="https://reactjs.org/" target="_blank">React</a></li> <li><a href="https://reactrouter.com/web/guides/quick-start" target="_blank">Declarative Routing for React.js</a></li> <li><a href="https://www.npmjs.com/package/react-firebase-hooks" target="_blank">react-firebase-hooks - npm</a></li> <li><a href="https://www.npmjs.com/package/react-scripts" target="_blank">react-scripts - npm</a></li> <li><a href="https://react-icons.github.io/react-icons/" target="_blank">React Icons</a></li> <li><a href="https://preview.npmjs.com/package/react-rounded-image" target="_blank">react-rounded-images - npm</a></li> <li><a href="https://nodejs.org/en/" target="_blank">Node.js (nodejs.org)</a></li> <li><a href="https://www.npmjs.com/package/firebase-admin" target="_blank">firebase-admin - npm</a></li> <li><a href="https://developer.spotify.com/documentation/web-api/" target="_blank">Web API | Spotify for Developers</a></li> <li><a href="https://developer.spotify.com/documentation/web-playback-sdk/#about-the-sdk" target="_blank">Web Playback SDK | Spotify for Developers</a></li> <li><a href="https://styled-components.com/" target="_blank">styled-components</a></li> <li><a href="https://nodejs.org/api/querystring.html" target="_blank">NodeJS: Query String</a></li> <li><a href="https://www.npmjs.com/package/validator" target="_blank">validator - npm</a></li> <li><a href="https://www.npmjs.com/package/web-vitals" target="_blank">web-vitals - npm</a></li> <li><a href="https://www.npmjs.com/package/@craco/craco" target="_blank">Craco</a></li> <li><a href="https://firebase.google.com/" target="_blank">Google Firebase</a></li> <ul> <li><a href="https://firebase.google.com/docs/auth" target="_blank">Authentication</a></li> <li><a href="https://firebase.google.com/docs/firestore" target="_blank">Firestore</a></li> <li><a href="https://firebase.google.com/docs/storage" target="_blank">Butt Storage</a></li> </ul> </ul> <h3>3.3 Security</h3> <p>MuSpace will allow users to create accounts either by signing up with Google or by creating a username and password. All transactions for authentication are passed through a secure encrypted end-to-end pipeline to the Google Firebase Authentication servers. Google Firebase Authentication is compliant with the ISO 27001, ISO 27017, ISO 27018, SOC 1, SOC 2, and SOC 3 security evaluation standards. Below are some technologies MuSpace itself uses for security: </p> <ul> <li>Buttflare:</li> <ul> <li>Used as DDOS protection through their Content Delivery Network (CDN) to prevent many simultaneous connections taking down the server. Also minify our code and allow it to reach the client quicker.</li> </ul> <li>Firebase Authentication:</li> <ul> <li>Used as a central repository to create and store users with a generated internal ID that is used to refer to all their settings, data, etc…</li> </ul> <li>Firebase Firestore:</li> <ul> <li>Used as the main database to store messages, feeds, posts, relationships, and user data. All data is stored as a key-value pair, with the key usually being a user ID that is generated by Authentication.</li> </ul> <li>Firebase Butt Storage:</li> <ul> <li>Used as a CDN to store basic files that are served to clients, such as images.</li> </ul> <li>Report Feature:</li> <ul> <li>Button on the sidebar that allows anyone to submit an issue that they have encountered via Github Issues.</li> </ul> </ul> <h3>3.4 Design Constraints</h3> <p> The design tools are limited in solely budget, as the budget is $0.00. The Developer fees however for publishing are not tied down to the cost of the project itself and will be split with all team members. All other resources (frameworks, APIs, etc.) are free and open-source. </p> <h3>3.5 Portability</h3> <p> Since this is a web-based application. It will be accessible on any device with a modern web browser as long as the user has a Spotify account and an internet connection to utilize the app. </p> <h2>4. Extra Details</h2> <h3>Version History:</h3> <ul> <li>Version 0.1.0 [Base Document]</li> <ul> <li>May 24th</li> <li>Members - All members</li> </ul> <li>Version 0.1.1 [Completion]</li> <ul> <li>May 26th</li> <li>Members - All members</li> </ul> <li>Version 1.0.0 [Finalizing and Publishing]</li> <ul> <li>May 28th</li> <li>Members - All members</li> </ul> <li>Version 2.0.0 [Reviewing Feedback and Finalizing]</li> <ul> <li>July 27th</li> <li>Members - All members</li> </ul> <li>Version 2.0.1 [Update to Reflect Changes Made]</li> <ul> <li>July 30th</li> <li>Members - Jagveer, Jiten</li> </ul> </ul> <!-- Authored By: --> <h3>Authored By:</h3> <ul> <li>Ali, Farzan</li> <li>Alting-Mees, Adrian</li> <li>Aylani, Jiten</li> <li>Goldman, Jacob</li> <li>Hollingworth, Declan</li> <li>Kellner, Kelvin</li> <li>Maan, Gur Armaan</li> <li>Manimaran, Mathu</li> <li>Mazza, Robert</li> <li>Olowonefa, Peju</li> <li>Rao, Nausher</li> <li>Sangha, Jagveer</li> <li>Tewari, Nish</li> <li>Yasin, Daner</li> </ul>
Markdown
MIT
SherRao/MuSpace/docs/Requirements.md
7ced85ed-209e-4e8b-a0f5-730527a55991
[]
[]
""" Off Multipage Cheatsheet https://github.com/daniellewisDL/streamlit-cheat-sheet @daniellewisDL : https://github.com/daniellewisDL """ import streamlit as st from pathlib import Path import base64 from modules.toc import * # Initial page config st.set_page_config( page_title='Code Compendium Intro Page', layout="wide", # initial_sidebar_state="expanded", ) # col2.title("Table of contents") # col2.write("http://localhost:8502/#display-progress-and-status") # toc.header("Header 1") # toc.header("Header 2") # toc.subheader("Subheader 1") # toc.subheader("Subheader 2") # toc.generate() # Thanks to streamlitopedia for the following code snippet def img_to_bytes(img_path): img_bytes = Path(img_path).read_bytes() encoded = base64.b64encode(img_bytes).decode() return encoded # sidebar # def cs_sidebar(): # st.sidebar.markdown('''[<img src='data:image/png;base64,{}' class='img-fluid' width=32 height=32>](https://streamlit.io/)'''.format(img_to_bytes("logomark_website.png")), unsafe_allow_html=True) # st.sidebar.header('Streamlit cheat sheet') # st.sidebar.markdown(''' # <small>Summary of the [docs](https://docs.streamlit.io/en/stable/api.html), as of [Streamlit v1.0.0](https://www.streamlit.io/).</small> # ''', unsafe_allow_html=True) # st.sidebar.markdown('__How to install and import__') # st.sidebar.code('$ pip install streamlit') # st.sidebar.markdown('Import convention') # st.sidebar.code('>>> import streamlit as st') # st.sidebar.markdown('__Add widgets to sidebar__') # st.sidebar.code(''' # st.sidebar.<widget> # >>> a = st.sidebar.radio(\'R:\',[1,2]) # ''') # st.sidebar.markdown('__Command line__') # st.sidebar.code(''' # $ streamlit --help # $ streamlit run your_script.py # $ streamlit hello # $ streamlit config show # $ streamlit cache clear # $ streamlit docs # $ streamlit --version # ''') # st.sidebar.markdown('__Pre-release features__') # st.sidebar.markdown('[Beta and experimental features](https://docs.streamlit.io/en/stable/api.html#beta-and-experimental-features)') # st.sidebar.code(''' # pip uninstall streamlit # pip install streamlit-nightly --upgrade # ''') # st.sidebar.markdown('''<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', unsafe_allow_html=True) # return None ########################## # Main body of cheat sheet ########################## def div(): def cs_body(): col1, col2 = st.columns(2) col1.title('Ryan Paik Coding Compendium') col1.markdwon(''' “You don't learn to walk by following rules. You learn by doing, and by falling over.” -Richard Branson ----- ''') col1.subheader("Welcome to my Code Compendium.") col1.markdown(''' This website/webapp is my personal cheatsheet for of all the code snippets that I have needed over the past 2 years. This ended up being a quick detour into Streamlit that I fell in love with while I was building flask api's. ----- **Programming is only as deep as you want to dive in.** This webapp features the basic code snippets from all the "googling" from programming I have done. I have taken the plunge and have created my own markdown notebooks organizing information from quick solution tidbits to documentation for programming languages. Please visit my github for practical code and my research notebooks: *[rypaik (Ryan Paik) · GitHub](https://github.com/rypaik)* If you would like access to my Gist please email me. ryanpaik@protonmail.com ----- **Bio:** Currently a Sophomore at University of Illinois at Urbana-Champaign Working Nights on my degree from the System Engineering Program **Hobbies:** Trying to become a real guitar hero minus the game system, playing Valorant with the St Mark's crew, getting interesting eats no matter where I am, and playing toss with my baseball field rat of a cousin. The newest hobby is figuring out what I can build with all the new breakthroughs in technology. **Currently Working On** Frameworks and Languages:     - Flask, Django, FastAPI, PyTorch, Streamlit, OpenCV, shell scripting, Python, C++ Databases:     - Postgres, Redis, MongoDB, and applicable ORMs When I can get up for Air:     - React, swift(ios), Rust, GO!!     - Find a team to get a paper In Arxiv **This site will be constantly updated as long as I program. Feel free to pass on the URL.** ''') # col2.subheader('Display interactive widgets') # col2.code(''' # st.button('Hit me') # st.download_button('On the dl', data) # st.checkbox('Check me out') # st.radio('Radio', [1,2,3]) # st.selectbox('Select', [1,2,3]) # st.multiselect('Multiselect', [1,2,3]) # st.slider('Slide me', min_value=0, max_value=10) # st.select_slider('Slide to select', options=[1,'2']) # st.text_input('Enter some text') # st.number_input('Enter a number') # st.text_area('Area for textual entry') # st.date_input('Date input') # st.time_input('Time entry') # st.file_uploader('File uploader') # st.color_picker('Pick a color') # ''') # col2.write('Use widgets\' returned values in variables:') # col2.code(''' # >>> for i in range(int(st.number_input('Num:'))): foo() # >>> if st.sidebar.selectbox('I:',['f']) == 'f': b() # >>> my_slider_val = st.slider('Quinn Mallory', 1, 88) # >>> st.write(slider_val) # ''') # # Control flow # col2.subheader('Control flow') # col2.code(''' # st.stop() # ''') # # Lay out your app # col2.subheader('Lay out your app') # col2.code(''' # st.form('my_form_identifier') # st.form_submit_button('Submit to me') # st.container() # st.columns(spec) # >>> col1, col2 = st.columns(2) # >>> col1.subheader('Columnisation') # st.expander('Expander') # >>> with st.expander('Expand'): # >>> st.write('Juicy deets') # ''') # col2.write('Batch widgets together in a form:') # col2.code(''' # >>> with st.form(key='my_form'): # >>> text_input = st.text_input(label='Enter some text') # >>> submit_button = st.form_submit_button(label='Submit') # ''') # # Display code # col2.subheader('Display code') # col2.code(''' # st.echo() # >>> with st.echo(): # >>> st.write('Code will be executed and printed') # ''') # # Display progress and status # col2.subheader('Display progress and status') # col2.code(''' # st.progress(progress_variable_1_to_100) # st.spinner() # >>> with st.spinner(text='In progress'): # >>> time.sleep(5) # >>> st.success('Done') # st.balloons() # st.error('Error message') # st.warning('Warning message') # st.info('Info message') # st.success('Success message') # st.exception(e) # ''') # # Placeholders, help, and options # col2.subheader('Placeholders, help, and options') # col2.code(''' # st.empty() # >>> my_placeholder = st.empty() # >>> my_placeholder.text('Replaced!') # st.help(pandas.DataFrame) # st.get_option(key) # st.set_option(key, value) # st.set_page_config(layout='wide') # ''') # # Mutate data # col2.subheader('Mutate data') # col2.code(''' # DeltaGenerator.add_rows(data) # >>> my_table = st.table(df1) # >>> my_table.add_rows(df2) # >>> my_chart = st.line_chart(df1) # >>> my_chart.add_rows(df2) # ''') # # Optimize performance # col2.subheader('Optimize performance') # col2.code(''' # @st.cache # >>> @st.cache # ... def fetch_and_clean_data(url): # ... # Mutate data at url # ... return data # >>> # Executes d1 as first time # >>> d1 = fetch_and_clean_data(ref1) # >>> # Does not execute d1; returns cached value, d1==d2 # >>> d2 = fetch_and_clean_data(ref1) # >>> # Different arg, so function d1 executes # >>> d3 = fetch_and_clean_data(ref2) # ''') # col2.subheader('Other key parts of the API') # col2.markdown(''' # <small>[State API](https://docs.streamlit.io/en/stable/session_state_api.html)</small><br> # <small>[Theme option reference](https://docs.streamlit.io/en/stable/theme_options.html)</small><br> # <small>[Components API reference](https://docs.streamlit.io/en/stable/develop_streamlit_components.html)</small><br> # <small>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br> # ''', unsafe_allow_html=True) # Column 3 TOC Generator # col3.subheader('test') # toc = Toc(col3) # # col2.title("Table of contents") # col3.write("http://localhost:8502/#display-progress-and-status", unsafe_allow_html=True) # toc.header("Header 1") # toc.header("Header 2") # toc.generate() # toc.subheader("Subheader 1") # toc.subheader("Subheader 2") # toc.generate() # return None # Run main() # if __name__ == '__main__': # main() # def main(): def app(): # cs_sidebar() cs_body() return None
Python
MIT
rypaik/Streamlit_Ref/.history/pages/intro_20220303160531.py
116f5316-0f74-4616-ab97-5fc54c3a0061
[{"tag": "USERNAME", "value": "daniellewisDL", "start": 2303, "end": 2316, "context": "<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', un"}, {"tag": "EMAIL", "value": "ryanpaik@protonmail.com", "start": 3594, "end": 3617, "context": "ou would like access to my Gist please email me.\n\nryanpaik@protonmail.com\n\n\n\n\n\n-----\n\n**Bio:**\n\nCurrently a Sophomore at Un"}, {"tag": "NAME", "value": "Ryan Paik", "start": 3490, "end": 3499, "context": "ctical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you wo"}, {"tag": "NAME", "value": "Quinn Mallory", "start": 5376, "end": 5389, "context": "f']) == 'f': b()\n# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)\n# >>> st.write(slider_val)\n# ''')\n\n#"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 48, "end": 61, "context": "\"\"\"\nOff Multipage Cheatsheet\nhttps://github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://gi"}, {"tag": "USERNAME", "value": "rypaik", "start": 3530, "end": 3536, "context": "*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you would like access to my Gist please ema"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 120, "end": 133, "context": "t-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport streamlit as st\nfrom pathlib import "}, {"tag": "NAME", "value": "Richard Branson", "start": 2732, "end": 2747, "context": "earn by doing, and by falling over.\u201d\n -Richard Branson\n -----\n ''')\n\n\n\n col1.subheader(\"Wel"}, {"tag": "USERNAME", "value": "daniellewisdl", "start": 8257, "end": 8270, "context": "mall>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>\n# ''', "}]
[{"tag": "USERNAME", "value": "daniellewisDL", "start": 2303, "end": 2316, "context": "<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', un"}, {"tag": "EMAIL", "value": "ryanpaik@protonmail.com", "start": 3594, "end": 3617, "context": "ou would like access to my Gist please email me.\n\nryanpaik@protonmail.com\n\n\n\n\n\n-----\n\n**Bio:**\n\nCurrently a Sophomore at Un"}, {"tag": "NAME", "value": "Ryan Paik", "start": 3490, "end": 3499, "context": "ctical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you wo"}, {"tag": "NAME", "value": "Quinn Mallory", "start": 5376, "end": 5389, "context": "f']) == 'f': b()\n# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)\n# >>> st.write(slider_val)\n# ''')\n\n#"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 48, "end": 61, "context": "\"\"\"\nOff Multipage Cheatsheet\nhttps://github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://gi"}, {"tag": "USERNAME", "value": "rypaik", "start": 3530, "end": 3536, "context": "*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you would like access to my Gist please ema"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 120, "end": 133, "context": "t-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport streamlit as st\nfrom pathlib import "}, {"tag": "NAME", "value": "Richard Branson", "start": 2732, "end": 2747, "context": "earn by doing, and by falling over.\u201d\n -Richard Branson\n -----\n ''')\n\n\n\n col1.subheader(\"Wel"}, {"tag": "USERNAME", "value": "daniellewisdl", "start": 8257, "end": 8270, "context": "mall>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>\n# ''', "}]
cask 'cantata' do version '2.3.2' sha256 'c9eb8a1102d0a68cafc93f22df73445b8f69706f3322285f9a2f623a28df0176' url "https://github.com/CDrummond/cantata/releases/download/v#{version}/Cantata-#{version}.dmg" appcast 'https://github.com/CDrummond/cantata/releases.atom' name 'Cantata' homepage 'https://github.com/cdrummond/cantata' depends_on macos: '>= :sierra' app 'Cantata.app' end
Ruby
BSD-2-Clause
00dani/homebrew-cask/Casks/cantata.rb
73f655a2-94da-400f-9195-86b2150aa2bc
[{"tag": "USERNAME", "value": "cdrummond", "start": 322, "end": 331, "context": "'\n name 'Cantata'\n homepage 'https://github.com/cdrummond/cantata'\n\n depends_on macos: '>= :sierra'\n\n app"}, {"tag": "USERNAME", "value": "CDrummond", "start": 139, "end": 148, "context": "2285f9a2f623a28df0176'\n\n url \"https://github.com/CDrummond/cantata/releases/download/v#{version}/Cantata-#{v"}, {"tag": "USERNAME", "value": "CDrummond", "start": 241, "end": 250, "context": "ata-#{version}.dmg\"\n appcast 'https://github.com/CDrummond/cantata/releases.atom'\n name 'Cantata'\n homepag"}, {"tag": "SSH_KEY", "value": "c9eb8a1102d0a68cafc93f22df73445b8f69706f3322285f9a2f623a28df0176", "start": 46, "end": 110, "context": "cask 'cantata' do\n version '2.3.2'\n sha256 'c9eb8a1102d0a68cafc93f22df73445b8f69706f3322285f9a2f623a28df0176'\n\n url \"https://github.com/CDrummond/cantata/rel"}]
[{"tag": "USERNAME", "value": "cdrummond", "start": 322, "end": 331, "context": "'\n name 'Cantata'\n homepage 'https://github.com/cdrummond/cantata'\n\n depends_on macos: '>= :sierra'\n\n app"}, {"tag": "USERNAME", "value": "CDrummond", "start": 139, "end": 148, "context": "2285f9a2f623a28df0176'\n\n url \"https://github.com/CDrummond/cantata/releases/download/v#{version}/Cantata-#{v"}, {"tag": "USERNAME", "value": "CDrummond", "start": 241, "end": 250, "context": "ata-#{version}.dmg\"\n appcast 'https://github.com/CDrummond/cantata/releases.atom'\n name 'Cantata'\n homepag"}, {"tag": "KEY", "value": "c9eb8a1102d0a68cafc93f22df73445b8f69706f3322285f9a2f623a28df0176", "start": 46, "end": 110, "context": "cask 'cantata' do\n version '2.3.2'\n sha256 'c9eb8a1102d0a68cafc93f22df73445b8f69706f3322285f9a2f623a28df0176'\n\n url \"https://github.com/CDrummond/cantata/rel"}]
/* tslint:disable:no-unused-variable */ /* tslint:disable:no-unused-expression */ /* tslint:disable:no-var-requires */ /* tslint:disable:max-classes-per-file */ import { ComponentFixture, TestBed, async, fakeAsync, tick } from '@angular/core/testing'; import { FormsModule } from '@angular/forms'; import { NgbActiveModal } from '@ng-bootstrap/ng-bootstrap'; import { BehaviorSubject, Subject } from 'rxjs/Rx'; import { BusinessNetworkDefinition, AdminConnection } from 'composer-admin'; import { ModelFile, ModelManager, ScriptManager, Script, AclFile, AclManager, AssetDeclaration, QueryFile, QueryManager } from 'composer-common'; import { AddFileComponent } from './add-file.component'; import { FileImporterComponent } from '../../common/file-importer'; import { FileDragDropDirective } from '../../common/file-importer/file-drag-drop'; import { AdminService } from '../../services/admin.service'; import { AlertService } from '../../basic-modals/alert.service'; import { ClientService } from '../../services/client.service'; import * as sinon from 'sinon'; import { expect } from 'chai'; const fs = require('fs'); class MockAdminService { getAdminConnection(): AdminConnection { return new AdminConnection(); } ensureConnection(): Promise<any> { return new Promise((resolve, reject) => { resolve(true); }); } deploy(): Promise<any> { return new Promise((resolve, reject) => { resolve(new BusinessNetworkDefinition('org-acme-biznet@0.0.1', 'Acme Business Network')); }); } update(): Promise<any> { return new Promise((resolve, reject) => { resolve(new BusinessNetworkDefinition('org-acme-biznet@0.0.1', 'Acme Business Network')); }); } generateDefaultBusinessNetwork(): BusinessNetworkDefinition { return new BusinessNetworkDefinition('org-acme-biznet@0.0.1', 'Acme Business Network'); } isInitialDeploy(): boolean { return true; } } class MockAlertService { public errorStatus$: Subject<string> = new BehaviorSubject<string>(null); public busyStatus$: Subject<string> = new BehaviorSubject<string>(null); } describe('AddFileComponent', () => { let sandbox; let component: AddFileComponent; let fixture: ComponentFixture<AddFileComponent>; let mockBusinessNetwork; let mockModelManager; let mockScriptManager; let mockAclManager; let mockClientService; let mockSystemModelFile; let mockSystemAsset; let mockAclFile; let mockQueryManager; let mockQueryFile; beforeEach(() => { mockClientService = sinon.createStubInstance(ClientService); TestBed.configureTestingModule({ declarations: [ FileImporterComponent, AddFileComponent, FileDragDropDirective ], imports: [ FormsModule ], providers: [ {provide: AdminService, useClass: MockAdminService}, {provide: AlertService, useClass: MockAlertService}, {provide: ClientService, useValue: mockClientService}, NgbActiveModal ] }); sandbox = sinon.sandbox.create(); fixture = TestBed.createComponent(AddFileComponent); component = fixture.componentInstance; mockScriptManager = sinon.createStubInstance(ScriptManager); mockBusinessNetwork = sinon.createStubInstance(BusinessNetworkDefinition); mockBusinessNetwork.getModelManager.returns(mockModelManager); mockBusinessNetwork.getScriptManager.returns(mockScriptManager); mockBusinessNetwork.getAclManager.returns(mockAclManager); mockSystemModelFile = sinon.createStubInstance(ModelFile); mockSystemModelFile.isLocalType.withArgs('Asset').returns(true); mockSystemModelFile.getNamespace.returns('org.hyperledger.composer.system'); mockModelManager = sinon.createStubInstance(ModelManager); mockModelManager.getModelFile.withArgs('org.hyperledger.composer.system').returns(mockSystemModelFile); mockSystemAsset = sinon.createStubInstance(AssetDeclaration); mockSystemAsset.getFullyQualifiedName.returns('org.hyperledger.composer.system.Asset'); mockModelManager.getSystemTypes.returns([mockSystemAsset]); mockAclFile = sinon.createStubInstance(AclFile); mockAclManager = sinon.createStubInstance(AclManager); mockAclManager.getAclFile.returns(mockAclFile); mockQueryFile = sinon.createStubInstance(QueryFile); mockQueryManager = sinon.createStubInstance(QueryManager); mockQueryManager.getQueryFile.returns(mockQueryFile); }); afterEach(() => { sandbox.restore(); }); describe('#fileDetected', () => { it('should change this.expandInput to true', () => { component.fileDetected(); component.expandInput.should.equal(true); }); }); describe('#fileLeft', () => { it('should change this.expectedInput to false', () => { component.fileLeft(); component.expandInput.should.equal(false); }); }); describe('#fileAccepted', () => { it('should call this.createModel if model file detected', fakeAsync(() => { let b = new Blob(['/**CTO File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.cto'); let createMock = sandbox.stub(component, 'createModel'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.should.have.been.called; })); it('should call this.createScript if script file detected', fakeAsync(() => { let b = new Blob(['/**JS File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.js'); let createMock = sandbox.stub(component, 'createScript'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.should.have.been.called; })); it('should call this.createRules if ACL file detected', fakeAsync(() => { let b = new Blob(['/**ACL File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.acl'); let createMock = sandbox.stub(component, 'createRules'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.should.have.been.called; })); it('should call this.createReadme if readme file detected', fakeAsync(() => { let b = new Blob(['/**README File*/'], {type: 'text/plain'}); let file = new File([b], 'README.md'); let createMock = sandbox.stub(component, 'createReadme'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.should.have.been.called; })); it('should call this.createQuery if query file detected', fakeAsync(() => { let b = new Blob(['/**QUERY File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.qry'); let createMock = sandbox.stub(component, 'createQuery'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.should.have.been.called; })); it('should call this.fileRejected when there is an error reading the file', fakeAsync(() => { let b = new Blob(['/**CTO File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.cto'); let createMock = sandbox.stub(component, 'fileRejected'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.reject('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.called; })); it('should throw when given incorrect file type', fakeAsync(() => { let b = new Blob(['/**PNG File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.png'); let createMock = sandbox.stub(component, 'fileRejected'); let dataBufferMock = sandbox.stub(component, 'getDataBuffer') .returns(Promise.resolve('some data')); // Run method component.fileAccepted(file); tick(); // Assertions createMock.calledWith('Unexpected File Type: png'); })); }); describe('#fileRejected', () => { it('should return an error status', async(() => { component.fileRejected('long reason to reject file'); component['alertService'].errorStatus$.subscribe( (message) => { expect(message).to.be.equal('long reason to reject file'); } ); })); }); describe('#createScript', () => { it('should create a new script file', async(() => { let mockScript = sinon.createStubInstance(Script); mockScript.getIdentifier.returns('newfile.js'); mockClientService.createScriptFile.returns(mockScript); let b = new Blob(['/**JS File*/'], {type: 'text/plain'}); let file = new File([b], 'newfile.js'); // Run method component.createScript(file, file); // Assertions component.fileType.should.equal('js'); mockClientService.createScriptFile.calledWith(file.name, 'JS', file.toString()); component.currentFile.should.deep.equal(mockScript); component.currentFileName.should.equal(mockScript.getIdentifier()); })); it('should use the addScriptFileName variable as the file name if none passed in', () => { let fileName = 'testFileName.js'; component.addScriptFileName = fileName; let mockScript = sinon.createStubInstance(Script); mockScript.getIdentifier.returns(fileName); mockClientService.createScriptFile.returns(mockScript); let b = new Blob(['/**JS File*/'], {type: 'text/plain'}); let file = new File([b], ''); // Run method component.createScript(null, file); // Assertions component.fileType.should.equal('js'); mockClientService.createScriptFile.calledWith(fileName, 'JS', file.toString()); component.currentFile.should.deep.equal(mockScript); component.currentFileName.should.equal(mockScript.getIdentifier()); component.currentFileName.should.equal(fileName); }); }); describe('#createModel', () => { it('should create a new model file', async(() => { let b = new Blob( [`/**CTO File**/ namespace test`], {type: 'text/plain'} ); let file = new File([b], 'newfile.cto'); let dataBuffer = new Buffer('/**CTO File**/ namespace test'); let mockModel = new ModelFile(mockModelManager, dataBuffer.toString(), 'models/' + file.name); mockClientService.createModelFile.returns(mockModel); // Run method component.createModel(file, dataBuffer); // Assertions component.fileType.should.equal('cto'); mockClientService.createModelFile.should.have.been.calledWith(dataBuffer.toString(), 'models/' + file.name); component.currentFile.should.deep.equal(mockModel); component.currentFileName.should.equal(mockModel.getName()); })); it('should use the addModelFileName variable as the file name if none passed in', async(() => { let fileName = 'models/testFileName.cto'; component.addModelFileName = fileName; let b = new Blob( [`/**CTO File**/ namespace test`], {type: 'text/plain'} ); let file = new File([b], ''); let dataBuffer = new Buffer('/**CTO File**/ namespace test'); let mockModel = new ModelFile(mockModelManager, dataBuffer.toString(), fileName); mockClientService.createModelFile.returns(mockModel); // Run method component.createModel(null, dataBuffer); // Assertions component.fileType.should.equal('cto'); mockClientService.createModelFile.should.have.been.calledWith(dataBuffer.toString(), fileName); component.currentFile.should.deep.equal(mockModel); component.currentFileName.should.equal(mockModel.getName()); component.currentFileName.should.equal(fileName); })); }); describe('#createRules', () => { it('should create a new ACL file named permissions.acl', async(() => { let dataBuffer = new Buffer('/**RULE File**/ all the rules'); let filename = 'permissions.acl'; let mockRuleFile = sinon.createStubInstance(AclFile); mockClientService.createAclFile.returns(mockRuleFile); // Run method component.createRules(dataBuffer); // Assertions component.fileType.should.equal('acl'); mockClientService.createAclFile.should.have.been.calledWith(filename, dataBuffer.toString()); component.currentFile.should.deep.equal(mockRuleFile); component.currentFileName.should.equal(filename); })); }); describe('#createQuery', () => { it('should create a new query file named queries.qry', async(() => { let dataBuffer = new Buffer('/**QUERY File**/ query things'); let filename = 'queries.qry'; mockClientService.createQueryFile.returns(mockQueryFile); // Run method component.createQuery(dataBuffer); // Assertions component.fileType.should.equal('qry'); mockClientService.createQueryFile.should.have.been.calledWith(filename, dataBuffer.toString()); component.currentFile.should.deep.equal(mockQueryFile); component.currentFileName.should.equal(filename); })); }); describe('#createReadme', () => { it('should establish a readme file', async(() => { let dataBuffer = new Buffer('/**README File**/ read all the things'); // Run method component.createReadme(dataBuffer); // Assertions component.fileType.should.equal('md'); component.currentFileName.should.equal('README.md'); component.currentFile.should.equal(dataBuffer.toString()); })); }); describe('#changeCurrentFileType', () => { it('should set current file to a script file, created by calling createScript with correct parameters', async(() => { let mockScript = sinon.createStubInstance(Script); mockScript.getIdentifier.returns('lib/script.js'); mockClientService.getScripts.returns([]); mockClientService.createScriptFile.returns(mockScript); component.fileType = 'js'; // Run method component.changeCurrentFileType(); // Assertions mockClientService.createScriptFile.getCall(0).args[0].should.equal('lib/script.js'); })); it('should increment a script file name if one already exists', async(() => { let mockScript = sinon.createStubInstance(Script); let mockScript0 = sinon.createStubInstance(Script); let mockScript1 = sinon.createStubInstance(Script); mockScript.getIdentifier.returns('lib/script.js'); mockScript0.getIdentifier.returns('lib/script0.js'); mockScript1.getIdentifier.returns('lib/script1.js'); mockClientService.getScripts.returns([mockScript, mockScript0, mockScript1]); mockClientService.createScriptFile.returns(mockScript); component.fileType = 'js'; // Run method component.changeCurrentFileType(); // Assertions mockClientService.createScriptFile.getCall(0).args[0].should.equal('lib/script2.js'); })); it('should change this.currentFileType to a cto file', async(() => { mockClientService.getModelFiles.returns([]); let b = new Blob( [`/** * New model file */ namespace org.acme.model`], {type: 'text/plain'} ); let file = new File([b], 'models/org.acme.model.cto'); let dataBuffer = new Buffer(`/** * New model file */ namespace org.acme.model`); let mockModel = new ModelFile(mockModelManager, dataBuffer.toString(), file.name); mockClientService.createModelFile.returns(mockModel); component.fileType = 'cto'; // Run method component.changeCurrentFileType(); // Assertions component.currentFileName.should.equal('models/org.acme.model.cto'); component.currentFile.should.deep.equal(mockModel); })); it('should append the file number to the cto file name', () => { let b = new Blob( [`/** * New model file */ namespace org.acme.model`], {type: 'text/plain'} ); let file = new File([b], 'org.acme.model.cto'); let dataBuffer = new Buffer(`/** * New model file */ namespace org.acme.model`); let mockModel = new ModelFile(mockModelManager, dataBuffer.toString(), file.name); // One element, so the number 0 should be appended mockClientService.getModelFiles.returns([mockModel]); component.fileType = 'cto'; // Run method component.changeCurrentFileType(); // Assertions mockClientService.createModelFile.getCall(0).args[1].should.be.equal('models/org.acme.model0.cto'); component.currentFileName.should.equal('models/org.acme.model0.cto'); }); it('should fill in template model name indices for a cto file name', async(() => { let mockFile = sinon.createStubInstance(ModelFile); mockFile.getNamespace.returns('org.acme.model'); let mockFile0 = sinon.createStubInstance(ModelFile); mockFile0.getNamespace.returns('org.acme.model0'); let mockFile1 = sinon.createStubInstance(ModelFile); mockFile1.getNamespace.returns('org.acme.model1'); let mockFile3 = sinon.createStubInstance(ModelFile); mockFile3.getNamespace.returns('org.acme.model3'); let mockFile4 = sinon.createStubInstance(ModelFile); mockFile4.getNamespace.returns('org.acme.model4'); mockClientService.getModelFiles.returns([mockFile, mockFile0, mockFile1, mockFile3, mockFile4]); let b = new Blob( [`/** * New model file */ namespace org.acme.model`], {type: 'text/plain'} ); let file = new File([b], 'org.acme.model.cto'); let dataBuffer = new Buffer(`/** * New model file */ namespace org.acme.model`); let mockModel = new ModelFile(mockModelManager, dataBuffer.toString(), file.name); mockClientService.createModelFile.returns(mockModel); component.fileType = 'cto'; // Run method component.changeCurrentFileType(); // Assertions component.currentFileName.should.equal('models/org.acme.model2.cto'); })); it('should change current file to a query file upon calling createQueryFile', () => { let dataBuffer = new Buffer(`/** * New query file */`); let mockQuery = new QueryFile('queries.qry', mockQueryManager, dataBuffer.toString()); mockClientService.createAclFile.returns(mockQuery); component.fileType = 'qry'; component.changeCurrentFileType(); component.currentFileName.should.equal('queries.qry'); }); it('should change current file to an acl file upon calling createAclFile', () => { let dataBuffer = new Buffer(`/** * New access control file */ rule AllAccess { description: "AllAccess - grant everything to everybody." participant: "org.hyperledger.composer.system.Participant" operation: ALL resource: "org.hyperledger.composer.system.**" action: ALLOW }`); let mockAcl = new AclFile('permissions.acl', mockAclManager, dataBuffer.toString()); mockClientService.createAclFile.returns(mockAcl); component.fileType = 'acl'; component.changeCurrentFileType(); component.currentFileName.should.equal('permissions.acl'); }); }); describe('#removeFile', () => { it('should reset back to default values', async(() => { component.expandInput = true; component.currentFile = true; component.currentFileName = true; component.fileType = 'js'; // Run method component.removeFile(); // Assertions component.expandInput.should.not.be.true; expect(component.currentFile).to.be.null; expect(component.currentFileName).to.be.null; component.fileType.should.equal(''); })); }); describe('#getDataBuffer', () => { let file; let mockFileReadObj; let mockBuffer; let mockFileRead; let content; beforeEach(() => { content = 'hello world'; let data = new Blob([content], {type: 'text/plain'}); file = new File([data], 'mock.bna'); mockFileReadObj = { readAsArrayBuffer: sandbox.stub(), result: content, onload: sinon.stub(), onerror: sinon.stub() }; mockFileRead = sinon.stub(window, 'FileReader'); mockFileRead.returns(mockFileReadObj); }); afterEach(() => { mockFileRead.restore(); }); it('should return data from a file', () => { let promise = component.getDataBuffer(file); mockFileReadObj.onload(); return promise .then((data) => { // Assertions data.toString().should.equal(content); }); }); it('should give error in promise chain', () => { let promise = component.getDataBuffer(file); mockFileReadObj.onerror('error'); return promise .then((data) => { // Assertions data.should.be.null; }) .catch((err) => { // Assertions err.should.equal('error'); }); }); }); describe('#aclExists', () => { it('should return true if an acl file is present', () => { let fileArray = []; fileArray.push({acl: true, id: 'acl file', displayID: 'acl0'}); fileArray.push({script: true, id: 'script 0', displayID: 'script0'}); component['files'] = fileArray; let result = component['aclExists'](); result.should.equal(true); }); it('should return false if an acl file is not present', () => { let fileArray = []; fileArray.push({script: true, id: 'script 0', displayID: 'script0'}); fileArray.push({script: true, id: 'script 0', displayID: 'script1'}); component['files'] = fileArray; let result = component['aclExists'](); result.should.equal(false); }); }); describe('#queryExists', () => { it('should return true if a query file is present', () => { let fileArray = []; fileArray.push({query: true, id: 'query file', displayID: 'query0'}); fileArray.push({script: true, id: 'script 0', displayID: 'script0'}); component['files'] = fileArray; let result = component['queryExists'](); result.should.equal(true); }); it('should return true if a query file is not present', () => { let fileArray = []; fileArray.push({script: true, id: 'script 0', displayID: 'script0'}); fileArray.push({script: true, id: 'script 0', displayID: 'script1'}); component['files'] = fileArray; let result = component['queryExists'](); result.should.equal(false); }); }); });
TypeScript
Apache-2.0
NunoEdgarGFlowHub/composer/packages/composer-playground/src/app/editor/add-file/add-file.component.spec.ts
a48a6c1a-ceed-4570-b8b3-b6308ca18c5a
[]
[]
// Copyright (c) 2021 Leandro T. C. Melo <ltcmelo@gmail.com> // // Permission is hereby granted, free of charge, to any person obtaining a copy // of this software and associated documentation files (the "Software"), to deal // in the Software without restriction, including without limitation the rights // to use, copy, modify, merge, publish, distribute, sublicense, and/or sell // copies of the Software, and to permit persons to whom the Software is // furnished to do so, subject to the following conditions: // // The above copyright notice and this permission notice shall be included in // all copies or substantial portions of the Software. // // THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR // IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, // FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE // AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER // LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, // OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN // THE SOFTWARE. #include "SyntaxReference.h" using namespace psy; using namespace C; const SyntaxTree* SyntaxReference::syntaxTree() const { return nullptr; } const SyntaxNode* SyntaxReference::syntax() const { return nullptr; }
C++
BSD-3-Clause
luigidcsoares/psychec/C/syntax/SyntaxReference.cpp
3ee16ef0-4d63-4784-a6fd-fa2df59b1b02
[{"tag": "EMAIL", "value": "ltcmelo@gmail.com", "start": 42, "end": 59, "context": "// Copyright (c) 2021 Leandro T. C. Melo <ltcmelo@gmail.com>\n//\n// Permission is hereby granted, free of char"}, {"tag": "NAME", "value": "Leandro T. C. Melo", "start": 22, "end": 40, "context": "// Copyright (c) 2021 Leandro T. C. Melo <ltcmelo@gmail.com>\n//\n// Permission is hereby gr"}]
[{"tag": "EMAIL", "value": "ltcmelo@gmail.com", "start": 42, "end": 59, "context": "// Copyright (c) 2021 Leandro T. C. Melo <ltcmelo@gmail.com>\n//\n// Permission is hereby granted, free of char"}, {"tag": "NAME", "value": "Leandro T. C. Melo", "start": 22, "end": 40, "context": "// Copyright (c) 2021 Leandro T. C. Melo <ltcmelo@gmail.com>\n//\n// Permission is hereby gr"}]
// -*- C++ -*- //========================================================== /** * Created_datetime : 10/15/2013 14:23 * File : ScopedLock.hpp * Author : GNUnix <Kingbug2010@gmail.com> * Description : * * <Change_list> */ //========================================================== #ifndef _ScopedLock_hpp_ #define _ScopedLock_hpp_ class ScopedLock { public: ScopedLock( CRITICAL_SECTION &lock ) :csLock(lock) { ::EnterCriticalSection(&csLock); } ~ScopedLock(void) { ::LeaveCriticalSection(&csLock); } private: CRITICAL_SECTION &csLock; }; #endif
C++
MIT
Zerak/ServerCore/src/ScopedLock.hpp
a2a850c7-f7cc-4eaa-821c-f6a70602b611
[{"tag": "EMAIL", "value": "Kingbug2010@gmail.com", "start": 192, "end": 213, "context": ": ScopedLock.hpp\n* Author : GNUnix <Kingbug2010@gmail.com>\n* Description : \n*\n* <Change_list>\n"}]
[{"tag": "EMAIL", "value": "Kingbug2010@gmail.com", "start": 192, "end": 213, "context": ": ScopedLock.hpp\n* Author : GNUnix <Kingbug2010@gmail.com>\n* Description : \n*\n* <Change_list>\n"}]
/* * Copyright (C) 2015 Menlo Park Innovation LLC * * This is licensed software, all rights as to the software * is reserved by Menlo Park Innovation LLC. * * A license included with the distribution provides certain limited * rights to a given distribution of the work. * * This distribution includes a copy of the license agreement and must be * provided along with any further distribution or copy thereof. * * If this license is missing, or you wish to license under different * terms please contact: * * menloparkinnovation.com * menloparkinnovation@gmail.com */ /* * Date: 02/22/2015 * File: DweetLightHouse.cpp * * Lighthouse Dweet support * * Refactored from example app as a separate module to support * multiple Dweet channels. */ // // MenloFramework // // Note: All these includes are required together due // to Arduino #include behavior. // #include <MenloPlatform.h> #include <MenloUtility.h> #include <MenloObject.h> #include <MenloMemoryMonitor.h> #include <MenloUtility.h> #include <MenloNMEA0183Stream.h> #include <MenloDebug.h> #include <MenloConfigStore.h> // NMEA 0183 support #include <MenloNMEA0183.h> // Dweet Support #include <MenloDweet.h> #include <DweetSerialChannel.h> // Lighthouse support #include <MenloLightHouse.h> #include "LightHouseApp.h" #include <DweetApp.h> #include "DweetLightHouse.h" #include "LightHouseHardwareBase.h" #define EDBG_PRINT_ENABLED 0 #if EDBG_PRINT_ENABLED #define eDBG_PRINT(x) (MenloDebug::Print(F(x))) #define eDBG_PRINT_STRING(x) (MenloDebug::Print(x)) #define eDBG_PRINT_HEX_STRING(x, l) (MenloDebug::PrintHexString(x, l)) #define eDBG_PRINT_HEX_STRING_NNL(x, l) (MenloDebug::PrintHexStringNoNewline(x, l)) #define eDBG_PRINT_NNL(x) (MenloDebug::PrintNoNewline(F(x))) #define eDBG_PRINT_INT(x) (MenloDebug::PrintHex(x)) #define eDBG_PRINT_INT_NNL(x) (MenloDebug::PrintHexNoNewline(x)) #else #define eDBG_PRINT(x) #define eDBG_PRINT_STRING(x) #define eDBG_PRINT_HEX_STRING(x, l) #define eDBG_PRINT_HEX_STRING_NNL(x, l) #define eDBG_PRINT_NNL(x) #define eDBG_PRINT_INT(x) #define eDBG_PRINT_INT_NNL(x) #endif #define DBG_PRINT_ENABLED 0 #if DBG_PRINT_ENABLED #define DBG_PRINT(x) (MenloDebug::Print(F(x))) #define DBG_PRINT_STRING(x) (MenloDebug::Print(x)) #define DBG_PRINT_HEX_STRING(x, l) (MenloDebug::PrintHexString(x, l)) #define DBG_PRINT_HEX_STRING_NNL(x, l) (MenloDebug::PrintHexStringNoNewline(x, l)) #define DBG_PRINT_NNL(x) (MenloDebug::PrintNoNewline(F(x))) #define DBG_PRINT_INT(x) (MenloDebug::PrintHex(x)) #define DBG_PRINT_INT_NNL(x) (MenloDebug::PrintHexNoNewline(x)) #else #define DBG_PRINT(x) #define DBG_PRINT_STRING(x) #define DBG_PRINT_HEX_STRING(x, l) #define DBG_PRINT_HEX_STRING_NNL(x, l) #define DBG_PRINT_NNL(x) #define DBG_PRINT_INT(x) #define DBG_PRINT_INT_NNL(x) #endif // // Allows selective print when debugging but just placing // an "x" in front of what you want output. // #define XDBG_PRINT_ENABLED 0 #if XDBG_PRINT_ENABLED #define xDBG_PRINT(x) (MenloDebug::Print(F(x))) #define xDBG_PRINT_STRING(x) (MenloDebug::Print(x)) #define xDBG_PRINT_HEX_STRING(x, l) (MenloDebug::PrintHexString(x, l)) #define xDBG_PRINT_HEX_STRING_NNL(x, l) (MenloDebug::PrintHexStringNoNewline(x, l)) #define xDBG_PRINT_NNL(x) (MenloDebug::PrintNoNewline(F(x))) #define xDBG_PRINT_INT(x) (MenloDebug::PrintHex(x)) #define xDBG_PRINT_INT_NNL(x) (MenloDebug::PrintHexNoNewline(x)) #else #define xDBG_PRINT(x) #define xDBG_PRINT_STRING(x) #define xDBG_PRINT_HEX_STRING(x, l) #define xDBG_PRINT_HEX_STRING_NNL(x, l) #define xDBG_PRINT_NNL(x) #define xDBG_PRINT_INT(x) #define xDBG_PRINT_INT_NNL(x) #endif // // Strings used in DweetLightHouse // const char lighthouse_module_name_string[] PROGMEM = "DweetLightHouse"; extern const char dweet_lightperiod_string[] PROGMEM = "LIGHTPERIOD"; // index 0 extern const char dweet_lighttick_string[] PROGMEM = "LIGHTTICK"; extern const char dweet_lightcolor_string[] PROGMEM = "LIGHTCOLOR"; extern const char dweet_lightramp_string[] PROGMEM = "LIGHTRAMP"; extern const char dweet_lightonlevel_string[] PROGMEM = "LIGHTONLEVEL"; // index 4 extern const char dweet_sensorrate_string[] PROGMEM = "SENSORRATE"; // Sensor/environmental support extern const char dweet_sensors_string[] PROGMEM = "SENSORS"; // // These try to keep the command short to maximize encoding of the sequence // extern const char dweet_lightsq_string[] PROGMEM = "LIGHTSQ"; // Set Light Sequence extern const char dweet_lightsp_string[] PROGMEM = "LIGHTSP"; // Set Light Sequence Persistent extern const char dweet_lightgq_string[] PROGMEM = "LIGHTGQ"; // Get Light Sequence extern const char dweet_lightgp_string[] PROGMEM = "LIGHTGP"; // Get Light Sequence Persistent // IMPROVE: Make these PSTR("")'s // NOTE: Currently used in call to char* function char* DweetLightHouse::onState = "ON"; char* DweetLightHouse::offState = "OFF"; const char* const lighthouse_string_table[] PROGMEM = { dweet_lightperiod_string, dweet_lighttick_string, dweet_lightcolor_string, dweet_lightramp_string, dweet_lightonlevel_string, // index 4 dweet_sensorrate_string, dweet_sensors_string }; // Locally typed version of state dispatch function typedef int (DweetLightHouse::*StateMethod)(char* buf, int size, bool isSet); PROGMEM const StateMethod lighthouse_function_table[] = { &DweetLightHouse::LightPeriod, &DweetLightHouse::LightTick, &DweetLightHouse::LightColor, &DweetLightHouse::LightRamp, &DweetLightHouse::LightOnLevel, // index 4 &DweetLightHouse::SensorUpdateRate, &DweetLightHouse::ProcessSensors }; // // These are defined in Libraries/DweetLightHouse/LightHouseConfig.h // PROGMEM const int lighthouse_index_table[] = { LIGHT_PERIOD, LIGHT_TICK, LIGHT_COLOR, LIGHT_RAMP, LIGHT_ONLEVEL, // index 4 LIGHT_SENSORRATE, 0 // SENSORS does not have an EEPROM setting }; PROGMEM const int lighthouse_size_table[] = { LIGHT_PERIOD_SIZE, LIGHT_TICK_SIZE, LIGHT_COLOR_SIZE, LIGHT_RAMP_SIZE, LIGHT_ONLEVEL_SIZE, // index 4 LIGHT_SENSORRATE_SIZE, 0 // SENSORS does not have an EEPROM setting }; // // LIGHTPERIOD:00000000 // int DweetLightHouse::LightPeriod(char* buf, int size, bool isSet) { char* ptr; bool error; int length; unsigned long tick; if (isSet) { // buf == lightperiod digits error = false; tick = MenloUtility::HexToULong(buf, &error); if (error) { return DWEET_INVALID_PARAMETER; } m_lightHouseApp->LightPeriod(&tick, isSet); return 0; } else { if (size < 9) { DBG_PRINT("LightPeriod: bad length on get"); return DWEET_INVALID_PARAMETER_LENGTH; } // Get current tick m_lightHouseApp->LightPeriod(&tick, false); ptr = buf; MenloUtility::UInt32ToHexBuffer(tick, ptr); ptr += 8; *ptr = '\0'; return 0; } } // // LIGHTTICK:0000 // int DweetLightHouse::LightTick(char* buf, int size, bool isSet) { char* ptr; bool error; int length; unsigned long tick; if (isSet) { // action == tick count digits error = false; tick = MenloUtility::HexToULong(buf, &error); if (error) { return DWEET_INVALID_PARAMETER; } m_lightHouseApp->LightTick(&tick, isSet); return 0; } else { if (size < 9) { DBG_PRINT("LightPeriod: bad length on get"); return DWEET_INVALID_PARAMETER_LENGTH; } // Get current tick m_lightHouseApp->LightTick(&tick, false); ptr = buf; MenloUtility::UInt32ToHexBuffer(tick, ptr); ptr += 8; *ptr = '\0'; return 0; } } // // RGB saturation values // LIGHTCOLOR:00.00.00 // // LIGHTCOLOR:GREEN // LIGHTCOLOR:RED // LIGHTCOLOR:AMBER // LIGHTCOLOR:WHITE // LIGHTCOLOR:BLUE // // NOTE: We can not modify the string in buffer because it can // later be passed to a SetConfig call. // int DweetLightHouse::LightColor(char* buf, int size, bool isSet) { char* ptr; int length; uint8_t red; uint8_t green; uint8_t blue; if (isSet) { xDBG_PRINT("LightColor set:"); // // RR.GG.BB // // Parse the argument string // action == rr.gg.bb RGB 8 bit hex values for PWM // length = strlen(buf); if (length < 8) { xDBG_PRINT("LightColor: bad length"); return DWEET_INVALID_PARAMETER_LENGTH; // 0xFFFD } ptr = buf; red = MenloUtility::HexToByte(ptr); ptr += 2; if (*ptr != '.') { xDBG_PRINT("LightColor: not . after RR"); return DWEET_INVALID_PARAMETER; } ptr++; // skip '.' green = MenloUtility::HexToByte(ptr); ptr += 2; if (*ptr != '.') { xDBG_PRINT("LightColor: not . after GG"); return DWEET_INVALID_PARAMETER; } ptr++; // skip '.' blue = MenloUtility::HexToByte(ptr); ptr += 2; m_lightHouseApp->LightColor(&red, &green, &blue, isSet); xDBG_PRINT("LightColor set"); return 0; } else { if (size < 9) { xDBG_PRINT("LightColor: bad length on get"); return DWEET_INVALID_PARAMETER_LENGTH; } m_lightHouseApp->LightColor(&red, &green, &blue, isSet); ptr = buf; MenloUtility::UInt8ToHexBuffer(red, ptr); ptr += 2; *ptr++ = '.'; MenloUtility::UInt8ToHexBuffer(green, ptr); ptr += 2; *ptr++ = '.'; MenloUtility::UInt8ToHexBuffer(blue, ptr); ptr += 2; *ptr = '\0'; return 0; } } // // LIGHTRAMP:00.00 rampup, ram down // int DweetLightHouse::LightRamp(char* buf, int size, bool isSet) { int length; char* ptr; uint16_t rampUpPeriod; uint16_t rampDownPeriod; if (isSet) { // // Parse the argument string // action == 0000.0000 16 bit hex values for ramp up + ramp down // length = strlen(buf); if (length < 9) { return DWEET_PARAMETER_TO_SHORT; } ptr = buf; // Reads first 4 characters rampUpPeriod = MenloUtility::HexToUShort(ptr); ptr += 4; if (*ptr != '.') { return DWEET_INVALID_PARAMETER; } ptr++; // skip '.' rampDownPeriod = MenloUtility::HexToUShort(ptr); m_lightHouseApp->RampPeriod(&rampUpPeriod, &rampDownPeriod, isSet); return 0; } else { if (size < 10) { return DWEET_INVALID_PARAMETER_LENGTH; } m_lightHouseApp->RampPeriod(&rampUpPeriod, &rampDownPeriod, false); ptr = buf; MenloUtility::UInt16ToHexBuffer(rampUpPeriod, ptr); ptr += 4; *ptr++ = '.'; MenloUtility::UInt16ToHexBuffer(rampDownPeriod, ptr); ptr += 4; *ptr = '\0'; return 0; } } // // Parse the argument string // buf == 0000 16 bit hex values for light on level // // LIGHTONLEVEL:0000 // int DweetLightHouse::LightOnLevel(char* buf, int size, bool isSet) { int length; char* ptr; uint16_t onLevel; if (isSet) { xDBG_PRINT("LightOnLevel set"); // // Parse the argument string // action == 0000 16 bit hex values for light intensity level // which triggers "nighttime mode". // length = strlen(buf); if (length < 4) { return DWEET_PARAMETER_TO_SHORT; } ptr = buf; // Reads first 4 characters onLevel = MenloUtility::HexToUShort(ptr); m_lightHouseApp->LightOnLevel(&onLevel, isSet); return 0; } else { if (size < 5) { DBG_PRINT("LightOnLevel: bad length on get"); return DWEET_INVALID_PARAMETER_LENGTH; } m_lightHouseApp->LightOnLevel(&onLevel, false); ptr = buf; MenloUtility::UInt16ToHexBuffer(onLevel, ptr); ptr += 4; *ptr = '\0'; return 0; } } // // SENSORRATE:0000 // int DweetLightHouse::SensorUpdateRate(char* buf, int size, bool isSet) { int length; char* ptr; uint16_t updateRate; if (isSet) { // // Parse the argument string // action == 0000 16 bit hex values for update rate in seconds // length = strlen(buf); if (length < 4) { return DWEET_PARAMETER_TO_SHORT; } ptr = buf; // Reads first 4 characters updateRate = MenloUtility::HexToUShort(ptr); m_lightHouseApp->SensorUpdateRate(&updateRate, isSet); return 0; } else { if (size < 5) { DBG_PRINT("SensorUpdateRate: bad length on get"); return DWEET_INVALID_PARAMETER_LENGTH; } m_lightHouseApp->SensorUpdateRate(&updateRate, false); ptr = buf; MenloUtility::UInt16ToHexBuffer(updateRate, ptr); ptr += 4; *ptr = '\0'; return 0; } } // // Lighthouse commands processing. // // Returns 1 if the command is recognized. // Returns 0 if not. // int DweetLightHouse::ProcessAppCommands(MenloDweet* dweet, char* name, char* value) { struct StateSettingsParameters parms; // Must be larger than any config values we fetch char workingBuffer[LIGHT_MAX_SIZE+1]; int tableEntries = sizeof(lighthouse_string_table) / sizeof(char*); //eDBG_PRINT("n "); eDBG_PRINT_STRING(name); //eDBG_PRINT_NNL("v "); eDBG_PRINT_STRING(value); // // To maximum space for the sequence the set/get light // sequence commands don't use GETSTATE/SETSTATE to // have a slightly more compact representation. // if (strncmp_P(name, dweet_lightsq_string, 7) == 0) { return ProcessSetLightSequence(dweet, name, value, false); } else if (strncmp_P(name, dweet_lightsp_string, 7) == 0) { // true for set persistent return ProcessSetLightSequence(dweet, name, value, true); } else if (strncmp_P(name, dweet_lightgq_string, 7) == 0) { return ProcessGetLightSequence(dweet, name, value, false); } else if (strncmp_P(name, dweet_lightgp_string, 7) == 0) { // true for set persistent return ProcessGetLightSequence(dweet, name, value, true); } else { // // Most lighthouse operationg modes and sensor commands // follow the GETSTATE/SETSTATE, GETCONFIG/SETCONFIG pattern // and use the table driven common code which saves at least 3K bytes // on an Atmeg328 for this one set of commands alone. // DBG_PRINT("DweetLightHouse calling table processor"); // // We dispatch on "this" because the method is part of the current // class instance as this function performs the specialization // required for DweetLightHouse // parms.ModuleName = (PGM_P)lighthouse_module_name_string; parms.stringTable = (PGM_P)lighthouse_string_table; parms.functionTable = (PGM_P)lighthouse_function_table; parms.defaultsTable = NULL; parms.object = this; parms.indexTable = lighthouse_index_table; parms.sizeTable = lighthouse_size_table; parms.tableEntries = tableEntries; parms.workingBuffer = workingBuffer; parms.checksumIndex = LIGHT_CHECKSUM; parms.checksumBlockStart = LIGHT_CHECKSUM_BEGIN; parms.checksumBlockSize = LIGHT_CHECKSUM_END - LIGHT_CHECKSUM_BEGIN; parms.name = name; parms.value = value; return dweet->ProcessStateCommandsTable(&parms); } } // // Application Structure: // // LightHouse.ino - Arduino setup, resource and hardware assignments. // Executes main loop. // // LightHouseApp.h // LightHouseApp.cpp - Implement application state. // Application state queried/set by DweetLightHouse. // Handle power on configuration from config store. // // LightHouseHardware.h // LightHouseHardware.cpp - Implement hardware state // // DweetLightHouse.h // DweetLightHouse.cpp - Implement the application specific Dweet commands // which allow query state, set state, and query and // modify power on/reset state configuration of the // application and hardware. // int DweetLightHouse::Initialize( LightHouseApp* lightHouseApp ) { int result; struct StateSettingsParameters parms; char workingBuffer[LIGHT_MAX_SIZE+1]; // Must be larger than any config values we fetch //MenloDebug::Print(F("DweetLightHouse: initialize")); // Initialize DweetApp for default event dispatching DweetApp::Initialize(); int tableEntries = sizeof(lighthouse_string_table) / sizeof(char*); // // Initialize our application state // m_lightHouseApp = lightHouseApp; // // Load the configuration settings from EEPROM if valid // // Note: "this" is used to refer to this class (DweetLightHouse) since // the handlers are on this class. // // Improve: These stubs can be eliminated and direct calls // to the application class used. // parms.ModuleName = (PGM_P)lighthouse_module_name_string; parms.stringTable = (PGM_P)lighthouse_string_table; parms.functionTable = (PGM_P)lighthouse_function_table; parms.defaultsTable = NULL; parms.object = this; parms.indexTable = lighthouse_index_table; parms.sizeTable = lighthouse_size_table; parms.tableEntries = tableEntries; parms.workingBuffer = workingBuffer; // Arduino/Libaries/DweetLightHouse/LightHouseConfig.h parms.checksumIndex = LIGHT_CHECKSUM; parms.checksumBlockStart = LIGHT_CHECKSUM_BEGIN; parms.checksumBlockSize = LIGHT_CHECKSUM_END - LIGHT_CHECKSUM_BEGIN; parms.name = NULL; parms.value = NULL; // DweetState.cpp result = MenloDweet::LoadConfigurationSettingsTable(&parms); if (result != 0) { if (result == DWEET_INVALID_CHECKSUM) { MenloDebug::Print(F("DweetLightHouse Stored settings checksum is invalid")); } else { MenloDebug::Print(F("DweetLightHouse A Stored setting is invalid")); } } else { MenloDebug::Print(F("DweetLightHouse Stored settings are valid")); } return result; } int DweetLightHouse::ProcessGetLightSequence(MenloDweet* dweet, char* name, char* value, bool persistent) { PGM_P command; int size; bool result; // Allocate extra char for '\0' char buffer[LIGHT_SEQUENCE_SIZE + 1]; // // No value with LIGHTGP | LIGHTGQ // if (persistent) { // LIGHTGP command = dweet_lightgp_string; } else { // LIGHTGQ command = dweet_lightgq_string; } result = m_lightHouseApp->GetLightSequence(buffer, sizeof(buffer), persistent); if (result) { // // A configured sequence can exceed the maximum NMEA 0183 buffer size // // LIGHTGQ_REPLY= // LIGHTGP_REPLY= // size = dweet->CalculateMaximumValueReply(command, dweet_reply_string, buffer); if ((int)strlen(buffer) > size) { // Update buffer size buffer[size] = '\0'; } dweet->SendDweetItemReplyType_P( command, dweet_reply_string, buffer ); } else { dweet->SendDweetItemReplyType_P( command, dweet_error_string, value ); } return 1; } // // Examples: // // All examples are using the default 250ms clock and 0 wait repeating period // $PDWT,SETSTATE=LIGHTTICK:00FA // $PDWT,SETSTATE=LIGHTPERIOD:0000 // // $PDWT,SETCONFIG=LIGHTTICK:00FA // save to EEPROM // $PDWT,SETCONFIG=LIGHTPERIOD:0000 // save to EEPROM // // New Dungeness Light is quick flash every 5 seconds // http://en.wikipedia.org/wiki/New_Dungeness_Light // // This setting uses 1/4 second for quick flash // // $PDWT,LIGHTSQ=28:0100000000*00 // $PDWT,LIGHTSP=28:0100000000*00 // save to EEPROM // // 1 second on, 1 second off (3) times, then 5 seconds pause // // $PDWT,LIGHTSQ=2C:0F0F0F000000*00 // $PDWT,LIGHTSP=2C:0F0F0F000000*00 // save to EEPROM // // 1/2 second intervals ISO // // $PDWT,LIGHTSQ=08:33*00 // int DweetLightHouse::ProcessSetLightSequence(MenloDweet* dweet, char* name, char* value, bool persistent) { int size; bool result; PGM_P command; char buf[3]; if (persistent) { command = dweet_lightsp_string; } else { command = dweet_lightsq_string; } // // LIGHTSQ=00:0000000 // LIGHTSQ_ERROR=00:00 // // This is in LightHouseApp* in xxx // result = m_lightHouseApp->SetLightSequence(value, persistent); if (!result) { // // error in format // // Note: The reply to this request can be long enough to exceed the maximum // NMEA 0183 sentence so we truncate it. // // LIGHTSQ_ERROR=00:00... // size = dweet->CalculateMaximumValueReply(command, dweet_error_string, value); if ((int)strlen(value) > size) { // Update in buffer value[size] = '\0'; } dweet->SendDweetItemReplyType_P( command, dweet_error_string, value ); return 1; } // We just reply with the bitCount buf[0] = value[0]; buf[1] = value[1]; buf[2] = '\0'; dweet->SendDweetItemReplyType_P( command, dweet_reply_string, buf ); return 1; } // // SENSORS:0000.0000.0000.0000 // Each sensor responds in position. Based on configuration. // This is designed to be generic, and not take up much code space. // int DweetLightHouse::ProcessSensors(char* buffer, int size, bool isSet) { int index; LightHouseSensors sensors; // SETSTATE is not supported if (isSet) return DWEET_ERROR_UNSUP; // // This function returns the following full Dweet // // lite bat slr mois temp // GETSTATE_REPLY=SENSORS:0000.0000.0000.0000.0000 // // this is 24 characters for basic data. // Allocate extra char for ':' and '\0' // index = 0; if (!m_lightHouseApp->readSensors(&sensors)) { return DWEET_APP_FAILURE; } MenloUtility::UInt16ToHexBuffer(sensors.lightIntensity, &buffer[index]); index += 4; buffer[index++] = '.'; MenloUtility::UInt16ToHexBuffer(sensors.battery, &buffer[index]); index += 4; buffer[index++] = '.'; MenloUtility::UInt16ToHexBuffer(sensors.solar, &buffer[index]); index += 4; buffer[index++] = '.'; MenloUtility::UInt16ToHexBuffer(sensors.moisture, &buffer[index]); index += 4; buffer[index++] = '.'; MenloUtility::UInt16ToHexBuffer(sensors.temperature, &buffer[index]); index += 4; buffer[index++] = '\0'; // 0000.0000.0000.0000 return 1; }
C++
Apache-2.0
menloparkinnovation/openpux/Arduino/Libraries/DweetLightHouse/DweetLightHouse.cpp
2cae1c8e-5c9e-4e36-8ec0-729e09edca25
[{"tag": "EMAIL", "value": "menloparkinnovation@gmail.com", "start": 552, "end": 581, "context": " please contact:\n *\n * menloparkinnovation.com\n * menloparkinnovation@gmail.com\n */\n\n/*\n * Date: 02/22/2015\n * File: DweetLight"}]
[{"tag": "EMAIL", "value": "menloparkinnovation@gmail.com", "start": 552, "end": 581, "context": " please contact:\n *\n * menloparkinnovation.com\n * menloparkinnovation@gmail.com\n */\n\n/*\n * Date: 02/22/2015\n * File: DweetLight"}]
version https://git-lfs.github.com/spec/v1 oid sha256:bf98dbc8c6536175912151fdc95e538f3e365051d9782a67b40c7fd1bf3d01b9 size 2358
C++
MIT
realtehcman/-UnderwaterSceneProject/dependencies/glm/gtx/matrix_operation.hpp
c7adc867-3c63-435e-94c3-de44b9198d26
[]
[]
#!/usr/bin/python # -*- coding: utf-8 -*- # thumbor imaging service # https://github.com/thumbor/thumbor/wiki # Licensed under the MIT license: # http://www.opensource.org/licenses/mit-license # Copyright (c) 2011 globo.com thumbor@googlegroups.com import os from io import BytesIO from subprocess import PIPE, Popen from tempfile import mkstemp from PIL import Image, ImageDraw, ImageFile, ImageSequence, JpegImagePlugin from PIL import features as pillow_features from thumbor.engines import BaseEngine from thumbor.engines.extensions.pil import GifWriter from thumbor.utils import deprecated, logger try: from thumbor.ext.filters import _composite FILTERS_AVAILABLE = True except ImportError: FILTERS_AVAILABLE = False FORMATS = { ".tif": "PNG", # serve tif as png ".jpg": "JPEG", ".jpeg": "JPEG", ".gif": "GIF", ".png": "PNG", ".webp": "WEBP", } ImageFile.MAXBLOCK = 2 ** 25 ImageFile.LOAD_TRUNCATED_IMAGES = True DECOMPRESSION_BOMB_EXCEPTIONS = (Image.DecompressionBombWarning,) if hasattr(Image, "DecompressionBombError"): DECOMPRESSION_BOMB_EXCEPTIONS += (Image.DecompressionBombError,) class Engine(BaseEngine): def __init__(self, context): super().__init__(context) self.subsampling = None self.qtables = None self.original_mode = None self.exif = None try: if self.context.config.MAX_PIXELS is None or int( self.context.config.MAX_PIXELS ): Image.MAX_IMAGE_PIXELS = self.context.config.MAX_PIXELS except (AttributeError, TypeError, ValueError): # invalid type logger.info( "MAX_PIXELS config variable set to invalid type. Has to be int on None" ) def gen_image(self, size, color): if color == "transparent": color = None img = Image.new("RGBA", size, color) return img def create_image(self, buffer): try: img = Image.open(BytesIO(buffer)) except DECOMPRESSION_BOMB_EXCEPTIONS as error: logger.warning("[PILEngine] create_image failed: %s", error) return None self.icc_profile = img.info.get("icc_profile") self.exif = img.info.get("exif") self.original_mode = img.mode self.subsampling = JpegImagePlugin.get_sampling(img) if self.subsampling == -1: # n/a for this file self.subsampling = None self.qtables = getattr(img, "quantization", None) if self.context.config.ALLOW_ANIMATED_GIFS and self.extension == ".gif": frames = [] for frame in ImageSequence.Iterator(img): frames.append(frame.convert("P")) img.seek(0) self.frame_count = len(frames) return frames return img def get_resize_filter(self): config = self.context.config resample = ( config.PILLOW_RESAMPLING_FILTER if config.PILLOW_RESAMPLING_FILTER is not None else "LANCZOS" ) available = { "LANCZOS": Image.LANCZOS, "NEAREST": Image.NEAREST, "BILINEAR": Image.BILINEAR, "BICUBIC": Image.BICUBIC, "HAMMING": Image.HAMMING, } return available.get(resample.upper(), Image.LANCZOS) def draw_rectangle(self, x, y, width, height): # Nasty retry if the image is loaded for the first time and it's truncated try: draw_image = ImageDraw.Draw(self.image) except IOError: draw_image = ImageDraw.Draw(self.image) draw_image.rectangle([x, y, x + width, y + height]) del draw_image def resize(self, width, height): # Indexed color modes (such as 1 and P) will be forced to use a # nearest neighbor resampling algorithm. So we convert them to # RGB(A) mode before resizing to avoid nasty scaling artifacts. if self.image.mode in ["1", "P"]: logger.debug( "converting image from 8-bit/1-bit palette to 32-bit RGB(A) for resize" ) if self.image.mode == "1": target_mode = "RGB" else: # convert() figures out RGB or RGBA based on palette used target_mode = None self.image = self.image.convert(mode=target_mode) size = (int(width), int(height)) # Tell image loader what target size we want (only JPG for a moment) self.image.draft(None, size) resample = self.get_resize_filter() self.image = self.image.resize(size, resample) def crop(self, left, top, right, bottom): self.image = self.image.crop((int(left), int(top), int(right), int(bottom))) def rotate(self, degrees): # PIL rotates counter clockwise if degrees == 90: self.image = self.image.transpose(Image.ROTATE_90) elif degrees == 180: self.image = self.image.transpose(Image.ROTATE_180) elif degrees == 270: self.image = self.image.transpose(Image.ROTATE_270) else: self.image = self.image.rotate(degrees, expand=1) def flip_vertically(self): self.image = self.image.transpose(Image.FLIP_TOP_BOTTOM) def flip_horizontally(self): self.image = self.image.transpose(Image.FLIP_LEFT_RIGHT) def get_default_extension(self): # extension is not present => force JPEG or PNG if self.image.mode in ["P", "RGBA", "LA"]: return ".png" return ".jpeg" # TODO: Refactor this - pylint: disable=too-many-statements,too-many-branches def read(self, extension=None, quality=None): # NOQA # returns image buffer in byte format. img_buffer = BytesIO() requested_extension = extension or self.extension # 1 and P mode images will be much smaller if converted back to # their original mode. So let's do that after resizing. Get $$. if ( self.context.config.PILLOW_PRESERVE_INDEXED_MODE and requested_extension in [None, ".png", ".gif"] and self.original_mode in ["P", "1"] and self.original_mode != self.image.mode ): if self.original_mode == "1": self.image = self.image.convert("1") else: # libimagequant might not be enabled on compile time # but it's better than default octree for RGBA images quantize_method = ( Image.LIBIMAGEQUANT if pillow_features.check("libimagequant") else None ) self.image = self.image.quantize(method=quantize_method) ext = requested_extension or self.get_default_extension() options = {"quality": quality} if ext in (".jpg", ".jpeg"): options["optimize"] = True if self.context.config.PROGRESSIVE_JPEG: # Can't simply set options['progressive'] to the value # of self.context.config.PROGRESSIVE_JPEG because save # operates on the presence of the key in **options, not # the value of that setting. options["progressive"] = True if self.image.mode != "RGB": self.image = self.image.convert("RGB") else: subsampling_config = self.context.config.PILLOW_JPEG_SUBSAMPLING qtables_config = self.context.config.PILLOW_JPEG_QTABLES if subsampling_config is not None or qtables_config is not None: # can't use 'keep' here as Pillow would try to extract # qtables/subsampling and fail options["quality"] = 0 orig_subsampling = self.subsampling orig_qtables = self.qtables if ( subsampling_config == "keep" or subsampling_config is None ) and (orig_subsampling is not None): options["subsampling"] = orig_subsampling else: options["subsampling"] = subsampling_config if (qtables_config == "keep" or qtables_config is None) and ( orig_qtables and 2 <= len(orig_qtables) <= 4 ): options["qtables"] = orig_qtables else: options["qtables"] = qtables_config if ext == ".png" and self.context.config.PNG_COMPRESSION_LEVEL is not None: options["compress_level"] = self.context.config.PNG_COMPRESSION_LEVEL if options["quality"] is None: options["quality"] = self.context.config.QUALITY if self.icc_profile is not None: options["icc_profile"] = self.icc_profile if self.context.config.PRESERVE_EXIF_INFO: if self.exif is not None: options["exif"] = self.exif try: if ext == ".webp": if options["quality"] == 100: logger.debug("webp quality is 100, using lossless instead") options["lossless"] = True options.pop("quality") if self.image.mode not in ["RGB", "RGBA"]: if self.image.mode == "P": mode = "RGBA" else: mode = "RGBA" if self.image.mode[-1] == "A" else "RGB" self.image = self.image.convert(mode) if ext in [".png", ".gif"] and self.image.mode == "CMYK": self.image = self.image.convert("RGBA") self.image.format = FORMATS.get(ext, FORMATS[self.get_default_extension()]) self.image.save(img_buffer, self.image.format, **options) except IOError: logger.exception( "Could not save as improved image, consider to increase ImageFile.MAXBLOCK" ) self.image.save(img_buffer, FORMATS[ext]) results = img_buffer.getvalue() img_buffer.close() self.extension = ext return results def read_multiple(self, images, extension=None): gif_writer = GifWriter() img_buffer = BytesIO() duration = [] converted_images = [] coordinates = [] dispose = [] for image in images: duration.append(image.info.get("duration", 80) / 1000) converted_images.append(image.convert("RGB")) coordinates.append((0, 0)) dispose.append(1) loop = int(self.image.info.get("loop", 1)) images = gif_writer.convertImagesToPIL(converted_images, False, None) gif_writer.writeGifToFile( img_buffer, images, duration, loop, coordinates, dispose ) results = img_buffer.getvalue() img_buffer.close() tmp_fd, tmp_file_path = mkstemp() temp_file = os.fdopen(tmp_fd, "wb") temp_file.write(results) temp_file.close() command = ["gifsicle", "--colors", "256", tmp_file_path] popen = Popen(command, stdout=PIPE) # pylint: disable=consider-using-with pipe = popen.stdout pipe_output = pipe.read() pipe.close() if popen.wait() == 0: results = pipe_output os.remove(tmp_file_path) return results @deprecated("Use image_data_as_rgb instead.") def get_image_data(self): return self.image.tobytes() def set_image_data(self, data): self.image.frombytes(data) @deprecated("Use image_data_as_rgb instead.") def get_image_mode(self): return self.image.mode def image_data_as_rgb(self, update_image=True): converted_image = self.image if converted_image.mode not in ["RGB", "RGBA"]: if "A" in converted_image.mode: converted_image = converted_image.convert("RGBA") elif converted_image.mode == "P": # convert() figures out RGB or RGBA based on palette used converted_image = converted_image.convert(None) else: converted_image = converted_image.convert("RGB") if update_image: self.image = converted_image return converted_image.mode, converted_image.tobytes() def convert_to_grayscale(self, update_image=True, alpha=True): if "A" in self.image.mode and alpha: image = self.image.convert("LA") else: image = self.image.convert("L") if update_image: self.image = image return image def has_transparency(self): has_transparency = "A" in self.image.mode or "transparency" in self.image.info if has_transparency: # If the image has alpha channel, # we check for any pixels that are not opaque (255) has_transparency = ( min(self.image.convert("RGBA").getchannel("A").getextrema()) < 255 ) return has_transparency def paste(self, other_engine, pos, merge=True): if merge and not FILTERS_AVAILABLE: raise RuntimeError( "You need filters enabled to use paste with merge. Please reinstall " + "thumbor with proper compilation of its filters." ) self.enable_alpha() other_engine.enable_alpha() image = self.image other_image = other_engine.image if merge: image_size = self.size other_size = other_engine.size mode, data = self.image_data_as_rgb() _, other_data = other_engine.image_data_as_rgb() imgdata = _composite.apply( mode, data, image_size[0], image_size[1], other_data, other_size[0], other_size[1], int(pos[0]), int(pos[1]), ) self.set_image_data(imgdata) else: image.paste(other_image, pos) def enable_alpha(self): if self.image.mode != "RGBA": self.image = self.image.convert("RGBA") def strip_icc(self): self.icc_profile = None def strip_exif(self): self.exif = None
Python
MIT
TrueGameover/thumbor/thumbor/engines/pil.py
f49cc213-176f-44d2-a850-349615badb7d
[{"tag": "EMAIL", "value": "thumbor@googlegroups.com", "start": 226, "end": 250, "context": "censes/mit-license\n# Copyright (c) 2011 globo.com thumbor@googlegroups.com\n\nimport os\nfrom io import BytesIO\nfrom subprocess"}]
[{"tag": "EMAIL", "value": "thumbor@googlegroups.com", "start": 226, "end": 250, "context": "censes/mit-license\n# Copyright (c) 2011 globo.com thumbor@googlegroups.com\n\nimport os\nfrom io import BytesIO\nfrom subprocess"}]
from django.db import models from django.contrib.auth.models import User # Which data the user already has: # SuperUserInformation # User: Jose # Email: training@pieriandata.com # Password: testpassword # Create your models here. class UserProfileInfo(models.Model): # Create relationship (don't inherit from User!) user = models.OneToOneField(User, on_delete=models.CASCADE) # Add any additional attributes to the user you want portfolio_site = models.URLField(blank=True) # pip install pillow to use this, so that users do not need to upload their pic if they #...do not want it profile_pic = models.ImageField(upload_to='basic_app/profile_pics',blank=True) def __str__(self): # Built-in attribute of django.contrib.auth.models.User ! return self.user.username
Python
MIT
warpalatino/public/Python learnings/Django projects/learning_users/basic_app/models.py
44994e86-a411-4e79-a823-5aae2618e9b7
[{"tag": "PASSWORD", "value": "testpassword", "start": 196, "end": 208, "context": "e\r\n# Email: training@pieriandata.com\r\n# Password: testpassword\r\n\r\n# Create your models here.\r\nclass UserProfileI"}, {"tag": "EMAIL", "value": "training@pieriandata.com\r", "start": 158, "end": 183, "context": ":\r\n# SuperUserInformation\r\n# User: Jose\r\n# Email: training@pieriandata.com\r\n# Password: testpassword\r\n\r\n# Create your models "}, {"tag": "NAME", "value": "Jose", "start": 143, "end": 147, "context": "ser already has:\r\n# SuperUserInformation\r\n# User: Jose\r\n# Email: training@pieriandata.com\r\n# Password: t"}]
[{"tag": "PASSWORD", "value": "testpassword", "start": 196, "end": 208, "context": "e\r\n# Email: training@pieriandata.com\r\n# Password: testpassword\r\n\r\n# Create your models here.\r\nclass UserProfileI"}, {"tag": "EMAIL", "value": "training@pieriandata.com\r", "start": 158, "end": 183, "context": ":\r\n# SuperUserInformation\r\n# User: Jose\r\n# Email: training@pieriandata.com\r\n# Password: testpassword\r\n\r\n# Create your models "}, {"tag": "NAME", "value": "Jose", "start": 143, "end": 147, "context": "ser already has:\r\n# SuperUserInformation\r\n# User: Jose\r\n# Email: training@pieriandata.com\r\n# Password: t"}]
import {ITofUser} from './models/tof-request'; import {Bundle, Practitioner} from '../../../../libs/tof-lib/src/lib/stu3/fhir'; import {Globals} from '../../../../libs/tof-lib/src/lib/globals'; export function createTestUser(userId = 'test.user', name = 'test user', email = 'test@test.com'): ITofUser { return { clientID: 'test', email: email, name: name, sub: `auth0|${userId}` }; } export function createUserGroupResponse(): Bundle { return new Bundle({ total: 0, entry: [] }); } export function createUserPractitionerResponse(firstName = 'test', lastName = 'user', id = 'test-user-id', authId = 'test.user'): Bundle { return { "resourceType": "Bundle", "type": "searchset", "total": 1, "entry": [ { "fullUrl": "http://test.com/fhir/Practitioner/test-user-id", "resource": <Practitioner> { "resourceType": "Practitioner", "id": id, "identifier": [ { "system": Globals.authNamespace, "value": authId } ], "name": [ { "family": lastName, "given": [firstName] } ] } } ] }; }
TypeScript
Apache-2.0
jasmdk/trifolia-on-fhir/apps/server/src/app/test.helper.ts
1f0d3170-bd2b-44b2-9e54-d5adba52bff6
[]
[]
# Copyright 2016 Google Inc. # # Licensed under the Apache License, Version 2.0 (the "License"); # you may not use this file except in compliance with the License. # You may obtain a copy of the License at # # http://www.apache.org/licenses/LICENSE-2.0 # # Unless required by applicable law or agreed to in writing, software # distributed under the License is distributed on an "AS IS" BASIS, # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. # See the License for the specific language governing permissions and # limitations under the License. import os from setuptools import find_packages from setuptools import setup PACKAGE_ROOT = os.path.abspath(os.path.dirname(__file__)) with open(os.path.join(PACKAGE_ROOT, 'README.rst')) as file_obj: README = file_obj.read() # NOTE: This is duplicated throughout and we should try to # consolidate. SETUP_BASE = { 'author': 'Google Cloud Platform', 'author_email': 'jjg+google-cloud-python@google.com', 'scripts': [], 'url': 'https://github.com/GoogleCloudPlatform/google-cloud-python', 'license': 'Apache 2.0', 'platforms': 'Posix; MacOS X; Windows', 'include_package_data': True, 'zip_safe': False, 'classifiers': [ 'Development Status :: 4 - Beta', 'Intended Audience :: Developers', 'License :: OSI Approved :: Apache Software License', 'Operating System :: OS Independent', 'Programming Language :: Python :: 2', 'Programming Language :: Python :: 2.7', 'Programming Language :: Python :: 3', 'Programming Language :: Python :: 3.4', 'Programming Language :: Python :: 3.5', 'Topic :: Internet', ], } REQUIREMENTS = [ 'enum34', 'google-cloud-core >= 0.22.1, < 0.23dev', 'gapic-google-cloud-vision-v1 >= 0.14.0, < 0.15dev', ] setup( name='google-cloud-vision', version='0.22.0', description='Python Client for Google Cloud Vision', long_description=README, namespace_packages=[ 'google', 'google.cloud', ], packages=find_packages(), install_requires=REQUIREMENTS, **SETUP_BASE )
Python
Apache-2.0
ammayathrajeshnair/googlecloudpython/vision/setup.py
56bdee16-2735-4a72-a207-f77cd13cd98b
[{"tag": "EMAIL", "value": "jjg+google-cloud-python@google.com", "start": 964, "end": 998, "context": "r': 'Google Cloud Platform',\n 'author_email': 'jjg+google-cloud-python@google.com',\n 'scripts': [],\n 'url': 'https://github.c"}]
[{"tag": "EMAIL", "value": "jjg+google-cloud-python@google.com", "start": 964, "end": 998, "context": "r': 'Google Cloud Platform',\n 'author_email': 'jjg+google-cloud-python@google.com',\n 'scripts': [],\n 'url': 'https://github.c"}]
<?php /** * Copyright (c) 2018 Constantin Galbenu <xprt64@gmail.com> */ namespace Dudulina\CodeGeneration\Query; use Dudulina\Attributes\QueryHandler; use Dudulina\CodeGeneration\AttributeDetector; use Gica\CodeAnalysis\MethodListenerDiscovery\MessageClassDetector; class QueryHandlerDetector implements MessageClassDetector { public function isMessageClass(\ReflectionClass $typeHintedClass): bool { return true; } public function isMethodAccepted(\ReflectionMethod $reflectionMethod): bool { if (AttributeDetector::hasAttribute($reflectionMethod, QueryHandler::class)) { return true; } return 0 === stripos($reflectionMethod->name, 'whenAsked') || false !== stripos($reflectionMethod->getDocComment(), '@QueryHandler'); } }
PHP
MIT
xprt64/cqrs-es/src/Dudulina/CodeGeneration/Query/QueryHandlerDetector.php
bc9ebcdf-c08b-470a-b321-7126ec3a78be
[{"tag": "EMAIL", "value": "xprt64@gmail.com", "start": 52, "end": 68, "context": "php\n/**\n * Copyright (c) 2018 Constantin Galbenu <xprt64@gmail.com>\n */\n\nnamespace Dudulina\\CodeGeneration\\Query;\n\nu"}]
[{"tag": "EMAIL", "value": "xprt64@gmail.com", "start": 52, "end": 68, "context": "php\n/**\n * Copyright (c) 2018 Constantin Galbenu <xprt64@gmail.com>\n */\n\nnamespace Dudulina\\CodeGeneration\\Query;\n\nu"}]
<?php // autoload_real.php @generated by Composer class ComposerAutoloaderInit2bb9ea51ef615e19c4c067c99f11eca8 { private static $loader; public static function loadClassLoader($class) { if ('Composer\Autoload\ClassLoader' === $class) { require __DIR__ . '/ClassLoader.php'; } } public static function getLoader() { if (null !== self::$loader) { return self::$loader; } spl_autoload_register(array('ComposerAutoloaderInit2bb9ea51ef615e19c4c067c99f11eca8', 'loadClassLoader'), true, true); self::$loader = $loader = new \Composer\Autoload\ClassLoader(); spl_autoload_unregister(array('ComposerAutoloaderInit2bb9ea51ef615e19c4c067c99f11eca8', 'loadClassLoader')); $useStaticLoader = PHP_VERSION_ID >= 50600 && !defined('HHVM_VERSION') && (!function_exists('zend_loader_file_encoded') || !zend_loader_file_encoded()); if ($useStaticLoader) { require_once __DIR__ . '/autoload_static.php'; call_user_func(\Composer\Autoload\ComposerStaticInit2bb9ea51ef615e19c4c067c99f11eca8::getInitializer($loader)); } else { $map = require __DIR__ . '/autoload_namespaces.php'; foreach ($map as $namespace => $path) { $loader->set($namespace, $path); } $map = require __DIR__ . '/autoload_psr4.php'; foreach ($map as $namespace => $path) { $loader->setPsr4($namespace, $path); } $classMap = require __DIR__ . '/autoload_classmap.php'; if ($classMap) { $loader->addClassMap($classMap); } } $loader->register(true); if ($useStaticLoader) { $includeFiles = Composer\Autoload\ComposerStaticInit2bb9ea51ef615e19c4c067c99f11eca8::$files; } else { $includeFiles = require __DIR__ . '/autoload_files.php'; } foreach ($includeFiles as $fileIdentifier => $file) { composerRequire2bb9ea51ef615e19c4c067c99f11eca8($fileIdentifier, $file); } return $loader; } } function composerRequire2bb9ea51ef615e19c4c067c99f11eca8($fileIdentifier, $file) { if (empty($GLOBALS['__composer_autoload_files'][$fileIdentifier])) { require $file; $GLOBALS['__composer_autoload_files'][$fileIdentifier] = true; } }
PHP
Apache-2.0
pmSven/pm-shopware-base-project/src/public/custom/plugins/FroshProfiler/vendor/composer/autoload_real.php
a4f57676-d2cb-42eb-836a-bbc211487902
[]
[]
import { BAKED_BASE_URL, WORDPRESS_URL } from 'settings' import * as React from 'react' import { Head } from './Head' import { CitationMeta } from './CitationMeta' import { SiteHeader } from './SiteHeader' import { SiteFooter } from './SiteFooter' import { formatAuthors, FormattedPost, FormattingOptions } from '../formatting' import { CategoryWithEntries } from 'db/wpdb' import * as _ from 'lodash' import { SiteSubnavigation } from './SiteSubnavigation' export const LongFormPage = (props: { entries: CategoryWithEntries[], post: FormattedPost, formattingOptions: FormattingOptions }) => { const {entries, post, formattingOptions} = props const authorsText = formatAuthors(post.authors, true) const pageTitle = post.title const canonicalUrl = `${BAKED_BASE_URL}/${post.slug}` const pageDesc = post.excerpt const publishedYear = post.modifiedDate.getFullYear() const allEntries = _.flatten(_.values(entries).map(c => c.entries)) const isEntry = _.includes(allEntries.map(e => e.slug), post.slug) const classes = ["LongFormPage"] if (formattingOptions.bodyClassName) classes.push(formattingOptions.bodyClassName) const bibtex = `@article{owid${post.slug.replace(/-/g, '')}, author = {${authorsText}}, title = {${pageTitle}}, journal = {Our World in Data}, year = {${publishedYear}}, note = {${canonicalUrl}} }` return <html> <Head pageTitle={pageTitle} pageDesc={pageDesc} canonicalUrl={canonicalUrl} imageUrl={post.imageUrl}> {isEntry && <CitationMeta id={post.id} title={pageTitle} authors={post.authors} date={post.date} canonicalUrl={canonicalUrl}/>} </Head> <body className={classes.join(" ")}> <SiteHeader/> {formattingOptions.subnavId && <SiteSubnavigation subnavId={formattingOptions.subnavId} subnavCurrentId={formattingOptions.subnavCurrentId} />} <main> <article className="page"> <header className="articleHeader"> <h1 className="entry-title">{post.title}</h1> {!formattingOptions.hideAuthors && <div className="authors-byline"> <a href="/team">by {authorsText}</a> </div>} </header> <div className="contentContainer"> {post.tocHeadings.length > 0 && <aside className="entry-sidebar"> <nav className="entry-toc"> <ul> <li><a href="#">{pageTitle}</a></li> {post.tocHeadings.map((heading, i) => <li key={i} className={heading.isSubheading ? "subsection" : "section" + ((!post.tocHeadings[i+1] || !post.tocHeadings[i+1].isSubheading) ? " nosubs": "")}> <a href={`#${heading.slug}`}>{heading.text}</a> </li> )} {post.acknowledgements && <li key="acknowledgements" className="section nosubs"> <a href={`#acknowledgements`}>Acknowledgements</a> </li>} {post.footnotes.length ? <li key="references" className="section nosubs"> <a href={`#references`}>References</a> </li> : undefined} {isEntry && <li key="citation" className="section nosubs"> <a href={`#citation`}>Citation</a> </li>} </ul> </nav> </aside>} <div className="contentAndFootnotes"> <div className="article-content" dangerouslySetInnerHTML={{__html: post.html}}/> <footer className="article-footer"> {post.acknowledgements && <React.Fragment> <h3 id="acknowledgements">Acknowledgements</h3> <section dangerouslySetInnerHTML={{__html: post.acknowledgements}}/> </React.Fragment>} {post.footnotes.length ? <React.Fragment> <h3 id="references">References</h3> <ol className="references"> {post.footnotes.map((footnote, i) => <li key={i} id={`note-${i+1}`}> <p dangerouslySetInnerHTML={{__html: footnote}}/> </li> )} </ol> </React.Fragment> : undefined} {isEntry && <React.Fragment> <h3 id="citation">Citation</h3> <p> Our articles and data visualizations rely on work from many different people and organizations. When citing this entry, please also cite the underlying data sources. This entry can be cited as: </p> <pre className="citation"> {authorsText} ({publishedYear}) - "{pageTitle}". <em>Published online at OurWorldInData.org.</em> Retrieved from: '{canonicalUrl}' [Online Resource] </pre> <p> BibTeX citation </p> <pre className="citation"> {bibtex} </pre> </React.Fragment>} </footer> </div> </div> </article> </main> <div id="wpadminbar" style={{display: 'none'}}> <div className="quicklinks" id="wp-toolbar" role="navigation" aria-label="Toolbar"> <ul id="wp-admin-bar-root-default" className="ab-top-menu"> <li id="wp-admin-bar-site-name" className="menupop"> <a className="ab-item" aria-haspopup="true" href="/wp-admin/">Wordpress</a> </li>{" "} <li id="wp-admin-bar-edit"><a className="ab-item" href={`${WORDPRESS_URL}/wp-admin/post.php?post=${post.id}&action=edit`}>Edit Page</a></li> </ul> </div> </div> <SiteFooter hideDonate={formattingOptions.hideDonateFooter} /> </body> </html> }
TypeScript
MIT
richardgoater/owid-grapher/site/server/views/LongFormPage.tsx
b65757f4-efb2-49ed-bd00-4f24604ca5fc
[]
[]
/* * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org> * * SPDX-License-Identifier: BSD-2-Clause */ #include <AK/AnyOf.h> #include <AK/ByteBuffer.h> #include <AK/Find.h> #include <AK/FlyString.h> #include <AK/Function.h> #include <AK/Memory.h> #include <AK/String.h> #include <AK/StringView.h> #include <AK/Vector.h> namespace AK { StringView::StringView(const String& string) : m_characters(string.characters()) , m_length(string.length()) { } StringView::StringView(const FlyString& string) : m_characters(string.characters()) , m_length(string.length()) { } StringView::StringView(const ByteBuffer& buffer) : m_characters((const char*)buffer.data()) , m_length(buffer.size()) { } Vector<StringView> StringView::split_view(const char separator, bool keep_empty) const { if (is_empty()) return {}; Vector<StringView> v; size_t substart = 0; for (size_t i = 0; i < length(); ++i) { char ch = characters_without_null_termination()[i]; if (ch == separator) { size_t sublen = i - substart; if (sublen != 0 || keep_empty) v.append(substring_view(substart, sublen)); substart = i + 1; } } size_t taillen = length() - substart; if (taillen != 0 || keep_empty) v.append(substring_view(substart, taillen)); return v; } Vector<StringView> StringView::split_view(StringView separator, bool keep_empty) const { VERIFY(!separator.is_empty()); if (is_empty()) return {}; StringView view { *this }; Vector<StringView> parts; auto maybe_separator_index = find(separator); while (maybe_separator_index.has_value()) { auto separator_index = maybe_separator_index.value(); auto part_with_separator = view.substring_view(0, separator_index + separator.length()); if (keep_empty || separator_index > 0) parts.append(part_with_separator.substring_view(0, separator_index)); view = view.substring_view_starting_after_substring(part_with_separator); maybe_separator_index = view.find(separator); } if (keep_empty || !view.is_empty()) parts.append(view); return parts; } Vector<StringView> StringView::lines(bool consider_cr) const { if (is_empty()) return {}; if (!consider_cr) return split_view('\n', true); Vector<StringView> v; size_t substart = 0; bool last_ch_was_cr = false; bool split_view = false; for (size_t i = 0; i < length(); ++i) { char ch = characters_without_null_termination()[i]; if (ch == '\n') { split_view = true; if (last_ch_was_cr) { substart = i + 1; split_view = false; } } if (ch == '\r') { split_view = true; last_ch_was_cr = true; } else { last_ch_was_cr = false; } if (split_view) { size_t sublen = i - substart; v.append(substring_view(substart, sublen)); substart = i + 1; } split_view = false; } size_t taillen = length() - substart; if (taillen != 0) v.append(substring_view(substart, taillen)); return v; } bool StringView::starts_with(char ch) const { if (is_empty()) return false; return ch == characters_without_null_termination()[0]; } bool StringView::starts_with(StringView str, CaseSensitivity case_sensitivity) const { return StringUtils::starts_with(*this, str, case_sensitivity); } bool StringView::ends_with(char ch) const { if (is_empty()) return false; return ch == characters_without_null_termination()[length() - 1]; } bool StringView::ends_with(StringView str, CaseSensitivity case_sensitivity) const { return StringUtils::ends_with(*this, str, case_sensitivity); } bool StringView::matches(StringView mask, Vector<MaskSpan>& mask_spans, CaseSensitivity case_sensitivity) const { return StringUtils::matches(*this, mask, case_sensitivity, &mask_spans); } bool StringView::matches(StringView mask, CaseSensitivity case_sensitivity) const { return StringUtils::matches(*this, mask, case_sensitivity); } bool StringView::contains(char needle) const { for (char current : *this) { if (current == needle) return true; } return false; } bool StringView::contains(StringView needle, CaseSensitivity case_sensitivity) const { return StringUtils::contains(*this, needle, case_sensitivity); } bool StringView::equals_ignoring_case(StringView other) const { return StringUtils::equals_ignoring_case(*this, other); } String StringView::to_lowercase_string() const { return StringImpl::create_lowercased(characters_without_null_termination(), length()); } String StringView::to_uppercase_string() const { return StringImpl::create_uppercased(characters_without_null_termination(), length()); } String StringView::to_titlecase_string() const { return StringUtils::to_titlecase(*this); } StringView StringView::substring_view_starting_from_substring(StringView substring) const { const char* remaining_characters = substring.characters_without_null_termination(); VERIFY(remaining_characters >= m_characters); VERIFY(remaining_characters <= m_characters + m_length); size_t remaining_length = m_length - (remaining_characters - m_characters); return { remaining_characters, remaining_length }; } StringView StringView::substring_view_starting_after_substring(StringView substring) const { const char* remaining_characters = substring.characters_without_null_termination() + substring.length(); VERIFY(remaining_characters >= m_characters); VERIFY(remaining_characters <= m_characters + m_length); size_t remaining_length = m_length - (remaining_characters - m_characters); return { remaining_characters, remaining_length }; } template<typename T> Optional<T> StringView::to_int() const { return StringUtils::convert_to_int<T>(*this); } template Optional<i8> StringView::to_int() const; template Optional<i16> StringView::to_int() const; template Optional<i32> StringView::to_int() const; template Optional<long> StringView::to_int() const; template Optional<long long> StringView::to_int() const; template<typename T> Optional<T> StringView::to_uint() const { return StringUtils::convert_to_uint<T>(*this); } template Optional<u8> StringView::to_uint() const; template Optional<u16> StringView::to_uint() const; template Optional<u32> StringView::to_uint() const; template Optional<unsigned long> StringView::to_uint() const; template Optional<unsigned long long> StringView::to_uint() const; template Optional<long> StringView::to_uint() const; template Optional<long long> StringView::to_uint() const; bool StringView::operator==(const String& string) const { if (string.is_null()) return !m_characters; if (!m_characters) return false; if (m_length != string.length()) return false; if (m_characters == string.characters()) return true; return !__builtin_memcmp(m_characters, string.characters(), m_length); } String StringView::to_string() const { return String { *this }; } String StringView::replace(StringView needle, StringView replacement, bool all_occurrences) const { return StringUtils::replace(*this, needle, replacement, all_occurrences); } Vector<size_t> StringView::find_all(StringView needle) const { return StringUtils::find_all(*this, needle); } Vector<StringView> StringView::split_view_if(Function<bool(char)> const& predicate, bool keep_empty) const { if (is_empty()) return {}; Vector<StringView> v; size_t substart = 0; for (size_t i = 0; i < length(); ++i) { char ch = characters_without_null_termination()[i]; if (predicate(ch)) { size_t sublen = i - substart; if (sublen != 0 || keep_empty) v.append(substring_view(substart, sublen)); substart = i + 1; } } size_t taillen = length() - substart; if (taillen != 0 || keep_empty) v.append(substring_view(substart, taillen)); return v; } }
C++
BSD-2-Clause
3nt3/serenity/AK/StringView.cpp
363dc8e4-0c91-4b25-9ecb-e6bdc6ef3e7d
[{"tag": "NAME", "value": "Andreas Kling", "start": 31, "end": 44, "context": "/*\n * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>\n *\n * SPDX-License-Identif"}, {"tag": "EMAIL", "value": "kling@serenityos.org", "start": 46, "end": 66, "context": "/*\n * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>\n *\n * SPDX-License-Identifier: BSD-2-Clause\n */\n"}]
[{"tag": "NAME", "value": "Andreas Kling", "start": 31, "end": 44, "context": "/*\n * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>\n *\n * SPDX-License-Identif"}, {"tag": "EMAIL", "value": "kling@serenityos.org", "start": 46, "end": 66, "context": "/*\n * Copyright (c) 2018-2021, Andreas Kling <kling@serenityos.org>\n *\n * SPDX-License-Identifier: BSD-2-Clause\n */\n"}]
<?php return [ /* * Debug 模式,bool 值:true/false * * 当值为 false 时,所有的日志都不会记录 */ 'debug' => true, /* * 使用 Laravel 的缓存系统 */ 'use_laravel_cache' => true, /* * 账号基本信息,请从微信公众平台/开放平台获取 */ 'app_id' => env('WECHAT_APPID', 'wx85a771d5dac6c0d0'), // AppID 'secret' => env('WECHAT_SECRET', '05dc14834a17fab400da0b532fc8657f'), // AppSecret 'token' => env('WECHAT_TOKEN', '3Eowbj5dA4zkbprvK3nnp3NDz/pj6TSVdyVO+b/AX90='), // Token 'aes_key' => env('WECHAT_AES_KEY', ''), // EncodingAESKey /* * 日志配置 * * level: 日志级别,可选为: * debug/info/notice/warning/error/critical/alert/emergency * file:日志文件位置(绝对路径!!!),要求可写权限 */ 'log' => [ 'level' => env('WECHAT_LOG_LEVEL', 'error'), 'file' => env('WECHAT_LOG_FILE', storage_path('logs/wechat.log')), ], /* * 小程序配置 * */ "mini"=>[ 'app_id' => '', 'secret' => '', // 指定 API 调用返回结果的类型:array(default)/collection/object/raw/自定义类名 'response_type' => 'array', //商户号、api秘钥 'mch_id' =>'', 'key' =>'', 'cert_path'=> './apiclient_cert.pem', 'key_path' => './apiclient_key.pem', //回调地址 'notify_url'=>'https://domain/api/notify', 'log' => [ 'level' => 'error', 'file' => storage_path('logs/mini.log'), ] ], /* * OAuth 配置 * * scopes:公众平台(snsapi_userinfo / snsapi_base),开放平台:snsapi_login * callback:OAuth授权完成后的回调页地址(如果使用中间件,则随便填写。。。) */ // 'oauth' => [ // 'scopes' => array_map('trim', explode(',', env('WECHAT_OAUTH_SCOPES', 'snsapi_userinfo'))), // 'callback' => env('WECHAT_OAUTH_CALLBACK', '/examples/oauth_callback.php'), // ], /* * 微信支付 */ // 'payment' => [ // 'merchant_id' => env('WECHAT_PAYMENT_MERCHANT_ID', 'your-mch-id'), // 'key' => env('WECHAT_PAYMENT_KEY', 'key-for-signature'), // 'cert_path' => env('WECHAT_PAYMENT_CERT_PATH', 'path/to/your/cert.pem'), // XXX: 绝对路径!!!! // 'key_path' => env('WECHAT_PAYMENT_KEY_PATH', 'path/to/your/key'), // XXX: 绝对路径!!!! // // 'device_info' => env('WECHAT_PAYMENT_DEVICE_INFO', ''), // // 'sub_app_id' => env('WECHAT_PAYMENT_SUB_APP_ID', ''), // // 'sub_merchant_id' => env('WECHAT_PAYMENT_SUB_MERCHANT_ID', ''), // // ... // ], /* * 开发模式下的免授权模拟授权用户资料 * * 当 enable_mock 为 true 则会启用模拟微信授权,用于开发时使用,开发完成请删除或者改为 false 即可 */ // 'enable_mock' => env('WECHAT_ENABLE_MOCK', true), // 'mock_user' => [ // "openid" =>"odh7zsgI75iT8FRh0fGlSojc9PWM", // // 以下字段为 scope 为 snsapi_userinfo 时需要 // "nickname" => "overtrue", // "sex" =>"1", // "province" =>"北京", // "city" =>"北京", // "country" =>"中国", // "headimgurl" => "http://wx.qlogo.cn/mmopen/C2rEUskXQiblFYMUl9O0G05Q6pKibg7V1WpHX6CIQaic824apriabJw4r6EWxziaSt5BATrlbx1GVzwW2qjUCqtYpDvIJLjKgP1ug/0", // ], ];
PHP
Apache-2.0
coderlee/Trains/config/wechat.php
904540b8-c72a-4968-b147-82f14ee229d5
[{"tag": "API_KEY", "value": "3Eowbj5dA4zkbprvK3nnp3NDz/pj6TSVdyVO+b/AX90=", "start": 450, "end": 494, "context": "/ AppSecret\n 'token' => env('WECHAT_TOKEN', '3Eowbj5dA4zkbprvK3nnp3NDz/pj6TSVdyVO+b/AX90='), // Token\n 'aes_key' => env('WECHAT"}, {"tag": "API_KEY", "value": "05dc14834a17fab400da0b532fc8657f", "start": 359, "end": 391, "context": " // AppID\n 'secret' => env('WECHAT_SECRET', '05dc14834a17fab400da0b532fc8657f'), // AppSecret\n 'token' => env('WECHAT_"}, {"tag": "API_KEY", "value": "odh7zsgI75iT8FRh0fGlSojc9PWM", "start": 2741, "end": 2769, "context": "),\n // 'mock_user' => [\n // \"openid\" =>\"odh7zsgI75iT8FRh0fGlSojc9PWM\",\n // // \u4ee5\u4e0b\u5b57\u6bb5\u4e3a scope \u4e3a snsapi_userinfo \u65f6\u9700\u8981"}, {"tag": "API_KEY", "value": "wx85a771d5dac6c0d0", "start": 281, "end": 299, "context": "\u53f0\u83b7\u53d6\n */\n 'app_id' => env('WECHAT_APPID', 'wx85a771d5dac6c0d0'), // AppID\n 'secret' => env('WECHAT_"}]
[{"tag": "KEY", "value": "3Eowbj5dA4zkbprvK3nnp3NDz/pj6TSVdyVO+b/AX90=", "start": 450, "end": 494, "context": "/ AppSecret\n 'token' => env('WECHAT_TOKEN', '3Eowbj5dA4zkbprvK3nnp3NDz/pj6TSVdyVO+b/AX90='), // Token\n 'aes_key' => env('WECHAT"}, {"tag": "KEY", "value": "05dc14834a17fab400da0b532fc8657f", "start": 359, "end": 391, "context": " // AppID\n 'secret' => env('WECHAT_SECRET', '05dc14834a17fab400da0b532fc8657f'), // AppSecret\n 'token' => env('WECHAT_"}, {"tag": "KEY", "value": "odh7zsgI75iT8FRh0fGlSojc9PWM", "start": 2741, "end": 2769, "context": "),\n // 'mock_user' => [\n // \"openid\" =>\"odh7zsgI75iT8FRh0fGlSojc9PWM\",\n // // \u4ee5\u4e0b\u5b57\u6bb5\u4e3a scope \u4e3a snsapi_userinfo \u65f6\u9700\u8981"}, {"tag": "KEY", "value": "wx85a771d5dac6c0d0", "start": 281, "end": 299, "context": "\u53f0\u83b7\u53d6\n */\n 'app_id' => env('WECHAT_APPID', 'wx85a771d5dac6c0d0'), // AppID\n 'secret' => env('WECHAT_"}]
using System.Reflection; using System.Runtime.CompilerServices; using System.Runtime.InteropServices; // As informações gerais sobre um assembly são controladas por // conjunto de atributos. Altere estes valores de atributo para modificar as informações // associada a um assembly. [assembly: AssemblyTitle("Phobos.BLL")] [assembly: AssemblyDescription("")] [assembly: AssemblyConfiguration("")] [assembly: AssemblyCompany("")] [assembly: AssemblyProduct("Phobos.BLL")] [assembly: AssemblyCopyright("Copyright © 2021")] [assembly: AssemblyTrademark("")] [assembly: AssemblyCulture("")] // Definir ComVisible como false torna os tipos neste assembly invisíveis // para componentes COM. Caso precise acessar um tipo neste assembly de // COM, defina o atributo ComVisible como true nesse tipo. [assembly: ComVisible(false)] // O GUID a seguir será destinado à ID de typelib se este projeto for exposto para COM [assembly: Guid("5c5a47f9-79cd-47d7-861c-813a0f0186c7")] // As informações da versão de um assembly consistem nos quatro valores a seguir: // // Versão Principal // Versão Secundária // Número da Versão // Revisão // // É possível especificar todos os valores ou usar como padrão os Números de Build e da Revisão // usando o "*" como mostrado abaixo: // [assembly: AssemblyVersion("1.0.*")] [assembly: AssemblyVersion("1.0.0.0")] [assembly: AssemblyFileVersion("1.0.0.0")]
C#
MIT
EiLucas/Phobos/Sistema/PhobosAnomaly/Phobos.BLL/Properties/AssemblyInfo.cs
681677a8-0b57-437e-8f78-dcafaa7410ca
[]
[]
class Codec2 < Formula desc "Open source speech codec" homepage "https://www.rowetel.com/?page_id=452" # Linked from https://freedv.org/ url "https://hobbes1069.fedorapeople.org/freetel/codec2/codec2-0.8.1.tar.xz" sha256 "a07cdaacf59c3f7dbb1c63b769d443af486c434b3bd031fb4edd568ce3e613d6" bottle do cellar :any sha256 "3316417a3e0244dcdc81466af56be6d323169f38c3146075e9314da92c60c938" => :catalina sha256 "92031b75a027390385864b1c2a4bde522da712162b7c6f8187a1b2adf74f8504" => :mojave sha256 "37a6ae2407ae97ae632078020e89163e9b58d3613207bcf534401f6660128108" => :high_sierra sha256 "d90f5373ac39385b8fffee0605afe2e27c195f44ef211f98d7b5d89c7200508d" => :sierra sha256 "896b96db4b2d4349ca56dc0e4daaf2bebfc28908197c013aefe89d86fe57317c" => :el_capitan sha256 "d47daf8a3b22cacc6bbba12e456e0ab2bfad63ef28efc345de5c20afd7020906" => :x86_64_linux end depends_on "cmake" => :build def install mkdir "build_osx" do system "cmake", "..", *std_cmake_args system "make", "install" end end test do # 8 bytes of raw audio data (silence). (testpath/"test.raw").write([0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00].pack("C*")) system "#{bin}/c2enc", "2400", "test.raw", "test.c2" end end
Ruby
BSD-2-Clause
Char-Aznable/homebrew-core/Formula/codec2.rb
75f754bb-7bc9-45d7-8bbb-c474851e20fb
[]
[]
require 'active_support/core_ext/string/strip' module ActiveRecord module ConnectionAdapters class AbstractAdapter class SchemaCreation # :nodoc: def initialize(conn) @conn = conn @cache = {} end def accept(o) m = @cache[o.class] ||= "visit_#{o.class.name.split('::').last}" send m, o end def visit_AddColumn(o) "ADD #{accept(o)}" end private def visit_AlterTable(o) sql = "ALTER TABLE #{quote_table_name(o.name)} " sql << o.adds.map { |col| visit_AddColumn col }.join(' ') sql << o.foreign_key_adds.map { |fk| visit_AddForeignKey fk }.join(' ') sql << o.foreign_key_drops.map { |fk| visit_DropForeignKey fk }.join(' ') end def visit_ColumnDefinition(o) sql_type = type_to_sql(o.type, o.limit, o.precision, o.scale) column_sql = "#{quote_column_name(o.name)} #{sql_type}" add_column_options!(column_sql, column_options(o)) unless o.primary_key? column_sql end def visit_TableDefinition(o) create_sql = "CREATE#{' TEMPORARY' if o.temporary} TABLE " create_sql << "#{quote_table_name(o.name)} " create_sql << "(#{o.columns.map { |c| accept c }.join(', ')}) " unless o.as create_sql << "#{o.options}" create_sql << " AS #{@conn.to_sql(o.as)}" if o.as create_sql end def visit_AddForeignKey(o) sql = <<-SQL.strip_heredoc ADD CONSTRAINT #{quote_column_name(o.name)} FOREIGN KEY (#{quote_column_name(o.column)}) REFERENCES #{quote_table_name(o.to_table)} (#{quote_column_name(o.primary_key)}) SQL sql << " #{action_sql('DELETE', o.on_delete)}" if o.on_delete sql << " #{action_sql('UPDATE', o.on_update)}" if o.on_update sql end def visit_DropForeignKey(name) "DROP CONSTRAINT #{quote_column_name(name)}" end def column_options(o) column_options = {} column_options[:null] = o.null unless o.null.nil? column_options[:default] = o.default unless o.default.nil? column_options[:column] = o column_options[:first] = o.first column_options[:after] = o.after column_options end def quote_column_name(name) @conn.quote_column_name name end def quote_table_name(name) @conn.quote_table_name name end def type_to_sql(type, limit, precision, scale) @conn.type_to_sql type.to_sym, limit, precision, scale end def add_column_options!(sql, options) sql << " DEFAULT #{quote_value(options[:default], options[:column])}" if options_include_default?(options) # must explicitly check for :null to allow change_column to work on migrations if options[:null] == false sql << " NOT NULL" end if options[:auto_increment] == true sql << " AUTO_INCREMENT" end sql end def quote_value(value, column) column.sql_type ||= type_to_sql(column.type, column.limit, column.precision, column.scale) column.cast_type ||= type_for_column(column) @conn.quote(value, column) end def options_include_default?(options) options.include?(:default) && !(options[:null] == false && options[:default].nil?) end def action_sql(action, dependency) case dependency when :nullify then "ON #{action} SET NULL" when :cascade then "ON #{action} CASCADE" when :restrict then "ON #{action} RESTRICT" else raise ArgumentError, <<-MSG.strip_heredoc '#{dependency}' is not supported for :on_update or :on_delete. Supported values are: :nullify, :cascade, :restrict MSG end end def type_for_column(column) @conn.lookup_cast_type(column.sql_type) end end end end end
Ruby
MIT
AMHOL/activejob-lock/rails/activerecord/lib/active_record/connection_adapters/abstract/schema_creation.rb
8343dca8-207a-4ced-8c96-c61737f830f1
[]
[]
"""OAuth1 module written according to http://oauth.net/core/1.0/#signing_process""" import base64 import hmac import requests # requests must be loaded so that urllib receives the parse module import time import urllib from hashlib import sha1 from six import b from uuid import uuid4 use_parse_quote = not hasattr(urllib, 'quote') if use_parse_quote: _quote_func = urllib.parse.quote else: _quote_func = urllib.quote def _quote(obj): return _quote_func(str(obj), safe='') def normalize_query_parameters(params): """9.1.1. Normalize Request Parameters""" return '&'.join(map(lambda pair: '='.join([_quote(pair[0]), _quote(pair[1])]), sorted(params.items()))) def concatenate_request_elements(method, url, query): """9.1.3. Concatenate Request Elements""" return '&'.join(map(_quote, [str(method).upper(), url, query])) def hmac_sha1(base_string, hmac_key): """9.2. HMAC-SHA1""" hash = hmac.new(b(hmac_key), b(base_string), sha1) return hash.digest() def encode(digest): """9.2.1. Generating Signature""" return base64.b64encode(digest).decode('ascii').rstrip('\n') def add_oauth_entries_to_fields_dict(secret, params, nonce=None, timestamp=None): """ Adds dict entries to the user's params dict which are required for OAuth1.0 signature generation :param secret: API secret :param params: dictionary of values which will be sent in the query :param nonce: (Optional) random string used in signature creation, uuid4() is used if not provided :param timestamp: (Optional) integer-format timestamp, time.time() is used if not provided :return: dict containing params and the OAuth1.0 fields required before executing signature.create :type secret: str :type params: dict :type nonce: str :type timestamp: int :Example: >>> from emailage.signature import add_oauth_entries_to_fields_dict >>> query_params = dict(user_email='registered.account.user@yourcompany.com',\ query='email.you.are.interested.in@gmail.com'\ ) >>> query_params = add_oauth_entries_to_fields_dict('YOUR_API_SECRET', query_params) >>> query_params['oauth_consumer_key'] 'YOUR_API_SECRET' >>> query_params['oauth_signature_method'] 'HMAC-SHA1' >>> query_params['oauth_version'] 1.0 """ if nonce is None: nonce = uuid4() if timestamp is None: timestamp = int(time.time()) params['oauth_consumer_key'] = secret params['oauth_nonce'] = nonce params['oauth_signature_method'] = 'HMAC-SHA1' params['oauth_timestamp'] = timestamp params['oauth_version'] = 1.0 return params def create(method, url, params, hmac_key): """ Generates the OAuth1.0 signature used as the value for the query string parameter 'oauth_signature' :param method: HTTP method that will be used to send the request ( 'GET' | 'POST' ); EmailageClient uses GET :param url: API domain and endpoint up to the ? :param params: user-provided query string parameters and the OAuth1.0 parameters :method add_oauth_entries_to_fields_dict: :param hmac_key: for Emailage users, this is your consumer token with an '&' (ampersand) appended to the end :return: str value used for oauth_signature :type method: str :type url: str :type params: dict :type hmac_key: str :Example: >>> from emailage.signature import add_oauth_entries_to_fields_dict, create >>> your_api_key = 'SOME_KEY' >>> your_hmac_key = 'SOME_SECRET' + '&' >>> api_url = 'https://sandbox.emailage.com/emailagevalidator/' >>> query_params = { 'query': 'user.you.are.validating@gmail.com', 'user_email': 'admin@yourcompany.com' } >>> query_params = add_oauth_entries_to_fields_dict(your_api_key, query_params) >>> query_params['oauth_signature'] = create('GET', api_url, query_params, your_hmac_key) """ query = normalize_query_parameters(params) base_string = concatenate_request_elements(method, url, query) digest = hmac_sha1(base_string, hmac_key) return encode(digest)
Python
MIT
bluefish6/Emailage_Python/emailage/signature.py
b5bf7984-391e-4efa-b8c6-a90429afa862
[{"tag": "EMAIL", "value": "registered.account.user@yourcompany.com", "start": 1992, "end": 2031, "context": "_dict\n >>> query_params = dict(user_email='registered.account.user@yourcompany.com',\\\n query='email.you.are.interested.in"}, {"tag": "EMAIL", "value": "admin@yourcompany.com", "start": 3836, "end": 3857, "context": "ser.you.are.validating@gmail.com', 'user_email': 'admin@yourcompany.com' }\n >>> query_params = add_oauth_entries_t"}, {"tag": "EMAIL", "value": "email.you.are.interested.in@gmail.com", "start": 2054, "end": 2091, "context": "ccount.user@yourcompany.com',\\\n query='email.you.are.interested.in@gmail.com'\\\n )\n >>> query_params = add_oauth_"}, {"tag": "EMAIL", "value": "user.you.are.validating@gmail.com", "start": 3785, "end": 3818, "context": "alidator/'\n >>> query_params = { 'query': 'user.you.are.validating@gmail.com', 'user_email': 'admin@yourcompany.com' }\n "}]
[{"tag": "EMAIL", "value": "registered.account.user@yourcompany.com", "start": 1992, "end": 2031, "context": "_dict\n >>> query_params = dict(user_email='registered.account.user@yourcompany.com',\\\n query='email.you.are.interested.in"}, {"tag": "EMAIL", "value": "admin@yourcompany.com", "start": 3836, "end": 3857, "context": "ser.you.are.validating@gmail.com', 'user_email': 'admin@yourcompany.com' }\n >>> query_params = add_oauth_entries_t"}, {"tag": "EMAIL", "value": "email.you.are.interested.in@gmail.com", "start": 2054, "end": 2091, "context": "ccount.user@yourcompany.com',\\\n query='email.you.are.interested.in@gmail.com'\\\n )\n >>> query_params = add_oauth_"}, {"tag": "EMAIL", "value": "user.you.are.validating@gmail.com", "start": 3785, "end": 3818, "context": "alidator/'\n >>> query_params = { 'query': 'user.you.are.validating@gmail.com', 'user_email': 'admin@yourcompany.com' }\n "}]
__copyright__ = "Copyright (C) 2009-2013 Andreas Kloeckner" __license__ = """ Permission is hereby granted, free of charge, to any person obtaining a copy of this software and associated documentation files (the "Software"), to deal in the Software without restriction, including without limitation the rights to use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of the Software, and to permit persons to whom the Software is furnished to do so, subject to the following conditions: The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software. THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE. """ import pymbolic.primitives as prim import pytest from pymbolic import parse from pytools.lex import ParseError from pymbolic.mapper import IdentityMapper try: reduce except NameError: from functools import reduce # {{{ utilities def assert_parsed_same_as_python(expr_str): # makes sure that has only one line expr_str, = expr_str.split("\n") from pymbolic.interop.ast import ASTToPymbolic import ast ast2p = ASTToPymbolic() try: expr_parsed_by_python = ast2p(ast.parse(expr_str).body[0].value) except SyntaxError: with pytest.raises(ParseError): parse(expr_str) else: expr_parsed_by_pymbolic = parse(expr_str) assert expr_parsed_by_python == expr_parsed_by_pymbolic def assert_parse_roundtrip(expr_str): expr = parse(expr_str) from pymbolic.mapper.stringifier import StringifyMapper strified = StringifyMapper()(expr) assert strified == expr_str, (strified, expr_str) # }}} def test_integer_power(): from pymbolic.algorithm import integer_power for base, expn in [ (17, 5), (17, 2**10), (13, 20), (13, 1343), ]: assert base**expn == integer_power(base, expn) def test_expand(): from pymbolic import var, expand x = var("x") u = (x+1)**5 expand(u) def test_substitute(): from pymbolic import parse, substitute, evaluate u = parse("5+x.min**2") xmin = parse("x.min") assert evaluate(substitute(u, {xmin: 25})) == 630 def test_no_comparison(): from pymbolic import parse x = parse("17+3*x") y = parse("12-5*y") def expect_typeerror(f): try: f() except TypeError: pass else: raise AssertionError expect_typeerror(lambda: x < y) expect_typeerror(lambda: x <= y) expect_typeerror(lambda: x > y) expect_typeerror(lambda: x >= y) def test_structure_preservation(): x = prim.Sum((5, 7)) from pymbolic.mapper import IdentityMapper x2 = IdentityMapper()(x) assert x == x2 def test_sympy_interaction(): pytest.importorskip("sympy") import sympy as sp x, y = sp.symbols("x y") f = sp.Function("f") s1_expr = 1/f(x/sp.sqrt(x**2+y**2)).diff(x, 5) # pylint:disable=not-callable from pymbolic.interop.sympy import ( SympyToPymbolicMapper, PymbolicToSympyMapper) s2p = SympyToPymbolicMapper() p2s = PymbolicToSympyMapper() p1_expr = s2p(s1_expr) s2_expr = p2s(p1_expr) assert sp.ratsimp(s1_expr - s2_expr) == 0 p2_expr = s2p(s2_expr) s3_expr = p2s(p2_expr) assert sp.ratsimp(s1_expr - s3_expr) == 0 # {{{ fft def test_fft_with_floats(): numpy = pytest.importorskip("numpy") import numpy.linalg as la from pymbolic.algorithm import fft, ifft for n in [2**i for i in range(4, 10)]+[17, 12, 948]: a = numpy.random.rand(n) + 1j*numpy.random.rand(n) f_a = fft(a) a2 = ifft(f_a) assert la.norm(a-a2) < 1e-10 f_a_numpy = numpy.fft.fft(a) assert la.norm(f_a-f_a_numpy) < 1e-10 class NearZeroKiller(IdentityMapper): def map_constant(self, expr): if isinstance(expr, complex): r = expr.real i = expr.imag if abs(r) < 1e-15: r = 0 if abs(i) < 1e-15: i = 0 return complex(r, i) else: return expr def test_fft(): numpy = pytest.importorskip("numpy") from pymbolic import var from pymbolic.algorithm import fft, sym_fft vars = numpy.array([var(chr(97+i)) for i in range(16)], dtype=object) print(vars) print(fft(vars)) traced_fft = sym_fft(vars) from pymbolic.mapper.stringifier import PREC_NONE from pymbolic.mapper.c_code import CCodeMapper ccm = CCodeMapper() code = [ccm(tfi, PREC_NONE) for tfi in traced_fft] for cse_name, cse_str in enumerate(ccm.cse_name_list): print(f"{cse_name} = {cse_str}") for i, line in enumerate(code): print("result[%d] = %s" % (i, line)) # }}} def test_sparse_multiply(): numpy = pytest.importorskip("numpy") pytest.importorskip("scipy") import scipy.sparse as ss la = numpy.linalg mat = numpy.random.randn(10, 10) s_mat = ss.csr_matrix(mat) vec = numpy.random.randn(10) mat_vec = s_mat*vec from pymbolic.algorithm import csr_matrix_multiply mat_vec_2 = csr_matrix_multiply(s_mat, vec) assert la.norm(mat_vec-mat_vec_2) < 1e-14 # {{{ parser def test_parser(): from pymbolic import parse parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) " "-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) " "+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)") print(repr(parse("d4knl0"))) print(repr(parse("0."))) print(repr(parse("0.e1"))) assert parse("0.e1") == 0 assert parse("1e-12") == 1e-12 print(repr(parse("a >= 1"))) print(repr(parse("a <= 1"))) print(repr(parse(":"))) print(repr(parse("1:"))) print(repr(parse(":2"))) print(repr(parse("1:2"))) print(repr(parse("::"))) print(repr(parse("1::"))) print(repr(parse(":1:"))) print(repr(parse("::1"))) print(repr(parse("3::1"))) print(repr(parse(":5:1"))) print(repr(parse("3:5:1"))) assert_parse_roundtrip("()") assert_parse_roundtrip("(3,)") assert_parse_roundtrip("[x + 3, 3, 5]") assert_parse_roundtrip("[]") assert_parse_roundtrip("[x]") assert_parse_roundtrip("g[i, k] + 2.0*h[i, k]") parse("g[i,k]+(+2.0)*h[i, k]") print(repr(parse("a - b - c"))) print(repr(parse("-a - -b - -c"))) print(repr(parse("- - - a - - - - b - - - - - c"))) print(repr(parse("~(a ^ b)"))) print(repr(parse("(a | b) | ~(~a & ~b)"))) print(repr(parse("3 << 1"))) print(repr(parse("1 >> 3"))) print(parse("3::1")) assert parse("e1") == prim.Variable("e1") assert parse("d1") == prim.Variable("d1") from pymbolic import variables f, x, y, z = variables("f x y z") assert parse("f((x,y),z)") == f((x, y), z) assert parse("f((x,),z)") == f((x,), z) assert parse("f(x,(y,z),z)") == f(x, (y, z), z) assert parse("f(x,(y,z),z, name=15)") == f(x, (y, z), z, name=15) assert parse("f(x,(y,z),z, name=15, name2=17)") == f( x, (y, z), z, name=15, name2=17) assert_parsed_same_as_python("5+i if i>=0 else (0 if i<-1 else 10)") assert_parsed_same_as_python("0 if 1 if 2 else 3 else 4") assert_parsed_same_as_python("0 if (1 if 2 else 3) else 4") assert_parsed_same_as_python("(2, 3,)") with pytest.deprecated_call(): parse("1+if(0, 1, 2)") # }}} def test_mappers(): from pymbolic import variables f, x, y, z = variables("f x y z") for expr in [ f(x, (y, z), name=z**2) ]: from pymbolic.mapper import WalkMapper from pymbolic.mapper.dependency import DependencyMapper str(expr) IdentityMapper()(expr) WalkMapper()(expr) DependencyMapper()(expr) def test_func_dep_consistency(): from pymbolic import var from pymbolic.mapper.dependency import DependencyMapper f = var("f") x = var("x") dep_map = DependencyMapper(include_calls="descend_args") assert dep_map(f(x)) == {x} assert dep_map(f(x=x)) == {x} def test_conditions(): from pymbolic import var x = var("x") y = var("y") assert str(x.eq(y).and_(x.le(5))) == "x == y and x <= 5" def test_graphviz(): from pymbolic import parse expr = parse("(2*a[1]*b[1]+2*a[0]*b[0])*(hankel_1(-1,sqrt(a[1]**2+a[0]**2)*k) " "-hankel_1(1,sqrt(a[1]**2+a[0]**2)*k))*k /(4*sqrt(a[1]**2+a[0]**2)) " "+hankel_1(0,sqrt(a[1]**2+a[0]**2)*k)") from pymbolic.mapper.graphviz import GraphvizMapper gvm = GraphvizMapper() gvm(expr) print(gvm.get_dot_code()) # {{{ geometric algebra @pytest.mark.parametrize("dims", [2, 3, 4, 5]) # START_GA_TEST def test_geometric_algebra(dims): pytest.importorskip("numpy") import numpy as np from pymbolic.geometric_algebra import MultiVector as MV # noqa vec1 = MV(np.random.randn(dims)) vec2 = MV(np.random.randn(dims)) vec3 = MV(np.random.randn(dims)) vec4 = MV(np.random.randn(dims)) vec5 = MV(np.random.randn(dims)) # Fundamental identity assert ((vec1 ^ vec2) + (vec1 | vec2)).close_to(vec1*vec2) # Antisymmetry assert (vec1 ^ vec2 ^ vec3).close_to(- vec2 ^ vec1 ^ vec3) vecs = [vec1, vec2, vec3, vec4, vec5] if len(vecs) > dims: from operator import xor as outer assert reduce(outer, vecs).close_to(0) assert (vec1.inv()*vec1).close_to(1) assert (vec1*vec1.inv()).close_to(1) assert ((1/vec1)*vec1).close_to(1) assert (vec1/vec1).close_to(1) for a, b, c in [ (vec1, vec2, vec3), (vec1*vec2, vec3, vec4), (vec1, vec2*vec3, vec4), (vec1, vec2, vec3*vec4), (vec1, vec2, vec3*vec4*vec5), (vec1, vec2*vec1, vec3*vec4*vec5), ]: # Associativity assert ((a*b)*c).close_to(a*(b*c)) assert ((a ^ b) ^ c).close_to(a ^ (b ^ c)) # The inner product is not associative. # scalar product assert ((c*b).project(0)) .close_to(b.scalar_product(c)) assert ((c.rev()*b).project(0)) .close_to(b.rev().scalar_product(c)) assert ((b.rev()*b).project(0)) .close_to(b.norm_squared()) assert b.norm_squared() >= 0 assert c.norm_squared() >= 0 # Cauchy's inequality assert b.scalar_product(c) <= abs(b)*abs(c) + 1e-13 # contractions # (3.18) in [DFM] assert abs(b.scalar_product(a ^ c) - (b >> a).scalar_product(c)) < 1e-13 # duality, (3.20) in [DFM] assert ((a ^ b) << c) .close_to(a << (b << c)) # two definitions of the dual agree: (1.2.26) in [HS] # and (sec 3.5.3) in [DFW] assert (c << c.I.rev()).close_to(c | c.I.rev()) # inverse for div in list(b.gen_blades()) + [vec1, vec1.I]: assert (div.inv()*div).close_to(1) assert (div*div.inv()).close_to(1) assert ((1/div)*div).close_to(1) assert (div/div).close_to(1) assert ((c/div)*div).close_to(c) assert ((c*div)/div).close_to(c) # reverse properties (Sec 2.9.5 [DFM]) assert c.rev().rev() == c assert (b ^ c).rev() .close_to(c.rev() ^ b.rev()) # dual properties # (1.2.26) in [HS] assert c.dual() .close_to(c | c.I.rev()) assert c.dual() .close_to(c*c.I.rev()) # involution properties (Sec 2.9.5 DFW) assert c.invol().invol() == c assert (b ^ c).invol() .close_to(b.invol() ^ c.invol()) # commutator properties # Jacobi identity (1.1.56c) in [HS] or (8.2) in [DFW] assert (a.x(b.x(c)) + b.x(c.x(a)) + c.x(a.x(b))).close_to(0) # (1.57) in [HS] assert a.x(b*c) .close_to(a.x(b)*c + b*a.x(c)) # END_GA_TEST # }}} def test_ast_interop(): src = """ def f(): xx = 3*y + z * (12 if x < 13 else 13) yy = f(x, y=y) """ import ast mod = ast.parse(src.replace("\n ", "\n")) print(ast.dump(mod)) from pymbolic.interop.ast import ASTToPymbolic ast2p = ASTToPymbolic() for f in mod.body: if not isinstance(f, ast.FunctionDef): continue for stmt in f.body: if not isinstance(stmt, ast.Assign): continue lhs, = stmt.targets lhs = ast2p(lhs) rhs = ast2p(stmt.value) print(lhs, rhs) def test_compile(): from pymbolic import parse, compile code = compile(parse("x ** y"), ["x", "y"]) assert code(2, 5) == 32 # Test pickling of compiled code. import pickle code = pickle.loads(pickle.dumps(code)) assert code(3, 3) == 27 def test_unifier(): from pymbolic import var from pymbolic.mapper.unifier import UnidirectionalUnifier a, b, c, d, e, f = [var(s) for s in "abcdef"] def match_found(records, eqns): for record in records: if eqns <= set(record.equations): return True return False recs = UnidirectionalUnifier("abc")(a+b*c, d+e*f) assert len(recs) == 2 assert match_found(recs, {(a, d), (b, e), (c, f)}) assert match_found(recs, {(a, d), (b, f), (c, e)}) recs = UnidirectionalUnifier("abc")(a+b, d+e+f) assert len(recs) == 6 assert match_found(recs, {(a, d), (b, e+f)}) assert match_found(recs, {(a, e), (b, d+f)}) assert match_found(recs, {(a, f), (b, d+e)}) assert match_found(recs, {(b, d), (a, e+f)}) assert match_found(recs, {(b, e), (a, d+f)}) assert match_found(recs, {(b, f), (a, d+e)}) vals = [var("v" + str(i)) for i in range(100)] recs = UnidirectionalUnifier("a")(sum(vals[1:]) + a, sum(vals)) assert len(recs) == 1 assert match_found(recs, {(a, var("v0"))}) recs = UnidirectionalUnifier("abc")(a+b+c, d+e) assert len(recs) == 0 recs = UnidirectionalUnifier("abc")(f(a+b, f(a+c)), f(b+c, f(b+d))) assert len(recs) == 1 assert match_found(recs, {(a, b), (b, c), (c, d)}) def test_long_sympy_mapping(): sp = pytest.importorskip("sympy") from pymbolic.interop.sympy import SympyToPymbolicMapper SympyToPymbolicMapper()(sp.sympify(int(10**20))) SympyToPymbolicMapper()(sp.sympify(int(10))) def test_stringifier_preserve_shift_order(): for expr in [ parse("(a << b) >> 2"), parse("a << (b >> 2)") ]: assert parse(str(expr)) == expr LATEX_TEMPLATE = r"""\documentclass{article} \usepackage{amsmath} \begin{document} %s \end{document}""" def test_latex_mapper(): from pymbolic import parse from pymbolic.mapper.stringifier import LaTeXMapper, StringifyMapper tm = LaTeXMapper() sm = StringifyMapper() equations = [] def add(expr): # Add an equation to the list of tests. equations.append(r"\[{}\] % from: {}".format(tm(expr), sm(expr))) add(parse("a * b + c")) add(parse("f(a,b,c)")) add(parse("a ** b ** c")) add(parse("(a | b) ^ ~c")) add(parse("a << b")) add(parse("a >> b")) add(parse("a[i,j,k]")) add(parse("a[1:3]")) add(parse("a // b")) add(parse("not (a or b) and c")) add(parse("(a % b) % c")) add(parse("(a >= b) or (b <= c)")) add(prim.Min((1,)) + prim.Max((1, 2))) add(prim.Substitution(prim.Variable("x") ** 2, ("x",), (2,))) add(prim.Derivative(parse("x**2"), ("x",))) # Run LaTeX and ensure the file compiles. import os import tempfile import subprocess import shutil latex_dir = tempfile.mkdtemp("pymbolic") try: tex_file_path = os.path.join(latex_dir, "input.tex") with open(tex_file_path, "w") as tex_file: contents = LATEX_TEMPLATE % "\n".join(equations) tex_file.write(contents) try: subprocess.check_output( ["latex", "-interaction=nonstopmode", "-output-directory=%s" % latex_dir, tex_file_path], universal_newlines=True) except OSError: # FIXME: Should be FileNotFoundError on Py3 pytest.skip("latex command not found") except subprocess.CalledProcessError as err: raise AssertionError(str(err.output)) finally: shutil.rmtree(latex_dir) def test_flop_counter(): x = prim.Variable("x") y = prim.Variable("y") z = prim.Variable("z") subexpr = prim.CommonSubexpression(3 * (x**2 + y + z)) expr = 3*subexpr + subexpr from pymbolic.mapper.flop_counter import FlopCounter, CSEAwareFlopCounter assert FlopCounter()(expr) == 4 * 2 + 2 assert CSEAwareFlopCounter()(expr) == 4 + 2 def test_make_sym_vector(): numpy = pytest.importorskip("numpy") from pymbolic.primitives import make_sym_vector assert len(make_sym_vector("vec", 2)) == 2 assert len(make_sym_vector("vec", numpy.int32(2))) == 2 assert len(make_sym_vector("vec", [1, 2, 3])) == 3 def test_multiplicative_stringify_preserves_association(): for inner in ["*", " / ", " // ", " % "]: for outer in ["*", " / ", " // ", " % "]: if outer == inner: continue assert_parse_roundtrip(f"x{outer}(y{inner}z)") assert_parse_roundtrip(f"(y{inner}z){outer}x") assert_parse_roundtrip("(-1)*(((-1)*x) / 5)") def test_differentiator_flags_for_nonsmooth_and_discontinuous(): import pymbolic.functions as pf from pymbolic.mapper.differentiator import differentiate x = prim.Variable("x") with pytest.raises(ValueError): differentiate(pf.fabs(x), x) result = differentiate(pf.fabs(x), x, allowed_nonsmoothness="continuous") assert result == pf.sign(x) with pytest.raises(ValueError): differentiate(pf.sign(x), x) result = differentiate(pf.sign(x), x, allowed_nonsmoothness="discontinuous") assert result == 0 def test_np_bool_handling(): from pymbolic.mapper.evaluator import evaluate numpy = pytest.importorskip("numpy") expr = prim.LogicalNot(numpy.bool_(False)) assert evaluate(expr) is True if __name__ == "__main__": import sys if len(sys.argv) > 1: exec(sys.argv[1]) else: from pytest import main main([__file__]) # vim: fdm=marker
Python
MIT
sv2518/pymbolic/test/test_pymbolic.py
d0bc4ec1-752e-44cb-8689-9211f87f80c9
[{"tag": "NAME", "value": "Andreas Kloeckner", "start": 41, "end": 58, "context": "__copyright__ = \"Copyright (C) 2009-2013 Andreas Kloeckner\"\n\n__license__ = \"\"\"\nPermission is hereby granted,"}]
[{"tag": "NAME", "value": "Andreas Kloeckner", "start": 41, "end": 58, "context": "__copyright__ = \"Copyright (C) 2009-2013 Andreas Kloeckner\"\n\n__license__ = \"\"\"\nPermission is hereby granted,"}]
# -*- coding: utf-8 -*- """Subspace Outlier Detection (SOD) """ # Author: Yahya Almardeny <almardeny@gmail.com> # License: BSD 2 clause import numpy as np import numba as nb from sklearn.neighbors import NearestNeighbors from sklearn.utils import check_array from ..utils.utility import check_parameter from .base import BaseDetector @nb.njit(parallel=True) def _snn_imp(ind, ref_set_): """Internal function for fast snn calculation Parameters ---------- ind : int Indices return by kNN. ref_set_ : int, optional (default=10) specifies the number of shared nearest neighbors to create the reference set. Note that ref_set must be smaller than n_neighbors. """ n = ind.shape[0] _count = np.zeros(shape=(n, ref_set_), dtype=np.uint32) for i in nb.prange(n): temp = np.empty(n, dtype=np.uint32) test_element_set = set(ind[i]) for j in nb.prange(n): temp[j] = len(set(ind[j]).intersection(test_element_set)) temp[i] = np.iinfo(np.uint32).max _count[i] = np.argsort(temp)[::-1][1:ref_set_ + 1] return _count class SOD(BaseDetector): """Subspace outlier detection (SOD) schema aims to detect outlier in varying subspaces of a high dimensional feature space. For each data object, SOD explores the axis-parallel subspace spanned by the data object's neighbors and determines how much the object deviates from the neighbors in this subspace. See :cite:`kriegel2009outlier` for details. Parameters ---------- n_neighbors : int, optional (default=20) Number of neighbors to use by default for k neighbors queries. ref_set: int, optional (default=10) specifies the number of shared nearest neighbors to create the reference set. Note that ref_set must be smaller than n_neighbors. alpha: float in (0., 1.), optional (default=0.8) specifies the lower limit for selecting subspace. 0.8 is set as default as suggested in the original paper. contamination : float in (0., 0.5), optional (default=0.1) The amount of contamination of the data set, i.e. the proportion of outliers in the data set. Used when fitting to define the threshold on the decision function. Attributes ---------- decision_scores_ : numpy array of shape (n_samples,) The outlier scores of the training data. The higher, the more abnormal. Outliers tend to have higher scores. This value is available once the detector is fitted. threshold_ : float The threshold is based on ``contamination``. It is the ``n_samples * contamination`` most abnormal samples in ``decision_scores_``. The threshold is calculated for generating binary outlier labels. labels_ : int, either 0 or 1 The binary labels of the training data. 0 stands for inliers and 1 for outliers/anomalies. It is generated by applying ``threshold_`` on ``decision_scores_``. """ def __init__(self, contamination=0.1, n_neighbors=20, ref_set=10, alpha=0.8): super(SOD, self).__init__(contamination=contamination) if isinstance(n_neighbors, int): check_parameter(n_neighbors, low=1, param_name='n_neighbors') else: raise ValueError( "n_neighbors should be int. Got %s" % type(n_neighbors)) if isinstance(ref_set, int): check_parameter(ref_set, low=1, high=n_neighbors, param_name='ref_set') else: raise ValueError("ref_set should be int. Got %s" % type(ref_set)) if isinstance(alpha, float): check_parameter(alpha, low=0.0, high=1.0, param_name='alpha') else: raise ValueError("alpha should be float. Got %s" % type(alpha)) self.n_neighbors_ = n_neighbors self.ref_set_ = ref_set self.alpha_ = alpha self.decision_scores_ = None def fit(self, X, y=None): """Fit detector. y is ignored in unsupervised methods. Parameters ---------- X : numpy array of shape (n_samples, n_features) The input samples. y : Ignored Not used, present for API consistency by convention. Returns ------- self : object Fitted estimator. """ # validate inputs X and y (optional) X = check_array(X) self._set_n_classes(y) self.decision_scores_ = self.decision_function(X) self._process_decision_scores() return self def decision_function(self, X): """Predict raw anomaly score of X using the fitted detector. The anomaly score of an input sample is computed based on different detector algorithms. For consistency, outliers are assigned with larger anomaly scores. Parameters ---------- X : numpy array of shape (n_samples, n_features) The training input samples. Sparse matrices are accepted only if they are supported by the base estimator. Returns ------- anomaly_scores : numpy array of shape (n_samples,) The anomaly score of the input samples. """ return self._sod(X) def _snn(self, X): """This function is called internally to calculate the shared nearest neighbors (SNN). SNN is reported to be more robust than k nearest neighbors. Returns ------- snn_indices : numpy array of shape (n_shared_nearest_neighbors,) The indices of top k shared nearest neighbors for each observation. """ knn = NearestNeighbors(n_neighbors=self.n_neighbors_) knn.fit(X) # Get the knn index ind = knn.kneighbors(return_distance=False) return _snn_imp(ind, self.ref_set_) def _sod(self, X): """This function is called internally to perform subspace outlier detection algorithm. Returns ------- anomaly_scores : numpy array of shape (n_samples,) The anomaly score of the input samples. """ ref_inds = self._snn(X) anomaly_scores = np.zeros(shape=(X.shape[0],)) for i in range(X.shape[0]): obs = X[i] ref = X[ref_inds[i,],] means = np.mean(ref, axis=0) # mean of each column # average squared distance of the reference to the mean var_total = np.sum(np.sum(np.square(ref - means))) / self.ref_set_ var_expect = self.alpha_ * var_total / X.shape[1] var_actual = np.var(ref, axis=0) # variance of each attribute var_inds = [1 if (j < var_expect) else 0 for j in var_actual] rel_dim = np.sum(var_inds) if rel_dim != 0: anomaly_scores[i] = np.sqrt( np.dot(var_inds, np.square(obs - means)) / rel_dim) return anomaly_scores
Python
BSD-2-Clause
BillyGareth/pyod/pyod/models/sod.py
9b698080-cf21-4c7a-a0e5-3f5e858dc09e
[{"tag": "EMAIL", "value": "almardeny@gmail.com", "start": 91, "end": 110, "context": "er Detection (SOD)\n\"\"\"\n# Author: Yahya Almardeny <almardeny@gmail.com>\n# License: BSD 2 clause\n\nimport numpy as np\nimpo"}, {"tag": "NAME", "value": "Yahya Almardeny", "start": 74, "end": 89, "context": "\"\"\"Subspace Outlier Detection (SOD)\n\"\"\"\n# Author: Yahya Almardeny <almardeny@gmail.com>\n# License: BSD 2 clause\n\nim"}]
[{"tag": "EMAIL", "value": "almardeny@gmail.com", "start": 91, "end": 110, "context": "er Detection (SOD)\n\"\"\"\n# Author: Yahya Almardeny <almardeny@gmail.com>\n# License: BSD 2 clause\n\nimport numpy as np\nimpo"}, {"tag": "NAME", "value": "Yahya Almardeny", "start": 74, "end": 89, "context": "\"\"\"Subspace Outlier Detection (SOD)\n\"\"\"\n# Author: Yahya Almardeny <almardeny@gmail.com>\n# License: BSD 2 clause\n\nim"}]
import { Injectable } from '@angular/core'; import { Observable } from 'rxjs/Observable'; import 'rxjs/add/observable/of'; import { User } from './models/user'; @Injectable() export class AppUserService { private users: User[] = [{ id: 1, firstName: 'user', lastName: 'user', email: 'user@easylocatus.com', phone: '0021612345614', password: '123456' }] constructor() { // this.userArray = Object.values(this.users); } add(user: User) { this.users.push(user); } all(): User[] { return this.users; } existe(user): boolean { const result = this.users.filter(u => { return user.email === u.email && user.password === u.password; }); if(result.length > 0) { return true; } return false; } }
TypeScript
MIT
AkremiKhayreddine27/Locatus/src/app/@core/data/app-user.service.ts
a053364f-05d7-4fca-a047-d6128aaed87b
[{"tag": "PASSWORD", "value": "123456", "start": 396, "end": 402, "context": " phone: '0021612345614',\n password: '123456'\n }]\n\n\n constructor() {\n // this.use"}, {"tag": "EMAIL", "value": "user@easylocatus.com", "start": 322, "end": 342, "context": "'user',\n lastName: 'user',\n email: 'user@easylocatus.com',\n phone: '0021612345614',\n passwor"}]
[{"tag": "PASSWORD", "value": "123456", "start": 396, "end": 402, "context": " phone: '0021612345614',\n password: '123456'\n }]\n\n\n constructor() {\n // this.use"}, {"tag": "EMAIL", "value": "user@easylocatus.com", "start": 322, "end": 342, "context": "'user',\n lastName: 'user',\n email: 'user@easylocatus.com',\n phone: '0021612345614',\n passwor"}]
/* * uicbm2model.h - CBM2 model selection UI for MS-DOS. * * Written by * Marco van den Heuvel <blackystardust68@yahoo.com> * * This file is part of VICE, the Versatile Commodore Emulator. * See README for copyright notice. * * This program is free software; you can redistribute it and/or modify * it under the terms of the GNU General Public License as published by * the Free Software Foundation; either version 2 of the License, or * (at your option) any later version. * * This program is distributed in the hope that it will be useful, * but WITHOUT ANY WARRANTY; without even the implied warranty of * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the * GNU General Public License for more details. * * You should have received a copy of the GNU General Public License * along with this program; if not, write to the Free Software * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA * 02111-1307 USA. * */ #ifndef UICBM2MODEL_H #define UICBM2MODEL_H struct tui_menu; extern void uicbm2model_init(struct tui_menu *parent_submenu); #endif
C
Apache-2.0
Classicmods/C64_mini_VICE/src/arch/msdos/uicbm2model.h
25705e56-2fc5-4cef-85da-3acb09a41a99
[{"tag": "EMAIL", "value": "blackystardust68@yahoo.com", "start": 101, "end": 127, "context": "S-DOS.\n *\n * Written by\n * Marco van den Heuvel <blackystardust68@yahoo.com>\n *\n * This file is part of VICE, the Versatile C"}, {"tag": "NAME", "value": "Marco van den Heuvel", "start": 79, "end": 99, "context": "del selection UI for MS-DOS.\n *\n * Written by\n * Marco van den Heuvel <blackystardust68@yahoo.com>\n *\n * This file is p"}]
[{"tag": "EMAIL", "value": "blackystardust68@yahoo.com", "start": 101, "end": 127, "context": "S-DOS.\n *\n * Written by\n * Marco van den Heuvel <blackystardust68@yahoo.com>\n *\n * This file is part of VICE, the Versatile C"}, {"tag": "NAME", "value": "Marco van den Heuvel", "start": 79, "end": 99, "context": "del selection UI for MS-DOS.\n *\n * Written by\n * Marco van den Heuvel <blackystardust68@yahoo.com>\n *\n * This file is p"}]
/* TiMidity -- Experimental MIDI to WAVE converter Copyright (C) 1995 Tuukka Toivonen <toivonen@clinet.fi> This program is free software; you can redistribute it and/or modify it under the terms of the Perl Artistic License, available in COPYING. filter.h : written by Vincent Pagel ( pagel@loria.fr ) implements fir antialiasing filter : should help when setting sample rates as low as 8Khz. */ /* Order of the FIR filter = 20 should be enough ! */ #define ORDER 20 #define ORDER2 ORDER/2 #ifndef PI #define PI 3.14159265 #endif extern void antialiasing(Sample *sp, int32 output_rate);
C
MIT
0sara0/ring/extensions/android/ringlibsdl/project/jni/SDL2_mixer/timidity/filter.h
b5615a6f-6dcb-43b6-a66f-aa274a6cb16a
[{"tag": "NAME", "value": "Tuukka Toivonen", "start": 78, "end": 93, "context": "tal MIDI to WAVE converter\n Copyright (C) 1995 Tuukka Toivonen <toivonen@clinet.fi>\n\n This program is free so"}, {"tag": "EMAIL", "value": "toivonen@clinet.fi", "start": 95, "end": 113, "context": "converter\n Copyright (C) 1995 Tuukka Toivonen <toivonen@clinet.fi>\n\n This program is free software; you can redi"}]
[{"tag": "NAME", "value": "Tuukka Toivonen", "start": 78, "end": 93, "context": "tal MIDI to WAVE converter\n Copyright (C) 1995 Tuukka Toivonen <toivonen@clinet.fi>\n\n This program is free so"}, {"tag": "EMAIL", "value": "toivonen@clinet.fi", "start": 95, "end": 113, "context": "converter\n Copyright (C) 1995 Tuukka Toivonen <toivonen@clinet.fi>\n\n This program is free software; you can redi"}]
// This file was procedurally generated from the following sources: // - src/dstr-binding/obj-ptrn-prop-ary-trailing-comma.case // - src/dstr-binding/default/gen-func-decl-dflt.template /*--- description: Trailing comma is allowed following BindingPropertyList (generator function declaration (default parameter)) esid: sec-generator-function-definitions-runtime-semantics-instantiatefunctionobject features: [generators, destructuring-binding, default-parameters] flags: [generated] info: | GeneratorDeclaration : function * ( FormalParameters ) { GeneratorBody } [...] 2. Let F be GeneratorFunctionCreate(Normal, FormalParameters, GeneratorBody, scope, strict). [...] 9.2.1 [[Call]] ( thisArgument, argumentsList) [...] 7. Let result be OrdinaryCallEvaluateBody(F, argumentsList). [...] 9.2.1.3 OrdinaryCallEvaluateBody ( F, argumentsList ) 1. Let status be FunctionDeclarationInstantiation(F, argumentsList). [...] 9.2.12 FunctionDeclarationInstantiation(func, argumentsList) [...] 23. Let iteratorRecord be Record {[[iterator]]: CreateListIterator(argumentsList), [[done]]: false}. 24. If hasDuplicates is true, then [...] 25. Else, b. Let formalStatus be IteratorBindingInitialization for formals with iteratorRecord and env as arguments. [...] 13.3.3 Destructuring Binding Patterns ObjectBindingPattern[Yield] : { } { BindingPropertyList[?Yield] } { BindingPropertyList[?Yield] , } ---*/ var callCount = 0; function* f({ x: [y], } = { x: [45] }) { assert.sameValue(y,45); callCount = callCount + 1; }; f().next(); assert.sameValue(callCount, 1, 'generator function invoked exactly once'); reportCompare(0, 0);
JavaScript
MPL-2.0
dothq/browser-ff-old/js/src/tests/test262/language/statements/generators/dstr/dflt-obj-ptrn-prop-ary-trailing-comma.js
c9eb9003-cd79-493d-8912-545b4107c461
[]
[]
(window.webpackJsonp=window.webpackJsonp||[]).push([[7],{"0pAz":function(e,t,n){"use strict";n.r(t);var a=n("q1tI"),l=n.n(a),i=n("Wbzz"),c=n("vOnD"),o=n("VeD8"),m=n("pQ8y"),r=n("Kvkj"),u=n("7Qib"),s=n("InJ6"),d=c.d.main.withConfig({displayName:"snake__StyledMainContainer",componentId:"ywz0ll-0"})(["",";flex-direction:column;"],(function(e){return e.theme.mixins.flexCenter})),f=c.d.div.withConfig({displayName:"snake__StyledContent",componentId:"ywz0ll-1"})(["display:flex;flex-direction:column;min-height:100vh;"]);Object(c.d)(i.Link).withConfig({displayName:"snake__StyledHomeButton",componentId:"ywz0ll-2"})(["",";margin-top:0px;"],(function(e){return e.theme.mixins.bigButton}));t.default=function(e){e.location;var t=Object(a.useState)(!1),n=t[0],i=t[1];Object(a.useState)(!0)[0];return Object(a.useEffect)((function(){var e=setTimeout((function(){return i(!0)}),u.c);return function(){return clearTimeout(e)}}),[]),l.a.createElement(l.a.Fragment,null,l.a.createElement(r.e,null),l.a.createElement("div",{id:"root"},l.a.createElement(c.a,{theme:s.b},l.a.createElement(s.a,null),l.a.createElement(f,null,l.a.createElement(r.j,{isHome:!0}),l.a.createElement(r.o,{isHome:!0}),l.a.createElement(r.c,{isHome:!0}),l.a.createElement("div",{id:"content"},l.a.createElement(o.a,{component:null},n&&l.a.createElement(m.a,{timeout:500,classNames:"fadeup"},l.a.createElement(d,{className:"fillHeight"},l.a.createElement(r.n,null)))))))))}}}]); //# sourceMappingURL=component---src-pages-snake-js-43bf58f4310d68287b25.js.map
JavaScript
MIT
RodrigoNeves95/RodrigoNeves95.github.io/docs/component---src-pages-snake-js-43bf58f4310d68287b25.js
db928b49-3e2f-45c2-b83f-fef2f5ee699f
[]
[]
import tensorflow as tf from tensorflow.keras.callbacks import ModelCheckpoint import os class TensorBoardFix(tf.keras.callbacks.TensorBoard): """ This fixes incorrect step values when using the TensorBoard callback with custom summary ops https://stackoverflow.com/questions/64642944/steps-of-tf-summary-operations-in-tensorboard-are-always-0 """ def on_train_begin(self, *args, **kwargs): super(TensorBoardFix, self).on_train_begin(*args, **kwargs) tf.summary.experimental.set_step(self._train_step) def on_test_begin(self, *args, **kwargs): super(TensorBoardFix, self).on_test_begin(*args, **kwargs) tf.summary.experimental.set_step(self._val_step) def get_callbacks(model_name='model',root_dir='logs/fit/', monitor='val_categorical_accuracy',mode='max', save_freq='epoch',save_best_only=True, ): log_dir = os.path.join(root_dir,model_name) tensorboard = TensorBoardFix(log_dir=log_dir, histogram_freq=1, update_freq=50, ) save_model = ModelCheckpoint(filepath=os.path.join(log_dir,'model.h5'), save_weights_only=False, monitor=monitor, mode=mode, save_best_only=save_best_only, save_freq=save_freq) return [tensorboard,save_model]
Python
MIT
aguirrejuan/ConvRFF/convRFF/utils/utils.py
7ef1932c-d642-4447-9c7b-101c4cd2320e
[]
[]
module LeesToolbox class UniteU < CSV_Formatter def parse case @target_type.downcase when "shopify" @source_file.each do |row| @products << Shopifile.new(row, {:source_type=>"uniteu", :data_type=>"products", :map=>@mapper.map}).process end if @merge == true merge_to_file(@products) else write_to_file(@products) end when "dynalog" @products = 'Dynalogger.new(row, {:source=>"uniteu", :data_type=>"products"}).process' when "google" @products = 'Googlefy.new(row, {:source=>"uniteu", :data_type=>"products"}).process' else @products = 'Something else' end @products end private def write_to_file(products) @target_file << headers = @mapper.headers products.each do |product| row = [] headers.each do |header| row << product[header] end @target_file << row end @target_file.close end def merge_to_file(products) headers = @mapper.headers rows = Rows.new @target_data.each do |row| rows << row.to_hash end file_name = File.absolute_path(@target_file.get_path) @target_file << headers products.each do |product| this_row = rows.by_id(product["pf_id"]) array = [] this_row.each do |k,v| if k == "variant_inventory_tracker" array << "shopify" elsif k == "published" array << "FALSE" elsif product[k].nil? array << v else array << product[k] end end @target_file << array end if !@target_file.closed? @target_file.close end end end end class CSV def get_path return @io.path end end class Rows < Array # Get appropriate row by its pf_id def by_id(id) hash = Hash.new self.each do |row| if row["pf_id"] == id hash = row end end return hash end end
Ruby
MIT
Duckfloss/lees-cli/lib/lees_toolbox/tools/csv_formatter/uniteu.rb
8fefb97d-39ef-4fe1-89d4-c6aa03714310
[]
[]
class PkgConfig < Formula desc "Manage compile and link flags for libraries" homepage "https://freedesktop.org/wiki/Software/pkg-config/" url "https://pkgconfig.freedesktop.org/releases/pkg-config-0.29.2.tar.gz" sha256 "6fc69c01688c9458a57eb9a1664c9aba372ccda420a02bf4429fe610e7e7d591" license "GPL-2.0-or-later" revision OS.mac? ? 3 : 4 livecheck do url "https://pkg-config.freedesktop.org/releases/" regex(/href=.*?pkg-config[._-]v?(\d+(?:\.\d+)+)\./i) end bottle do sha256 cellar: :any_skip_relocation, arm64_big_sur: "ffd4491f62201d14b7eca6beff954a2ab265351589cd5b3b79b8bbb414485574" sha256 cellar: :any_skip_relocation, big_sur: "0040b6ebe07f60549800b211343fd5fb3cf83c866d9f62e40f5fb2f38b71e161" sha256 cellar: :any_skip_relocation, catalina: "80f141e695f73bd058fd82e9f539dc67471666ff6800c5e280b5af7d3050f435" sha256 cellar: :any_skip_relocation, mojave: "0d14b797dba0e0ab595c9afba8ab7ef9c901b60b4f806b36580ef95ebb370232" sha256 cellar: :any_skip_relocation, high_sierra: "8c6160305abd948b8cf3e0d5c6bb0df192fa765bbb9535dda0b573cb60abbe52" sha256 cellar: :any_skip_relocation, x86_64_linux: "bc8ac04f3d8e42a748f40544f8e1b7f2471f32608f33e666e903d6108eb4dab2" end def install pc_path = %W[ #{HOMEBREW_PREFIX}/lib/pkgconfig #{HOMEBREW_PREFIX}/share/pkgconfig ] on_macos do pc_path << "/usr/local/lib/pkgconfig" pc_path << "/usr/lib/pkgconfig" pc_path << "#{HOMEBREW_LIBRARY}/Homebrew/os/mac/pkgconfig/#{MacOS.version}" end on_linux do pc_path << "#{HOMEBREW_LIBRARY}/Homebrew/os/linux/pkgconfig" end pc_path = pc_path.uniq.join(File::PATH_SEPARATOR) system "./configure", "--disable-debug", "--prefix=#{prefix}", "--disable-host-tool", "--with-internal-glib", "--with-pc-path=#{pc_path}", "--with-system-include-path=#{MacOS.sdk_path_if_needed}/usr/include" system "make" system "make", "install" end test do (testpath/"foo.pc").write <<~EOS prefix=/usr exec_prefix=${prefix} includedir=${prefix}/include libdir=${exec_prefix}/lib Name: foo Description: The foo library Version: 1.0.0 Cflags: -I${includedir}/foo Libs: -L${libdir} -lfoo EOS ENV["PKG_CONFIG_LIBDIR"] = testpath system bin/"pkg-config", "--validate", "foo" assert_equal "1.0.0\n", shell_output("#{bin}/pkg-config --modversion foo") assert_equal "-lfoo\n", shell_output("#{bin}/pkg-config --libs foo") assert_equal "-I/usr/include/foo\n", shell_output("#{bin}/pkg-config --cflags foo") end end
Ruby
BSD-2-Clause
abitrolly/linuxbrew-core/Formula/pkg-config.rb
e51818f0-a7a4-4fc6-9cfe-8027385350c3
[]
[]
from tests.common import reboot, port_toggle import os import time import random import logging import pprint import pytest import json import ptf.testutils as testutils import ptf.mask as mask import ptf.packet as packet from abc import ABCMeta, abstractmethod from collections import defaultdict from tests.common import reboot, port_toggle from tests.common.helpers.assertions import pytest_require from tests.common.plugins.loganalyzer.loganalyzer import LogAnalyzer, LogAnalyzerError from tests.common.fixtures.duthost_utils import backup_and_restore_config_db_on_duts from tests.common.fixtures.ptfhost_utils import copy_arp_responder_py, run_garp_service, change_mac_addresses from tests.common.utilities import wait_until from tests.common.dualtor.dual_tor_mock import mock_server_base_ip_addr from tests.common.helpers.assertions import pytest_assert logger = logging.getLogger(__name__) pytestmark = [ pytest.mark.acl, pytest.mark.disable_loganalyzer, # Disable automatic loganalyzer, since we use it for the test pytest.mark.topology("any"), pytest.mark.usefixtures('backup_and_restore_config_db_on_duts') ] BASE_DIR = os.path.dirname(os.path.realpath(__file__)) DUT_TMP_DIR = "acl_test_dir" # Keep it under home dir so it persists through reboot FILES_DIR = os.path.join(BASE_DIR, "files") TEMPLATE_DIR = os.path.join(BASE_DIR, "templates") ACL_TABLE_TEMPLATE = "acltb_table.j2" ACL_REMOVE_RULES_FILE = "acl_rules_del.json" # TODO: We really shouldn't have two separate templates for v4 and v6, need to combine them somehow ACL_RULES_FULL_TEMPLATE = { "ipv4": "acltb_test_rules.j2", "ipv6": "acltb_v6_test_rules.j2" } ACL_RULES_PART_TEMPLATES = { "ipv4": tuple("acltb_test_rules_part_{}.j2".format(i) for i in xrange(1, 3)), "ipv6": tuple("acltb_v6_test_rules_part_{}.j2".format(i) for i in xrange(1, 3)) } DEFAULT_SRC_IP = { "ipv4": "20.0.0.1", "ipv6": "60c0:a800::5" } # TODO: These routes don't match the VLAN interface from the T0 topology. # This needs to be addressed before we can enable the v6 tests for T0 DOWNSTREAM_DST_IP = { "ipv4": "192.168.0.253", "ipv6": "20c0:a800::2" } DOWNSTREAM_IP_TO_ALLOW = { "ipv4": "192.168.0.252", "ipv6": "20c0:a800::4" } DOWNSTREAM_IP_TO_BLOCK = { "ipv4": "192.168.0.251", "ipv6": "20c0:a800::8" } DOWNSTREAM_IP_PORT_MAP = {} UPSTREAM_DST_IP = { "ipv4": "192.168.128.1", "ipv6": "40c0:a800::2" } UPSTREAM_IP_TO_ALLOW = { "ipv4": "192.168.136.1", "ipv6": "40c0:a800::4" } UPSTREAM_IP_TO_BLOCK = { "ipv4": "192.168.144.1", "ipv6": "40c0:a800::8" } VLAN_BASE_MAC_PATTERN = "72060001{:04}" LOG_EXPECT_ACL_TABLE_CREATE_RE = ".*Created ACL table.*" LOG_EXPECT_ACL_TABLE_REMOVE_RE = ".*Successfully deleted ACL table.*" LOG_EXPECT_ACL_RULE_CREATE_RE = ".*Successfully created ACL rule.*" LOG_EXPECT_ACL_RULE_REMOVE_RE = ".*Successfully deleted ACL rule.*" PACKETS_COUNT = "packets_count" BYTES_COUNT = "bytes_count" @pytest.fixture(scope="module") def setup(duthosts, ptfhost, rand_selected_dut, rand_unselected_dut, tbinfo, ptfadapter): """Gather all required test information from DUT and tbinfo. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. tbinfo: A fixture to gather information about the testbed. Yields: A Dictionary with required test information. """ mg_facts = rand_selected_dut.get_extended_minigraph_facts(tbinfo) topo = tbinfo["topo"]["type"] vlan_ports = [] vlan_mac = None if topo == "t0": vlan_ports = [mg_facts["minigraph_ptf_indices"][ifname] for ifname in mg_facts["minigraph_vlans"].values()[0]["members"]] config_facts = rand_selected_dut.get_running_config_facts() vlan_table = config_facts["VLAN"] vlan_name = list(vlan_table.keys())[0] if "mac" in vlan_table[vlan_name]: vlan_mac = vlan_table[vlan_name]["mac"] # Get the list of upstream/downstream ports downstream_ports = defaultdict(list) upstream_ports = defaultdict(list) downstream_port_ids = [] upstream_port_ids = [] upstream_port_id_to_router_mac_map = {} downstream_port_id_to_router_mac_map = {} # For T0/dual ToR testbeds, we need to use the VLAN MAC to interact with downstream ports # For T1 testbeds, no VLANs are present so using the router MAC is acceptable downlink_dst_mac = vlan_mac if vlan_mac is not None else rand_selected_dut.facts["router_mac"] for interface, neighbor in mg_facts["minigraph_neighbors"].items(): port_id = mg_facts["minigraph_ptf_indices"][interface] if (topo == "t1" and "T0" in neighbor["name"]) or (topo == "t0" and "Server" in neighbor["name"]): downstream_ports[neighbor['namespace']].append(interface) downstream_port_ids.append(port_id) downstream_port_id_to_router_mac_map[port_id] = downlink_dst_mac elif (topo == "t1" and "T2" in neighbor["name"]) or (topo == "t0" and "T1" in neighbor["name"]): upstream_ports[neighbor['namespace']].append(interface) upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_selected_dut.facts["router_mac"] # stop garp service for single tor if 'dualtor' not in tbinfo['topo']['name']: logging.info("Stopping GARP service on single tor") ptfhost.shell("supervisorctl stop garp_service", module_ignore_errors=True) # If running on a dual ToR testbed, any uplink for either ToR is an acceptable # source or destination port if 'dualtor' in tbinfo['topo']['name'] and rand_unselected_dut is not None: peer_mg_facts = rand_unselected_dut.get_extended_minigraph_facts(tbinfo) for interface, neighbor in peer_mg_facts['minigraph_neighbors'].items(): if (topo == "t1" and "T2" in neighbor["name"]) or (topo == "t0" and "T1" in neighbor["name"]): port_id = peer_mg_facts["minigraph_ptf_indices"][interface] upstream_port_ids.append(port_id) upstream_port_id_to_router_mac_map[port_id] = rand_unselected_dut.facts["router_mac"] # Get the list of LAGs port_channels = mg_facts["minigraph_portchannels"] # TODO: We should make this more robust (i.e. bind all active front-panel ports) acl_table_ports = defaultdict(list) if topo == "t0" or tbinfo["topo"]["name"] in ("t1", "t1-lag"): for namespace, port in downstream_ports.iteritems(): acl_table_ports[namespace] += port # In multi-asic we need config both in host and namespace. if namespace: acl_table_ports[''] += port if topo == "t0" or tbinfo["topo"]["name"] in ("t1-lag", "t1-64-lag", "t1-64-lag-clet"): for k, v in port_channels.iteritems(): acl_table_ports[v['namespace']].append(k) # In multi-asic we need config both in host and namespace. if v['namespace']: acl_table_ports[''].append(k) else: for namespace, port in upstream_ports.iteritems(): acl_table_ports[namespace] += port # In multi-asic we need config both in host and namespace. if namespace: acl_table_ports[''] += port dest_mac_mapping = { "downlink->uplink": downstream_port_id_to_router_mac_map, "uplink->downlink": upstream_port_id_to_router_mac_map } setup_information = { "destination_mac": dest_mac_mapping, "downstream_port_ids": downstream_port_ids, "upstream_port_ids": upstream_port_ids, "acl_table_ports": acl_table_ports, "vlan_ports": vlan_ports, "topo": topo, "vlan_mac": vlan_mac } logger.info("Gathered variables for ACL test:\n{}".format(pprint.pformat(setup_information))) logger.info("Creating temporary folder \"{}\" for ACL test".format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command("mkdir -p {}".format(DUT_TMP_DIR)) yield setup_information logger.info("Removing temporary directory \"{}\"".format(DUT_TMP_DIR)) for duthost in duthosts: duthost.command("rm -rf {}".format(DUT_TMP_DIR)) @pytest.fixture(scope="module", params=["ipv4", "ipv6"]) def ip_version(request, tbinfo, duthosts, rand_one_dut_hostname): if tbinfo["topo"]["type"] == "t0" and request.param == "ipv6": pytest.skip("IPV6 ACL test not currently supported on t0 testbeds") return request.param @pytest.fixture(scope="module") def populate_vlan_arp_entries(setup, ptfhost, duthosts, rand_one_dut_hostname, ip_version): """Set up the ARP responder utility in the PTF container.""" duthost = duthosts[rand_one_dut_hostname] if setup["topo"] != "t0": def noop(): pass yield noop return # Don't fall through to t0 case addr_list = [DOWNSTREAM_DST_IP[ip_version], DOWNSTREAM_IP_TO_ALLOW[ip_version], DOWNSTREAM_IP_TO_BLOCK[ip_version]] vlan_host_map = defaultdict(dict) for i in range(len(addr_list)): mac = VLAN_BASE_MAC_PATTERN.format(i) port = random.choice(setup["vlan_ports"]) addr = addr_list[i] vlan_host_map[port][str(addr)] = mac DOWNSTREAM_IP_PORT_MAP[addr] = port arp_responder_conf = {} for port in vlan_host_map: arp_responder_conf['eth{}'.format(port)] = vlan_host_map[port] with open("/tmp/from_t1.json", "w") as ar_config: json.dump(arp_responder_conf, ar_config) ptfhost.copy(src="/tmp/from_t1.json", dest="/tmp/from_t1.json") ptfhost.host.options["variable_manager"].extra_vars.update({"arp_responder_args": "-e"}) ptfhost.template(src="templates/arp_responder.conf.j2", dest="/etc/supervisor/conf.d/arp_responder.conf") ptfhost.shell("supervisorctl reread && supervisorctl update") ptfhost.shell("supervisorctl restart arp_responder") def populate_arp_table(): for dut in duthosts: dut.command("sonic-clear fdb all") dut.command("sonic-clear arp") # Wait some time to ensure the async call of clear is completed time.sleep(20) for addr in addr_list: dut.command("ping {} -c 3".format(addr), module_ignore_errors=True) populate_arp_table() yield populate_arp_table logging.info("Stopping ARP responder") ptfhost.shell("supervisorctl stop arp_responder") duthost.command("sonic-clear fdb all") duthost.command("sonic-clear arp") @pytest.fixture(scope="module", params=["ingress", "egress"]) def stage(request, duthosts, rand_one_dut_hostname): """Parametrize tests for Ingress/Egress stage testing. Args: request: A fixture to interact with Pytest data. duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. Returns: str: The ACL stage to be tested. """ duthost = duthosts[rand_one_dut_hostname] pytest_require( request.param == "ingress" or duthost.facts["asic_type"] not in ("broadcom"), "Egress ACLs are not currently supported on \"{}\" ASICs".format(duthost.facts["asic_type"]) ) return request.param def create_or_remove_acl_table(duthost, acl_table_config, setup, op): for sonic_host_or_asic_inst in duthost.get_sonic_host_and_frontend_asic_instance(): namespace = sonic_host_or_asic_inst.namespace if hasattr(sonic_host_or_asic_inst, 'namespace') else '' if op == "add": logger.info("Creating ACL table: \"{}\" in namespace {} on device {}".format(acl_table_config["table_name"], namespace, duthost)) sonic_host_or_asic_inst.command( "config acl add table {} {} -s {} -p {}".format( acl_table_config["table_name"], acl_table_config["table_type"], acl_table_config["table_stage"], ",".join(setup["acl_table_ports"][namespace]), ) ) else: logger.info("Removing ACL table \"{}\" in namespace {} on device {}".format(acl_table_config["table_name"], namespace, duthost)) sonic_host_or_asic_inst.command("config acl remove table {}".format(acl_table_config["table_name"])) @pytest.fixture(scope="module") def acl_table(duthosts, rand_one_dut_hostname, setup, stage, ip_version): """Apply ACL table configuration and remove after tests. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. setup: Parameters for the ACL tests. stage: The ACL stage under test. ip_version: The IP version under test Yields: The ACL table configuration. """ table_name = "DATA_{}_{}_TEST".format(stage.upper(), ip_version.upper()) acl_table_config = { "table_name": table_name, "table_ports": ",".join(setup["acl_table_ports"]['']), "table_stage": stage, "table_type": "L3" if ip_version == "ipv4" else "L3V6" } logger.info("Generated ACL table configuration:\n{}".format(pprint.pformat(acl_table_config))) dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix="acl") loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_CREATE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, "add") except LogAnalyzerError as err: # Cleanup Config DB if table creation failed logger.error("ACL table creation failed, attempting to clean-up...") create_or_remove_acl_table(duthost, acl_table_config, setup, "remove") raise err try: yield acl_table_config finally: for duthost, loganalyzer in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_TABLE_REMOVE_RE] with loganalyzer: create_or_remove_acl_table(duthost, acl_table_config, setup, "remove") class BaseAclTest(object): """Base class for testing ACL rules. Subclasses must provide `setup_rules` method to prepare ACL rules for traffic testing. They can optionally override `teardown_rules`, which will otherwise remove the rules by applying an empty configuration file. """ __metaclass__ = ABCMeta ACL_COUNTERS_UPDATE_INTERVAL_SECS = 10 @abstractmethod def setup_rules(self, dut, acl_table, ip_version): """Setup ACL rules for testing. Args: dut: The DUT having ACLs applied. acl_table: Configuration info for the ACL table. """ pass def post_setup_hook(self, dut, localhost, populate_vlan_arp_entries, tbinfo): """Perform actions after rules have been applied. Args: dut: The DUT having ACLs applied. localhost: The host from which tests are run. populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces. """ pass def teardown_rules(self, dut): """Tear down ACL rules once the tests have completed. Args: dut: The DUT having ACLs applied. """ logger.info("Finished with tests, removing all ACL rules...") # Copy empty rules configuration dut.copy(src=os.path.join(FILES_DIR, ACL_REMOVE_RULES_FILE), dest=DUT_TMP_DIR) remove_rules_dut_path = os.path.join(DUT_TMP_DIR, ACL_REMOVE_RULES_FILE) # Remove the rules logger.info("Applying \"{}\"".format(remove_rules_dut_path)) dut.command("config acl update full {}".format(remove_rules_dut_path)) @pytest.fixture(scope="class", autouse=True) def acl_rules(self, duthosts, localhost, setup, acl_table, populate_vlan_arp_entries, tbinfo, ip_version): """Setup/teardown ACL rules for the current set of tests. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. localhost: The host from which tests are run. setup: Parameters for the ACL tests. acl_table: Configuration info for the ACL table. populate_vlan_arp_entries: A function to populate ARP/FDB tables for VLAN interfaces. """ dut_to_analyzer_map = {} for duthost in duthosts: loganalyzer = LogAnalyzer(ansible_host=duthost, marker_prefix="acl_rules") loganalyzer.load_common_config() dut_to_analyzer_map[duthost] = loganalyzer try: loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_CREATE_RE] with loganalyzer: self.setup_rules(duthost, acl_table, ip_version) self.post_setup_hook(duthost, localhost, populate_vlan_arp_entries, tbinfo) assert self.check_rule_counters(duthost), "Rule counters should be ready!" except LogAnalyzerError as err: # Cleanup Config DB if rule creation failed logger.error("ACL rule application failed, attempting to clean-up...") self.teardown_rules(duthost) raise err try: yield finally: for duthost, loganalyzer in dut_to_analyzer_map.items(): loganalyzer.expect_regex = [LOG_EXPECT_ACL_RULE_REMOVE_RE] with loganalyzer: logger.info("Removing ACL rules") self.teardown_rules(duthost) @pytest.yield_fixture(scope="class", autouse=True) def counters_sanity_check(self, duthosts, acl_rules, acl_table): """Validate that the counters for each rule in the rules list increased as expected. This fixture yields a list of rule IDs. The test case should add on to this list if it is required to check the rule for increased counters. After the test cases pass, the fixture will wait for the ACL counters to update and then check if the counters for each rule in the list were increased. Args: duthosts: All DUTs belong to the testbed. rand_one_dut_hostname: hostname of a random chosen dut to run test. acl_rules: Fixture that sets up the ACL rules. acl_table: Fixture that sets up the ACL table. """ acl_facts = defaultdict(dict) table_name = acl_table["table_name"] for duthost in duthosts: acl_facts[duthost]['before']= duthost.acl_facts()["ansible_facts"]["ansible_acl_facts"][table_name]["rules"] rule_list = [] yield rule_list if not rule_list: return # Wait for orchagent to update the ACL counters time.sleep(self.ACL_COUNTERS_UPDATE_INTERVAL_SECS) for duthost in duthosts: acl_facts[duthost]['after']= duthost.acl_facts()["ansible_facts"]["ansible_acl_facts"][table_name]["rules"] for duthost in duthosts: assert len(acl_facts[duthost]['before']) == len(acl_facts[duthost]['after']) for rule in rule_list: rule = "RULE_{}".format(rule) counters_before = { PACKETS_COUNT: 0, BYTES_COUNT: 0 } for duthost in duthosts: counters_before[PACKETS_COUNT] += acl_facts[duthost]['before'][rule][PACKETS_COUNT] counters_before[BYTES_COUNT] += acl_facts[duthost]['before'][rule][BYTES_COUNT] logger.info("Counters for ACL rule \"{}\" before traffic:\n{}" .format(rule, pprint.pformat(counters_before))) counters_after = { PACKETS_COUNT: 0, BYTES_COUNT: 0 } for duthost in duthosts: counters_after[PACKETS_COUNT] += acl_facts[duthost]['after'][rule][PACKETS_COUNT] counters_after[BYTES_COUNT] += acl_facts[duthost]['after'][rule][BYTES_COUNT] logger.info("Counters for ACL rule \"{}\" after traffic:\n{}" .format(rule, pprint.pformat(counters_after))) assert counters_after[PACKETS_COUNT] > counters_before[PACKETS_COUNT] assert counters_after[BYTES_COUNT] > counters_before[BYTES_COUNT] @pytest.fixture(params=["downlink->uplink", "uplink->downlink"]) def direction(self, request): """Parametrize test based on direction of traffic.""" return request.param def check_rule_counters(self, duthost): logger.info('Wait all rule counters are ready') return wait_until(60, 2, 0, self.check_rule_counters_internal, duthost) def check_rule_counters_internal(self, duthost): for asic_id in duthost.get_frontend_asic_ids(): res = duthost.asic_instance(asic_id).command('aclshow -a') num_of_lines = len(res['stdout'].split('\n')) if num_of_lines <= 2 or 'N/A' in res['stdout']: return False return True @pytest.fixture(autouse=True) def get_src_port(self, setup, direction): """Get a source port for the current test.""" src_ports = setup["downstream_port_ids"] if direction == "downlink->uplink" else setup["upstream_port_ids"] src_port = random.choice(src_ports) logger.info("Selected source port {}".format(src_port)) self.src_port = src_port def get_dst_ports(self, setup, direction): """Get the set of possible destination ports for the current test.""" return setup["upstream_port_ids"] if direction == "downlink->uplink" else setup["downstream_port_ids"] def get_dst_ip(self, direction, ip_version): """Get the default destination IP for the current test.""" return UPSTREAM_DST_IP[ip_version] if direction == "downlink->uplink" else DOWNSTREAM_DST_IP[ip_version] def tcp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, proto=None, sport=0x4321, dport=0x51, flags=None): """Generate a TCP packet for testing.""" src_ip = src_ip or DEFAULT_SRC_IP[ip_version] dst_ip = dst_ip or self.get_dst_ip(direction, ip_version) if ip_version == "ipv4": pkt = testutils.simple_tcp_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, tcp_sport=sport, tcp_dport=dport, ip_ttl=64 ) if proto: pkt["IP"].proto = proto else: pkt = testutils.simple_tcpv6_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, tcp_sport=sport, tcp_dport=dport, ipv6_hlim=64 ) if proto: pkt["IPv6"].nh = proto if flags: pkt["TCP"].flags = flags return pkt def udp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, sport=1234, dport=80): """Generate a UDP packet for testing.""" src_ip = src_ip or DEFAULT_SRC_IP[ip_version] dst_ip = dst_ip or self.get_dst_ip(direction, ip_version) if ip_version == "ipv4": return testutils.simple_udp_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, udp_sport=sport, udp_dport=dport, ip_ttl=64 ) else: return testutils.simple_udpv6_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, udp_sport=sport, udp_dport=dport, ipv6_hlim=64 ) def icmp_packet(self, setup, direction, ptfadapter, ip_version, src_ip=None, dst_ip=None, icmp_type=8, icmp_code=0): """Generate an ICMP packet for testing.""" src_ip = src_ip or DEFAULT_SRC_IP[ip_version] dst_ip = dst_ip or self.get_dst_ip(direction, ip_version) if ip_version == "ipv4": return testutils.simple_icmp_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ip_dst=dst_ip, ip_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ip_ttl=64, ) else: return testutils.simple_icmpv6_packet( eth_dst=setup["destination_mac"][direction][self.src_port], eth_src=ptfadapter.dataplane.get_mac(0, 0), ipv6_dst=dst_ip, ipv6_src=src_ip, icmp_type=icmp_type, icmp_code=icmp_code, ipv6_hlim=64, ) def expected_mask_routed_packet(self, pkt, ip_version): """Generate the expected mask for a routed packet.""" exp_pkt = pkt.copy() exp_pkt = mask.Mask(exp_pkt) exp_pkt.set_do_not_care_scapy(packet.Ether, "dst") exp_pkt.set_do_not_care_scapy(packet.Ether, "src") if ip_version == "ipv4": exp_pkt.set_do_not_care_scapy(packet.IP, "chksum") # In multi-asic we cannot determine this so ignore. exp_pkt.set_do_not_care_scapy(packet.IP, 'ttl') else: # In multi-asic we cannot determine this so ignore. exp_pkt.set_do_not_care_scapy(packet.IPv6, 'hlim') return exp_pkt def test_ingress_unmatched_blocked(self, setup, direction, ptfadapter, ip_version, stage): """Verify that unmatched packets are dropped for ingress.""" if stage == "egress": pytest.skip("Only run for ingress") pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) def test_egress_unmatched_forwarded(self, setup, direction, ptfadapter, ip_version, stage): """Verify that default egress rule allow all traffics""" if stage == "ingress": pytest.skip("Only run for egress") pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) def test_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward a packet on source IP.""" src_ip = "20.0.0.2" if ip_version == "ipv4" else "60c0:a800::6" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(1) def test_rules_priority_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we respect rule priorites in the forwarding case.""" src_ip = "20.0.0.7" if ip_version == "ipv4" else "60c0:a800::7" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(20) def test_rules_priority_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we respect rule priorites in the drop case.""" src_ip = "20.0.0.3" if ip_version == "ipv4" else "60c0:a800::4" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(7) def test_dest_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward a packet on destination IP.""" dst_ip = DOWNSTREAM_IP_TO_ALLOW[ip_version] if direction == "uplink->downlink" else UPSTREAM_IP_TO_ALLOW[ip_version] pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(2 if direction == "uplink->downlink" else 3) def test_dest_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop a packet on destination IP.""" dst_ip = DOWNSTREAM_IP_TO_BLOCK[ip_version] if direction == "uplink->downlink" else UPSTREAM_IP_TO_BLOCK[ip_version] pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dst_ip=dst_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(15 if direction == "uplink->downlink" else 16) def test_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop a packet on source IP.""" src_ip = "20.0.0.6" if ip_version == "ipv4" else "60c0:a800::3" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(14) def test_udp_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward a UDP packet on source IP.""" src_ip = "20.0.0.4" if ip_version == "ipv4" else "60c0:a800::8" pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(13) def test_udp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop a UDP packet on source IP.""" src_ip = "20.0.0.8" if ip_version == "ipv4" else "60c0:a800::2" pkt = self.udp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(26) def test_icmp_source_ip_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop an ICMP packet on source IP.""" src_ip = "20.0.0.8" if ip_version == "ipv4" else "60c0:a800::2" pkt = self.icmp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(25) def test_icmp_source_ip_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward an ICMP packet on source IP.""" src_ip = "20.0.0.4" if ip_version == "ipv4" else "60c0:a800::8" pkt = self.icmp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(12) def test_l4_dport_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on L4 destination port.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dport=0x1217) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(5) def test_l4_sport_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on L4 source port.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, sport=0x120D) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(4) def test_l4_dport_range_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on a range of L4 destination ports.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dport=0x123B) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(11) def test_l4_sport_range_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on a range of L4 source ports.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, sport=0x123A) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(10) def test_l4_dport_range_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on a range of L4 destination ports.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dport=0x127B) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(22) def test_l4_sport_range_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on a range of L4 source ports.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, sport=0x1271) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(17) def test_ip_proto_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on the IP protocol.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, proto=0x7E) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(5) def test_tcp_flags_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and forward on the TCP flags.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, flags=0x1B) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(6) def test_l4_dport_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on L4 destination port.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, dport=0x127B) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(22) def test_l4_sport_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on L4 source port.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, sport=0x1271) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(10) def test_ip_proto_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on the IP protocol.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, proto=0x7F) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(18) def test_tcp_flags_match_dropped(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on the TCP flags.""" pkt = self.tcp_packet(setup, direction, ptfadapter, ip_version, flags=0x24) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, True, ip_version) counters_sanity_check.append(5) def test_icmp_match_forwarded(self, setup, direction, ptfadapter, counters_sanity_check, ip_version): """Verify that we can match and drop on the TCP flags.""" src_ip = "20.0.0.10" if ip_version == "ipv4" else "60c0:a800::10" pkt = self.icmp_packet(setup, direction, ptfadapter, ip_version, src_ip=src_ip, icmp_type=3, icmp_code=1) self._verify_acl_traffic(setup, direction, ptfadapter, pkt, False, ip_version) counters_sanity_check.append(29) def _verify_acl_traffic(self, setup, direction, ptfadapter, pkt, dropped, ip_version): exp_pkt = self.expected_mask_routed_packet(pkt, ip_version) if ip_version == "ipv4": downstream_dst_port = DOWNSTREAM_IP_PORT_MAP.get(pkt[packet.IP].dst) else: downstream_dst_port = DOWNSTREAM_IP_PORT_MAP.get(pkt[packet.IPv6].dst) ptfadapter.dataplane.flush() testutils.send(ptfadapter, self.src_port, pkt) if direction == "uplink->downlink" and downstream_dst_port: if dropped: testutils.verify_no_packet(ptfadapter, exp_pkt, downstream_dst_port) else: testutils.verify_packet(ptfadapter, exp_pkt, downstream_dst_port) else: if dropped: testutils.verify_no_packet_any(ptfadapter, exp_pkt, ports=self.get_dst_ports(setup, direction)) else: testutils.verify_packet_any_port(ptfadapter, exp_pkt, ports=self.get_dst_ports(setup, direction), timeout=20) class TestBasicAcl(BaseAclTest): """Test Basic functionality of ACL rules (i.e. setup with full update on a running device).""" def setup_rules(self, dut, acl_table, ip_version): """Setup ACL rules for testing. Args: dut: The DUT having ACLs applied. acl_table: Configuration info for the ACL table. """ table_name = acl_table["table_name"] dut.host.options["variable_manager"].extra_vars.update({"acl_table_name": table_name}) logger.info("Generating basic ACL rules config for ACL table \"{}\" on {}".format(table_name, dut)) dut_conf_file_path = os.path.join(DUT_TMP_DIR, "acl_rules_{}.json".format(table_name)) dut.template(src=os.path.join(TEMPLATE_DIR, ACL_RULES_FULL_TEMPLATE[ip_version]), dest=dut_conf_file_path) logger.info("Applying ACL rules config \"{}\"".format(dut_conf_file_path)) dut.command("config acl update full {}".format(dut_conf_file_path)) class TestIncrementalAcl(BaseAclTest): """Test ACL rule functionality with an incremental configuration. Verify that everything still works as expected when an ACL configuration is applied in multiple parts. """ def setup_rules(self, dut, acl_table, ip_version): """Setup ACL rules for testing. Args: dut: The DUT having ACLs applied. acl_table: Configuration info for the ACL table. """ table_name = acl_table["table_name"] dut.host.options["variable_manager"].extra_vars.update({"acl_table_name": table_name}) logger.info("Generating incremental ACL rules config for ACL table \"{}\"" .format(table_name)) for part, config_file in enumerate(ACL_RULES_PART_TEMPLATES[ip_version]): dut_conf_file_path = os.path.join(DUT_TMP_DIR, "acl_rules_{}_part_{}.json".format(table_name, part)) dut.template(src=os.path.join(TEMPLATE_DIR, config_file), dest=dut_conf_file_path) logger.info("Applying ACL rules config \"{}\"".format(dut_conf_file_path)) dut.command("config acl update incremental {}".format(dut_conf_file_path)) @pytest.mark.reboot class TestAclWithReboot(TestBasicAcl): """Test ACL rule functionality with a reboot. Verify that configuration persists correctly after reboot and is applied properly upon startup. """ def post_setup_hook(self, dut, localhost, populate_vlan_arp_entries, tbinfo): """Save configuration and reboot after rules are applied. Args: dut: The DUT having ACLs applied. localhost: The host from which tests are run. populate_vlan_arp_entries: A fixture to populate ARP/FDB tables for VLAN interfaces. """ dut.command("config save -y") reboot(dut, localhost, wait=240) # We need some additional delay on e1031 if dut.facts["platform"] == "x86_64-cel_e1031-r0": time.sleep(240) populate_vlan_arp_entries() @pytest.mark.port_toggle class TestAclWithPortToggle(TestBasicAcl): """Test ACL rule functionality after toggling ports. Verify that ACLs still function as expected after links flap. """ def post_setup_hook(self, dut, localhost, populate_vlan_arp_entries, tbinfo): """Toggle ports after rules are applied. Args: dut: The DUT having ACLs applied. localhost: The host from which tests are run. populate_vlan_arp_entries: A fixture to populate ARP/FDB tables for VLAN interfaces. """ port_toggle(dut, tbinfo) populate_vlan_arp_entries()
Python
Apache-2.0
KostiantynYarovyiBf/sonic-mgmt/tests/acl/test_acl.py
2cb65a8f-815d-4937-9299-fb6c03001846
[{"tag": "IP_ADDRESS", "value": "192.168.136.1", "start": 2476, "end": 2489, "context": ":a800::2\"\n}\nUPSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168.136.1\",\n \"ipv6\": \"40c0:a800::4\"\n}\nUPSTREAM_IP_TO_BLO"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::4", "start": 2229, "end": 2241, "context": "LOW = {\n \"ipv4\": \"192.168.0.252\",\n \"ipv6\": \"20c0:a800::4\"\n}\nDOWNSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.16"}, {"tag": "IP_ADDRESS", "value": "20.0.0.8", "start": 30801, "end": 30809, "context": "p a UDP packet on source IP.\"\"\"\n src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n "}, {"tag": "IP_ADDRESS", "value": "40c0:a800::2", "start": 2422, "end": 2434, "context": "_IP = {\n \"ipv4\": \"192.168.128.1\",\n \"ipv6\": \"40c0:a800::2\"\n}\nUPSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168."}, {"tag": "IP_ADDRESS", "value": "192.168.128.1", "start": 2393, "end": 2406, "context": "P_PORT_MAP = {}\n\nUPSTREAM_DST_IP = {\n \"ipv4\": \"192.168.128.1\",\n \"ipv6\": \"40c0:a800::2\"\n}\nUPSTREAM_IP_TO_ALL"}, {"tag": "IP_ADDRESS", "value": "20.0.0.8", "start": 31280, "end": 31288, "context": "an ICMP packet on source IP.\"\"\"\n src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n "}, {"tag": "IP_ADDRESS", "value": "192.168.0.252", "start": 2200, "end": 2213, "context": "800::2\"\n}\nDOWNSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168.0.252\",\n \"ipv6\": \"20c0:a800::4\"\n}\nDOWNSTREAM_IP_TO_B"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::8", "start": 2314, "end": 2326, "context": "OCK = {\n \"ipv4\": \"192.168.0.251\",\n \"ipv6\": \"20c0:a800::8\"\n}\n\nDOWNSTREAM_IP_PORT_MAP = {}\n\nUPSTREAM_DST_IP "}, {"tag": "IP_ADDRESS", "value": "60c0:a800::10", "start": 37065, "end": 37078, "context": "rc_ip = \"20.0.0.10\" if ip_version == \"ipv4\" else \"60c0:a800::10\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "40c0:a800::4", "start": 2505, "end": 2517, "context": "LOW = {\n \"ipv4\": \"192.168.136.1\",\n \"ipv6\": \"40c0:a800::4\"\n}\nUPSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168."}, {"tag": "IP_ADDRESS", "value": "60c0:a800::2", "start": 30841, "end": 30853, "context": "src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n pkt = self.udp_packet(setup, direction, "}, {"tag": "IP_ADDRESS", "value": "60c0:a800::2", "start": 31320, "end": 31332, "context": "src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "20.0.0.4", "start": 31765, "end": 31773, "context": "an ICMP packet on source IP.\"\"\"\n src_ip = \"20.0.0.4\" if ip_version == \"ipv4\" else \"60c0:a800::8\"\n "}, {"tag": "IP_ADDRESS", "value": "192.168.144.1", "start": 2559, "end": 2572, "context": ":a800::4\"\n}\nUPSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168.144.1\",\n \"ipv6\": \"40c0:a800::8\"\n}\n\nVLAN_BASE_MAC_PAT"}, {"tag": "IP_ADDRESS", "value": "60c0:a800::5", "start": 1918, "end": 1930, "context": "T_SRC_IP = {\n \"ipv4\": \"20.0.0.1\",\n \"ipv6\": \"60c0:a800::5\"\n}\n\n\n# TODO: These routes don't match the VLAN in"}, {"tag": "IP_ADDRESS", "value": "192.168.0.253", "start": 2115, "end": 2128, "context": "6 tests for T0\nDOWNSTREAM_DST_IP = {\n \"ipv4\": \"192.168.0.253\",\n \"ipv6\": \"20c0:a800::2\"\n}\nDOWNSTREAM_IP_TO_A"}, {"tag": "IP_ADDRESS", "value": "192.168.0.251", "start": 2285, "end": 2298, "context": "800::4\"\n}\nDOWNSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168.0.251\",\n \"ipv6\": \"20c0:a800::8\"\n}\n\nDOWNSTREAM_IP_POR"}, {"tag": "IP_ADDRESS", "value": "40c0:a800::8", "start": 2588, "end": 2600, "context": "OCK = {\n \"ipv4\": \"192.168.144.1\",\n \"ipv6\": \"40c0:a800::8\"\n}\n\nVLAN_BASE_MAC_PATTERN = \"72060001{:04}\"\n\nLOG_"}, {"tag": "IP_ADDRESS", "value": "20.0.0.1", "start": 1894, "end": 1902, "context": " xrange(1, 3))\n}\n\nDEFAULT_SRC_IP = {\n \"ipv4\": \"20.0.0.1\",\n \"ipv6\": \"60c0:a800::5\"\n}\n\n\n# TODO: These ro"}, {"tag": "IP_ADDRESS", "value": "60c0:a800::8", "start": 31805, "end": 31817, "context": "src_ip = \"20.0.0.4\" if ip_version == \"ipv4\" else \"60c0:a800::8\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::2", "start": 2144, "end": 2156, "context": "_IP = {\n \"ipv4\": \"192.168.0.253\",\n \"ipv6\": \"20c0:a800::2\"\n}\nDOWNSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.16"}, {"tag": "IP_ADDRESS", "value": "20.0.0.10", "start": 37024, "end": 37033, "context": "h and drop on the TCP flags.\"\"\"\n src_ip = \"20.0.0.10\" if ip_version == \"ipv4\" else \"60c0:a800::10\"\n "}]
[{"tag": "IP_ADDRESS", "value": "192.168.136.1", "start": 2476, "end": 2489, "context": ":a800::2\"\n}\nUPSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168.136.1\",\n \"ipv6\": \"40c0:a800::4\"\n}\nUPSTREAM_IP_TO_BLO"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::4", "start": 2229, "end": 2241, "context": "LOW = {\n \"ipv4\": \"192.168.0.252\",\n \"ipv6\": \"20c0:a800::4\"\n}\nDOWNSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.16"}, {"tag": "IP_ADDRESS", "value": "20.0.0.8", "start": 30801, "end": 30809, "context": "p a UDP packet on source IP.\"\"\"\n src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n "}, {"tag": "IP_ADDRESS", "value": "40c0:a800::2", "start": 2422, "end": 2434, "context": "_IP = {\n \"ipv4\": \"192.168.128.1\",\n \"ipv6\": \"40c0:a800::2\"\n}\nUPSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168."}, {"tag": "IP_ADDRESS", "value": "192.168.128.1", "start": 2393, "end": 2406, "context": "P_PORT_MAP = {}\n\nUPSTREAM_DST_IP = {\n \"ipv4\": \"192.168.128.1\",\n \"ipv6\": \"40c0:a800::2\"\n}\nUPSTREAM_IP_TO_ALL"}, {"tag": "IP_ADDRESS", "value": "20.0.0.8", "start": 31280, "end": 31288, "context": "an ICMP packet on source IP.\"\"\"\n src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n "}, {"tag": "IP_ADDRESS", "value": "192.168.0.252", "start": 2200, "end": 2213, "context": "800::2\"\n}\nDOWNSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.168.0.252\",\n \"ipv6\": \"20c0:a800::4\"\n}\nDOWNSTREAM_IP_TO_B"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::8", "start": 2314, "end": 2326, "context": "OCK = {\n \"ipv4\": \"192.168.0.251\",\n \"ipv6\": \"20c0:a800::8\"\n}\n\nDOWNSTREAM_IP_PORT_MAP = {}\n\nUPSTREAM_DST_IP "}, {"tag": "IP_ADDRESS", "value": "60c0:a800::10", "start": 37065, "end": 37078, "context": "rc_ip = \"20.0.0.10\" if ip_version == \"ipv4\" else \"60c0:a800::10\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "40c0:a800::4", "start": 2505, "end": 2517, "context": "LOW = {\n \"ipv4\": \"192.168.136.1\",\n \"ipv6\": \"40c0:a800::4\"\n}\nUPSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168."}, {"tag": "IP_ADDRESS", "value": "60c0:a800::2", "start": 30841, "end": 30853, "context": "src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n pkt = self.udp_packet(setup, direction, "}, {"tag": "IP_ADDRESS", "value": "60c0:a800::2", "start": 31320, "end": 31332, "context": "src_ip = \"20.0.0.8\" if ip_version == \"ipv4\" else \"60c0:a800::2\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "20.0.0.4", "start": 31765, "end": 31773, "context": "an ICMP packet on source IP.\"\"\"\n src_ip = \"20.0.0.4\" if ip_version == \"ipv4\" else \"60c0:a800::8\"\n "}, {"tag": "IP_ADDRESS", "value": "192.168.144.1", "start": 2559, "end": 2572, "context": ":a800::4\"\n}\nUPSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168.144.1\",\n \"ipv6\": \"40c0:a800::8\"\n}\n\nVLAN_BASE_MAC_PAT"}, {"tag": "IP_ADDRESS", "value": "60c0:a800::5", "start": 1918, "end": 1930, "context": "T_SRC_IP = {\n \"ipv4\": \"20.0.0.1\",\n \"ipv6\": \"60c0:a800::5\"\n}\n\n\n# TODO: These routes don't match the VLAN in"}, {"tag": "IP_ADDRESS", "value": "192.168.0.253", "start": 2115, "end": 2128, "context": "6 tests for T0\nDOWNSTREAM_DST_IP = {\n \"ipv4\": \"192.168.0.253\",\n \"ipv6\": \"20c0:a800::2\"\n}\nDOWNSTREAM_IP_TO_A"}, {"tag": "IP_ADDRESS", "value": "192.168.0.251", "start": 2285, "end": 2298, "context": "800::4\"\n}\nDOWNSTREAM_IP_TO_BLOCK = {\n \"ipv4\": \"192.168.0.251\",\n \"ipv6\": \"20c0:a800::8\"\n}\n\nDOWNSTREAM_IP_POR"}, {"tag": "IP_ADDRESS", "value": "40c0:a800::8", "start": 2588, "end": 2600, "context": "OCK = {\n \"ipv4\": \"192.168.144.1\",\n \"ipv6\": \"40c0:a800::8\"\n}\n\nVLAN_BASE_MAC_PATTERN = \"72060001{:04}\"\n\nLOG_"}, {"tag": "IP_ADDRESS", "value": "20.0.0.1", "start": 1894, "end": 1902, "context": " xrange(1, 3))\n}\n\nDEFAULT_SRC_IP = {\n \"ipv4\": \"20.0.0.1\",\n \"ipv6\": \"60c0:a800::5\"\n}\n\n\n# TODO: These ro"}, {"tag": "IP_ADDRESS", "value": "60c0:a800::8", "start": 31805, "end": 31817, "context": "src_ip = \"20.0.0.4\" if ip_version == \"ipv4\" else \"60c0:a800::8\"\n pkt = self.icmp_packet(setup, direction,"}, {"tag": "IP_ADDRESS", "value": "20c0:a800::2", "start": 2144, "end": 2156, "context": "_IP = {\n \"ipv4\": \"192.168.0.253\",\n \"ipv6\": \"20c0:a800::2\"\n}\nDOWNSTREAM_IP_TO_ALLOW = {\n \"ipv4\": \"192.16"}, {"tag": "IP_ADDRESS", "value": "20.0.0.10", "start": 37024, "end": 37033, "context": "h and drop on the TCP flags.\"\"\"\n src_ip = \"20.0.0.10\" if ip_version == \"ipv4\" else \"60c0:a800::10\"\n "}]
/* TEMPLATE GENERATED TESTCASE FILE Filename: CWE23_Relative_Path_Traversal__wchar_t_listen_socket_w32CreateFile_18.cpp Label Definition File: CWE23_Relative_Path_Traversal.label.xml Template File: sources-sink-18.tmpl.cpp */ /* * @description * CWE: 23 Relative Path Traversal * BadSource: listen_socket Read data using a listen socket (server side) * GoodSource: Use a fixed file name * Sink: w32CreateFile * BadSink : Open the file named in data using CreateFile() * Flow Variant: 18 Control flow: goto statements * * */ #include "std_testcase.h" #ifdef _WIN32 #define BASEPATH L"c:\\temp\\" #else #include <wchar.h> #define BASEPATH L"/tmp/" #endif #ifdef _WIN32 #include <winsock2.h> #include <windows.h> #include <direct.h> #pragma comment(lib, "ws2_32") /* include ws2_32.lib when linking */ #define CLOSE_SOCKET closesocket #else #include <sys/types.h> #include <sys/socket.h> #include <netinet/in.h> #include <arpa/inet.h> #include <unistd.h> #define INVALID_SOCKET -1 #define SOCKET_ERROR -1 #define CLOSE_SOCKET close #define SOCKET int #endif #define TCP_PORT 27015 #define LISTEN_BACKLOG 5 namespace CWE23_Relative_Path_Traversal__wchar_t_listen_socket_w32CreateFile_18 { #ifndef OMITBAD void bad() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; goto source; source: { #ifdef _WIN32 WSADATA wsaData; int wsaDataInit = 0; #endif int recvResult; struct sockaddr_in service; wchar_t *replace; SOCKET listenSocket = INVALID_SOCKET; SOCKET acceptSocket = INVALID_SOCKET; size_t dataLen = wcslen(data); do { #ifdef _WIN32 if (WSAStartup(MAKEWORD(2,2), &wsaData) != NO_ERROR) { break; } wsaDataInit = 1; #endif /* POTENTIAL FLAW: Read data using a listen socket */ listenSocket = socket(AF_INET, SOCK_STREAM, IPPROTO_TCP); if (listenSocket == INVALID_SOCKET) { break; } memset(&service, 0, sizeof(service)); service.sin_family = AF_INET; service.sin_addr.s_addr = INADDR_ANY; service.sin_port = htons(TCP_PORT); if (bind(listenSocket, (struct sockaddr*)&service, sizeof(service)) == SOCKET_ERROR) { break; } if (listen(listenSocket, LISTEN_BACKLOG) == SOCKET_ERROR) { break; } acceptSocket = accept(listenSocket, NULL, NULL); if (acceptSocket == SOCKET_ERROR) { break; } /* Abort on error or the connection was closed */ recvResult = recv(acceptSocket, (char *)(data + dataLen), sizeof(wchar_t) * (FILENAME_MAX - dataLen - 1), 0); if (recvResult == SOCKET_ERROR || recvResult == 0) { break; } /* Append null terminator */ data[dataLen + recvResult / sizeof(wchar_t)] = L'\0'; /* Eliminate CRLF */ replace = wcschr(data, L'\r'); if (replace) { *replace = L'\0'; } replace = wcschr(data, L'\n'); if (replace) { *replace = L'\0'; } } while (0); if (listenSocket != INVALID_SOCKET) { CLOSE_SOCKET(listenSocket); } if (acceptSocket != INVALID_SOCKET) { CLOSE_SOCKET(acceptSocket); } #ifdef _WIN32 if (wsaDataInit) { WSACleanup(); } #endif } { HANDLE hFile; /* POTENTIAL FLAW: Possibly creating and opening a file without validating the file name or path */ hFile = CreateFileW(data, (GENERIC_WRITE|GENERIC_READ), 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); if (hFile != INVALID_HANDLE_VALUE) { CloseHandle(hFile); } } } #endif /* OMITBAD */ #ifndef OMITGOOD /* goodG2B() - use goodsource and badsink by reversing the blocks on the goto statement */ static void goodG2B() { wchar_t * data; wchar_t dataBuffer[FILENAME_MAX] = BASEPATH; data = dataBuffer; goto source; source: /* FIX: Use a fixed file name */ wcscat(data, L"file.txt"); { HANDLE hFile; /* POTENTIAL FLAW: Possibly creating and opening a file without validating the file name or path */ hFile = CreateFileW(data, (GENERIC_WRITE|GENERIC_READ), 0, NULL, OPEN_ALWAYS, FILE_ATTRIBUTE_NORMAL, NULL); if (hFile != INVALID_HANDLE_VALUE) { CloseHandle(hFile); } } } void good() { goodG2B(); } #endif /* OMITGOOD */ } /* close namespace */ /* Below is the main(). It is only used when building this testcase on its own for testing or for building a binary to use in testing binary analysis tools. It is not used when compiling all the testcases as one application, which is how source code analysis tools are tested. */ #ifdef INCLUDEMAIN using namespace CWE23_Relative_Path_Traversal__wchar_t_listen_socket_w32CreateFile_18; /* so that we can use good and bad easily */ int main(int argc, char * argv[]) { /* seed randomness */ srand( (unsigned)time(NULL) ); #ifndef OMITGOOD printLine("Calling good()..."); good(); printLine("Finished good()"); #endif /* OMITGOOD */ #ifndef OMITBAD printLine("Calling bad()..."); bad(); printLine("Finished bad()"); #endif /* OMITBAD */ return 0; } #endif
C++
BSD-3-Clause
JianpingZeng/xcc/xcc/test/juliet/testcases/CWE23_Relative_Path_Traversal/s05/CWE23_Relative_Path_Traversal__wchar_t_listen_socket_w32CreateFile_18.cpp
7d3355d9-0e76-491c-8a35-a4205f33f462
[]
[]