text
stringlengths
1
1.05M
def reverse_words(sentence): words = sentence.split() reversed_words = words[::-1] return ' '.join(reversed_words) sentence = "Hello World" reversed_sentence = reverse_words(sentence) print(reversed_sentence)
<gh_stars>1-10 export enum LogLevel { INFO = "INFO", WARN = "WARN", ERROR = "ERROR", DEBUG = "DEBUG", UNKNOWN = "UNKNOWN", } export function logLevelColor(logLevel: LogLevel): string { switch (logLevel) { case LogLevel.INFO: return "#73BD15"; case LogLevel.WARN: return "#E0A903"; case LogLevel.ERROR: return "#C72000"; case LogLevel.DEBUG: return "#A896C7"; case LogLevel.UNKNOWN: return "#F0F"; default: return "#FFF"; } }
export default { elem: 'svg', attrs: { xmlns: 'http://www.w3.org/2000/svg', viewBox: '0 0 32 32', width: 32, height: 32, }, content: [ { elem: 'path', attrs: { d: 'M28 26H4a2 2 0 0 1-2-2V10a2 2 0 0 1 2-2h24a2 2 0 0 1 2 2v14a2 2 0 0 1-2 2zM4 10v14h24V10z', }, }, { elem: 'path', attrs: { d: 'M10 20h11v2H10zm-4-8h2v2H6zm4 0h2v2h-2zm4 0h2v2h-2zm4 0h2v2h-2zM6 20h2v2H6zm0-4h2v2H6zm4 0h2v2h-2zm4 0h2v2h-2zm8-4h4v2h-4zm0 4h4v2h-4zm-4 0h2v2h-2zm5 4h3v2h-3z', }, }, ], name: 'keyboard', size: 32, };
<reponame>Site-Command/gravitee-management-rest-api /** * Copyright (C) 2015 The Gravitee team (http://gravitee.io) * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package io.gravitee.rest.api.service.impl.configuration.identity; import io.gravitee.rest.api.model.configuration.identity.*; import io.gravitee.rest.api.model.configuration.identity.am.AMIdentityProviderEntity; import io.gravitee.rest.api.model.configuration.identity.github.GitHubIdentityProviderEntity; import io.gravitee.rest.api.model.configuration.identity.google.GoogleIdentityProviderEntity; import io.gravitee.rest.api.model.configuration.identity.oidc.OIDCIdentityProviderEntity; import io.gravitee.rest.api.service.SocialIdentityProviderService; import io.gravitee.rest.api.service.configuration.identity.IdentityProviderActivationService; import io.gravitee.rest.api.service.configuration.identity.IdentityProviderService; import io.gravitee.rest.api.service.exceptions.TechnicalManagementException; import io.gravitee.rest.api.service.impl.AbstractService; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.springframework.beans.factory.annotation.Autowired; import org.springframework.stereotype.Component; import java.util.List; import java.util.Set; import java.util.regex.Pattern; import java.util.stream.Collectors; import java.util.stream.Stream; /** * @author <NAME> (david.brassely at graviteesource.com) * @author <NAME> (nicolas.geraud at graviteesource.com) * @author GraviteeSource Team */ @Component public class SocialIdentityProviderImpl extends AbstractService implements SocialIdentityProviderService { private final Logger LOGGER = LoggerFactory.getLogger(SocialIdentityProviderImpl.class); // Pattern reuse for duplicate slash removal private static final Pattern DUPLICATE_SLASH_REMOVER = Pattern.compile("(?<!(http:|https:))[//]+"); private static final String URI_PATH_SEPARATOR = "/"; private final static String CLIENT_ID = "clientId"; private final static String CLIENT_SECRET = "clientSecret"; @Autowired private IdentityProviderService identityProviderService; @Autowired private IdentityProviderActivationService identityProviderActivationService; @Override public Set<SocialIdentityProviderEntity> findAll(IdentityProviderActivationService.ActivationTarget target) { try { Set<String> allIdpByTarget = identityProviderActivationService.findAllByTarget(target) .stream() .map(IdentityProviderActivationEntity::getIdentityProvider) .collect(Collectors.toSet()); Stream<IdentityProviderEntity> identityProviderEntityStream = identityProviderService.findAll() .stream() .filter(idp -> allIdpByTarget.contains(idp.getId())); if (target.getReferenceType() == IdentityProviderActivationReferenceType.ENVIRONMENT) { identityProviderEntityStream = identityProviderEntityStream.filter(IdentityProviderEntity::isEnabled); } return identityProviderEntityStream .sorted((idp1, idp2) -> String.CASE_INSENSITIVE_ORDER.compare(idp1.getName(), idp2.getName())) .map(this::convert) .collect(Collectors.toSet()); } catch (Exception ex) { LOGGER.error("An error occurs while trying to retrieve identity providers", ex); throw new TechnicalManagementException( "An error occurs while trying to retrieve identity providers", ex); } } @Override public SocialIdentityProviderEntity findById(String id, IdentityProviderActivationService.ActivationTarget target) { try { LOGGER.debug("Find identity provider by ID: {}", id); Set<String> allIdpByTarget = identityProviderActivationService.findAllByTarget(target) .stream() .map(IdentityProviderActivationEntity::getIdentityProvider) .collect(Collectors.toSet()); if (!allIdpByTarget.contains(id)) { throw new IdentityProviderNotFoundException(id); } IdentityProviderEntity identityProvider = identityProviderService.findById(id); if (target.getReferenceType() == IdentityProviderActivationReferenceType.ENVIRONMENT && !identityProvider.isEnabled()) { throw new IdentityProviderNotFoundException(identityProvider.getId()); } return convert(identityProvider); } catch (IdentityProviderNotFoundException ex) { throw ex; } catch (Exception ex) { LOGGER.error("An error occurs while trying to find an identity provider using its ID {}", id, ex); throw new TechnicalManagementException( "An error occurs while trying to delete an identity provider using its ID " + id, ex); } } private SocialIdentityProviderEntity convert(IdentityProviderEntity identityProvider) { SocialIdentityProviderEntity provider = null; if (identityProvider.getType() == IdentityProviderType.GOOGLE) { provider = new GoogleIdentityProviderEntity(); } else if (identityProvider.getType() == IdentityProviderType.GITHUB) { provider = new GitHubIdentityProviderEntity(); } else if (identityProvider.getType() == IdentityProviderType.OIDC) { provider = new OIDCIdentityProviderEntity(); ((OIDCIdentityProviderEntity)provider).setColor((String)identityProvider.getConfiguration().get("color")); ((OIDCIdentityProviderEntity)provider).setDiscoveryEndpoint((String)identityProvider.getConfiguration().get("discoveryEndpoint")); ((OIDCIdentityProviderEntity)provider).setTokenEndpoint((String)identityProvider.getConfiguration().get("tokenEndpoint")); ((OIDCIdentityProviderEntity)provider).setAuthorizationEndpoint((String)identityProvider.getConfiguration().get("authorizeEndpoint")); ((OIDCIdentityProviderEntity)provider).setTokenIntrospectionEndpoint((String)identityProvider.getConfiguration().get("tokenIntrospectionEndpoint")); ((OIDCIdentityProviderEntity)provider).setUserInfoEndpoint((String)identityProvider.getConfiguration().get("userInfoEndpoint")); ((OIDCIdentityProviderEntity) provider).setUserLogoutEndpoint((String)identityProvider.getConfiguration().get("userLogoutEndpoint")); ((OIDCIdentityProviderEntity)provider).setScopes((List<String>) identityProvider.getConfiguration().get("scopes")); ((OIDCIdentityProviderEntity) provider).setUserProfileMapping(identityProvider.getUserProfileMapping()); } else if (identityProvider.getType() == IdentityProviderType.GRAVITEEIO_AM) { String serverBaseUrl = (String)identityProvider.getConfiguration().get("serverURL"); String domain = (String)identityProvider.getConfiguration().get("domain"); // Remove duplicate slash String serverUrl = DUPLICATE_SLASH_REMOVER.matcher(serverBaseUrl + '/' + domain).replaceAll(URI_PATH_SEPARATOR); if (serverUrl.lastIndexOf(URI_PATH_SEPARATOR) == serverUrl.length() - 1) { serverUrl = serverUrl.substring(0, serverUrl.length() - 1); } provider = new AMIdentityProviderEntity(serverUrl); ((AMIdentityProviderEntity)provider).setColor((String)identityProvider.getConfiguration().get("color")); ((AMIdentityProviderEntity)provider).setDiscoveryEndpoint((String)identityProvider.getConfiguration().get("discoveryEndpoint")); ((AMIdentityProviderEntity)provider).setScopes((List<String>) identityProvider.getConfiguration().get("scopes")); ((AMIdentityProviderEntity) provider).setUserProfileMapping(identityProvider.getUserProfileMapping()); } if (provider != null) { provider.setId(identityProvider.getId()); provider.setName(identityProvider.getName()); provider.setDescription(identityProvider.getDescription()); provider.setClientId((String) identityProvider.getConfiguration().get(CLIENT_ID)); provider.setClientSecret((String) identityProvider.getConfiguration().get(CLIENT_SECRET)); provider.setGroupMappings(identityProvider.getGroupMappings()); provider.setRoleMappings(identityProvider.getRoleMappings()); provider.setRoleMappings(identityProvider.getRoleMappings()); provider.setEmailRequired(identityProvider.isEmailRequired()); provider.setSyncMappings(identityProvider.isSyncMappings()); return provider; } return null; } }
angular.module('myApp').service('ConfigService', ['$http', '$q', 'baseApi', function ($http, $q, baseApi) { var ConfigService = {}; var resourceUrl = '/api/admin-config'; function buildBaucisQuery(opts) { var q ='?'; var prefix=''; if (opts.page === null && opts.blockSize === null) { opts.page = opts.page || 1; opts.pageSize = opts.pageSize || 20; } else { opts.page = opts.page || 1; opts.pageSize = opts.pageSize || 20; var skip = (opts.page-1)*opts.pageSize; if(skip > 0) { q += prefix + 'skip=' + skip; prefix='&'; } q += prefix + '&limit=' + opts.pageSize; prefix='&'; } if (opts.sort) { q += prefix + 'sort=' + encodeURIComponent(opts.sort) + ''; prefix='&'; } if (opts.criteria) { q += prefix + 'conditions={' + encodeURIComponent(opts.criteria) + '}'; prefix='&'; } if (opts.select) { q += prefix + 'select={' + encodeURIComponent(opts.select) + '}'; prefix='&'; } if (opts.populate) { q += prefix + 'populate={' + encodeURIComponent(opts.populate) + '}'; prefix='&'; } if (opts.hint) { q += prefix + 'hint={' + encodeURIComponent(opts.hint) + '}'; prefix='&'; } if (opts.count === true) { q += prefix + 'count=true'; prefix='&'; } if (opts.searchText && opts.searchText!=='') { //Do a custom like query var likeQuery = buildLikeQuery(opts.searchText); q += prefix + 'conditions={' + encodeURIComponent(likeQuery) + '}'; prefix='&'; } return q; } function buildLikeQuery(searchText) { var res='"$or":['; //add string fields var clauses = []; var clause = null; //Process each property clause = addStringLike('key', searchText); if (clause !== null) { clauses.push(clause); } clause = addStringLike('value', searchText); if (clause !== null) { clauses.push(clause); } var prefix=''; clauses.forEach(function(item) { res+=prefix+item; prefix=','; }); res += ']'; if (clauses.length>0) { return res; } return ''; } function addStringLike(property, searchValue) { if (searchValue === null) { return null; } return '{"'+ property +'":{"$regex":"' + escapeForRegex(searchValue) + '","$options":"i"}}'; } function escapeForRegex(candidate) { //escape values for regex return candidate; } function buildMongooseQuery(opts) { var q = ''; if (opts.searchText && opts.searchText!=='') { var likeQuery = buildMoongooseLikeQuery(opts.searchText); q = '{' + likeQuery + '}'; } return q; } function buildMoongooseLikeQuery(searchText) { var res='"$or":['; //add string fields var clauses = []; var clause = null; //Process each property clause = addStringLike('key', searchText); if (clause !== null) { clauses.push(clause); } clause = addStringLike('value', searchText); if (clause !== null) { clauses.push(clause); } var prefix=''; clauses.forEach(function(item) { res+=prefix+item; prefix=','; }); res += ']'; if (clauses.length>0) { return res; } return ''; } //-- Public API ----- ConfigService.getCount = function (opts) { opts = opts || {}; opts.count = true; var q = buildBaucisQuery(opts); return $http.get(resourceUrl + q); }; ConfigService.getList = function (opts) { opts = opts || {}; var q = buildBaucisQuery(opts); return $http.get(resourceUrl + q); }; ConfigService.getListAsCsv = function () { return $http({ method: 'GET', url: resourceUrl, headers: {'Accept': 'text/csv'} }); }; ConfigService.getFileAsCsv = function () { return $http({ method: 'GET', url: resourceUrl + '/download/csv/', headers: {'Accept': 'text/csv'} }); }; ConfigService.getFileAsXml = function () { return $http({ method: 'GET', url: resourceUrl + '/download/xml/', headers: {'Accept': 'text/xml'} }); }; ConfigService.getFileAsXlsx = function () { return $http({ method: 'GET', url: resourceUrl + '/download/xlsx/', headers: {'Accept': 'application/vnd.openxmlformats-officedocument.spreadsheetml.sheet'}, responseType: 'blob' }); }; ConfigService.getToEdit = function (id) { return $http.get(resourceUrl + '/' + id ); }; ConfigService.get = function (link) { return $http.get(link); }; ConfigService.add = function (item) { return $http.post(resourceUrl, JSON.stringify(item)); }; ConfigService.update = function (item) { return $http.put(resourceUrl + '/' + item._id, JSON.stringify(item)); }; ConfigService.delete = function (id) { return $http.delete(resourceUrl + '/' + id); }; ConfigService.deleteMany = function (ids) { var msg = { 'className' : 'admin_config', 'ids' : ids }; return $http.post(baseApi + '/delete', JSON.stringify(msg)); }; ConfigService.deleteAll = function (opts) { var msg = { 'className' : 'admin_config', 'conditions' : buildMongooseQuery(opts) }; return $http.post(baseApi + '/deleteAll', JSON.stringify(msg)); }; ConfigService.setKey = function (key, value) { var item = { 'key': key, 'value': value }; return $http.post('/api/setConfigKey', JSON.stringify(item)); }; ConfigService.getByKey = function (configKey) { var q = buildBaucisQuery({ 'searchText': configKey }); return $http.get(resourceUrl + q); }; return ConfigService; }]);
/* * @Date: 2022-03-30 16:34:56 * @LastEditors: huangzh873 * @LastEditTime: 2022-04-02 10:47:18 * @FilePath: /vt-cesium2.0/src/store/modules/jt-cesium-vue/modules/locationbar/actions.ts */ import { ActionTree } from 'vuex' import { RootState } from '@/store' import type { State } from './state' import { LocationbarHandlerEnum } from './enum-constant' export const actions: ActionTree<State, RootState> = { async [LocationbarHandlerEnum.RESET_STATE]({ commit }) { commit(LocationbarHandlerEnum.RESET_STATE) }, async [LocationbarHandlerEnum.SET_SHOW_CAMERA_LOCATION]( { commit }, payload: boolean ) { commit(LocationbarHandlerEnum.SET_SHOW_CAMERA_LOCATION, payload) }, async [LocationbarHandlerEnum.SET_SHOW_MOUSE_LOCATION]( { commit }, payload: boolean ) { commit(LocationbarHandlerEnum.SET_SHOW_MOUSE_LOCATION, payload) }, async [LocationbarHandlerEnum.SET_SHOW_FPS]({ commit }, payload: boolean) { commit(LocationbarHandlerEnum.SET_SHOW_FPS, payload) }, }
export {default as configureCompression} from './configureCompression';
import React, { useState } from "react"; import { useDispatch } from "react-redux"; import tradeApi from "../../../../services/tradeApiClient"; import { showErrorAlert } from "../../../../store/actions/ui"; import useInterval from "../../../../hooks/useInterval"; import { Box, CircularProgress } from "@material-ui/core"; import ManagementTable from "./ManagementTable"; /** * @typedef {import("../../../../services/tradeApiClient.types").ManagementPositionsEntity} ManagementPositionsEntity * @typedef {import("../../../../services/tradeApiClient.types").DefaultProviderGetObject} DefaultProviderGetObject * @typedef {Object} DefaultProps * @property {DefaultProviderGetObject} provider Balance */ /** * @param {DefaultProps} props Default props. * @returns {JSX.Element} Component JSX. */ const Management = ({ provider }) => { const [tablePositions, setTablePositions] = useState([]); const [allPositions, setAllPositions] = useState([]); const [positionsLoading, setPositionsLoading] = useState(true); const dispatch = useDispatch(); const loadPositions = () => { if (provider.id) { const payload = { providerId: provider.id, }; tradeApi .providerManagementPositions(payload) .then((response) => { setPositionsLoading(false); setAllPositions(response); setTablePositions(prepareTableList(response)); }) .catch((e) => { dispatch(showErrorAlert(e)); }); } }; useInterval(loadPositions, 5000, true); /** * Function to prepare list of the table. * * @param {Array<ManagementPositionsEntity>} data default data from backend. * @returns {Array<Position>} Array of position entities. */ const prepareTableList = (data) => { /** * @type {Array<Position>} */ let list = []; data.forEach((item) => { list.push(item.position); }); return list; }; return ( <> {positionsLoading && ( <Box alignItems="center" className="loadingBox" display="flex" flexDirection="row" justifyContent="center" > <CircularProgress color="primary" size={40} /> </Box> )} {!positionsLoading && ( <ManagementTable allPositions={allPositions} list={tablePositions} provider={provider} setLoading={setPositionsLoading} /> )} </> ); }; export default Management;
docker build . -t 2022_terra_group1
<gh_stars>1-10 import App from "next/app"; import Head from "next/head"; import Router from "next/router"; import styles from "~/components/styles.module.scss"; import { inc, dec } from "~/components/hooks/use-nprogress"; import Header from "~/components/header"; import Footer from "~/components/footer"; Router.events.on("routeChangeStart", inc); Router.events.on("routeChangeComplete", dec); Router.events.on("routeChangeError", dec); // I only need a custom app so I can override the default viewport. const MyApp = (props: any) => { return ( <> <Head> <meta name="viewport" content="width=600" /> <meta name="apple-mobile-web-app-capable" content="yes" /> </Head> <Header /> <main> <section className={styles.content}> <App {...props} /> </section> </main> <Footer /> </> ); }; export default MyApp;
<filename>userdoc/html/search/defines_1.js var searchData= [ ['error',['error',['../Misc_8h.html#a5924a4a2ac1a09a8c6679579dc22249e',1,'Misc.h']]] ];
#!/bin/bash set -e usage() { cat <<EOF Generate certificate suitable for use with an sidecar-injector webhook service. This script uses k8s' CertificateSigningRequest API to a generate a certificate signed by k8s CA suitable for use with sidecar-injector webhook services. This requires permissions to create and approve CSR. See https://kubernetes.io/docs/tasks/tls/managing-tls-in-a-cluster for detailed explantion and additional instructions. The server key/cert k8s CA cert are stored in a k8s secret. usage: ${0} [OPTIONS] The following flags are required. --service Service name of webhook. --namespace Namespace where webhook service and secret reside. --secret Secret name for CA certificate and server certificate/key pair. EOF exit 1 } while [[ $# -gt 0 ]]; do case ${1} in --service) service="$2" shift ;; --secret) secret="$2" shift ;; --namespace) namespace="$2" shift ;; *) usage ;; esac shift done [ -z ${service} ] && service=gcp-cred-webhook [ -z ${secret} ] && secret=gcp-cred-webhook-certs [ -z ${namespace} ] && namespace=${NAMESPACE} [ -z ${namespace} ] && namespace=default echo ${service} echo ${namespace} echo ${secret} if [ ! -x "$(command -v openssl)" ]; then echo "openssl not found" exit 1 fi csrName=${service}.${namespace} tmpdir=$(mktemp -d) echo "creating certs in tmpdir ${tmpdir} " # x509 outputs a self signed certificate instead of certificate request, later used as self signed root CA openssl req -x509 -newkey rsa:2048 -keyout ${tmpdir}/self_ca.key -out ${tmpdir}/self_ca.crt -days 365 -nodes -subj /C=/ST=/L=/O=/OU=/CN=test-certificate-authority cat <<EOF >> ${tmpdir}/csr.conf [req] req_extensions = v3_req distinguished_name = req_distinguished_name [req_distinguished_name] [ v3_req ] basicConstraints = CA:FALSE keyUsage = nonRepudiation, digitalSignature, keyEncipherment extendedKeyUsage = serverAuth subjectAltName = @alt_names [alt_names] DNS.1 = ${service} DNS.2 = ${service}.${namespace} DNS.3 = ${service}.${namespace}.svc EOF openssl genrsa -out ${tmpdir}/server-key.pem 2048 openssl req -new -key ${tmpdir}/server-key.pem -subj "/CN=${service}.${namespace}.svc" -out ${tmpdir}/server.csr -config ${tmpdir}/csr.conf # Self sign openssl x509 -req -days 365 -in ${tmpdir}/server.csr -CA ${tmpdir}/self_ca.crt -CAkey ${tmpdir}/self_ca.key -CAcreateserial -out ${tmpdir}/server-cert.pem # create the secret with CA cert and server cert/key kubectl create secret generic ${secret} \ --from-file=key.pem=${tmpdir}/server-key.pem \ --from-file=cert.pem=${tmpdir}/server-cert.pem \ --dry-run -o yaml | kubectl -n ${namespace} apply -f - # restart the webhook server if it already exists, ignore the error otherwise # this is for https://github.com/kubeflow/kubeflow/issues/3227. # Webhook pod once created loads the secret in the begining and starts serving. # Therefore, if secret is updated, then Webhook pod needs to be restarted webhookPod=$(kubectl get pods -n ${namespace} | grep gcp-cred-webhook- |awk '{print $1;}') kubectl delete pod ${webhookPod} 2>/dev/null || true echo "webhook ${webhookPod} is restarted to utilize the new secret" cat ${tmpdir}/self_ca.crt # -a means base64 encode caBundle=`cat ${tmpdir}/self_ca.crt | openssl enc -a -A` echo ${caBundle} patchString='[{"op": "replace", "path": "/webhooks/0/clientConfig/caBundle", "value":"{{CA_BUNDLE}}"}]' patchString=`echo ${patchString} | sed "s|{{CA_BUNDLE}}|${caBundle}|g"` echo ${patchString} checkWebhookConfig() { currentBundle=$(kubectl get mutatingwebhookconfigurations -n ${namespace} gcp-cred-webhook -o jsonpath='{.webhooks[0].clientConfig.caBundle}') [[ "$currentBundle" == "$caBundle" ]] } while true; do if ! checkWebhookConfig; then echo "patching ca bundle for webhook configuration..." kubectl patch mutatingwebhookconfiguration gcp-cred-webhook \ --type='json' -p="${patchString}" fi sleep 10 done
#!/bin/bash echo "Monosodium Glutamate version $MSG_VERSION"
$(document).ready(function () { "use strict"; var av_name = "RegularLangQuestionsFF"; var av = new JSAV(av_name); var Frames = PIFRAMES.init(av_name); // Load the config object with interpreter and code created by odsaUtils.js var config = ODSA.UTILS.loadConfig({ av_name: av_name }), interpret = config.interpreter, // get the interpreter code = config.code; // get the code object var goNext = false; //frame 1 av.umsg("Let us answer some questions about Regular Languages."); av.displayInit(); //frame 2 av.umsg("Suppose we have $L$ is a regular language."); av.step(); //frame 3 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $w \\in L$?"); av.step(); //frame 4 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $w \\in L$?<br/>Answer: Construct a FA and test if it accepts $w$"); av.step(); //frame 5 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is empty?"); av.step(); //frame 6 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is empty?<br/>Construct a FA. If there is a path from the start state to any final state, then $L$ is not empty."); av.step(); //frame 7 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if the complement of $L$ is regular?<br/>Simply take the DFA and reverse the final and non-final states."); av.step(); //frame 8 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is finite?"); av.step(); //frame 9 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is finite?<br/>This was easy! But we will see in other contexts that complement is not so simple to decide."); av.step(); //frame 10 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is infinite?"); av.step(); //frame 11 av.umsg("Given $L, \\Sigma, w \\in \\Sigma^*$. How can we determine if $L$ is finite?<br/>Construct a FA. Determine if any of the vertices on a path from the start state to a final state are the base of some cycle. If so, then $L$ is infinite"); av.step(); //frame 3 av.umsg("Given $L_1$ and $L_2$. How can we determine if $L_1 = L_2$?"); av.step(); //frame 3 av.umsg("Given $L_1$ and $L_2$. How can we determine if $L_1 = L_2$?<br/>Construct $L_3 = (L_1 \\cap \\bar{L_2}) \\cup (\\bar{L_1} \\cap L_2)$, then $L_1 = L_2$"); av.step(); //frame av.step("Completed."); av.recorded(); });
<filename>src/components/home/sections/characterisitcs.js import React from 'react' import { StaticQuery, graphql } from 'gatsby' import Img from 'gatsby-image' import { Title } from '../../utils' const chars = [ { imgIndex : 1, title : 'Подхоящ за всеки тип кожа и косъм', text : 'Сигурен и бърз, необходими сесии 8-10 с интервал от 4 седмици' }, { imgIndex : 2, title : 'Индивидуална настройка на лазера, според типа кожа', text : 'Намалява риска от увреждане на повърхностния слой на кожата (епидермиса)' }, { imgIndex : 3, title : 'Ефективност на лазерна процедура', text : 'Получавате бързи, качествени и дълготрайни резултати' }, { imgIndex : 4, title : 'Безопасна и безболезнена лазерна епилация', text : 'Процедури дори в разгара на лятото' } ] const Characterisitcs = () => ( <StaticQuery query={graphql` query { img1: file(relativePath: { eq: "slide1.jpg" }) { childImageSharp { fluid(maxWidth: 1500) { ...GatsbyImageSharpFluid_tracedSVG } } } img2: file(relativePath: { eq: "slide2.jpg" }) { childImageSharp { fluid(maxWidth: 1500) { ...GatsbyImageSharpFluid_tracedSVG } } } img3: file(relativePath: { eq: "slide3.jpg" }) { childImageSharp { fluid(maxWidth: 1500) { ...GatsbyImageSharpFluid_tracedSVG } } } img4: file(relativePath: { eq: "slide4.jpg" }) { childImageSharp { fluid(maxWidth: 1500) { ...GatsbyImageSharpFluid_tracedSVG } } } } `} render={(data) => ( <section> <Title title="Ефективност и сигурност" imgUrl="slide1.jpg" /> {chars.map(({ imgIndex, title, text }) => ( <div key={imgIndex} className="card border-0"> <Img fluid={data[`img${imgIndex}`].childImageSharp.fluid} className="card-img-top" /> <div className="card-body"> <h5 className="card-title">{title}</h5> <p className="card-text">{text}</p> </div> </div> ))} </section> )} /> ) export default Characterisitcs
#!/bin/bash SCRIPTPATH="$( cd "$(dirname "$0")"; cd .. ; pwd -P )" version=ds001705 nrm=${version}-download #################### # Default Settings # #################### appian_dir="/opt/APPIAN/" source_dir="/opt/APPIAN/Test/$nrm" target_dir="/opt/APPIAN/Test/out_${nrm}" threads=1 use_docker=0 docker_image="tffunck/appian:latest" function useage(){ echo Name : quantitative_validation.sh echo About: Evaluate quantitative accuracy of APPIAN using NRM2018 Grand Challenge Dataset. echo Options: echo " -a Set APPIAN directory where Launcher is located (default=$appian_dir)" echo " -s Set source directory where NRM2018 data will be downloaded (default=$source_dir)" echo " -t Set target directory where results will be saved (default=$target_dir)" echo " -d Use docker container (default=False)" echo " -i Docker image name (default=$docker_image)" echo " -r Number of threads to use (default=$threads)" echo " -h Print this help menu" } ################### # Parse Arguments # ################### while getopts "a:s:t:r:i:dh" opt; do case $opt in a) appian_dir=$OPTARG 1>&2 ;; s) source_dir=$OPTARG 1>&2 ;; t) target_dir=$OPTARG 1>&2 ;; r) threads=$OPTARG 1>&2 ;; i) docker_image=$OPTARG 1>&2 ;; d) use_docker=1 ;; h) useage exit 0 ;; \?) echo "Warning -- Invalid option: -$OPTARG" 1>&2 useage exit 1 ;; :) echo "Error -- Option -$OPTARG requires argument " 1>&2 useage exit 1 esac done ########################################## # Download data from Amazon Web Services # ########################################## # pip install awscli --upgrade --user > /dev/null && export PATH="${PATH}:/root/.local/bin/" > /dev/null # aws s3 sync --no-sign-request s3://openneuro.org/$version $source_dir ################## # Run validation # ################## echo echo Quantitative Validation Settings echo ------------------------------- echo " APPIAN Directory : $appian_dir" echo " Source Directory : $source_dir" echo " Target Directory : $target_dir" if [[ $use_docker == 1 ]]; then echo " Docker image : $docker_image" fi echo " Threads : $threads" echo pvcMethods="idSURF VC" #quantMethods="lp lp-roi suv suvr srtm srtm-bf" quantMethods="lp srtm " #Run Quant cmd_base="python ${appian_dir}/Launcher.py -s ${source_dir} -t ${target_dir} --start-time 7 --threads $threads --tka-label-img /APPIAN/Atlas/MNI152/dka.nii --quant-label 8 47 --quant-labels-ones-only --quant-label-erosion 3 --pvc-fwhm 2.5 2.5 2.5 " cmd_quant="$cmd_base --tka-method suvr " cmd_pvc="$cmd_quant" # --pvc-method VC " echo docker run -v "$SCRIPTPATH":"/APPIAN" --rm $docker_image bash -c "$cmd_pvc" docker run -v "$SCRIPTPATH":"/APPIAN" --rm $docker_image bash -c "$cmd_pvc" exit 0 for quant in $quantMethods; do cmd_quant="$cmd_base --tka-method $quant " if [[ $use_docker != 0 ]]; then docker run -v "$SCRIPTPATH":"/APPIAN" --rm $docker_image bash -c "$cmd_quant" else bash -c "$cmd" fi #Run PVC for pvc in $pvcMethods; do echo Testing $pvc $quant # Setup command to run APPIAN cmd_pvc="$cmd_quant --pvc-method $pvc " if [[ $use_docker != 0 ]]; then # Run command in docker container docker run -v "$SCRIPTPATH":"/APPIAN" --rm $docker_image bash -c "$cmd_pvc" else # Assumes you are already in an environment that can run APPIAN bash -c "$cmd_pvc" fi done done
#!/bin/sh sudo rm -rf /etc/goxlr sudo mkdir /etc/goxlr cd /etc/goxlr || exit 1 sudo git clone https://github.com/GoXLR-on-Linux/goxlr-on-linux.git || skip cd goxlr-on-linux || exit 1 #Config location CONFIG="$HOME/GoXLR.cfg" #Create config if it doesn't exist if [ ! -e $CONFIG ]; then sudo cp "/etc/goxlr/goxlr-on-linux/bin/raw.cfg" $CONFIG sudo chown -c $USER $CONFIG fi #Source config . $CONFIG #Function to edit config file #Use is: set_config "valueToChange" "changeToWhat" set_config(){ sudo sed -i "s/^\($1\s*=\s*\).*\$/\1$2/" $CONFIG #Source the config again so information is up to date . $CONFIG } #Ask user a config question #Use is: ask_config configOption question option1 option2 ask_config(){ #Dummy var to trap the script in loop until an acceptable answer is input allowed= while [ ! $allowed ]; do echo -n "$2 Options: $3, $4: " read REPLY #Lowercase reply and check against options to see if it's acceptable fixReply="$(echo ${REPLY} | tr 'A-Z' 'a-z')" if [ $fixReply = $3 ] || [ $fixReply = $4 ]; then set_config $1 $fixReply allowed='true' else echo "$REPLY isn't an option." fi done } echo #Ask type of GoXLR, full or mini ask_config "device" "GoXLR Full or Mini?" "full" "mini" #Ask which sound system to use, pulseaudio or pipewire #ask_config "type" "Which sound system?" "pulseaudio" "pipewire" echo "Working..." #Install APT_GET_CMD=$(which apt-get) PACMAN_CMD=$(which pacman) if [ -n "$APT_GET_CMD" ]; then cd $HOME || exit 1 dpkg -s jackd2 >word 2>&1 || sudo apt-get install jackd2 dpkg -s pulseaudio-module-jack >word 2>&1 || sudo apt-get install pulseaudio-module-jack grep -iq "source /etc/goxlr/goxlr-on-linux/run_goxlr.sh" \.profile || sudo echo "source /etc/goxlr/goxlr-on-linux/run_goxlr.sh" | sudo tee -a ".profile" elif [ -n "$PACMAN_CMD" ]; then sudo pacman -Qs jack2 || sudo pacman -S jack2 sudo pacman -Qs jack2-dbus || sudo pacman -S jack2-dbus sudo pacman -Qs pulseaudio-jack || sudo pacman -S pulseaudio-jack cd $HOME || exit 1 grep -iq "source /etc/goxlr/goxlr-on-linux/run_goxlr.sh" \.bash_profile || sudo echo "source /etc/goxlr/goxlr-on-linux/run_goxlr.sh" | sudo tee -a ".bash_profile" cd /etc/goxlr/goxlr-on-linux || exit 1 sudo cp audio.conf /etc/security/limits.d else echo "error can't install packages" exit 1; fi #Restart PA pulseaudio --kill #Default cmode for testing both options set_config "cmode" "false" #Run GoXLR sh /etc/goxlr/goxlr-on-linux/run_goxlr.sh|grep "not a valid port" && set_config "cmode" "true" && sh /etc/goxlr/goxlr-on-linux/run_goxlr|grep "not a valid port" && printf "Your GoXLR has been powercycled or was not found.\nPlease look in the wiki for other known issues,\nif it isn't a know issue Please create one on github\nand attach the GoXLR_Log.txt found in your home directory.\n" && sh /etc/goxlr/goxlr-on-linux/genlog.sh #clear console clear #Config default output device echo "Output Devices" case "${1:-}" in (""|list) outputs=$(pacmd list-sinks | #Filter for jack.client_name, remove quotes, number results grep -E 'jack.client_name' | sed 's/ jack.client_name = //g' | sed 's/"//g' | nl -ba -s') ') echo "$outputs" #Dummy var to trap the script in loop until an acceptable answer is input allowed= while [ ! $allowed ]; do #Ask which device to use echo -n "Please type a number to pick a default output device (0 to skip): " read REPLY #Set selection selected=$(echo "$outputs" | grep -E $REPLY | sed 's/ '$REPLY') //g') #Check if valid option and name found if [ $REPLY = '0' ]; then echo "Skipping option." allowed="true" elif [ ! $selected ]; then echo "Invalid option selected." else echo $selected "was selected." echo allowed="true" fi done ;; esac if [ $selected ]; then #Filter for both names, remove $selected and get line before it found=$(pacmd list-sinks | grep -E "$selected|name:" | grep -B 1 $selected | sed '/'$selected'/d' | #Sed off unneeded characters sed 's/[ <>]//g' | sed 's/name://g') #Set config and apply default device set_config "ouput" $selected pacmd "set-default-sink $found" fi #Config default input device echo "Input Devices" case "${1:-}" in (""|list) inputs=$(pacmd list-sources | #Filter for jack.client_name, remove quotes, number results grep -E 'jack.client_name' | sed 's/ jack.client_name = //g' | sed 's/"//g' | nl -ba -s') ') echo "$inputs" #Dummy var to trap the script in loop until an acceptable answer is input allowed= while [ ! $allowed ]; do #Ask which device to use echo -n "Please type a number to pick a default input device (0 to skip): " read REPLY #Set selection selected=$(echo "$inputs" | grep -E $REPLY | sed 's/ '$REPLY') //g') #Check if valid option and name found if [ $REPLY = '0' ]; then echo "Skipping option." allowed="true" elif [ ! $selected ]; then echo "Invalid option selected." else echo $selected "was selected." echo allowed="true" fi done ;; esac if [ $selected ]; then #Filter for both names, remove $selected and get line before it found=$(pacmd list-sources | grep -E "$selected|name:" | grep -B 1 $selected | sed '/'$selected'/d' | #Sed off unneeded characters sed 's/[ <>]//g' | sed 's/name://g') #Set config and apply default device set_config "input" $selected pacmd "set-default-source $found" fi #Finished echo "Install complete. Configured using a '$device' GoXLR with '$ouput' as your default output and '$input' as your default input."
# Generated by Powerlevel10k configuration wizard on 2021-04-21 at 23:04 CDT. # Based on romkatv/powerlevel10k/config/p10k-rainbow.zsh, checksum 23931. # Wizard options: nerdfont-complete + powerline, small icons, rainbow, unicode, # 12h time, angled separators, sharp heads, blurred tails, 2 lines, dotted, full frame, # lightest-ornaments, sparse, many icons, fluent, transient_prompt, # instant_prompt=verbose. # Type `p10k configure` to generate another config. # # Config for Powerlevel10k with powerline prompt style with colorful background. # Type `p10k configure` to generate your own config based on it. # # Tip: Looking for a nice color? Here's a one-liner to print colormap. # # for i in {0..255}; do print -Pn "%K{$i} %k%F{$i}${(l:3::0:)i}%f " ${${(M)$((i%6)):#3}:+$'\n'}; done # Temporarily change options. 'builtin' 'local' '-a' 'p10k_config_opts' [[ ! -o 'aliases' ]] || p10k_config_opts+=('aliases') [[ ! -o 'sh_glob' ]] || p10k_config_opts+=('sh_glob') [[ ! -o 'no_brace_expand' ]] || p10k_config_opts+=('no_brace_expand') 'builtin' 'setopt' 'no_aliases' 'no_sh_glob' 'brace_expand' () { emulate -L zsh -o extended_glob # Unset all configuration options. This allows you to apply configuration changes without # restarting zsh. Edit ~/.p10k.zsh and type `source ~/.p10k.zsh`. unset -m '(POWERLEVEL9K_*|DEFAULT_USER)~POWERLEVEL9K_GITSTATUS_DIR' # Zsh >= 5.1 is required. autoload -Uz is-at-least && is-at-least 5.1 || return # The list of segments shown on the left. Fill it with the most important segments. typeset -g POWERLEVEL9K_LEFT_PROMPT_ELEMENTS=( # =========================[ Line #1 ]========================= os_icon # os identifier dir # current directory vcs # git status # =========================[ Line #2 ]========================= newline # \n # prompt_char # prompt symbol ) # The list of segments shown on the right. Fill it with less important segments. # Right prompt on the last prompt line (where you are typing your commands) gets # automatically hidden when the input line reaches it. Right prompt above the # last prompt line gets hidden if it would overlap with left prompt. typeset -g POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS=( # =========================[ Line #1 ]========================= status # exit code of the last command command_execution_time # duration of the last command background_jobs # presence of background jobs direnv # direnv status (https://direnv.net/) asdf # asdf version manager (https://github.com/asdf-vm/asdf) virtualenv # python virtual environment (https://docs.python.org/3/library/venv.html) anaconda # conda environment (https://conda.io/) pyenv # python environment (https://github.com/pyenv/pyenv) goenv # go environment (https://github.com/syndbg/goenv) nodenv # node.js version from nodenv (https://github.com/nodenv/nodenv) nvm # node.js version from nvm (https://github.com/nvm-sh/nvm) nodeenv # node.js environment (https://github.com/ekalinin/nodeenv) # node_version # node.js version # go_version # go version (https://golang.org) # rust_version # rustc version (https://www.rust-lang.org) # dotnet_version # .NET version (https://dotnet.microsoft.com) # php_version # php version (https://www.php.net/) # laravel_version # laravel php framework version (https://laravel.com/) # java_version # java version (https://www.java.com/) # package # name@version from package.json (https://docs.npmjs.com/files/package.json) rbenv # ruby version from rbenv (https://github.com/rbenv/rbenv) rvm # ruby version from rvm (https://rvm.io) fvm # flutter version management (https://github.com/leoafarias/fvm) luaenv # lua version from luaenv (https://github.com/cehoffman/luaenv) jenv # java version from jenv (https://github.com/jenv/jenv) plenv # perl version from plenv (https://github.com/tokuhirom/plenv) phpenv # php version from phpenv (https://github.com/phpenv/phpenv) scalaenv # scala version from scalaenv (https://github.com/scalaenv/scalaenv) haskell_stack # haskell version from stack (https://haskellstack.org/) kubecontext # current kubernetes context (https://kubernetes.io/) terraform # terraform workspace (https://www.terraform.io) aws # aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) aws_eb_env # aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) azure # azure account name (https://docs.microsoft.com/en-us/cli/azure) gcloud # google cloud cli account and project (https://cloud.google.com/) google_app_cred # google application credentials (https://cloud.google.com/docs/authentication/production) context # user@hostname nordvpn # nordvpn connection status, linux only (https://nordvpn.com/) ranger # ranger shell (https://github.com/ranger/ranger) nnn # nnn shell (https://github.com/jarun/nnn) vim_shell # vim shell indicator (:sh) midnight_commander # midnight commander shell (https://midnight-commander.org/) nix_shell # nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) vi_mode # vi mode (you don't need this if you've enabled prompt_char) # vpn_ip # virtual private network indicator # load # CPU load # disk_usage # disk usage # ram # free RAM # swap # used swap todo # todo items (https://github.com/todotxt/todo.txt-cli) timewarrior # timewarrior tracking status (https://timewarrior.net/) taskwarrior # taskwarrior task count (https://taskwarrior.org/) time # current time # =========================[ Line #2 ]========================= newline # ip # ip address and bandwidth usage for a specified network interface # public_ip # public IP address # proxy # system-wide http/https/ftp proxy # battery # internal battery # wifi # wifi speed # example # example user-defined segment (see prompt_example function below) ) # Defines character set used by powerlevel10k. It's best to let `p10k configure` set it for you. typeset -g POWERLEVEL9K_MODE=nerdfont-complete # When set to `moderate`, some icons will have an extra space after them. This is meant to avoid # icon overlap when using non-monospace fonts. When set to `none`, spaces are not added. typeset -g POWERLEVEL9K_ICON_PADDING=none # When set to true, icons appear before content on both sides of the prompt. When set # to false, icons go after content. If empty or not set, icons go before content in the left # prompt and after content in the right prompt. # # You can also override it for a specific segment: # # POWERLEVEL9K_STATUS_ICON_BEFORE_CONTENT=false # # Or for a specific segment in specific state: # # POWERLEVEL9K_DIR_NOT_WRITABLE_ICON_BEFORE_CONTENT=false typeset -g POWERLEVEL9K_ICON_BEFORE_CONTENT= # Add an empty line before each prompt. typeset -g POWERLEVEL9K_PROMPT_ADD_NEWLINE=true # Connect left prompt lines with these symbols. You'll probably want to use the same color # as POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND below. typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_PREFIX='%244F╭─' typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_PREFIX='%244F├─' typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_PREFIX='%244F╰─' # Connect right prompt lines with these symbols. typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_SUFFIX='%244F─╮' typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_SUFFIX='%244F─┤' typeset -g POWERLEVEL9K_MULTILINE_LAST_PROMPT_SUFFIX='%244F─╯' # Filler between left and right prompt on the first prompt line. You can set it to ' ', '·' or # '─'. The last two make it easier to see the alignment between left and right prompt and to # separate prompt from command output. You might want to set POWERLEVEL9K_PROMPT_ADD_NEWLINE=false # for more compact prompt if using using this option. typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR='·' typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_BACKGROUND= typeset -g POWERLEVEL9K_MULTILINE_NEWLINE_PROMPT_GAP_BACKGROUND= if [[ $POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_CHAR != ' ' ]]; then # The color of the filler. You'll probably want to match the color of POWERLEVEL9K_MULTILINE # ornaments defined above. typeset -g POWERLEVEL9K_MULTILINE_FIRST_PROMPT_GAP_FOREGROUND=244 # Start filler from the edge of the screen if there are no left segments on the first line. typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_FIRST_SEGMENT_END_SYMBOL='%{%}' # End filler on the edge of the screen if there are no right segments on the first line. typeset -g POWERLEVEL9K_EMPTY_LINE_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='%{%}' fi # Separator between same-color segments on the left. typeset -g POWERLEVEL9K_LEFT_SUBSEGMENT_SEPARATOR='\uE0B1' # Separator between same-color segments on the right. typeset -g POWERLEVEL9K_RIGHT_SUBSEGMENT_SEPARATOR='\uE0B3' # Separator between different-color segments on the left. typeset -g POWERLEVEL9K_LEFT_SEGMENT_SEPARATOR='\uE0B0' # Separator between different-color segments on the right. typeset -g POWERLEVEL9K_RIGHT_SEGMENT_SEPARATOR='\uE0B2' # The right end of left prompt. typeset -g POWERLEVEL9K_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL='\uE0B0' # The left end of right prompt. typeset -g POWERLEVEL9K_RIGHT_PROMPT_FIRST_SEGMENT_START_SYMBOL='\uE0B2' # The left end of left prompt. typeset -g POWERLEVEL9K_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL='░▒▓' # The right end of right prompt. typeset -g POWERLEVEL9K_RIGHT_PROMPT_LAST_SEGMENT_END_SYMBOL='▓▒░' # Left prompt terminator for lines without any segments. typeset -g POWERLEVEL9K_EMPTY_LINE_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL= #################################[ os_icon: os identifier ]################################## # OS identifier color. typeset -g POWERLEVEL9K_OS_ICON_FOREGROUND=232 typeset -g POWERLEVEL9K_OS_ICON_BACKGROUND=7 # Custom icon. # typeset -g POWERLEVEL9K_OS_ICON_CONTENT_EXPANSION='⭐' ################################[ prompt_char: prompt symbol ]################################ # Transparent background. typeset -g POWERLEVEL9K_PROMPT_CHAR_BACKGROUND= # Green prompt symbol if the last command succeeded. typeset -g POWERLEVEL9K_PROMPT_CHAR_OK_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=76 # Red prompt symbol if the last command failed. typeset -g POWERLEVEL9K_PROMPT_CHAR_ERROR_{VIINS,VICMD,VIVIS,VIOWR}_FOREGROUND=196 # Default prompt symbol. typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIINS_CONTENT_EXPANSION='❯' # Prompt symbol in command vi mode. typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VICMD_CONTENT_EXPANSION='❮' # Prompt symbol in visual vi mode. typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIVIS_CONTENT_EXPANSION='V' # Prompt symbol in overwrite vi mode. typeset -g POWERLEVEL9K_PROMPT_CHAR_{OK,ERROR}_VIOWR_CONTENT_EXPANSION='▶' typeset -g POWERLEVEL9K_PROMPT_CHAR_OVERWRITE_STATE=true # No line terminator if prompt_char is the last segment. typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_LAST_SEGMENT_END_SYMBOL= # No line introducer if prompt_char is the first segment. typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_PROMPT_FIRST_SEGMENT_START_SYMBOL= # No surrounding whitespace. typeset -g POWERLEVEL9K_PROMPT_CHAR_LEFT_{LEFT,RIGHT}_WHITESPACE= ##################################[ dir: current directory ]################################## # Current directory background color. typeset -g POWERLEVEL9K_DIR_BACKGROUND=4 # Default current directory foreground color. typeset -g POWERLEVEL9K_DIR_FOREGROUND=254 # If directory is too long, shorten some of its segments to the shortest possible unique # prefix. The shortened directory can be tab-completed to the original. typeset -g POWERLEVEL9K_SHORTEN_STRATEGY=truncate_to_unique # Replace removed segment suffixes with this symbol. typeset -g POWERLEVEL9K_SHORTEN_DELIMITER= # Color of the shortened directory segments. typeset -g POWERLEVEL9K_DIR_SHORTENED_FOREGROUND=250 # Color of the anchor directory segments. Anchor segments are never shortened. The first # segment is always an anchor. typeset -g POWERLEVEL9K_DIR_ANCHOR_FOREGROUND=255 # Display anchor directory segments in bold. typeset -g POWERLEVEL9K_DIR_ANCHOR_BOLD=true # Don't shorten directories that contain any of these files. They are anchors. local anchor_files=( .bzr .citc .git .hg .node-version .python-version .go-version .ruby-version .lua-version .java-version .perl-version .php-version .tool-version .shorten_folder_marker .svn .terraform CVS Cargo.toml composer.json go.mod package.json stack.yaml ) typeset -g POWERLEVEL9K_SHORTEN_FOLDER_MARKER="(${(j:|:)anchor_files})" # If set to "first" ("last"), remove everything before the first (last) subdirectory that contains # files matching $POWERLEVEL9K_SHORTEN_FOLDER_MARKER. For example, when the current directory is # /foo/bar/git_repo/nested_git_repo/baz, prompt will display git_repo/nested_git_repo/baz (first) # or nested_git_repo/baz (last). This assumes that git_repo and nested_git_repo contain markers # and other directories don't. # # Optionally, "first" and "last" can be followed by ":<offset>" where <offset> is an integer. # This moves the truncation point to the right (positive offset) or to the left (negative offset) # relative to the marker. Plain "first" and "last" are equivalent to "first:0" and "last:0" # respectively. typeset -g POWERLEVEL9K_DIR_TRUNCATE_BEFORE_MARKER=false # Don't shorten this many last directory segments. They are anchors. typeset -g POWERLEVEL9K_SHORTEN_DIR_LENGTH=1 # Shorten directory if it's longer than this even if there is space for it. The value can # be either absolute (e.g., '80') or a percentage of terminal width (e.g, '50%'). If empty, # directory will be shortened only when prompt doesn't fit or when other parameters demand it # (see POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS and POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT below). # If set to `0`, directory will always be shortened to its minimum length. typeset -g POWERLEVEL9K_DIR_MAX_LENGTH=80 # When `dir` segment is on the last prompt line, try to shorten it enough to leave at least this # many columns for typing commands. typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS=40 # When `dir` segment is on the last prompt line, try to shorten it enough to leave at least # COLUMNS * POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT * 0.01 columns for typing commands. typeset -g POWERLEVEL9K_DIR_MIN_COMMAND_COLUMNS_PCT=50 # If set to true, embed a hyperlink into the directory. Useful for quickly # opening a directory in the file manager simply by clicking the link. # Can also be handy when the directory is shortened, as it allows you to see # the full directory that was used in previous commands. typeset -g POWERLEVEL9K_DIR_HYPERLINK=false # Enable special styling for non-writable and non-existent directories. See POWERLEVEL9K_LOCK_ICON # and POWERLEVEL9K_DIR_CLASSES below. typeset -g POWERLEVEL9K_DIR_SHOW_WRITABLE=v3 # The default icon shown next to non-writable and non-existent directories when # POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3. # typeset -g POWERLEVEL9K_LOCK_ICON='⭐' # POWERLEVEL9K_DIR_CLASSES allows you to specify custom icons and colors for different # directories. It must be an array with 3 * N elements. Each triplet consists of: # # 1. A pattern against which the current directory ($PWD) is matched. Matching is done with # extended_glob option enabled. # 2. Directory class for the purpose of styling. # 3. An empty string. # # Triplets are tried in order. The first triplet whose pattern matches $PWD wins. # # If POWERLEVEL9K_DIR_SHOW_WRITABLE is set to v3, non-writable and non-existent directories # acquire class suffix _NOT_WRITABLE and NON_EXISTENT respectively. # # For example, given these settings: # # typeset -g POWERLEVEL9K_DIR_CLASSES=( # '~/work(|/*)' WORK '' # '~(|/*)' HOME '' # '*' DEFAULT '') # # Whenever the current directory is ~/work or a subdirectory of ~/work, it gets styled with one # of the following classes depending on its writability and existence: WORK, WORK_NOT_WRITABLE or # WORK_NON_EXISTENT. # # Simply assigning classes to directories doesn't have any visible effects. It merely gives you an # option to define custom colors and icons for different directory classes. # # # Styling for WORK. # typeset -g POWERLEVEL9K_DIR_WORK_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_DIR_WORK_BACKGROUND=4 # typeset -g POWERLEVEL9K_DIR_WORK_FOREGROUND=254 # typeset -g POWERLEVEL9K_DIR_WORK_SHORTENED_FOREGROUND=250 # typeset -g POWERLEVEL9K_DIR_WORK_ANCHOR_FOREGROUND=255 # # # Styling for WORK_NOT_WRITABLE. # typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_BACKGROUND=4 # typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND=254 # typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_SHORTENED_FOREGROUND=250 # typeset -g POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_ANCHOR_FOREGROUND=255 # # # Styling for WORK_NON_EXISTENT. # typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_BACKGROUND=4 # typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_FOREGROUND=254 # typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_SHORTENED_FOREGROUND=250 # typeset -g POWERLEVEL9K_DIR_WORK_NON_EXISTENT_ANCHOR_FOREGROUND=255 # # If a styling parameter isn't explicitly defined for some class, it falls back to the classless # parameter. For example, if POWERLEVEL9K_DIR_WORK_NOT_WRITABLE_FOREGROUND is not set, it falls # back to POWERLEVEL9K_DIR_FOREGROUND. # # typeset -g POWERLEVEL9K_DIR_CLASSES=() # Custom prefix. # typeset -g POWERLEVEL9K_DIR_PREFIX='in ' #####################################[ vcs: git status ]###################################### # Version control system colors. typeset -g POWERLEVEL9K_VCS_CLEAN_BACKGROUND=2 typeset -g POWERLEVEL9K_VCS_MODIFIED_BACKGROUND=3 typeset -g POWERLEVEL9K_VCS_UNTRACKED_BACKGROUND=2 typeset -g POWERLEVEL9K_VCS_CONFLICTED_BACKGROUND=3 typeset -g POWERLEVEL9K_VCS_LOADING_BACKGROUND=8 # Branch icon. Set this parameter to '\uF126 ' for the popular Powerline branch icon. typeset -g POWERLEVEL9K_VCS_BRANCH_ICON='\uF126 ' # Untracked files icon. It's really a question mark, your font isn't broken. # Change the value of this parameter to show a different icon. typeset -g POWERLEVEL9K_VCS_UNTRACKED_ICON='?' # Formatter for Git status. # # Example output: master ⇣42⇡42 *42 merge ~42 +42 !42 ?42. # # You can edit the function to customize how Git status looks. # # VCS_STATUS_* parameters are set by gitstatus plugin. See reference: # https://github.com/romkatv/gitstatus/blob/master/gitstatus.plugin.zsh. function my_git_formatter() { emulate -L zsh if [[ -n $P9K_CONTENT ]]; then # If P9K_CONTENT is not empty, use it. It's either "loading" or from vcs_info (not from # gitstatus plugin). VCS_STATUS_* parameters are not available in this case. typeset -g my_git_format=$P9K_CONTENT return fi # Styling for different parts of Git status. local meta='%7F' # white foreground local clean='%0F' # black foreground local modified='%0F' # black foreground local untracked='%0F' # black foreground local conflicted='%1F' # red foreground local res if [[ -n $VCS_STATUS_LOCAL_BRANCH ]]; then local branch=${(V)VCS_STATUS_LOCAL_BRANCH} # If local branch name is at most 32 characters long, show it in full. # Otherwise show the first 12 … the last 12. # Tip: To always show local branch name in full without truncation, delete the next line. (( $#branch > 32 )) && branch[13,-13]="…" # <-- this line res+="${clean}${(g::)POWERLEVEL9K_VCS_BRANCH_ICON}${branch//\%/%%}" fi if [[ -n $VCS_STATUS_TAG # Show tag only if not on a branch. # Tip: To always show tag, delete the next line. && -z $VCS_STATUS_LOCAL_BRANCH # <-- this line ]]; then local tag=${(V)VCS_STATUS_TAG} # If tag name is at most 32 characters long, show it in full. # Otherwise show the first 12 … the last 12. # Tip: To always show tag name in full without truncation, delete the next line. (( $#tag > 32 )) && tag[13,-13]="…" # <-- this line res+="${meta}#${clean}${tag//\%/%%}" fi # Display the current Git commit if there is no branch and no tag. # Tip: To always display the current Git commit, delete the next line. [[ -z $VCS_STATUS_LOCAL_BRANCH && -z $VCS_STATUS_LOCAL_BRANCH ]] && # <-- this line res+="${meta}@${clean}${VCS_STATUS_COMMIT[1,8]}" # Show tracking branch name if it differs from local branch. if [[ -n ${VCS_STATUS_REMOTE_BRANCH:#$VCS_STATUS_LOCAL_BRANCH} ]]; then res+="${meta}:${clean}${(V)VCS_STATUS_REMOTE_BRANCH//\%/%%}" fi # ⇣42 if behind the remote. (( VCS_STATUS_COMMITS_BEHIND )) && res+=" ${clean}⇣${VCS_STATUS_COMMITS_BEHIND}" # ⇡42 if ahead of the remote; no leading space if also behind the remote: ⇣42⇡42. (( VCS_STATUS_COMMITS_AHEAD && !VCS_STATUS_COMMITS_BEHIND )) && res+=" " (( VCS_STATUS_COMMITS_AHEAD )) && res+="${clean}⇡${VCS_STATUS_COMMITS_AHEAD}" # ⇠42 if behind the push remote. (( VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" ${clean}⇠${VCS_STATUS_PUSH_COMMITS_BEHIND}" (( VCS_STATUS_PUSH_COMMITS_AHEAD && !VCS_STATUS_PUSH_COMMITS_BEHIND )) && res+=" " # ⇢42 if ahead of the push remote; no leading space if also behind: ⇠42⇢42. (( VCS_STATUS_PUSH_COMMITS_AHEAD )) && res+="${clean}⇢${VCS_STATUS_PUSH_COMMITS_AHEAD}" # *42 if have stashes. (( VCS_STATUS_STASHES )) && res+=" ${clean}*${VCS_STATUS_STASHES}" # 'merge' if the repo is in an unusual state. [[ -n $VCS_STATUS_ACTION ]] && res+=" ${conflicted}${VCS_STATUS_ACTION}" # ~42 if have merge conflicts. (( VCS_STATUS_NUM_CONFLICTED )) && res+=" ${conflicted}~${VCS_STATUS_NUM_CONFLICTED}" # +42 if have staged changes. (( VCS_STATUS_NUM_STAGED )) && res+=" ${modified}+${VCS_STATUS_NUM_STAGED}" # !42 if have unstaged changes. (( VCS_STATUS_NUM_UNSTAGED )) && res+=" ${modified}!${VCS_STATUS_NUM_UNSTAGED}" # ?42 if have untracked files. It's really a question mark, your font isn't broken. # See POWERLEVEL9K_VCS_UNTRACKED_ICON above if you want to use a different icon. # Remove the next line if you don't want to see untracked files at all. (( VCS_STATUS_NUM_UNTRACKED )) && res+=" ${untracked}${(g::)POWERLEVEL9K_VCS_UNTRACKED_ICON}${VCS_STATUS_NUM_UNTRACKED}" # "─" if the number of unstaged files is unknown. This can happen due to # POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY (see below) being set to a non-negative number lower # than the number of files in the Git index, or due to bash.showDirtyState being set to false # in the repository config. The number of staged and untracked files may also be unknown # in this case. (( VCS_STATUS_HAS_UNSTAGED == -1 )) && res+=" ${modified}─" typeset -g my_git_format=$res } functions -M my_git_formatter 2>/dev/null # Don't count the number of unstaged, untracked and conflicted files in Git repositories with # more than this many files in the index. Negative value means infinity. # # If you are working in Git repositories with tens of millions of files and seeing performance # sagging, try setting POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY to a number lower than the output # of `git ls-files | wc -l`. Alternatively, add `bash.showDirtyState = false` to the repository's # config: `git config bash.showDirtyState false`. typeset -g POWERLEVEL9K_VCS_MAX_INDEX_SIZE_DIRTY=-1 # Don't show Git status in prompt for repositories whose workdir matches this pattern. # For example, if set to '~', the Git repository at $HOME/.git will be ignored. # Multiple patterns can be combined with '|': '~(|/foo)|/bar/baz/*'. typeset -g POWERLEVEL9K_VCS_DISABLED_WORKDIR_PATTERN='~' # Disable the default Git status formatting. typeset -g POWERLEVEL9K_VCS_DISABLE_GITSTATUS_FORMATTING=true # Install our own Git status formatter. typeset -g POWERLEVEL9K_VCS_CONTENT_EXPANSION='${$((my_git_formatter()))+${my_git_format}}' # Enable counters for staged, unstaged, etc. typeset -g POWERLEVEL9K_VCS_{STAGED,UNSTAGED,UNTRACKED,CONFLICTED,COMMITS_AHEAD,COMMITS_BEHIND}_MAX_NUM=-1 # Custom icon. # typeset -g POWERLEVEL9K_VCS_VISUAL_IDENTIFIER_EXPANSION='⭐' # Custom prefix. typeset -g POWERLEVEL9K_VCS_PREFIX='on ' # Show status of repositories of these types. You can add svn and/or hg if you are # using them. If you do, your prompt may become slow even when your current directory # isn't in an svn or hg reposotiry. typeset -g POWERLEVEL9K_VCS_BACKENDS=(git) ##########################[ status: exit code of the last command ]########################### # Enable OK_PIPE, ERROR_PIPE and ERROR_SIGNAL status states to allow us to enable, disable and # style them independently from the regular OK and ERROR state. typeset -g POWERLEVEL9K_STATUS_EXTENDED_STATES=true # Status on success. No content, just an icon. No need to show it if prompt_char is enabled as # it will signify success by turning green. typeset -g POWERLEVEL9K_STATUS_OK=true typeset -g POWERLEVEL9K_STATUS_OK_VISUAL_IDENTIFIER_EXPANSION='✔' typeset -g POWERLEVEL9K_STATUS_OK_FOREGROUND=2 typeset -g POWERLEVEL9K_STATUS_OK_BACKGROUND=0 # Status when some part of a pipe command fails but the overall exit status is zero. It may look # like this: 1|0. typeset -g POWERLEVEL9K_STATUS_OK_PIPE=true typeset -g POWERLEVEL9K_STATUS_OK_PIPE_VISUAL_IDENTIFIER_EXPANSION='✔' typeset -g POWERLEVEL9K_STATUS_OK_PIPE_FOREGROUND=2 typeset -g POWERLEVEL9K_STATUS_OK_PIPE_BACKGROUND=0 # Status when it's just an error code (e.g., '1'). No need to show it if prompt_char is enabled as # it will signify error by turning red. typeset -g POWERLEVEL9K_STATUS_ERROR=true typeset -g POWERLEVEL9K_STATUS_ERROR_VISUAL_IDENTIFIER_EXPANSION='✘' typeset -g POWERLEVEL9K_STATUS_ERROR_FOREGROUND=3 typeset -g POWERLEVEL9K_STATUS_ERROR_BACKGROUND=1 # Status when the last command was terminated by a signal. typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL=true # Use terse signal names: "INT" instead of "SIGINT(2)". typeset -g POWERLEVEL9K_STATUS_VERBOSE_SIGNAME=false typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_VISUAL_IDENTIFIER_EXPANSION='✘' typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_FOREGROUND=3 typeset -g POWERLEVEL9K_STATUS_ERROR_SIGNAL_BACKGROUND=1 # Status when some part of a pipe command fails and the overall exit status is also non-zero. # It may look like this: 1|0. typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE=true typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_VISUAL_IDENTIFIER_EXPANSION='✘' typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_FOREGROUND=3 typeset -g POWERLEVEL9K_STATUS_ERROR_PIPE_BACKGROUND=1 ###################[ command_execution_time: duration of the last command ]################### # Execution time color. typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FOREGROUND=0 typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_BACKGROUND=3 # Show duration of the last command if takes at least this many seconds. typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_THRESHOLD=3 # Show this many fractional digits. Zero means round to seconds. typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PRECISION=0 # Duration format: 1d 2h 3m 4s. typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_FORMAT='d h m s' # Custom icon. # typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐' # Custom prefix. typeset -g POWERLEVEL9K_COMMAND_EXECUTION_TIME_PREFIX='took ' #######################[ background_jobs: presence of background jobs ]####################### # Background jobs color. typeset -g POWERLEVEL9K_BACKGROUND_JOBS_FOREGROUND=6 typeset -g POWERLEVEL9K_BACKGROUND_JOBS_BACKGROUND=0 # Don't show the number of background jobs. typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VERBOSE=false # Custom icon. # typeset -g POWERLEVEL9K_BACKGROUND_JOBS_VISUAL_IDENTIFIER_EXPANSION='⭐' #######################[ direnv: direnv status (https://direnv.net/) ]######################## # Direnv color. typeset -g POWERLEVEL9K_DIRENV_FOREGROUND=3 typeset -g POWERLEVEL9K_DIRENV_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_DIRENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ###############[ asdf: asdf version manager (https://github.com/asdf-vm/asdf) ]############### # Default asdf color. Only used to display tools for which there is no color override (see below). # Tip: Override these parameters for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_FOREGROUND and # POWERLEVEL9K_ASDF_${TOOL}_BACKGROUND. typeset -g POWERLEVEL9K_ASDF_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_BACKGROUND=7 # There are four parameters that can be used to hide asdf tools. Each parameter describes # conditions under which a tool gets hidden. Parameters can hide tools but not unhide them. If at # least one parameter decides to hide a tool, that tool gets hidden. If no parameter decides to # hide a tool, it gets shown. # # Special note on the difference between POWERLEVEL9K_ASDF_SOURCES and # POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW. Consider the effect of the following commands: # # asdf local python 3.8.1 # asdf global python 3.8.1 # # After running both commands the current python version is 3.8.1 and its source is "local" as # it takes precedence over "global". If POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW is set to false, # it'll hide python version in this case because 3.8.1 is the same as the global version. # POWERLEVEL9K_ASDF_SOURCES will hide python version only if the value of this parameter doesn't # contain "local". # Hide tool versions that don't come from one of these sources. # # Available sources: # # - shell `asdf current` says "set by ASDF_${TOOL}_VERSION environment variable" # - local `asdf current` says "set by /some/not/home/directory/file" # - global `asdf current` says "set by /home/username/file" # # Note: If this parameter is set to (shell local global), it won't hide tools. # Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SOURCES. typeset -g POWERLEVEL9K_ASDF_SOURCES=(shell local global) # If set to false, hide tool versions that are the same as global. # # Note: The name of this parameter doesn't reflect its meaning at all. # Note: If this parameter is set to true, it won't hide tools. # Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_PROMPT_ALWAYS_SHOW. typeset -g POWERLEVEL9K_ASDF_PROMPT_ALWAYS_SHOW=false # If set to false, hide tool versions that are equal to "system". # # Note: If this parameter is set to true, it won't hide tools. # Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_SYSTEM. typeset -g POWERLEVEL9K_ASDF_SHOW_SYSTEM=true # If set to non-empty value, hide tools unless there is a file matching the specified file pattern # in the current directory, or its parent directory, or its grandparent directory, and so on. # # Note: If this parameter is set to empty value, it won't hide tools. # Note: SHOW_ON_UPGLOB isn't specific to asdf. It works with all prompt segments. # Tip: Override this parameter for ${TOOL} with POWERLEVEL9K_ASDF_${TOOL}_SHOW_ON_UPGLOB. # # Example: Hide nodejs version when there is no package.json and no *.js files in the current # directory, in `..`, in `../..` and so on. # # typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.js|package.json' typeset -g POWERLEVEL9K_ASDF_SHOW_ON_UPGLOB= # Ruby version from asdf. typeset -g POWERLEVEL9K_ASDF_RUBY_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_RUBY_BACKGROUND=1 # typeset -g POWERLEVEL9K_ASDF_RUBY_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_RUBY_SHOW_ON_UPGLOB='*.foo|*.bar' # Python version from asdf. typeset -g POWERLEVEL9K_ASDF_PYTHON_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_PYTHON_BACKGROUND=4 # typeset -g POWERLEVEL9K_ASDF_PYTHON_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_PYTHON_SHOW_ON_UPGLOB='*.foo|*.bar' # Go version from asdf. typeset -g POWERLEVEL9K_ASDF_GOLANG_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_GOLANG_BACKGROUND=4 # typeset -g POWERLEVEL9K_ASDF_GOLANG_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_GOLANG_SHOW_ON_UPGLOB='*.foo|*.bar' # Node.js version from asdf. typeset -g POWERLEVEL9K_ASDF_NODEJS_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_NODEJS_BACKGROUND=2 # typeset -g POWERLEVEL9K_ASDF_NODEJS_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_NODEJS_SHOW_ON_UPGLOB='*.foo|*.bar' # Rust version from asdf. typeset -g POWERLEVEL9K_ASDF_RUST_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_RUST_BACKGROUND=208 # typeset -g POWERLEVEL9K_ASDF_RUST_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_RUST_SHOW_ON_UPGLOB='*.foo|*.bar' # .NET Core version from asdf. typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_BACKGROUND=5 # typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_DOTNET_CORE_SHOW_ON_UPGLOB='*.foo|*.bar' # Flutter version from asdf. typeset -g POWERLEVEL9K_ASDF_FLUTTER_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_FLUTTER_BACKGROUND=4 # typeset -g POWERLEVEL9K_ASDF_FLUTTER_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_FLUTTER_SHOW_ON_UPGLOB='*.foo|*.bar' # Lua version from asdf. typeset -g POWERLEVEL9K_ASDF_LUA_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_LUA_BACKGROUND=4 # typeset -g POWERLEVEL9K_ASDF_LUA_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_LUA_SHOW_ON_UPGLOB='*.foo|*.bar' # Java version from asdf. typeset -g POWERLEVEL9K_ASDF_JAVA_FOREGROUND=1 typeset -g POWERLEVEL9K_ASDF_JAVA_BACKGROUND=7 # typeset -g POWERLEVEL9K_ASDF_JAVA_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_JAVA_SHOW_ON_UPGLOB='*.foo|*.bar' # Perl version from asdf. typeset -g POWERLEVEL9K_ASDF_PERL_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_PERL_BACKGROUND=4 # typeset -g POWERLEVEL9K_ASDF_PERL_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_PERL_SHOW_ON_UPGLOB='*.foo|*.bar' # Erlang version from asdf. typeset -g POWERLEVEL9K_ASDF_ERLANG_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_ERLANG_BACKGROUND=1 # typeset -g POWERLEVEL9K_ASDF_ERLANG_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_ERLANG_SHOW_ON_UPGLOB='*.foo|*.bar' # Elixir version from asdf. typeset -g POWERLEVEL9K_ASDF_ELIXIR_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_ELIXIR_BACKGROUND=5 # typeset -g POWERLEVEL9K_ASDF_ELIXIR_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_ELIXIR_SHOW_ON_UPGLOB='*.foo|*.bar' # Postgres version from asdf. typeset -g POWERLEVEL9K_ASDF_POSTGRES_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_POSTGRES_BACKGROUND=6 # typeset -g POWERLEVEL9K_ASDF_POSTGRES_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_POSTGRES_SHOW_ON_UPGLOB='*.foo|*.bar' # PHP version from asdf. typeset -g POWERLEVEL9K_ASDF_PHP_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_PHP_BACKGROUND=5 # typeset -g POWERLEVEL9K_ASDF_PHP_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_PHP_SHOW_ON_UPGLOB='*.foo|*.bar' # Haskell version from asdf. typeset -g POWERLEVEL9K_ASDF_HASKELL_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_HASKELL_BACKGROUND=3 # typeset -g POWERLEVEL9K_ASDF_HASKELL_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_HASKELL_SHOW_ON_UPGLOB='*.foo|*.bar' # Julia version from asdf. typeset -g POWERLEVEL9K_ASDF_JULIA_FOREGROUND=0 typeset -g POWERLEVEL9K_ASDF_JULIA_BACKGROUND=2 # typeset -g POWERLEVEL9K_ASDF_JULIA_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_ASDF_JULIA_SHOW_ON_UPGLOB='*.foo|*.bar' ##########[ nordvpn: nordvpn connection status, linux only (https://nordvpn.com/) ]########### # NordVPN connection indicator color. typeset -g POWERLEVEL9K_NORDVPN_FOREGROUND=7 typeset -g POWERLEVEL9K_NORDVPN_BACKGROUND=4 # Hide NordVPN connection indicator when not connected. typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_CONTENT_EXPANSION= typeset -g POWERLEVEL9K_NORDVPN_{DISCONNECTED,CONNECTING,DISCONNECTING}_VISUAL_IDENTIFIER_EXPANSION= # Custom icon. # typeset -g POWERLEVEL9K_NORDVPN_VISUAL_IDENTIFIER_EXPANSION='⭐' #################[ ranger: ranger shell (https://github.com/ranger/ranger) ]################## # Ranger shell color. typeset -g POWERLEVEL9K_RANGER_FOREGROUND=3 typeset -g POWERLEVEL9K_RANGER_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_RANGER_VISUAL_IDENTIFIER_EXPANSION='⭐' ######################[ nnn: nnn shell (https://github.com/jarun/nnn) ]####################### # Nnn shell color. typeset -g POWERLEVEL9K_NNN_FOREGROUND=0 typeset -g POWERLEVEL9K_NNN_BACKGROUND=6 # Custom icon. # typeset -g POWERLEVEL9K_NNN_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########################[ vim_shell: vim shell indicator (:sh) ]########################### # Vim shell indicator color. typeset -g POWERLEVEL9K_VIM_SHELL_FOREGROUND=0 typeset -g POWERLEVEL9K_VIM_SHELL_BACKGROUND=2 # Custom icon. # typeset -g POWERLEVEL9K_VIM_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐' ######[ midnight_commander: midnight commander shell (https://midnight-commander.org/) ]###### # Midnight Commander shell color. typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_FOREGROUND=3 typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_MIDNIGHT_COMMANDER_VISUAL_IDENTIFIER_EXPANSION='⭐' #[ nix_shell: nix shell (https://nixos.org/nixos/nix-pills/developing-with-nix-shell.html) ]## # Nix shell color. typeset -g POWERLEVEL9K_NIX_SHELL_FOREGROUND=0 typeset -g POWERLEVEL9K_NIX_SHELL_BACKGROUND=4 # Tip: If you want to see just the icon without "pure" and "impure", uncomment the next line. # typeset -g POWERLEVEL9K_NIX_SHELL_CONTENT_EXPANSION= # Custom icon. # typeset -g POWERLEVEL9K_NIX_SHELL_VISUAL_IDENTIFIER_EXPANSION='⭐' ##################################[ disk_usage: disk usage ]################################## # Colors for different levels of disk usage. typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_FOREGROUND=3 typeset -g POWERLEVEL9K_DISK_USAGE_NORMAL_BACKGROUND=0 typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_FOREGROUND=0 typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_BACKGROUND=3 typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_FOREGROUND=7 typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_BACKGROUND=1 # Thresholds for different levels of disk usage (percentage points). typeset -g POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL=90 typeset -g POWERLEVEL9K_DISK_USAGE_CRITICAL_LEVEL=95 # If set to true, hide disk usage when below $POWERLEVEL9K_DISK_USAGE_WARNING_LEVEL percent. typeset -g POWERLEVEL9K_DISK_USAGE_ONLY_WARNING=false # Custom icon. # typeset -g POWERLEVEL9K_DISK_USAGE_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########[ vi_mode: vi mode (you don't need this if you've enabled prompt_char) ]########### # Foreground color. typeset -g POWERLEVEL9K_VI_MODE_FOREGROUND=0 # Text and color for normal (a.k.a. command) vi mode. typeset -g POWERLEVEL9K_VI_COMMAND_MODE_STRING=NORMAL typeset -g POWERLEVEL9K_VI_MODE_NORMAL_BACKGROUND=2 # Text and color for visual vi mode. typeset -g POWERLEVEL9K_VI_VISUAL_MODE_STRING=VISUAL typeset -g POWERLEVEL9K_VI_MODE_VISUAL_BACKGROUND=4 # Text and color for overtype (a.k.a. overwrite and replace) vi mode. typeset -g POWERLEVEL9K_VI_OVERWRITE_MODE_STRING=OVERTYPE typeset -g POWERLEVEL9K_VI_MODE_OVERWRITE_BACKGROUND=3 # Text and color for insert vi mode. typeset -g POWERLEVEL9K_VI_INSERT_MODE_STRING= typeset -g POWERLEVEL9K_VI_MODE_INSERT_FOREGROUND=8 ######################################[ ram: free RAM ]####################################### # RAM color. typeset -g POWERLEVEL9K_RAM_FOREGROUND=0 typeset -g POWERLEVEL9K_RAM_BACKGROUND=3 # Custom icon. # typeset -g POWERLEVEL9K_RAM_VISUAL_IDENTIFIER_EXPANSION='⭐' #####################################[ swap: used swap ]###################################### # Swap color. typeset -g POWERLEVEL9K_SWAP_FOREGROUND=0 typeset -g POWERLEVEL9K_SWAP_BACKGROUND=3 # Custom icon. # typeset -g POWERLEVEL9K_SWAP_VISUAL_IDENTIFIER_EXPANSION='⭐' ######################################[ load: CPU load ]###################################### # Show average CPU load over this many last minutes. Valid values are 1, 5 and 15. typeset -g POWERLEVEL9K_LOAD_WHICH=5 # Load color when load is under 50%. typeset -g POWERLEVEL9K_LOAD_NORMAL_FOREGROUND=0 typeset -g POWERLEVEL9K_LOAD_NORMAL_BACKGROUND=2 # Load color when load is between 50% and 70%. typeset -g POWERLEVEL9K_LOAD_WARNING_FOREGROUND=0 typeset -g POWERLEVEL9K_LOAD_WARNING_BACKGROUND=3 # Load color when load is over 70%. typeset -g POWERLEVEL9K_LOAD_CRITICAL_FOREGROUND=0 typeset -g POWERLEVEL9K_LOAD_CRITICAL_BACKGROUND=1 # Custom icon. # typeset -g POWERLEVEL9K_LOAD_VISUAL_IDENTIFIER_EXPANSION='⭐' ################[ todo: todo items (https://github.com/todotxt/todo.txt-cli) ]################ # Todo color. typeset -g POWERLEVEL9K_TODO_FOREGROUND=0 typeset -g POWERLEVEL9K_TODO_BACKGROUND=8 # Hide todo when the total number of tasks is zero. typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_TOTAL=true # Hide todo when the number of tasks after filtering is zero. typeset -g POWERLEVEL9K_TODO_HIDE_ZERO_FILTERED=false # Todo format. The following parameters are available within the expansion. # # - P9K_TODO_TOTAL_TASK_COUNT The total number of tasks. # - P9K_TODO_FILTERED_TASK_COUNT The number of tasks after filtering. # # These variables correspond to the last line of the output of `todo.sh -p ls`: # # TODO: 24 of 42 tasks shown # # Here 24 is P9K_TODO_FILTERED_TASK_COUNT and 42 is P9K_TODO_TOTAL_TASK_COUNT. # # typeset -g POWERLEVEL9K_TODO_CONTENT_EXPANSION='$P9K_TODO_FILTERED_TASK_COUNT' # Custom icon. # typeset -g POWERLEVEL9K_TODO_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########[ timewarrior: timewarrior tracking status (https://timewarrior.net/) ]############ # Timewarrior color. typeset -g POWERLEVEL9K_TIMEWARRIOR_FOREGROUND=255 typeset -g POWERLEVEL9K_TIMEWARRIOR_BACKGROUND=8 # If the tracked task is longer than 24 characters, truncate and append "…". # Tip: To always display tasks without truncation, delete the following parameter. # Tip: To hide task names and display just the icon when time tracking is enabled, set the # value of the following parameter to "". typeset -g POWERLEVEL9K_TIMEWARRIOR_CONTENT_EXPANSION='${P9K_CONTENT:0:24}${${P9K_CONTENT:24}:+…}' # Custom icon. # typeset -g POWERLEVEL9K_TIMEWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐' ##############[ taskwarrior: taskwarrior task count (https://taskwarrior.org/) ]############## # Taskwarrior color. typeset -g POWERLEVEL9K_TASKWARRIOR_FOREGROUND=0 typeset -g POWERLEVEL9K_TASKWARRIOR_BACKGROUND=6 # Taskwarrior segment format. The following parameters are available within the expansion. # # - P9K_TASKWARRIOR_PENDING_COUNT The number of pending tasks: `task +PENDING count`. # - P9K_TASKWARRIOR_OVERDUE_COUNT The number of overdue tasks: `task +OVERDUE count`. # # Zero values are represented as empty parameters. # # The default format: # # '${P9K_TASKWARRIOR_OVERDUE_COUNT:+"!$P9K_TASKWARRIOR_OVERDUE_COUNT/"}$P9K_TASKWARRIOR_PENDING_COUNT' # # typeset -g POWERLEVEL9K_TASKWARRIOR_CONTENT_EXPANSION='$P9K_TASKWARRIOR_PENDING_COUNT' # Custom icon. # typeset -g POWERLEVEL9K_TASKWARRIOR_VISUAL_IDENTIFIER_EXPANSION='⭐' ##################################[ context: user@hostname ]################################## # Context color when running with privileges. typeset -g POWERLEVEL9K_CONTEXT_ROOT_FOREGROUND=1 typeset -g POWERLEVEL9K_CONTEXT_ROOT_BACKGROUND=0 # Context color in SSH without privileges. typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_FOREGROUND=3 typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_BACKGROUND=0 # Default context color (no privileges, no SSH). typeset -g POWERLEVEL9K_CONTEXT_FOREGROUND=3 typeset -g POWERLEVEL9K_CONTEXT_BACKGROUND=0 # Context format when running with privileges: user@hostname. typeset -g POWERLEVEL9K_CONTEXT_ROOT_TEMPLATE='%n@%m' # Context format when in SSH without privileges: user@hostname. typeset -g POWERLEVEL9K_CONTEXT_{REMOTE,REMOTE_SUDO}_TEMPLATE='%n@%m' # Default context format (no privileges, no SSH): user@hostname. typeset -g POWERLEVEL9K_CONTEXT_TEMPLATE='%n@%m' # Don't show context unless running with privileges or in SSH. # Tip: Remove the next line to always show context. typeset -g POWERLEVEL9K_CONTEXT_{DEFAULT,SUDO}_{CONTENT,VISUAL_IDENTIFIER}_EXPANSION= # Custom icon. # typeset -g POWERLEVEL9K_CONTEXT_VISUAL_IDENTIFIER_EXPANSION='⭐' # Custom prefix. typeset -g POWERLEVEL9K_CONTEXT_PREFIX='with ' ###[ virtualenv: python virtual environment (https://docs.python.org/3/library/venv.html) ]### # Python virtual environment color. typeset -g POWERLEVEL9K_VIRTUALENV_FOREGROUND=0 typeset -g POWERLEVEL9K_VIRTUALENV_BACKGROUND=4 # Don't show Python version next to the virtual environment name. typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_PYTHON_VERSION=false # If set to "false", won't show virtualenv if pyenv is already shown. # If set to "if-different", won't show virtualenv if it's the same as pyenv. typeset -g POWERLEVEL9K_VIRTUALENV_SHOW_WITH_PYENV=false # Separate environment name from Python version only with a space. typeset -g POWERLEVEL9K_VIRTUALENV_{LEFT,RIGHT}_DELIMITER= # Custom icon. # typeset -g POWERLEVEL9K_VIRTUALENV_VISUAL_IDENTIFIER_EXPANSION='⭐' #####################[ anaconda: conda environment (https://conda.io/) ]###################### # Anaconda environment color. typeset -g POWERLEVEL9K_ANACONDA_FOREGROUND=0 typeset -g POWERLEVEL9K_ANACONDA_BACKGROUND=4 # Anaconda segment format. The following parameters are available within the expansion. # # - CONDA_PREFIX Absolute path to the active Anaconda/Miniconda environment. # - CONDA_DEFAULT_ENV Name of the active Anaconda/Miniconda environment. # - CONDA_PROMPT_MODIFIER Configurable prompt modifier (see below). # - P9K_ANACONDA_PYTHON_VERSION Current python version (python --version). # # CONDA_PROMPT_MODIFIER can be configured with the following command: # # conda config --set env_prompt '({default_env}) ' # # The last argument is a Python format string that can use the following variables: # # - prefix The same as CONDA_PREFIX. # - default_env The same as CONDA_DEFAULT_ENV. # - name The last segment of CONDA_PREFIX. # - stacked_env Comma-separated list of names in the environment stack. The first element is # always the same as default_env. # # Note: '({default_env}) ' is the default value of env_prompt. # # The default value of POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION expands to $CONDA_PROMPT_MODIFIER # without the surrounding parentheses, or to the last path component of CONDA_PREFIX if the former # is empty. typeset -g POWERLEVEL9K_ANACONDA_CONTENT_EXPANSION='${${${${CONDA_PROMPT_MODIFIER#\(}% }%\)}:-${CONDA_PREFIX:t}}' # Custom icon. # typeset -g POWERLEVEL9K_ANACONDA_VISUAL_IDENTIFIER_EXPANSION='⭐' ################[ pyenv: python environment (https://github.com/pyenv/pyenv) ]################ # Pyenv color. typeset -g POWERLEVEL9K_PYENV_FOREGROUND=0 typeset -g POWERLEVEL9K_PYENV_BACKGROUND=4 # Hide python version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_PYENV_SOURCES=(shell local global) # If set to false, hide python version if it's the same as global: # $(pyenv version-name) == $(pyenv global). typeset -g POWERLEVEL9K_PYENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide python version if it's equal to "system". typeset -g POWERLEVEL9K_PYENV_SHOW_SYSTEM=true # Pyenv segment format. The following parameters are available within the expansion. # # - P9K_CONTENT Current pyenv environment (pyenv version-name). # - P9K_PYENV_PYTHON_VERSION Current python version (python --version). # # The default format has the following logic: # # 1. Display "$P9K_CONTENT $P9K_PYENV_PYTHON_VERSION" if $P9K_PYENV_PYTHON_VERSION is not # empty and unequal to $P9K_CONTENT. # 2. Otherwise display just "$P9K_CONTENT". typeset -g POWERLEVEL9K_PYENV_CONTENT_EXPANSION='${P9K_CONTENT}${${P9K_PYENV_PYTHON_VERSION:#$P9K_CONTENT}:+ $P9K_PYENV_PYTHON_VERSION}' # Custom icon. # typeset -g POWERLEVEL9K_PYENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ################[ goenv: go environment (https://github.com/syndbg/goenv) ]################ # Goenv color. typeset -g POWERLEVEL9K_GOENV_FOREGROUND=0 typeset -g POWERLEVEL9K_GOENV_BACKGROUND=4 # Hide go version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_GOENV_SOURCES=(shell local global) # If set to false, hide go version if it's the same as global: # $(goenv version-name) == $(goenv global). typeset -g POWERLEVEL9K_GOENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide go version if it's equal to "system". typeset -g POWERLEVEL9K_GOENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_GOENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ nodenv: node.js version from nodenv (https://github.com/nodenv/nodenv) ]########## # Nodenv color. typeset -g POWERLEVEL9K_NODENV_FOREGROUND=2 typeset -g POWERLEVEL9K_NODENV_BACKGROUND=0 # Hide node version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_NODENV_SOURCES=(shell local global) # If set to false, hide node version if it's the same as global: # $(nodenv version-name) == $(nodenv global). typeset -g POWERLEVEL9K_NODENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide node version if it's equal to "system". typeset -g POWERLEVEL9K_NODENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_NODENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ##############[ nvm: node.js version from nvm (https://github.com/nvm-sh/nvm) ]############### # Nvm color. typeset -g POWERLEVEL9K_NVM_FOREGROUND=0 typeset -g POWERLEVEL9K_NVM_BACKGROUND=5 # Custom icon. # typeset -g POWERLEVEL9K_NVM_VISUAL_IDENTIFIER_EXPANSION='⭐' ############[ nodeenv: node.js environment (https://github.com/ekalinin/nodeenv) ]############ # Nodeenv color. typeset -g POWERLEVEL9K_NODEENV_FOREGROUND=2 typeset -g POWERLEVEL9K_NODEENV_BACKGROUND=0 # Don't show Node version next to the environment name. typeset -g POWERLEVEL9K_NODEENV_SHOW_NODE_VERSION=false # Separate environment name from Node version only with a space. typeset -g POWERLEVEL9K_NODEENV_{LEFT,RIGHT}_DELIMITER= # Custom icon. # typeset -g POWERLEVEL9K_NODEENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ##############################[ node_version: node.js version ]############################### # Node version color. typeset -g POWERLEVEL9K_NODE_VERSION_FOREGROUND=7 typeset -g POWERLEVEL9K_NODE_VERSION_BACKGROUND=2 # Show node version only when in a directory tree containing package.json. typeset -g POWERLEVEL9K_NODE_VERSION_PROJECT_ONLY=true # Custom icon. # typeset -g POWERLEVEL9K_NODE_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' #######################[ go_version: go version (https://golang.org) ]######################## # Go version color. typeset -g POWERLEVEL9K_GO_VERSION_FOREGROUND=255 typeset -g POWERLEVEL9K_GO_VERSION_BACKGROUND=2 # Show go version only when in a go project subdirectory. typeset -g POWERLEVEL9K_GO_VERSION_PROJECT_ONLY=true # Custom icon. # typeset -g POWERLEVEL9K_GO_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' #################[ rust_version: rustc version (https://www.rust-lang.org) ]################## # Rust version color. typeset -g POWERLEVEL9K_RUST_VERSION_FOREGROUND=0 typeset -g POWERLEVEL9K_RUST_VERSION_BACKGROUND=208 # Show rust version only when in a rust project subdirectory. typeset -g POWERLEVEL9K_RUST_VERSION_PROJECT_ONLY=true # Custom icon. # typeset -g POWERLEVEL9K_RUST_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' ###############[ dotnet_version: .NET version (https://dotnet.microsoft.com) ]################ # .NET version color. typeset -g POWERLEVEL9K_DOTNET_VERSION_FOREGROUND=7 typeset -g POWERLEVEL9K_DOTNET_VERSION_BACKGROUND=5 # Show .NET version only when in a .NET project subdirectory. typeset -g POWERLEVEL9K_DOTNET_VERSION_PROJECT_ONLY=true # Custom icon. # typeset -g POWERLEVEL9K_DOTNET_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' #####################[ php_version: php version (https://www.php.net/) ]###################### # PHP version color. typeset -g POWERLEVEL9K_PHP_VERSION_FOREGROUND=0 typeset -g POWERLEVEL9K_PHP_VERSION_BACKGROUND=5 # Show PHP version only when in a PHP project subdirectory. typeset -g POWERLEVEL9K_PHP_VERSION_PROJECT_ONLY=true # Custom icon. # typeset -g POWERLEVEL9K_PHP_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ laravel_version: laravel php framework version (https://laravel.com/) ]########### # Laravel version color. typeset -g POWERLEVEL9K_LARAVEL_VERSION_FOREGROUND=1 typeset -g POWERLEVEL9K_LARAVEL_VERSION_BACKGROUND=7 # Custom icon. # typeset -g POWERLEVEL9K_LARAVEL_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' #############[ rbenv: ruby version from rbenv (https://github.com/rbenv/rbenv) ]############## # Rbenv color. typeset -g POWERLEVEL9K_RBENV_FOREGROUND=0 typeset -g POWERLEVEL9K_RBENV_BACKGROUND=1 # Hide ruby version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_RBENV_SOURCES=(shell local global) # If set to false, hide ruby version if it's the same as global: # $(rbenv version-name) == $(rbenv global). typeset -g POWERLEVEL9K_RBENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide ruby version if it's equal to "system". typeset -g POWERLEVEL9K_RBENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_RBENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ####################[ java_version: java version (https://www.java.com/) ]#################### # Java version color. typeset -g POWERLEVEL9K_JAVA_VERSION_FOREGROUND=1 typeset -g POWERLEVEL9K_JAVA_VERSION_BACKGROUND=7 # Show java version only when in a java project subdirectory. typeset -g POWERLEVEL9K_JAVA_VERSION_PROJECT_ONLY=true # Show brief version. typeset -g POWERLEVEL9K_JAVA_VERSION_FULL=false # Custom icon. # typeset -g POWERLEVEL9K_JAVA_VERSION_VISUAL_IDENTIFIER_EXPANSION='⭐' ###[ package: name@version from package.json (https://docs.npmjs.com/files/package.json) ]#### # Package color. typeset -g POWERLEVEL9K_PACKAGE_FOREGROUND=0 typeset -g POWERLEVEL9K_PACKAGE_BACKGROUND=6 # Package format. The following parameters are available within the expansion. # # - P9K_PACKAGE_NAME The value of `name` field in package.json. # - P9K_PACKAGE_VERSION The value of `version` field in package.json. # # typeset -g POWERLEVEL9K_PACKAGE_CONTENT_EXPANSION='${P9K_PACKAGE_NAME//\%/%%}@${P9K_PACKAGE_VERSION//\%/%%}' # Custom icon. # typeset -g POWERLEVEL9K_PACKAGE_VISUAL_IDENTIFIER_EXPANSION='⭐' #######################[ rvm: ruby version from rvm (https://rvm.io) ]######################## # Rvm color. typeset -g POWERLEVEL9K_RVM_FOREGROUND=0 typeset -g POWERLEVEL9K_RVM_BACKGROUND=240 # Don't show @gemset at the end. typeset -g POWERLEVEL9K_RVM_SHOW_GEMSET=false # Don't show ruby- at the front. typeset -g POWERLEVEL9K_RVM_SHOW_PREFIX=false # Custom icon. # typeset -g POWERLEVEL9K_RVM_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########[ fvm: flutter version management (https://github.com/leoafarias/fvm) ]############ # Fvm color. typeset -g POWERLEVEL9K_FVM_FOREGROUND=0 typeset -g POWERLEVEL9K_FVM_BACKGROUND=4 # Custom icon. # typeset -g POWERLEVEL9K_FVM_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ luaenv: lua version from luaenv (https://github.com/cehoffman/luaenv) ]########### # Lua color. typeset -g POWERLEVEL9K_LUAENV_FOREGROUND=0 typeset -g POWERLEVEL9K_LUAENV_BACKGROUND=4 # Hide lua version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_LUAENV_SOURCES=(shell local global) # If set to false, hide lua version if it's the same as global: # $(luaenv version-name) == $(luaenv global). typeset -g POWERLEVEL9K_LUAENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide lua version if it's equal to "system". typeset -g POWERLEVEL9K_LUAENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_LUAENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ###############[ jenv: java version from jenv (https://github.com/jenv/jenv) ]################ # Java color. typeset -g POWERLEVEL9K_JENV_FOREGROUND=1 typeset -g POWERLEVEL9K_JENV_BACKGROUND=7 # Hide java version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_JENV_SOURCES=(shell local global) # If set to false, hide java version if it's the same as global: # $(jenv version-name) == $(jenv global). typeset -g POWERLEVEL9K_JENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide java version if it's equal to "system". typeset -g POWERLEVEL9K_JENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_JENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########[ plenv: perl version from plenv (https://github.com/tokuhirom/plenv) ]############ # Perl color. typeset -g POWERLEVEL9K_PLENV_FOREGROUND=0 typeset -g POWERLEVEL9K_PLENV_BACKGROUND=4 # Hide perl version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_PLENV_SOURCES=(shell local global) # If set to false, hide perl version if it's the same as global: # $(plenv version-name) == $(plenv global). typeset -g POWERLEVEL9K_PLENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide perl version if it's equal to "system". typeset -g POWERLEVEL9K_PLENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_PLENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ############[ phpenv: php version from phpenv (https://github.com/phpenv/phpenv) ]############ # PHP color. typeset -g POWERLEVEL9K_PHPENV_FOREGROUND=0 typeset -g POWERLEVEL9K_PHPENV_BACKGROUND=5 # Hide php version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_PHPENV_SOURCES=(shell local global) # If set to false, hide php version if it's the same as global: # $(phpenv version-name) == $(phpenv global). typeset -g POWERLEVEL9K_PHPENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide PHP version if it's equal to "system". typeset -g POWERLEVEL9K_PHPENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_PHPENV_VISUAL_IDENTIFIER_EXPANSION='⭐' #######[ scalaenv: scala version from scalaenv (https://github.com/scalaenv/scalaenv) ]####### # Scala color. typeset -g POWERLEVEL9K_SCALAENV_FOREGROUND=0 typeset -g POWERLEVEL9K_SCALAENV_BACKGROUND=1 # Hide scala version if it doesn't come from one of these sources. typeset -g POWERLEVEL9K_SCALAENV_SOURCES=(shell local global) # If set to false, hide scala version if it's the same as global: # $(scalaenv version-name) == $(scalaenv global). typeset -g POWERLEVEL9K_SCALAENV_PROMPT_ALWAYS_SHOW=false # If set to false, hide scala version if it's equal to "system". typeset -g POWERLEVEL9K_SCALAENV_SHOW_SYSTEM=true # Custom icon. # typeset -g POWERLEVEL9K_SCALAENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ haskell_stack: haskell version from stack (https://haskellstack.org/) ]########### # Haskell color. typeset -g POWERLEVEL9K_HASKELL_STACK_FOREGROUND=0 typeset -g POWERLEVEL9K_HASKELL_STACK_BACKGROUND=3 # Hide haskell version if it doesn't come from one of these sources. # # shell: version is set by STACK_YAML # local: version is set by stack.yaml up the directory tree # global: version is set by the implicit global project (~/.stack/global-project/stack.yaml) typeset -g POWERLEVEL9K_HASKELL_STACK_SOURCES=(shell local) # If set to false, hide haskell version if it's the same as in the implicit global project. typeset -g POWERLEVEL9K_HASKELL_STACK_ALWAYS_SHOW=true # Custom icon. # typeset -g POWERLEVEL9K_HASKELL_STACK_VISUAL_IDENTIFIER_EXPANSION='⭐' ################[ terraform: terraform workspace (https://www.terraform.io) ]################# # Don't show terraform workspace if it's literally "default". typeset -g POWERLEVEL9K_TERRAFORM_SHOW_DEFAULT=false # POWERLEVEL9K_TERRAFORM_CLASSES is an array with even number of elements. The first element # in each pair defines a pattern against which the current terraform workspace gets matched. # More specifically, it's P9K_CONTENT prior to the application of context expansion (see below) # that gets matched. If you unset all POWERLEVEL9K_TERRAFORM_*CONTENT_EXPANSION parameters, # you'll see this value in your prompt. The second element of each pair in # POWERLEVEL9K_TERRAFORM_CLASSES defines the workspace class. Patterns are tried in order. The # first match wins. # # For example, given these settings: # # typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=( # '*prod*' PROD # '*test*' TEST # '*' OTHER) # # If your current terraform workspace is "project_test", its class is TEST because "project_test" # doesn't match the pattern '*prod*' but does match '*test*'. # # You can define different colors, icons and content expansions for different classes: # # typeset -g POWERLEVEL9K_TERRAFORM_TEST_FOREGROUND=2 # typeset -g POWERLEVEL9K_TERRAFORM_TEST_BACKGROUND=0 # typeset -g POWERLEVEL9K_TERRAFORM_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_TERRAFORM_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <' typeset -g POWERLEVEL9K_TERRAFORM_CLASSES=( # '*prod*' PROD # These values are examples that are unlikely # '*test*' TEST # to match your needs. Customize them as needed. '*' OTHER) typeset -g POWERLEVEL9K_TERRAFORM_OTHER_FOREGROUND=4 typeset -g POWERLEVEL9K_TERRAFORM_OTHER_BACKGROUND=0 # typeset -g POWERLEVEL9K_TERRAFORM_OTHER_VISUAL_IDENTIFIER_EXPANSION='⭐' #############[ kubecontext: current kubernetes context (https://kubernetes.io/) ]############# # Show kubecontext only when the the command you are typing invokes one of these tools. # Tip: Remove the next line to always show kubecontext. typeset -g POWERLEVEL9K_KUBECONTEXT_SHOW_ON_COMMAND='kubectl|helm|kubens|kubectx|oc|istioctl|kogito|k9s|helmfile' # Kubernetes context classes for the purpose of using different colors, icons and expansions with # different contexts. # # POWERLEVEL9K_KUBECONTEXT_CLASSES is an array with even number of elements. The first element # in each pair defines a pattern against which the current kubernetes context gets matched. # More specifically, it's P9K_CONTENT prior to the application of context expansion (see below) # that gets matched. If you unset all POWERLEVEL9K_KUBECONTEXT_*CONTENT_EXPANSION parameters, # you'll see this value in your prompt. The second element of each pair in # POWERLEVEL9K_KUBECONTEXT_CLASSES defines the context class. Patterns are tried in order. The # first match wins. # # For example, given these settings: # # typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=( # '*prod*' PROD # '*test*' TEST # '*' DEFAULT) # # If your current kubernetes context is "deathray-testing/default", its class is TEST # because "deathray-testing/default" doesn't match the pattern '*prod*' but does match '*test*'. # # You can define different colors, icons and content expansions for different classes: # # typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_FOREGROUND=0 # typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_BACKGROUND=2 # typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_KUBECONTEXT_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <' typeset -g POWERLEVEL9K_KUBECONTEXT_CLASSES=( # '*prod*' PROD # These values are examples that are unlikely # '*test*' TEST # to match your needs. Customize them as needed. '*' DEFAULT) typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_FOREGROUND=7 typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_BACKGROUND=5 # typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐' # Use POWERLEVEL9K_KUBECONTEXT_CONTENT_EXPANSION to specify the content displayed by kubecontext # segment. Parameter expansions are very flexible and fast, too. See reference: # http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion. # # Within the expansion the following parameters are always available: # # - P9K_CONTENT The content that would've been displayed if there was no content # expansion defined. # - P9K_KUBECONTEXT_NAME The current context's name. Corresponds to column NAME in the # output of `kubectl config get-contexts`. # - P9K_KUBECONTEXT_CLUSTER The current context's cluster. Corresponds to column CLUSTER in the # output of `kubectl config get-contexts`. # - P9K_KUBECONTEXT_NAMESPACE The current context's namespace. Corresponds to column NAMESPACE # in the output of `kubectl config get-contexts`. If there is no # namespace, the parameter is set to "default". # - P9K_KUBECONTEXT_USER The current context's user. Corresponds to column AUTHINFO in the # output of `kubectl config get-contexts`. # # If the context points to Google Kubernetes Engine (GKE) or Elastic Kubernetes Service (EKS), # the following extra parameters are available: # # - P9K_KUBECONTEXT_CLOUD_NAME Either "gke" or "eks". # - P9K_KUBECONTEXT_CLOUD_ACCOUNT Account/project ID. # - P9K_KUBECONTEXT_CLOUD_ZONE Availability zone. # - P9K_KUBECONTEXT_CLOUD_CLUSTER Cluster. # # P9K_KUBECONTEXT_CLOUD_* parameters are derived from P9K_KUBECONTEXT_CLUSTER. For example, # if P9K_KUBECONTEXT_CLUSTER is "gke_my-account_us-east1-a_my-cluster-01": # # - P9K_KUBECONTEXT_CLOUD_NAME=gke # - P9K_KUBECONTEXT_CLOUD_ACCOUNT=my-account # - P9K_KUBECONTEXT_CLOUD_ZONE=us-east1-a # - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01 # # If P9K_KUBECONTEXT_CLUSTER is "arn:aws:eks:us-east-1:123456789012:cluster/my-cluster-01": # # - P9K_KUBECONTEXT_CLOUD_NAME=eks # - P9K_KUBECONTEXT_CLOUD_ACCOUNT=123456789012 # - P9K_KUBECONTEXT_CLOUD_ZONE=us-east-1 # - P9K_KUBECONTEXT_CLOUD_CLUSTER=my-cluster-01 typeset -g POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION= # Show P9K_KUBECONTEXT_CLOUD_CLUSTER if it's not empty and fall back to P9K_KUBECONTEXT_NAME. POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${P9K_KUBECONTEXT_CLOUD_CLUSTER:-${P9K_KUBECONTEXT_NAME}}' # Append the current context's namespace if it's not "default". POWERLEVEL9K_KUBECONTEXT_DEFAULT_CONTENT_EXPANSION+='${${:-/$P9K_KUBECONTEXT_NAMESPACE}:#/default}' # Custom prefix. typeset -g POWERLEVEL9K_KUBECONTEXT_PREFIX='at ' #[ aws: aws profile (https://docs.aws.amazon.com/cli/latest/userguide/cli-configure-profiles.html) ]# # Show aws only when the the command you are typing invokes one of these tools. # Tip: Remove the next line to always show aws. typeset -g POWERLEVEL9K_AWS_SHOW_ON_COMMAND='aws|awless|terraform|pulumi|terragrunt' # POWERLEVEL9K_AWS_CLASSES is an array with even number of elements. The first element # in each pair defines a pattern against which the current AWS profile gets matched. # More specifically, it's P9K_CONTENT prior to the application of context expansion (see below) # that gets matched. If you unset all POWERLEVEL9K_AWS_*CONTENT_EXPANSION parameters, # you'll see this value in your prompt. The second element of each pair in # POWERLEVEL9K_AWS_CLASSES defines the profile class. Patterns are tried in order. The # first match wins. # # For example, given these settings: # # typeset -g POWERLEVEL9K_AWS_CLASSES=( # '*prod*' PROD # '*test*' TEST # '*' DEFAULT) # # If your current AWS profile is "company_test", its class is TEST # because "company_test" doesn't match the pattern '*prod*' but does match '*test*'. # # You can define different colors, icons and content expansions for different classes: # # typeset -g POWERLEVEL9K_AWS_TEST_FOREGROUND=28 # typeset -g POWERLEVEL9K_AWS_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_AWS_TEST_CONTENT_EXPANSION='> ${P9K_CONTENT} <' typeset -g POWERLEVEL9K_AWS_CLASSES=( # '*prod*' PROD # These values are examples that are unlikely # '*test*' TEST # to match your needs. Customize them as needed. '*' DEFAULT) typeset -g POWERLEVEL9K_AWS_DEFAULT_FOREGROUND=7 typeset -g POWERLEVEL9K_AWS_DEFAULT_BACKGROUND=1 # typeset -g POWERLEVEL9K_AWS_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐' #[ aws_eb_env: aws elastic beanstalk environment (https://aws.amazon.com/elasticbeanstalk/) ]# # AWS Elastic Beanstalk environment color. typeset -g POWERLEVEL9K_AWS_EB_ENV_FOREGROUND=2 typeset -g POWERLEVEL9K_AWS_EB_ENV_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_AWS_EB_ENV_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ azure: azure account name (https://docs.microsoft.com/en-us/cli/azure) ]########## # Show azure only when the the command you are typing invokes one of these tools. # Tip: Remove the next line to always show azure. typeset -g POWERLEVEL9K_AZURE_SHOW_ON_COMMAND='az|terraform|pulumi|terragrunt' # Azure account name color. typeset -g POWERLEVEL9K_AZURE_FOREGROUND=7 typeset -g POWERLEVEL9K_AZURE_BACKGROUND=4 # Custom icon. # typeset -g POWERLEVEL9K_AZURE_VISUAL_IDENTIFIER_EXPANSION='⭐' ##########[ gcloud: google cloud account and project (https://cloud.google.com/) ]########### # Show gcloud only when the the command you are typing invokes one of these tools. # Tip: Remove the next line to always show gcloud. typeset -g POWERLEVEL9K_GCLOUD_SHOW_ON_COMMAND='gcloud|gcs' # Google cloud color. typeset -g POWERLEVEL9K_GCLOUD_FOREGROUND=7 typeset -g POWERLEVEL9K_GCLOUD_BACKGROUND=4 # Google cloud format. Change the value of POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION and/or # POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION if the default is too verbose or not informative # enough. You can use the following parameters in the expansions. Each of them corresponds to the # output of `gcloud` tool. # # Parameter | Source # -------------------------|-------------------------------------------------------------------- # P9K_GCLOUD_CONFIGURATION | gcloud config configurations list --format='value(name)' # P9K_GCLOUD_ACCOUNT | gcloud config get-value account # P9K_GCLOUD_PROJECT_ID | gcloud config get-value project # P9K_GCLOUD_PROJECT_NAME | gcloud projects describe $P9K_GCLOUD_PROJECT_ID --format='value(name)' # # Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced with '%%'. # # Obtaining project name requires sending a request to Google servers. This can take a long time # and even fail. When project name is unknown, P9K_GCLOUD_PROJECT_NAME is not set and gcloud # prompt segment is in state PARTIAL. When project name gets known, P9K_GCLOUD_PROJECT_NAME gets # set and gcloud prompt segment transitions to state COMPLETE. # # You can customize the format, icon and colors of gcloud segment separately for states PARTIAL # and COMPLETE. You can also hide gcloud in state PARTIAL by setting # POWERLEVEL9K_GCLOUD_PARTIAL_VISUAL_IDENTIFIER_EXPANSION and # POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION to empty. typeset -g POWERLEVEL9K_GCLOUD_PARTIAL_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_ID//\%/%%}' typeset -g POWERLEVEL9K_GCLOUD_COMPLETE_CONTENT_EXPANSION='${P9K_GCLOUD_PROJECT_NAME//\%/%%}' # Send a request to Google (by means of `gcloud projects describe ...`) to obtain project name # this often. Negative value disables periodic polling. In this mode project name is retrieved # only when the current configuration, account or project id changes. typeset -g POWERLEVEL9K_GCLOUD_REFRESH_PROJECT_NAME_SECONDS=60 # Custom icon. # typeset -g POWERLEVEL9K_GCLOUD_VISUAL_IDENTIFIER_EXPANSION='⭐' #[ google_app_cred: google application credentials (https://cloud.google.com/docs/authentication/production) ]# # Show google_app_cred only when the the command you are typing invokes one of these tools. # Tip: Remove the next line to always show google_app_cred. typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_SHOW_ON_COMMAND='terraform|pulumi|terragrunt' # Google application credentials classes for the purpose of using different colors, icons and # expansions with different credentials. # # POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES is an array with even number of elements. The first # element in each pair defines a pattern against which the current kubernetes context gets # matched. More specifically, it's P9K_CONTENT prior to the application of context expansion # (see below) that gets matched. If you unset all POWERLEVEL9K_GOOGLE_APP_CRED_*CONTENT_EXPANSION # parameters, you'll see this value in your prompt. The second element of each pair in # POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES defines the context class. Patterns are tried in order. # The first match wins. # # For example, given these settings: # # typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=( # '*:*prod*:*' PROD # '*:*test*:*' TEST # '*' DEFAULT) # # If your current Google application credentials is "service_account deathray-testing x@y.com", # its class is TEST because it doesn't match the pattern '* *prod* *' but does match '* *test* *'. # # You can define different colors, icons and content expansions for different classes: # # typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_FOREGROUND=28 # typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_VISUAL_IDENTIFIER_EXPANSION='⭐' # typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_TEST_CONTENT_EXPANSION='$P9K_GOOGLE_APP_CRED_PROJECT_ID' typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_CLASSES=( # '*:*prod*:*' PROD # These values are examples that are unlikely # '*:*test*:*' TEST # to match your needs. Customize them as needed. '*' DEFAULT) typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_FOREGROUND=7 typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_BACKGROUND=4 # typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_VISUAL_IDENTIFIER_EXPANSION='⭐' # Use POWERLEVEL9K_GOOGLE_APP_CRED_CONTENT_EXPANSION to specify the content displayed by # google_app_cred segment. Parameter expansions are very flexible and fast, too. See reference: # http://zsh.sourceforge.net/Doc/Release/Expansion.html#Parameter-Expansion. # # You can use the following parameters in the expansion. Each of them corresponds to one of the # fields in the JSON file pointed to by GOOGLE_APPLICATION_CREDENTIALS. # # Parameter | JSON key file field # ---------------------------------+--------------- # P9K_GOOGLE_APP_CRED_TYPE | type # P9K_GOOGLE_APP_CRED_PROJECT_ID | project_id # P9K_GOOGLE_APP_CRED_CLIENT_EMAIL | client_email # # Note: ${VARIABLE//\%/%%} expands to ${VARIABLE} with all occurrences of '%' replaced by '%%'. typeset -g POWERLEVEL9K_GOOGLE_APP_CRED_DEFAULT_CONTENT_EXPANSION='${P9K_GOOGLE_APP_CRED_PROJECT_ID//\%/%%}' ###############################[ public_ip: public IP address ]############################### # Public IP color. typeset -g POWERLEVEL9K_PUBLIC_IP_FOREGROUND=7 typeset -g POWERLEVEL9K_PUBLIC_IP_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_PUBLIC_IP_VISUAL_IDENTIFIER_EXPANSION='⭐' ########################[ vpn_ip: virtual private network indicator ]######################### # VPN IP color. typeset -g POWERLEVEL9K_VPN_IP_FOREGROUND=0 typeset -g POWERLEVEL9K_VPN_IP_BACKGROUND=6 # When on VPN, show just an icon without the IP address. # Tip: To display the private IP address when on VPN, remove the next line. typeset -g POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION= # Regular expression for the VPN network interface. Run `ifconfig` or `ip -4 a show` while on VPN # to see the name of the interface. typeset -g POWERLEVEL9K_VPN_IP_INTERFACE='(gpd|wg|(.*tun)|tailscale)[0-9]*' # If set to true, show one segment per matching network interface. If set to false, show only # one segment corresponding to the first matching network interface. # Tip: If you set it to true, you'll probably want to unset POWERLEVEL9K_VPN_IP_CONTENT_EXPANSION. typeset -g POWERLEVEL9K_VPN_IP_SHOW_ALL=false # Custom icon. # typeset -g POWERLEVEL9K_VPN_IP_VISUAL_IDENTIFIER_EXPANSION='⭐' ###########[ ip: ip address and bandwidth usage for a specified network interface ]########### # IP color. typeset -g POWERLEVEL9K_IP_BACKGROUND=4 typeset -g POWERLEVEL9K_IP_FOREGROUND=0 # The following parameters are accessible within the expansion: # # Parameter | Meaning # ----------------------+--------------- # P9K_IP_IP | IP address # P9K_IP_INTERFACE | network interface # P9K_IP_RX_BYTES | total number of bytes received # P9K_IP_TX_BYTES | total number of bytes sent # P9K_IP_RX_RATE | receive rate (since last prompt) # P9K_IP_TX_RATE | send rate (since last prompt) typeset -g POWERLEVEL9K_IP_CONTENT_EXPANSION='${P9K_IP_RX_RATE:+⇣$P9K_IP_RX_RATE }${P9K_IP_TX_RATE:+⇡$P9K_IP_TX_RATE }$P9K_IP_IP' # Show information for the first network interface whose name matches this regular expression. # Run `ifconfig` or `ip -4 a show` to see the names of all network interfaces. typeset -g POWERLEVEL9K_IP_INTERFACE='[ew].*' # Custom icon. # typeset -g POWERLEVEL9K_IP_VISUAL_IDENTIFIER_EXPANSION='⭐' #########################[ proxy: system-wide http/https/ftp proxy ]########################## # Proxy color. typeset -g POWERLEVEL9K_PROXY_FOREGROUND=4 typeset -g POWERLEVEL9K_PROXY_BACKGROUND=0 # Custom icon. # typeset -g POWERLEVEL9K_PROXY_VISUAL_IDENTIFIER_EXPANSION='⭐' ################################[ battery: internal battery ]################################# # Show battery in red when it's below this level and not connected to power supply. typeset -g POWERLEVEL9K_BATTERY_LOW_THRESHOLD=20 typeset -g POWERLEVEL9K_BATTERY_LOW_FOREGROUND=1 # Show battery in green when it's charging or fully charged. typeset -g POWERLEVEL9K_BATTERY_{CHARGING,CHARGED}_FOREGROUND=2 # Show battery in yellow when it's discharging. typeset -g POWERLEVEL9K_BATTERY_DISCONNECTED_FOREGROUND=3 # Battery pictograms going from low to high level of charge. typeset -g POWERLEVEL9K_BATTERY_STAGES='\uf58d\uf579\uf57a\uf57b\uf57c\uf57d\uf57e\uf57f\uf580\uf581\uf578' # Don't show the remaining time to charge/discharge. typeset -g POWERLEVEL9K_BATTERY_VERBOSE=false typeset -g POWERLEVEL9K_BATTERY_BACKGROUND=0 #####################################[ wifi: wifi speed ]##################################### # WiFi color. typeset -g POWERLEVEL9K_WIFI_FOREGROUND=0 typeset -g POWERLEVEL9K_WIFI_BACKGROUND=4 # Custom icon. # typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='⭐' # Use different colors and icons depending on signal strength ($P9K_WIFI_BARS). # # # Wifi colors and icons for different signal strength levels (low to high). # typeset -g my_wifi_fg=(0 0 0 0 0) # <-- change these values # typeset -g my_wifi_icon=('WiFi' 'WiFi' 'WiFi' 'WiFi' 'WiFi') # <-- change these values # # typeset -g POWERLEVEL9K_WIFI_CONTENT_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}$P9K_WIFI_LAST_TX_RATE Mbps' # typeset -g POWERLEVEL9K_WIFI_VISUAL_IDENTIFIER_EXPANSION='%F{${my_wifi_fg[P9K_WIFI_BARS+1]}}${my_wifi_icon[P9K_WIFI_BARS+1]}' # # The following parameters are accessible within the expansions: # # Parameter | Meaning # ----------------------+--------------- # P9K_WIFI_SSID | service set identifier, a.k.a. network name # P9K_WIFI_LINK_AUTH | authentication protocol such as "wpa2-psk" or "none"; empty if unknown # P9K_WIFI_LAST_TX_RATE | wireless transmit rate in megabits per second # P9K_WIFI_RSSI | signal strength in dBm, from -120 to 0 # P9K_WIFI_NOISE | noise in dBm, from -120 to 0 # P9K_WIFI_BARS | signal strength in bars, from 0 to 4 (derived from P9K_WIFI_RSSI and P9K_WIFI_NOISE) ####################################[ time: current time ]#################################### # Current time color. typeset -g POWERLEVEL9K_TIME_FOREGROUND=0 typeset -g POWERLEVEL9K_TIME_BACKGROUND=7 # Format for the current time: 09:51:02. See `man 3 strftime`. typeset -g POWERLEVEL9K_TIME_FORMAT='%D{%I:%M:%S %p}' # If set to true, time will update when you hit enter. This way prompts for the past # commands will contain the start times of their commands as opposed to the default # behavior where they contain the end times of their preceding commands. typeset -g POWERLEVEL9K_TIME_UPDATE_ON_COMMAND=false # Custom icon. # typeset -g POWERLEVEL9K_TIME_VISUAL_IDENTIFIER_EXPANSION='⭐' # Custom prefix. typeset -g POWERLEVEL9K_TIME_PREFIX='at ' # Example of a user-defined prompt segment. Function prompt_example will be called on every # prompt if `example` prompt segment is added to POWERLEVEL9K_LEFT_PROMPT_ELEMENTS or # POWERLEVEL9K_RIGHT_PROMPT_ELEMENTS. It displays an icon and yellow text on red background # greeting the user. # # Type `p10k help segment` for documentation and a more sophisticated example. function prompt_example() { p10k segment -b 1 -f 3 -i '⭐' -t 'hello, %n' } # User-defined prompt segments may optionally provide an instant_prompt_* function. Its job # is to generate the prompt segment for display in instant prompt. See # https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt. # # Powerlevel10k will call instant_prompt_* at the same time as the regular prompt_* function # and will record all `p10k segment` calls it makes. When displaying instant prompt, Powerlevel10k # will replay these calls without actually calling instant_prompt_*. It is imperative that # instant_prompt_* always makes the same `p10k segment` calls regardless of environment. If this # rule is not observed, the content of instant prompt will be incorrect. # # Usually, you should either not define instant_prompt_* or simply call prompt_* from it. If # instant_prompt_* is not defined for a segment, the segment won't be shown in instant prompt. function instant_prompt_example() { # Since prompt_example always makes the same `p10k segment` calls, we can call it from # instant_prompt_example. This will give us the same `example` prompt segment in the instant # and regular prompts. prompt_example } # User-defined prompt segments can be customized the same way as built-in segments. typeset -g POWERLEVEL9K_EXAMPLE_FOREGROUND=3 typeset -g POWERLEVEL9K_EXAMPLE_BACKGROUND=1 # typeset -g POWERLEVEL9K_EXAMPLE_VISUAL_IDENTIFIER_EXPANSION='⭐' # Transient prompt works similarly to the builtin transient_rprompt option. It trims down prompt # when accepting a command line. Supported values: # # - off: Don't change prompt when accepting a command line. # - always: Trim down prompt when accepting a command line. # - same-dir: Trim down prompt when accepting a command line unless this is the first command # typed after changing current working directory. typeset -g POWERLEVEL9K_TRANSIENT_PROMPT=always # Instant prompt mode. # # - off: Disable instant prompt. Choose this if you've tried instant prompt and found # it incompatible with your zsh configuration files. # - quiet: Enable instant prompt and don't print warnings when detecting console output # during zsh initialization. Choose this if you've read and understood # https://github.com/romkatv/powerlevel10k/blob/master/README.md#instant-prompt. # - verbose: Enable instant prompt and print a warning when detecting console output during # zsh initialization. Choose this if you've never tried instant prompt, haven't # seen the warning, or if you are unsure what this all means. typeset -g POWERLEVEL9K_INSTANT_PROMPT=verbose # Hot reload allows you to change POWERLEVEL9K options after Powerlevel10k has been initialized. # For example, you can type POWERLEVEL9K_BACKGROUND=red and see your prompt turn red. Hot reload # can slow down prompt by 1-2 milliseconds, so it's better to keep it turned off unless you # really need it. typeset -g POWERLEVEL9K_DISABLE_HOT_RELOAD=true # If p10k is already loaded, reload configuration. # This works even with POWERLEVEL9K_DISABLE_HOT_RELOAD=true. (( ! $+functions[p10k] )) || p10k reload } # Tell `p10k configure` which file it should overwrite. typeset -g POWERLEVEL9K_CONFIG_FILE=${${(%):-%x}:a} (( ${#p10k_config_opts} )) && setopt ${p10k_config_opts[@]} 'builtin' 'unset' 'p10k_config_opts'
import { Component, OnInit, Input, Inject, OnDestroy, ViewChild, TemplateRef } from "@angular/core"; import { TableColumn } from "@swimlane/ngx-datatable"; import { SparqlQueryResult } from "../sparql-models"; import { ISparqlServiceInjectionToken, ISparqlService } from "../sparql.service.contract"; import { Subscription } from "rxjs"; @Component({ selector: "app-result-viewer", templateUrl: "./result-viewer.component.html", styleUrls: ["./result-viewer.component.css"] }) export class ResultViewerComponent implements OnInit, OnDestroy { private _sparqlResult: SparqlQueryResult; private sparqlResultSubscription: Subscription; public constructor(@Inject(ISparqlServiceInjectionToken) private sparqlService: ISparqlService) { } @ViewChild("variableValueCellTemplate", { static: false }) public variableValueCellTemplate: TemplateRef<any>; public rows: any[]; public columns: TableColumn[]; get sparqlResult() { return this._sparqlResult; } set sparqlResult(value: SparqlQueryResult) { value = value || SparqlQueryResult.Empty; this._sparqlResult = value; if (!this._sparqlResult.records) { this.rows = []; this.columns = []; } else { this.columns = value.variables.map((varName): TableColumn => { return { name: varName, prop: "bindings." + varName, cellTemplate: this.variableValueCellTemplate }; }); this.rows = value.records; } } public ngOnInit() { this.sparqlResultSubscription = this.sparqlService.currentResult.subscribe(value => { this.sparqlResult = value; }); } public ngOnDestroy(): void { this.sparqlResultSubscription.unsubscribe(); } }
package au.gov.ga.geodesy.igssitelog.domain.model; import java.time.Instant; import javax.persistence.Column; import javax.persistence.Embeddable; import org.apache.commons.lang.builder.EqualsBuilder; /** * http://sopac.ucsd.edu/ns/geodesy/doc/igsSiteLog/equipment/2004/baseEquipmentLib.xsd:baseSensorEquipmentType.effectiveDates Note: this is an attempt to * interpret date values as java dates, rather than as strings as defined in the SOPAC schema (feel free to revert to java strings, if this starts to cause * trouble). */ @Embeddable public class EffectiveDates { @Column(name = "EFFECTIVE_FROM") private Instant from; @Column(name = "EFFECTIVE_TO") private Instant to; public EffectiveDates() { } public EffectiveDates(Instant from) { this(from, null); } public EffectiveDates(Instant from, Instant to) { setFrom(from); setTo(to); } public Instant getFrom() { return from; } public void setFrom(Instant from) { this.from = from; } public Instant getTo() { return to; } public void setTo(Instant to) { this.to = to; } @Override public boolean equals(Object o) { if (this == o) return true; if (o == null || getClass() != o.getClass()) return false; EffectiveDates that = (EffectiveDates) o; return new EqualsBuilder() .append(getFrom(), that.getFrom()) .append(getTo(), that.getTo()) .isEquals(); } }
#!/bin/bash usage() { echo "Usage: $0 [-c <channel name>] [-g <orgs of peers>] [-n <chaincode name>] [-v <chaincode version>]" 1>&2 exit 1 } while getopts ":c:n:v:g:" o; do case "${o}" in c) c=${OPTARG} ;; n) n=${OPTARG} ;; v) v=${OPTARG} ;; g) g=${OPTARG} ;; *) usage ;; esac done shift $((OPTIND - 1)) if [ -z "${c}" ] || [ -z "${n}" ] || [ -z "${v}" ] || [ -z "${g}" ]; then usage fi echo "create channel channelID ${c} chaincodeName ${n} chaincodeVersion ${v}" # init config source $(dirname "$0")/env.sh PEER_ORGS=($g) CHANNEL_NAME=${c} CHANNEL_TX_FILE=$DATA/$CHANNEL_NAME.tx QUERY_TIMEOUT=30 # clone sourecode cd $GOPATH/src/github.com/deevotech rm -rf sc-chaincode.deevo.io git clone https://github.com/deevotech/sc-chaincode.deevo.io # install chaincode on peer0-org1, peer0-org2 for ORG in ${PEER_ORGS[*]}; do initPeerVars $ORG 0 echo "Install for $PEER_HOST ..." echo $ORDERER_CONN_ARGS $GOPATH/src/github.com/hyperledger/fabric/.build/bin/peer chaincode install -n $n -v $v -p github.com/deevotech/sc-chaincode.deevo.io/food-supplychain done $GOPATH/src/github.com/hyperledger/fabric/.build/bin/peer chaincode list --installed -C $CHANNEL_NAME # instantiate chaincode initPeerVars ${PEER_ORGS[0]} 0 echo $ORDERER_CONN_ARGS echo "Instantiating chaincode on $PEER_HOST ..." $GOPATH/src/github.com/hyperledger/fabric/.build/bin/peer chaincode instantiate -C $CHANNEL_NAME -n ${n} -v ${v} -c '{"Args":["init"]}' $ORDERER_CONN_ARGS sleep 10 JSON='{"traceable":[{"objectType":"org","id":"org_1","name":"org 1","content":"address 1","parent":""},{"objectType":"party","id":"party_1","name":"party 1","content":"","parent":"org_1"},{"objectType":"party","id":"party_2","name":"party 2","content":"","parent":"org_1"},{"objectType":"location","id":"location_1","name":"location 1","content":"","parent":"party_1"},{"objectType":"location","id":"location_2","name":"location 2","content":"","parent":"party_2"},{"objectType":"product","id":"product_1","name":"product 1","content":"","parent":"product_1"},{"objectType":"product","id":"product_2","name":"product 2","content":"","parent":"product_2"}],"auditors":[{"objectType":"auditor","id":"Auditor_1","name":"Auditor 1","content":""}]}' $GOPATH/src/github.com/hyperledger/fabric/.build/bin/peer chaincode invoke -C $CHANNEL_NAME -n ${n} -c '{"Args":["initOrgData", "{\"traceable\":[{\"objectType\":\"org\",\"id\":\"org_1\",\"name\":\"org 1\",\"content\":\"address 1\",\"parent\":\"\"},{\"objectType\":\"party\",\"id\":\"party_1\",\"name\":\"party 1\",\"content\":\"\",\"parent\":\"org_1\"},{\"objectType\":\"party\",\"id\":\"party_2\",\"name\":\"party 2\",\"content\":\"\",\"parent\":\"org_1\"},{\"objectType\":\"location\",\"id\":\"location_1\",\"name\":\"location 1\",\"content\":\"\",\"parent\":\"party_1\"},{\"objectType\":\"location\",\"id\":\"location_2\",\"name\":\"location 2\",\"content\":\"\",\"parent\":\"party_2\"},{\"objectType\":\"product\",\"id\":\"product_1\",\"name\":\"product 1\",\"content\":\"\",\"parent\":\"product_1\"},{\"objectType\":\"product\",\"id\":\"product_2\",\"name\":\"product 2\",\"content\":\"\",\"parent\":\"product_2\"}],\"auditors\":[{\"objectType\":\"auditor\",\"id\":\"Auditor_1\",\"name\":\"Auditor 1\",\"content\":\"\"}]}"]}' $ORDERER_CONN_ARGS
package com.wyp.materialqqlite.qqclient.protocol.protocoldata; public class MessageSender { public static final int BUDDY = 0; public static final int GROUP = 1; public static final int SESS = 2; public static final int SYSTEM = 3; public int m_nType; // 消息发送者类型 public int m_nGroupCode; // 群代码 public int m_nQQUin; // 好友Uin public String m_strName; // 好友昵称或群名 public Object m_objLastMsg; // 最后一条消息 public int m_nUnreadMsgCnt; // 未读消息数 }
#!/bin/bash # Run MODE within singularity container outdir=$1 mode PYTHON_NUMPY PYTHON_NUMPY ${outdir}/MODEConfig -outdir ${outdir}
#!/bin/bash DIR="$( cd "$( dirname "${BASH_SOURCE[0]}" )" >/dev/null 2>&1 && pwd )" cd $DIR && COMPOSE_DOCKER_CLI_BUILD=1 DOCKER_BUILDKIT=1 docker-compose -p datahub up --build
print('hello world this is my first program !', end='****') print('i am batman') print('this is so amazing !!!', end='______') print('hello !!!!', end='\n') print('this is in next line ')
'use strict'; const webpack = require('webpack'); const path = require('path'); module.exports = { mode: 'production', entry: { Web3Controller: './dist/Web3Controller.js', Web3UITools: './dist/UITools.js', Web3Bundle: './dist/bundle.js' }, output: { path: path.resolve(__dirname, 'dist'), filename: '[name].bundle.js', library: '[name]', libraryTarget: 'umd', libraryExport: 'default' }, optimization: { usedExports: true, sideEffects: true }, resolve: { fallback: { fs: false, path: false, http: require.resolve('stream-http'), url: require.resolve('url/'), https: require.resolve('https-browserify'), zlib: require.resolve('browserify-zlib'), assert: require.resolve('assert/'), stream: require.resolve('stream-browserify'), os: require.resolve('os-browserify'), buffer: require.resolve('buffer'), crypto: require.resolve('crypto-browserify') } }, plugins: [ new webpack.ProvidePlugin({ $: 'jquery', jQuery: 'jquery', 'window.jQuery': 'jquery' }), new webpack.ProvidePlugin({ process: 'process/browser' }), new webpack.ProvidePlugin({ Buffer: ['buffer', 'Buffer'] }) ], module: { rules: [ { test: /\.css$/, use: ['style-loader', 'css-loader'] }, { test: require.resolve('jquery'), loader: 'expose-loader', options: { exposes: ['$', 'jQuery'] } } ] }, target: 'web' };
#!/usr/bin/env bash cp ../README.md docs/index.md cp -r ../_readme docs/ cp ../CONTRIBUTING.md docs/CONTRIBUTING.md cp ../LICENSE docs/LICENSE.md python autogen.py mkdocs serve
DOCKER_CONTENTS = """ #################################### CATKIN #################################### RUN mkdir -p ${HOME}/catkin_ws/src && ln -s ${HOME}/%s ${HOME}/catkin_ws/src/. RUN cd ${HOME}/catkin_ws \\ && apt-get -o Acquire::ForceIPv4=true update \\ && /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && rosdep update && rosdep install --as-root apt:false --from-paths src --ignore-src -r -y" \\ && apt-get clean \\ && rm -rf /var/lib/apt/lists/* \\ && /bin/bash -c "source /opt/ros/${ROS_DISTRO}/setup.bash && %s" RUN echo "source ~/catkin_ws/devel/setup.bash" >> ${HOME}/.bashrc """ build_string = {'catkin_make': 'catkin_make', 'catkin_build': 'catkin build'} def write(DOCKER_FILE, name, build_method): with open(DOCKER_FILE, "a") as dockerfile: dockerfile.write(DOCKER_CONTENTS % (name, build_string[build_method])) return
package com.limpoxe.fairy.manager; /** *use PluginStatusChangeListener instead */ @Deprecated public interface PluginCallback { public static final String ACTION_PLUGIN_CHANGED = "com.limpoxe.fairy.action_plugin_changed"; public static final String EXTRA_TYPE = "type"; public static final String EXTRA_ID = "id"; public static final String EXTRA_VERSION = "version"; public static final String EXTRA_RESULT_CODE = "code"; public static final String EXTRA_SRC = "src"; public static final String TYPE_INSTALL = "install"; public static final String TYPE_REMOVE = "remove"; public static final String TYPE_START = "start"; public static final String TYPE_STOP = "stop"; void onInstall(int result, String packageName, String version, String src); void onRemove(String packageName, int code); void onStart(String packageName); void onStop(String packageName); }
import json import logging from copy import deepcopy from urllib3 import Retry from binascii import unhexlify try: from json.decoder import JSONDecodeError except ImportError: JSONDecodeError = ValueError import requests import datetime as dt from dateutil import parser from future.utils import raise_from from requests.adapters import HTTPAdapter from urllib3.exceptions import InsecureRequestWarning from polyswarm_api import settings, exceptions logger = logging.getLogger(__name__) class PolyswarmSession(requests.Session): def __init__(self, key, retries, user_agent=settings.DEFAULT_USER_AGENT, verify=True, **kwargs): super(PolyswarmSession, self).__init__(**kwargs) logger.debug('Creating PolyswarmHTTP instance') self.requests_retry_session(retries=retries) if not verify: logger.warn('Disabling TLS verification for this session.') requests.packages.urllib3.disable_warnings(category=InsecureRequestWarning) self.verify = verify if key: self.set_auth(key) if user_agent: self.set_user_agent(user_agent) def requests_retry_session(self, retries=settings.DEFAULT_RETRIES, backoff_factor=settings.DEFAULT_BACKOFF, status_forcelist=settings.DEFAULT_RETRY_CODES): retry = Retry( total=retries, read=retries, connect=retries, backoff_factor=backoff_factor, status_forcelist=status_forcelist, ) adapter = HTTPAdapter(max_retries=retry) self.mount('http://', adapter) self.mount('https://', adapter) def set_auth(self, key): if key: self.headers.update({'Authorization': key}) else: self.headers.pop('Authorization', None) def set_user_agent(self, ua): if ua: self.headers.update({'User-Agent': ua}) else: self.headers.pop('User-Agent', None) class RequestParamsEncoder(json.JSONEncoder): def default(self, obj): try: return json.JSONEncoder.default(self, obj) except Exception: return str(obj) class PolyswarmRequest(object): """This class holds a requests-compatible dictionary and extra information we need to parse the response.""" def __init__(self, api_instance, request_parameters, key=None, result_parser=None, **kwargs): logger.debug('Creating PolyswarmRequest instance.\nRequest parameters: %s\nResult parser: %s', request_parameters, result_parser.__name__ if result_parser else 'No result parser') self.api_instance = api_instance # we should not access the api_instance session directly, but provide as a # parameter in the constructor, but this will do for the moment self.session = self.api_instance.session or PolyswarmSession(key, retries=settings.DEFAULT_RETRIES) self.timeout = self.api_instance.timeout or settings.DEFAULT_HTTP_TIMEOUT self.request_parameters = request_parameters self.result_parser = result_parser self.raw_result = None self.status_code = None self.status = None self.errors = None self._result = None self._paginated = False self.total = None self.limit = None self.offset = None self.order_by = None self.direction = None self.has_more = None self.parser_kwargs = kwargs def result(self): if self._paginated: return self.consume_results() else: return self._result def execute(self): logger.debug('Executing request.') self.request_parameters.setdefault('timeout', self.timeout) if self.result_parser and not issubclass(self.result_parser, BaseJsonResource): self.request_parameters.setdefault('stream', True) self.raw_result = self.session.request(**self.request_parameters) logger.debug('Request returned code %s', self.raw_result.status_code) self.parse_result(self.raw_result) return self def _bad_status_message(self): request_parameters = json.dumps(self.request_parameters, indent=4, sort_keys=True, cls=RequestParamsEncoder) message = "Error when running the request:\n{}\n" \ "Return code: {}\n" \ "Message: {}".format(request_parameters, self.status_code, self._result) if self.errors: message = '{}\nErrors:\n{}'.format(message, '\n'.join(str(error) for error in self.errors)) return message def _extract_json_body(self, result): self.json = result.json() self._result = self.json.get('result') self.status = self.json.get('status') self.errors = self.json.get('errors') def parse_result(self, result): self.status_code = result.status_code if self.request_parameters['method'] == 'HEAD': logger.debug('HEAD method does not return results, setting it to the status code.') self._result = self.status_code if not self.result_parser: logger.debug('Result parser is not defined, skipping parsing results.') return logger.debug('Parsing request results.') try: if self.status_code // 100 != 2: self._extract_json_body(result) if self.status_code == 429: message = '{} This may mean you need to purchase a ' \ 'larger package, or that you have exceeded ' \ 'rate limits. If you continue to have issues, ' \ 'please contact us at <EMAIL>.'.format(self._result) raise exceptions.UsageLimitsExceededException(self, message) elif self.status_code == 404: raise exceptions.NotFoundException(self, self._result) else: raise exceptions.RequestException(self, self._bad_status_message()) elif self.status_code == 204: raise exceptions.NoResultsException(self, 'The request returned no results.') elif issubclass(self.result_parser, BaseJsonResource): self._extract_json_body(result) if 'has_more' in self.json: # has_more will always be present, being either False or True self._paginated = True self.total = self.json.get('total') self.limit = self.json.get('limit') self.offset = self.json.get('offset') self.order_by = self.json.get('order_by') self.direction = self.json.get('direction') self.has_more = self.json.get('has_more') if 'result' in self.json: result = self.json['result'] elif 'results' in self.json: result = self.json['results'] else: raise exceptions.RequestException( self, 'The response standard must contain either the "result" or "results" key.' ) if isinstance(result, list): self._result = self.result_parser.parse_result_list(self.api_instance, result, **self.parser_kwargs) else: self._result = self.result_parser.parse_result(self.api_instance, result, **self.parser_kwargs) else: self._result = self.result_parser.parse_result(self.api_instance, result, **self.parser_kwargs) except JSONDecodeError as e: if self.status_code == 404: raise raise_from(exceptions.NotFoundException(self, 'The requested endpoint does not exist.'), e) else: err_msg = 'Server returned non-JSON response [{}]: {}'.format(self.status_code, result) raise raise_from(exceptions.RequestException(self, err_msg), e) def __iter__(self): return self.consume_results() def consume_results(self): # StopIteration is deprecated # As per https://www.python.org/dev/peps/pep-0479/ # We simply return upon termination condition request = self while True: # consume items items from list if iterable # of yield the single result if not try: for result in request._result: yield result except TypeError: yield request._result # if the result is not a list, there is not next page return # if the server indicates that there are no more results, return if not request.has_more: return # try to get the next page and execute the request request = request.next_page() def next_page(self): new_parameters = deepcopy(self.request_parameters) params = new_parameters.setdefault('params', {}) if isinstance(params, dict): params['offset'] = self.offset params['limit'] = self.limit else: params = [p for p in params if p[0] != 'offset' and p[0] != 'limit'] params.extend([('offset', self.offset), ('limit', self.limit)]) new_parameters['params'] = params return PolyswarmRequest( self.api_instance, new_parameters, result_parser=self.result_parser, ).execute() class BaseResource(object): def __init__(self, content, *args, **kwargs): # hack to behave as in python 3, signature should be # __init__(self, content, *args, api=None, **kwargs) api = kwargs.pop('api', None) super(BaseResource, self).__init__(*args, **kwargs) self.api = api self._content = content @classmethod def parse_result(cls, api, content, **kwargs): logger.debug('Parsing resource %s', cls.__name__) return cls(content, api=api, **kwargs) class BaseJsonResource(BaseResource): RESOURCE_ENDPOINT = None RESOURCE_ID_KEY = 'id' def __init__(self, content, *args, **kwargs): super(BaseJsonResource, self).__init__(content, *args, **kwargs) self.json = content def __int__(self): id_ = getattr(self, 'id', None) if id_ is None: raise TypeError('Resource {} does not have an id and can not be cast to int'.format(type(self).__name__)) return int(id_) def _get(self, path, default=None, content=None): """ Helper for rendering attributes of child objects in the json that might be None. Returns the default value if some of the items in the path is not present. """ previous_attribute = 'resource_json' obj = content or self.json try: for attribute in path.split('.'): if obj is None: raise KeyError('{} is None, can not resolve full path'.format(previous_attribute)) if attribute.endswith(']'): # handling the list case, e.g.: "root.list_attr[2]" attribute, _, index = attribute.rpartition('[') index = int(index.rstrip(']')) obj = obj[attribute] if obj is None: raise KeyError('{} is None, but is it supposed to be a list'.format(attribute)) elif not isinstance(obj, list): raise ValueError('Can not access index for {}, it is not a list.'.format(attribute)) else: obj = obj[index] else: obj = obj[attribute] previous_attribute = attribute return obj except (KeyError, IndexError) as e: logger.debug('Returning default value: %s', e) return default @classmethod def parse_result_list(cls, api_instance, json_data, **kwargs): return [cls.parse_result(api_instance, entry, **kwargs) for entry in json_data] @classmethod def _endpoint(cls, api, **kwargs): if cls.RESOURCE_ENDPOINT is None: raise exceptions.InvalidValueException('RESOURCE_ENDPOINT is not configured for this resource.') return '{api.uri}{endpoint}'.format(api=api, endpoint=cls.RESOURCE_ENDPOINT, **kwargs) @classmethod def _list_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) + '/list' @classmethod def _create_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) @classmethod def _get_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) @classmethod def _head_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) @classmethod def _update_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) @classmethod def _delete_endpoint(cls, api, **kwargs): return cls._endpoint(api, **kwargs) @classmethod def _params(cls, method, *param_keys, **kwargs): params = {} json_params = {} for k, v in kwargs.items(): if v is not None: # try to parse "*_id" stuff as integer if k.endswith('_id'): try: parsed_value = str(int(v)) except Exception: # fallback to string parsed_value = str(v) elif isinstance(v, bool): parsed_value = int(v) else: parsed_value = v if method == 'POST': json_params[k] = parsed_value elif method == 'GET' or k in param_keys: params[k] = parsed_value else: json_params[k] = parsed_value params = params if params else None json_params = json_params if json_params else None return params, json_params @classmethod def _list_params(cls, **kwargs): return cls._params('GET', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _create_params(cls, **kwargs): return cls._params('POST', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _get_params(cls, **kwargs): return cls._params('GET', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _head_params(cls, **kwargs): return cls._params('HEAD', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _update_params(cls, **kwargs): return cls._params('PUT', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _delete_params(cls, **kwargs): return cls._params('DELETE', cls.RESOURCE_ID_KEY, **kwargs) @classmethod def _list_headers(cls, api): return None @classmethod def _create_headers(cls, api): return None @classmethod def _get_headers(cls, api): return None @classmethod def _head_headers(cls, api): return None @classmethod def _update_headers(cls, api): return None @classmethod def _delete_headers(cls, api): return None @classmethod def _build_request(cls, api, method, url, headers, params, json_params): request_params = {'method': method, 'url': url} if params: request_params['params'] = params if json_params: request_params['json'] = json_params if headers: request_params['headers'] = headers return PolyswarmRequest(api, request_params, result_parser=cls) @classmethod def create(cls, api, **kwargs): return cls._build_request(api, 'POST', cls._create_endpoint(api, **kwargs), cls._create_headers(api), *cls._create_params(**kwargs)).execute() @classmethod def get(cls, api, **kwargs): return cls._build_request(api, 'GET', cls._get_endpoint(api, **kwargs), cls._get_headers(api), *cls._get_params(**kwargs)).execute() @classmethod def head(cls, api, **kwargs): return cls._build_request(api, 'HEAD', cls._head_endpoint(api, **kwargs), cls._head_headers(api), *cls._head_params(**kwargs)).execute() @classmethod def update(cls, api, **kwargs): return cls._build_request(api, 'PUT', cls._update_endpoint(api, **kwargs), cls._update_headers(api), *cls._update_params(**kwargs)).execute() @classmethod def delete(cls, api, **kwargs): return cls._build_request(api, 'DELETE', cls._delete_endpoint(api, **kwargs), cls._delete_headers(api), *cls._delete_params(**kwargs)).execute() @classmethod def list(cls, api, **kwargs): return cls._build_request(api, 'GET', cls._list_endpoint(api, **kwargs), cls._list_headers(api), *cls._list_params(**kwargs)).execute() def is_hex(value): try: _ = int(value, 16) return True except ValueError: return False def is_valid_sha1(value): if len(value) != 40: return False return is_hex(value) def is_valid_md5(value): if len(value) != 32: return False return is_hex(value) def is_valid_sha256(value): if len(value) != 64: return False return is_hex(value) class Hashable(object): SUPPORTED_HASH_TYPES = { 'sha1': is_valid_sha1, 'sha256': is_valid_sha256, 'md5': is_valid_md5, } def __init__(self, *args, **kwargs): # hack to behave as in python 3, signature should be # __init__(self, content, *args, hash_value=None, hash_type=None, validate_hash=False, **kwargs) hash_value = kwargs.pop('hash_value', None) hash_type = kwargs.pop('hash_type', None) validate_hash = kwargs.pop('validate_hash', False) super(Hashable, self).__init__(*args, **kwargs) self._hash = hash_value.strip() if hash_value is not None else None if hash_type: if hash_type not in self.SUPPORTED_HASH_TYPES: raise exceptions.InvalidValueException('Hash type provided is not supported.') self._hash_type = hash_type else: self._hash_type = self.resolve_hash_type() if self._hash_type is None: raise exceptions.InvalidValueException('Invalid hash provided: {}'.format(self._hash)) if validate_hash: self.validate() @property def hash(self): return self._hash @hash.setter def hash(self, value): self._hash = value.strip() if value is not None else None @property def hash_type(self): return self._hash_type def validate(self): hash_type = self.resolve_hash_type() if self.hash_type != hash_type: raise exceptions.InvalidValueException('Detected hash type {}, got type {} for hash {}' .format(hash_type, self.hash_type, self.hash)) def resolve_hash_type(self): for hash_type, validator in self.SUPPORTED_HASH_TYPES.items(): if validator(self._hash): return hash_type return None @property def raw(self): return unhexlify(self.hash) def __eq__(self, other): return self.hash == other def parse_isoformat(date_string): """Parses the current date format version """ if isinstance(date_string, (dt.date, dt.datetime)): return date_string elif date_string: return parser.isoparse(date_string) else: return None
#!/bin/sh set -e wget -c http://central.maven.org/maven2/io/swagger/swagger-codegen-cli/2.3.1/swagger-codegen-cli-2.3.1.jar -O codegen.jar rm -rf src/app/swagger java -jar codegen.jar generate \ -l typescript-angular \ -o src/app/swagger \ -i swagger.yaml
import { DefineEvent } from "../utils/decorators/DefineEvent"; import { BaseEvent } from "../structures/BaseEvent"; @DefineEvent("ready") export class ReadyEvent extends BaseEvent { public execute(): void { this.client.logger.info( `${this.client.shard ? `[Shard #${this.client.shard.ids[0]}]` : ""} I'm ready to serve ${this.client.guilds.cache.size} guilds ` + `with ${this.client.channels.cache.filter(c => c.type === "GUILD_TEXT").size} text channels and ` + `${this.client.channels.cache.filter(c => c.type === "GUILD_VOICE").size} voice channels` ); this.doPresence(); } private doPresence(): void { this.client.util.updatePresence() .then(() => setInterval(() => this.client.util.updatePresence(), 30 * 1000)) .catch(e => { if (e.message === "Shards are still being spawned.") return this.doPresence(); this.client.logger.error("DO_PRESENCE_ERR:", e); }); return undefined; } }
import UIKit import Photos class ViewController: UIViewController { private let imagePicker = UIImagePickerController() private var timelineView = UIScrollView() override func viewDidLoad() { super.viewDidLoad() setupImagePicker() setupTimelineView() } private func setupImagePicker() { imagePicker.sourceType = .photoLibrary imagePicker.allowsEditing = false imagePicker.delegate = self } private func setupTimelineView() { timelineView.translatesAutoresizingMaskIntoConstraints = false view.addSubview(timelineView) timelineView.topAnchor.constraint(equalTo: view.topAnchor).isActive = true timelineView.leadingAnchor.constraint(equalTo: view.leadingAnchor).isActive = true timelineView.trailingAnchor.constraint(equalTo: view.trailingAnchor).isActive = true timelineView.bottomAnchor.constraint(equalTo: view.bottomAnchor).isActive = true timelineView.backgroundColor = UIColor.white } @IBAction func addPhoto(_ sender: UIButton) { present(imagePicker, animated: true, completion: nil) } } extension ViewController: UIImagePickerControllerDelegate, UINavigationControllerDelegate { func imagePickerController(_ picker: UIImagePickerController, didFinishPickingMediaWithInfo info: [UIImagePickerController.InfoKey : Any]) { guard let photo = info[.originalImage] as? UIImage else { return } let imageView = UIImageView(image: photo) imageView.contentMode = .scaleAspectFit timelineView.addSubview(imageView) imageView.translatesAutoresizingMaskIntoConstraints = false let leadingAnchor: NSLayoutAnchor<NSLayoutXAxisAnchor> if timelineView.subviews.isEmpty { leadingAnchor = timelineView.leadingAnchor } else { let previousImage = timelineView.subviews.last! leadingAnchor = previousImage.trailingAnchor } NSLayoutConstraint.activate([ imageView.topAnchor.constraint(equalTo: timelineView.topAnchor), leadingAnchor.constraint(equalTo: imageView.leadingAnchor), imageView.bottomAnchor.constraint(equalTo: timelineView.bottomAnchor), imageView.widthAnchor.constraint(equalTo: timelineView.widthAnchor, multiplier: 1.0/3.0) ]) UIImageWriteToSavedPhotosAlbum(photo, nil, nil, nil) dismiss(animated: true, completion: nil) } }
package org.mnode.jot4j.dynamodb.command; import com.amazonaws.services.dynamodbv2.datamodeling.DynamoDBMapper; import net.fortuna.ical4j.vcard.Property; import net.fortuna.ical4j.vcard.VCard; import org.mnode.jot4j.dynamodb.mapper.Card; import org.mnode.jot4j.dynamodb.mapper.CardOrg; import java.util.ArrayList; import java.util.List; public class CreateCard extends AbstractCommand<VCard> { public CreateCard(DynamoDBMapper mapper, String ownerId, String groupId) { super(mapper, ownerId, groupId); } @Override public void execute(VCard input) { List<Object> model = new ArrayList<>(); Card card = new Card(); card.setData(input); model.add(card); input.getProperties(Property.Id.ORG).forEach(org -> { CardOrg cardOrg = new CardOrg("CARD"); cardOrg.setUid(input.getProperty(Property.Id.UID).getValue()); cardOrg.setName(org.getValue()); }); mapper.batchSave(model.toArray()); } @Override public void executeBatch(VCard... input) { } }
def binary_search(arr, target): lower_bound = 0 upper_bound = len(arr) - 1 while lower_bound <= upper_bound: mid = ( lower_bound + upper_bound ) // 2 if arr[mid] < target: lower_bound = mid + 1 elif arr[mid] > target: upper_bound = mid -1 else: return mid return -1 # Target not found
#!/bin/bash # Docunment : https://kubernetes.github.io/ingress-nginx/deploy/#bare-metal helm repo add ingress-nginx https://kubernetes.github.io/ingress-nginx helm repo update helm install ingress-nginx ingress-nginx/ingress-nginx
#!/bin/bash SEED=$1 N=$2 OUTPUT_DIR=$3 EXTRA_ARGS=$4 for i in $(seq 1 "$N") ; do # shellcheck disable=SC2086 python3 -m simulation.utils.machine_learning.data.extract_simulated_images \ --seed "$SEED$i" \ --output_dir "$OUTPUT_DIR" \ --image_topic /camera/image_raw $EXTRA_ARGS; done # Remove duplicates fdupes -dNq "$OUTPUT_DIR"/
<filename>chest/gui/swing/src/main/java/net/community/chest/swing/component/label/JLabelReflectiveProxy.java<gh_stars>1-10 package net.community.chest.swing.component.label; import java.lang.reflect.Method; import javax.swing.JLabel; import net.community.chest.awt.dom.converter.KeyCodeValueInstantiator; import net.community.chest.convert.ValueStringInstantiator; import net.community.chest.swing.HAlignmentValueStringInstantiator; import net.community.chest.swing.VAlignmentValueStringInstantiator; import net.community.chest.swing.component.JComponentReflectiveProxy; /** * <P>Copyright 2008 as per GPLv2</P> * * @param <L> The reflected {@link JLabel} type * @author <NAME>. * @since Mar 24, 2008 11:17:19 AM */ public class JLabelReflectiveProxy<L extends JLabel> extends JComponentReflectiveProxy<L> { public JLabelReflectiveProxy (Class<L> objClass) throws IllegalArgumentException { this(objClass, false); } protected JLabelReflectiveProxy (Class<L> objClass, boolean registerAsDefault) throws IllegalArgumentException, IllegalStateException { super(objClass, registerAsDefault); } /** * Default element name used for labels(s) */ public static final String LABEL_ELEMNAME="label"; // some specialized values handling public static final String HALIGN_ATTR="horizontalAlignment", HTEXTPOS_ATTR="horizontalTextPosition", VALIGN_ATTR="verticalAlignment", VTEXTPOS_ATTR="verticalTextPosition", DISPMNEMONIC_ATTR="displayedMnemonic", ICON_ATTR="icon", DISABLED_ICON_ATTR="disabledIcon"; /* * @see net.community.chest.awt.dom.UIReflectiveAttributesProxy#resolveAttributeInstantiator(java.lang.String, java.lang.Class) */ @SuppressWarnings("unchecked") @Override protected <C> ValueStringInstantiator<C> resolveAttributeInstantiator (String name, Class<C> type) throws Exception { if (HALIGN_ATTR.equalsIgnoreCase(name) || HTEXTPOS_ATTR.equalsIgnoreCase(name)) return (ValueStringInstantiator<C>) HAlignmentValueStringInstantiator.DEFAULT; else if (VALIGN_ATTR.equalsIgnoreCase(name) || VTEXTPOS_ATTR.equalsIgnoreCase(name)) return (ValueStringInstantiator<C>) VAlignmentValueStringInstantiator.DEFAULT; else if (DISPMNEMONIC_ATTR.equalsIgnoreCase(name)) return (ValueStringInstantiator<C>) KeyCodeValueInstantiator.DEFAULT; return super.resolveAttributeInstantiator(name, type); } /* * @see net.community.chest.dom.transform.ReflectiveAttributesProxy#updateObjectAttribute(java.lang.Object, java.lang.String, java.lang.String, java.lang.reflect.Method) */ @Override protected L updateObjectAttribute (L src, String name, String value, Method setter) throws Exception { if (ICON_ATTR.equalsIgnoreCase(name) || DISABLED_ICON_ATTR.equalsIgnoreCase(name)) return updateObjectResourceAttribute(src, name, value, setter); return super.updateObjectAttribute(src, name, value, setter); } public static final JLabelReflectiveProxy<JLabel> LABEL= new JLabelReflectiveProxy<JLabel>(JLabel.class, true); }
public class GraphicalObject { private float x; private float y; private float width; private float height; private Text text; public boolean hasText() { return text != null; } public void setPosition(float x, float y) { this.x = x; this.y = y; if (hasText()) { text.setPosition(this.x + width * 3 / 24f, this.y + height * 39 / 48f); } } }
<gh_stars>0 def solve_9_keypad(steps): code = [] position = 5 for step in steps: for c in step: if c.upper() == "L": if ((position - 1) % 3) != 0: position -= 1 elif c.upper() == "R": if(position % 3) != 0: position += 1 elif c.upper() == "U": if (position - 3) > 0: position -= 3 elif c.upper() == "D": if (position + 3) < 10: position += 3 code.append(position) return code def solve_fancy_keypad(steps): keypad = [["*", "*", "1", "*", "*"], ["*", "2", "3", "4", "*"], ["5", "6", "7", "8", "9"], ["*", "A", "B", "C", "*"], ["*", "*", "D", "*", "*"]] code = [] position = [2,0] for step in steps: for c in step: if c.upper() == "L": if position[1] > 0 and keypad[position[0]][position[1] - 1] != "*": position[1] -= 1 if c.upper() == "R": if position[1] < 4 and keypad[position[0]][position[1] + 1] != "*": position[1] += 1 if c.upper() == "U": if position[0] > 0 and keypad[position[0] - 1][position[1]] != "*": position[0] -= 1 if c.upper() == "D": if position[0] < 4 and keypad[position[0] + 1][position[1]] != "*": position[0] += 1 code.append(keypad[position[0]][position[1]]) return code if __name__ == "__main__": with open("day_02_input.txt") as f: input = f.readlines() print "Part 1 answer: " + str(solve_9_keypad(input)) print "Part 2 answer: " + str(solve_fancy_keypad(input))
install_dir=$1 if [ -d "$install_dir" ] ; then echo "Cleaning ${install_dir}..." # Delete files what we don't want # rm -rf "$install_dir/bin" rm -rf "$install_dir/share" rm -rf "$install_dir/lib/cmake" rm -rf "$install_dir/lib/pkgconfig" fi
def install_recovery_hd(existing_partitions: list) -> list: existing_partitions.append('Recovery HD') return existing_partitions
#!/bin/bash # ------------------------------------------------------------------------------------ # Tutorial: How using if-else work # ------------------------------------------------------------------------------------ # You can write condition same as in real life # For example, if the person in front of me is Sarah, I will tell you "Hi Sarah !" # else, I will tell you "Hi anonymous !" # In bash, it will be : if my variable person equal Sarah, I will tell you "Hi Sarah !" # else, I will tell you "Hi anonymous !" person="Sarah" # brackets are needed for the condition if [ $person = "Bob" ] then echo "Hi Sarah !" else echo "Hi anonymous !" fi # if you want to say something if the condition is true but say nothing it's not, you # don't put the else part, just like this : if [ $person = "Sarah" ] then echo "Hi Sarah !" fi # ------------------------------------------------------------------------------------ # Challenge: Say hello ! # ------------------------------------------------------------------------------------ # Write a condition to say Hi to yourself and if it's not your first name # it will say "No, try again"
from django.contrib import admin from .models import Toy class ToyAdmin(admin.ModelAdmin): list_display = ['title', 'location'] admin.site.register(Toy, ToyAdmin)
<filename>src/genie/libs/parser/nxos/tests/test_show_arp.py<gh_stars>0 # Python import unittest from unittest.mock import Mock # ATS from ats.topology import Device # Metaparset from genie.metaparser.util.exceptions import SchemaEmptyParserError, \ SchemaMissingKeyError # Parser from genie.libs.parser.nxos.show_arp import ShowIpArpDetailVrfAll, \ ShowIpArpSummaryVrfAll, \ ShowIpArpstatisticsVrfAll #========================================================= # Unit test for show ip arp detail vrf all #========================================================= class test_show_ip_arp_detail_vrf_all(unittest.TestCase): device = Device(name='aDevice') empty_output = {'execute.return_value': ''} golden_parsed_output = { 'interfaces': { 'Ethernet1/1': { 'ipv4': { 'neighbors': { '10.1.3.5': { 'age': '-', 'ip': '10.1.3.5', 'link_layer_address': 'aaaa.bbbb.cccc', 'origin': 'static', 'physical_interface': 'Ethernet1/1'} } } }, 'Ethernet1/1.1': { 'ipv4': { 'neighbors': { '192.168.4.2': { 'age': '00:01:53', 'ip': '192.168.4.2', 'link_layer_address': '000c.292a.1eaf', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/1.1'} } } }, 'Ethernet1/1.2': { 'ipv4': { 'neighbors': { '192.168.154.2': { 'age': '00:00:47', 'ip': '192.168.154.2', 'link_layer_address': '000c.292a.1eaf', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/1.2'} } } }, 'Ethernet1/1.4': { 'ipv4': { 'neighbors': { '192.168.106.2': { 'age': '00:08:42', 'ip': '192.168.106.2', 'link_layer_address': '000c.292a.1eaf', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/1.4'} } } }, 'Ethernet1/2.1': { 'ipv4': { 'neighbors': { '192.168.154.2': { 'age': '00:18:24', 'ip': '192.168.154.2', 'link_layer_address': '000c.2904.5840', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/2.1'} } } }, 'Ethernet1/2.2': { 'ipv4': { 'neighbors': { '192.168.51.2': { 'age': '00:05:21', 'ip': '192.168.51.2', 'link_layer_address': '000c.2904.5840', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/2.2'} } } }, 'Ethernet1/2.4': { 'ipv4': { 'neighbors': { '192.168.9.2': { 'age': '00:10:51', 'ip': '192.168.9.2', 'link_layer_address': '000c.2904.5840', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/2.4'} } } }, 'Ethernet1/4.100': { 'ipv4': { 'neighbors': { '10.51.1.101': { 'age': '00:01:28', 'ip': '10.51.1.101', 'link_layer_address': '0000.71c7.6e61', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/4.100'} } } }, 'Ethernet1/4.101': { 'ipv4': { 'neighbors': { '10.154.1.101': { 'age': '00:01:28', 'ip': '10.154.1.101', 'link_layer_address': '0000.71c7.75c1', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/4.101'} } } }, 'Ethernet1/4.200': { 'ipv4': { 'neighbors': { '10.76.1.101': { 'age': '00:01:28', 'ip': '10.76.1.101', 'link_layer_address': '0000.0068.ce6f', 'origin': 'dynamic', 'physical_interface': 'Ethernet1/4.200'} } } }, 'mgmt0': { 'ipv4': { 'neighbors': { '10.1.7.1': { 'age': '00:17:15', 'ip': '10.1.7.1', 'link_layer_address': '0012.7f57.ac80', 'origin': 'dynamic', 'physical_interface': 'mgmt0'}, '10.1.7.250': { 'age': '00:14:24', 'ip': '10.1.7.250', 'link_layer_address': '0050.5682.7915', 'origin': 'dynamic', 'physical_interface': 'mgmt0'}, '10.1.7.253': { 'age': '00:10:22', 'ip': '10.1.7.253', 'link_layer_address': '0050.56a4.a9fc', 'origin': 'dynamic', 'physical_interface': 'mgmt0'} } } } } } golden_output = {'execute.return_value': ''' N95_1# show ip arp detail vrf all Flags: * - Adjacencies learnt on non-active FHRP router + - Adjacencies synced via CFSoE # - Adjacencies Throttled for Glean CP - Added via L2RIB, Control plane Adjacencies PS - Added via L2RIB, Peer Sync RO - Re-Originated Peer Sync Entry IP ARP Table for all contexts Total number of entries: 12 Address Age MAC Address Interface Physical Interface Flags 10.1.7.1 00:17:15 0012.7f57.ac80 mgmt0 mgmt0 10.1.7.250 00:14:24 0050.5682.7915 mgmt0 mgmt0 10.1.7.253 00:10:22 0050.56a4.a9fc mgmt0 mgmt0 10.1.3.5 - aaaa.bbbb.cccc Ethernet1/1 Ethernet1/1 192.168.4.2 00:01:53 000c.292a.1eaf Ethernet1/1.1 Ethernet1/1.1 192.168.154.2 00:00:47 000c.292a.1eaf Ethernet1/1.2 Ethernet1/1.2 192.168.106.2 00:08:42 000c.292a.1eaf Ethernet1/1.4 Ethernet1/1.4 192.168.154.2 00:18:24 000c.2904.5840 Ethernet1/2.1 Ethernet1/2.1 192.168.51.2 00:05:21 000c.2904.5840 Ethernet1/2.2 Ethernet1/2.2 192.168.9.2 00:10:51 000c.2904.5840 Ethernet1/2.4 Ethernet1/2.4 10.51.1.101 00:01:28 0000.71c7.6e61 Ethernet1/4.100 Ethernet1/4.100 10.154.1.101 00:01:28 0000.71c7.75c1 Ethernet1/4.101 Ethernet1/4.101 10.76.1.101 00:01:28 0000.0068.ce6f Ethernet1/4.200 Ethernet1/4.200 ''' } def test_empty(self): self.device = Mock(**self.empty_output) obj = ShowIpArpDetailVrfAll(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) obj = ShowIpArpDetailVrfAll(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) #========================================================= # Unit test for show ip arp summary vrf all #========================================================= class test_show_ip_arp_summary_vrf_all(unittest.TestCase): device = Device(name='aDevice') empty_output = {'execute.return_value': ''} golden_parsed_output = { 'incomplete': 0, 'throttled': 0, 'resolved': 12, 'total': 12, 'unknown': 0} golden_output = {'execute.return_value': ''' N95_1# show ip arp summary IP ARP Table - Adjacency Summary Resolved : 12 Incomplete : 0 (Throttled : 0) Unknown : 0 Total : 12 ''' } golden_parsed_output_1 = { 'incomplete': 0, 'throttled': 0, 'resolved': 12, 'total': 12, 'unknown': 0} golden_output_1 = {'execute.return_value': ''' N95_1# show ip arp summary vrf all IP ARP Table - Adjacency Summary Resolved : 12 Incomplete : 0 (Throttled : 0) Unknown : 0 Total : 12 ''' } def test_empty(self): self.device = Mock(**self.empty_output) obj = ShowIpArpSummaryVrfAll(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) obj = ShowIpArpSummaryVrfAll(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowIpArpSummaryVrfAll(device=self.device) parsed_output = obj.parse(vrf='all') self.assertEqual(parsed_output, self.golden_parsed_output_1) #========================================================= # Unit test for show ip arp statistics vrf all #========================================================= class test_show_ip_arp_statistics_vrf_all(unittest.TestCase): device = Device(name='aDevice') empty_output = {'execute.return_value': ''} golden_parsed_output = { 'statistics': { 'adjacency': { 'adjacency_adds': 43, 'adjacency_deletes': 12, 'adjacency_timeouts': 12, 'failed_due_to_limits': 0}, 'received': { 'anycast_proxy_arp': 0, 'dropped': 28218, 'dropped_server_port': 0, 'drops_details': { 'appeared_on_a_wrong_interface': 0, 'arp_refresh_requests_received_from_clients': 0, 'context_not_created': 0, 'directed_broadcast_source': 0, 'dropping_due_to_tunneling_failures': 0, 'glean_requests_recv_count': 71, 'grat_arp_received_on_proxy': 0, 'incorrect_length': 0, 'invalid_context': 0, 'invalid_destination_ip_address': 0, 'invalid_hardwaretype': 0, 'invalid_layer2_address_length': 0, 'invalid_layer3_address_length': 0, 'invalid_protocol_packet': 0, 'invalid_source_ip_address': 28, 'invalid_source_mac_address': 0, 'l2_packet_on_untrusted_l2_port': 0, 'l2fm_query_failed_for_a_l2address': 0, 'no_mem_to_create_per_intf_structure': 0, 'non_active_fhrp_dest_ip': 0, 'non_local_destination_ip_address': 20421, 'number_of_signals_received_from_l2rib': 0, 'packet_with_vip_on_standby_fhrp': 0, 'received_before_arp_initialization': 0, 'requests_came_for_exising_entries': 15, 'requests_came_on_a_l2_interface': 0, 'source_address_mismatch_with_subnet': 0, 'source_mac_address_is_our_own': 0}, 'enhanced_proxy_arp': 0, 'fastpath': 0, 'l2_port_track_proxy_arp': 0, 'l2_replies': 0, 'l2_requests': 0, 'local_proxy_arp': 0, 'proxy_arp': 0, 'replies': 6582, 'requests': 22632, 'snooped': 0, 'total': 0, 'tunneled': 0}, 'sent': { 'dropped': 0, 'drops_details': { 'adjacency_couldnt_be_added': 0, 'arp_refresh_skipped_over_core_and_flooded': 0, 'client_enqueue_failed': 0, 'context_not_created': 0, 'dest_not_reachable_for_proxy_arp': 0, 'dest_unreachable_for_enhanced_proxy': 0, 'destnination_is_our_own_ip': 26, 'destnination_on_l2_port_tracked': 0, 'invalid_context': 0, 'invalid_dest_ip': 0, 'invalid_ifindex': 0, 'invalid_local_proxy_arp': 0, 'invalid_proxy_arp': 0, 'invalid_src_ip': 0, 'mbuf_operation_failed': 0, 'null_source_ip': 0, 'null_source_mac': 0, 'unattached_ip': 0, 'vip_is_not_active': 0}, 'gratuitous': 58, 'l2_replies': 0, 'l2_requests': 0, 'replies': 998, 'requests': 2102, 'total': 3158, 'tunneled': 0} } } golden_output = {'execute.return_value': ''' N95_1# show ip arp statistics vrf all ARP State Machine Stats ARP packet statistics for all contexts Sent: Total 3158, Requests 2102, Replies 998, Requests on L2 0, Replies on L2 0, Gratuitous 58, Tunneled 0, Dropped 0 Send packet drops details: MBUF operation failed : 0 Context not yet created : 0 Invalid context : 0 Invalid ifindex : 0 Invalid SRC IP : 0 Invalid DEST IP : 0 Destination is our own IP : 26 Unattached IP : 0 Adjacency Couldn't be added : 0 Null Source IP : 0 Null Source MAC : 0 Client Enqueue Failed : 0 Dest. not reachable for proxy arp : 0 Dest. unreachable for enhanced proxy : 0 Dest. on L2 port being tracked : 0 Invalid Local proxy arp : 0 Invalid proxy arp : 0 VIP is not active : 0 ARP refresh skipped over core and flooded on server : 0 Received: Total 0, Requests 22632, Replies 6582, Requests on L2 0, Replies on L2 0 Proxy arp 0, Local-Proxy arp 0, Enhanced Proxy arp 0, Anycast proxy Proxy arp 0, L2 Port-track Proxy arp 0, Tunneled 0, Fastpath 0, Snooped 0, Dropped 28218 on Server Port 0 Received packet drops details: Appeared on a wrong interface : 0 Incorrect length : 0 Invalid protocol packet : 0 Invalid Hardware type : 0 Invalid context : 0 Context not yet created : 0 Invalid layer 2 address length : 0 Invalid layer 3 address length : 0 Invalid source IP address : 28 Source IP address is our own : 0 No mem to create per intf structure : 0 Source address mismatch with subnet : 0 Directed broadcast source : 0 Invalid destination IP address : 0 Non-local destination IP address : 20421 Non-active FHRP dest IP address. Learn and drop : 0 Invalid source MAC address : 0 Source MAC address is our own : 0 Received before arp initialization : 0 L2 packet on proxy-arp-enabled interface : 0 L2 packet on untrusted L2 port : 0 Packet with VIP on standby FHRP : 0 Grat arp received on proxy-arp-enabled interface : 0 Requests came for exising entries : 15 Requests came on a L2 interface : 0 L2FM query failed for a L2 Address : 0 Dropping due to tunneling failures : 0 Glean requests recv count : 71 ARP refresh requests received from clients: 0 Number of Signals received from L2rib : 0 ARP adjacency statistics Adds 43, Deletes 12, Timeouts 12 Failed due to limits: 0 ''' } golden_parsed_output_1 = {'statistics': {'adjacency': {'adjacency_adds': 5, 'adjacency_deletes': 0, 'adjacency_timeouts': 0}, 'received': {'anycast_proxy_arp': 0, 'dropped': 7, 'dropped_server_port': 0, 'drops_details': {'appeared_on_a_wrong_interface': 0, 'context_not_created': 0, 'directed_broadcast_source': 0, 'dropping_due_to_tunneling_failures': 0, 'grat_arp_received_on_proxy': 0, 'incorrect_length': 0, 'invalid_context': 0, 'invalid_destination_ip_address': 0, 'invalid_hardwaretype': 0, 'invalid_layer2_address_length': 0, 'invalid_layer3_address_length': 0, 'invalid_protocol_packet': 0, 'invalid_source_ip_address': 0, 'invalid_source_mac_address': 0, 'l2_packet_on_untrusted_l2_port': 0, 'l2fm_query_failed_for_a_l2address': 0, 'no_mem_to_create_per_intf_structure': 0, 'non_active_fhrp_dest_ip': 0, 'non_local_destination_ip_address': 7, 'packet_with_vip_on_standby_fhrp': 0, 'received_before_arp_initialization': 0, 'requests_came_for_exising_entries': 0, 'requests_came_on_a_l2_interface': 0, 'source_address_mismatch_with_subnet': 0, 'source_mac_address_is_our_own': 0}, 'enhanced_proxy_arp': 0, 'fastpath': 0, 'l2_port_track_proxy_arp': 0, 'l2_replies': 0, 'l2_requests': 0, 'local_proxy_arp': 0, 'proxy_arp': 0, 'replies': 55, 'requests': 5, 'snooped': 0, 'total': 67, 'tunneled': 0}, 'sent': {'dropped': 0, 'drops_details': {'adjacency_couldnt_be_added': 0, 'client_enqueue_failed': 0, 'context_not_created': 0, 'dest_not_reachable_for_proxy_arp': 0, 'dest_unreachable_for_enhanced_proxy': 0, 'destnination_is_our_own_ip': 0, 'destnination_on_l2_port_tracked': 0, 'invalid_context': 0, 'invalid_dest_ip': 0, 'invalid_ifindex': 0, 'invalid_local_proxy_arp': 0, 'invalid_proxy_arp': 0, 'invalid_src_ip': 0, 'mbuf_operation_failed': 0, 'null_source_ip': 0, 'null_source_mac': 0, 'unattached_ip': 0, 'vip_is_not_active': 0}, 'gratuitous': 2, 'l2_replies': 0, 'l2_requests': 0, 'replies': 5, 'requests': 57, 'total': 64, 'tunneled': 0}}} golden_output_1 = {'execute.return_value': ''' nx-osv9000-1# show ip arp statistics ARP packet statistics for context default Sent: Total 64, Requests 57, Replies 5, Requests on L2 0, Replies on L2 0, Gratuitous 2, Tunneled 0, Dropped 0 Send packet drops details: MBUF operation failed : 0 Context not yet created : 0 Invalid context : 0 Invalid ifindex : 0 Invalid SRC IP : 0 Invalid DEST IP : 0 Destination is our own IP : 0 Unattached IP : 0 Adjacency Couldn't be added : 0 Null Source IP : 0 Null Source MAC : 0 Client Enqueue Failed : 0 Dest. not reachable for proxy arp : 0 Dest. unreachable for enhanced proxy: 0 Dest. on L2 port being tracked : 0 Invalid Local proxy arp : 0 Invalid proxy arp : 0 VIP is not active : 0 Received: Total 67, Requests 5, Replies 55, Requests on L2 0, Replies on L2 0 Proxy arp 0, Local-Proxy arp 0, Enhanced Proxy arp 0, Anycast proxy Proxy arp 0, L2 Port-track Proxy arp 0, Tunneled 0, Fastpath 0, Snooped 0, Dropped 7, on Server Port 0 Received packet drops details: Appeared on a wrong interface : 0 Incorrect length : 0 Invalid protocol packet : 0 Invalid Hardware type : 0 Invalid context : 0 Context not yet created : 0 Invalid layer 2 address length : 0 Invalid layer 3 address length : 0 Invalid source IP address : 0 Source IP address is our own : 0 No mem to create per intf structure : 0 Source address mismatch with subnet : 0 Directed broadcast source : 0 Invalid destination IP address : 0 Non-local destination IP address : 7 Non-active FHRP dest IP address. Learn and drop : 0 Invalid source MAC address : 0 Source MAC address is our own : 0 Received before arp initialization : 0 L2 packet on proxy-arp-enabled interface : 0 L2 packet on untrusted L2 port : 0 Packet with VIP on standby FHRP : 0 Grat arp received on proxy-arp-enabled interface : 0 Requests came for exising entries : 0 Requests came on a l2 interface : 0 L2FM query failed for a L2 Address : 0 Dropping due to tunneling failures : 0 ARP adjacency statistics Adds 5, Deletes 0, Timeouts 0 nx-osv9000-1# ''' } def test_empty(self): self.device = Mock(**self.empty_output) obj = ShowIpArpstatisticsVrfAll(device=self.device) with self.assertRaises(SchemaEmptyParserError): parsed_output = obj.parse() def test_golden(self): self.maxDiff = None self.device = Mock(**self.golden_output) obj = ShowIpArpstatisticsVrfAll(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output) def test_golden_1(self): self.maxDiff = None self.device = Mock(**self.golden_output_1) obj = ShowIpArpstatisticsVrfAll(device=self.device) parsed_output = obj.parse() self.assertEqual(parsed_output, self.golden_parsed_output_1) if __name__ == '__main__': unittest.main()
awk -F"|" '{print$7}' log_terminal_CDG.csv > ssh_infected_keys.txt
#!/bin/bash DISABLE_FILE=/home/pi/scripts/github/media_frame/data/disable if [ ! -f $DISABLE_FILE ]; then touch /home/pi/scripts/github/media_frame/data/music/is_active ### mosquitto_pub -h localhost -t "frame/fade_time" -m "3" mosquitto_pub -h localhost -t "frame/time_delay" -m "10800" mosquitto_pub -h localhost -t "frame/music_on" -m "" mosquitto_pub -h localhost -t "frame/subdirectory" -m "music/artwork" # mosquitto_pub -h localhost -t "frame/next" -m "" ### # old_pid=$(cat /home/pi/scripts/github/media_frame/data/pid); # new_pid=$(/home/pi/scripts/github/media_frame/scripts/start_music.sh); # echo $new_pid > /home/pi/scripts/github/media_frame/data/pid; # sleep 15; # if [ -n "$(ps -p $old_pid -o pid=)" ]; then # kill $old_pid; # fi; fi;
<reponame>orz365/http-mpcloud const Collection = require('./Collection') const Storage = require('./other/Storage') const Collections = require('./Collections') const logger = require('./utils/logger') const {getToken, getNewToken} = require('./utils/token') const HttpService = require('./utils/HttpService') const Img = require('./other/Img') const Wxacode = require('./other/Wxacode') const CustomerServiceMessage = require('./other/CustomerServiceMessage') const Analysis = require('./other/Analysis') const Security = require('./other/Security') const Base = require('./common/Base') const Database = require('./Database') /** * 微信小程序云开发HTTP请求类 */ class HttpMpCloud extends Base { /** * 获取当前 * @deprecated */ currentToken() { return getToken(this.params) } /** * 获取最新的token,并返回{access_token,expires_in} * 如果不想使用当前的缓存来管理access_token,可以拿到最新的token后自行管理 * @return {Promise<{access_token,expires_in}>} */ async getNewToken() { let {access_token,expires_in} = await getNewToken(this.params) return {access_token,expires_in} } /** * 自己管理access_token,重新设置access_token * @param access_token */ setAccessToken(access_token){ this.params.access_token = access_token } /** * 数据库选择 * @param env * @return {Database} */ database(env) { if (env) { this.env = env } return new Database(this.params) } /** * 操作集合 使用 hcloud.database().collection * @param tableName * @return {Collection} */ collection(tableName) { this.params['tableName'] = tableName return new Collection(this.params) } /** * 操作集合 * @param tableName * @return {Collection} */ collections() { return new Collections(this.params) } /** * 操作集合 * @param tableName * @return {Collection} */ storage() { return new Storage(this.params) } /** * 触发云函数 * HTTP API 途径触发云函数不包含用户信息 * @param name * @param data * @return {Promise} */ async callFunction({name, data}) { let access_token = await getToken(this.params) let url = `https://api.weixin.qq.com/tcb/invokecloudfunction?access_token=${access_token}&name=${name}&env=${this.env}` return new Promise((resolve, reject) => { HttpService.post(url, data).then(res => { if (res.errcode !== 0) { reject(res) } else { try { resolve(JSON.parse(res.resp_data)) } catch (e) { logger.error(e) } } }).catch(err => { reject(err) }) }) } /** * 操作集合 * @param tableName * @return {Collection} */ img() { return new Img(this.params) } wxacode() { return new Wxacode(this.params) } customerServiceMessage() { return new CustomerServiceMessage(this.params) } analysis() { return new Analysis(this.params) } security() { return new Security(this.params) } } module.exports = HttpMpCloud
def countComments(code_snippet): count = 0 in_comment = False i = 0 while i < len(code_snippet): if code_snippet[i:i+2] == "/*" and not in_comment: in_comment = True count += 1 i += 1 # Skip the next character to avoid counting nested comments elif code_snippet[i:i+2] == "*/" and in_comment: in_comment = False i += 1 # Skip the next character to avoid counting overlapping comments i += 1 return count
from flask_restful import Api from .routes import ( VmachineEvent, VnicEvent, VdiskEvent, VmachineControl, AttachDevice, VmachineEventItem, VmachineCallbackEvent, ) def init_api(api: Api): api.add_resource(VmachineEvent, "/api/v1/vmachine") api.add_resource(VmachineEventItem, "/api/v1/vmachine/<int:vmachine_id>") api.add_resource(VmachineControl, "/api/v1/vmachine/<int:vmachine_id>/power") api.add_resource(AttachDevice, "/api/v1/attach") api.add_resource(VnicEvent, "/api/v1/vnic") api.add_resource(VdiskEvent, "/api/v1/vdisk") api.add_resource(VmachineCallbackEvent, "/api/v1/vmachine/callback")
from SimpleITK import ReadImage, WriteImage, GetArrayFromImage from SimpleElastix import SimpleElastix def perform_rigid_registration(fixed_image_path, moving_image_path): # Load the input images fixed_image = ReadImage(fixed_image_path) moving_image = ReadImage(moving_image_path) # Create a SimpleElastix object elastix = SimpleElastix() # Set the fixed and moving images elastix.SetFixedImage(fixed_image) elastix.SetMovingImage(moving_image) # Set the registration parameters for rigid registration parameter_map = elastix.GetParameterMap() parameter_map["Registration"] = ["MultiResolutionRegistration"] parameter_map["Metric"] = ["AdvancedMattesMutualInformation"] parameter_map["Transform"] = ["EulerTransform"] parameter_map["Interpolator"] = ["BSplineInterpolator"] parameter_map["Optimizer"] = ["AdaptiveStochasticGradientDescent"] parameter_map["ResampleInterpolator"] = ["FinalBSplineInterpolator"] parameter_map["Resampler"] = ["DefaultResampler"] parameter_map["FixedImagePyramid"] = ["FixedRecursiveImagePyramid"] parameter_map["MovingImagePyramid"] = ["MovingRecursiveImagePyramid"] parameter_map["NumberOfResolutions"] = ["4"] parameter_map["MaximumNumberOfIterations"] = ["2000"] parameter_map["NumberOfSpatialSamples"] = ["2048"] parameter_map["NewSamplesEveryIteration"] = ["true"] parameter_map["ImageSampler"] = ["Random"] parameter_map["AutomaticScalesEstimation"] = ["true"] parameter_map["AutomaticTransformInitialization"] = ["true"] elastix.SetParameterMap(parameter_map) # Perform the registration elastix.Execute() # Get the registered image registered_image = elastix.GetResultImage() return registered_image
#!/bin/sh echo "Fixing permissions (just in case)" chown -R xcauth:xcauth /etc/xcauth.conf /var/*/xcauth echo "Stopping xcauth.service, if running" systemctl -q stop xcauth.service echo "(Re-)starting listening sockets" cd /etc/systemd/system && for i in xc*.socket; do systemctl start $i done
<reponame>leongaban/redux-saga-exchange import * as React from 'react'; import block from 'bem-cn'; import { ITab } from 'shared/types/ui'; import './Tabs.scss'; interface IProps { tabs: ITab[]; } const b = block('tabs'); class Tabs extends React.PureComponent<IProps> { public render() { const { tabs } = this.props; return ( <div className={b()}> { tabs.map(({ active, onClick, title, disabled, hidden }, index) => ( <div className={b('element', { active, disabled: !!disabled, hidden: !!hidden })()} key={index} onClick={!disabled ? onClick : void (0)} > {title} </div> ))} </div> ); } } export default Tabs; export { IProps as ITabsProps };
from dateutil.rrule import * from prometheus_client import Histogram from company_jira import jira from util import * from jira_util import * PROJECT_TIME_HISTOGRAM = Histogram( 'project_time_days', 'Lead time in days.', ['project', 'metric'], buckets=[ 0.5, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 12, 14, 16, 18, 20, 25, 30, 35, 40, 50, 60, 70, 80, 90, 100, 365, float("inf") ] ) def get_recent_changelogs(conf, num_issues, all_issues=False): last_issues = jira.search_issues( """project = "%s" and %s status in ("Resolved", "Done", "Closed") order by updatedDate DESC""" % ( conf['project_id'], "" if all_issues else "issuetype = \"%s\" and" % conf['product_granularity'] ), expand='changelog', maxResults=num_issues ) return [ issue_to_changelog(issue)['changelog'] for issue in last_issues ] def calculate_time_metric(conf, metric_name, changelogs): delta_days = None rev_statuses_until_start = [conf['wip_workflow_statuses'][0]] + list( reversed(conf['planning_workflow_statuses']) ) + ['Created'] if metric_name == 'lead-time': delta_days = [ weekdays_between( get_or_else(changelog, ['Created']), get_or_else(changelog, ['Resolved', 'Done', 'Closed']) ) for changelog in changelogs ] elif metric_name == 'cycle-time': delta_days = [ weekdays_between( get_or_else(changelog, rev_statuses_until_start), get_or_else(changelog, ['Resolved', 'Done', 'Closed']) ) for changelog in changelogs ] return delta_days def observe_metric(project_name, project_id, metric, value): PROJECT_TIME_HISTOGRAM.labels( project_name, metric ).observe(value) logger.info('Observed for %s (%s), metric %s: %.02f days' % ( project_name, project_id, metric, value )) def monitor_project_time_metrics(conf): recent_changelogs = get_recent_changelogs(conf, 5) delta_days = calculate_time_metric(conf, 'lead-time', recent_changelogs) for delta in delta_days: observe_metric( conf['project_name'], conf['project_id'], 'lead-time', delta ) delta_days = calculate_time_metric(conf, 'cycle-time', recent_changelogs) for delta in delta_days: observe_metric( conf['project_name'], conf['project_id'], 'cycle-time', delta )
import { GraphQLSchema, GraphQLObjectType, GraphQLString, } from 'graphql'; const PostType = new GraphQLObjectType({ name: 'Post', fields: { id: { type: GraphQLString }, title: { type: GraphQLString }, content: { type: GraphQLString }, } }); const QueryType = new GraphQLObjectType({ name: 'Query', fields: { post: { type: PostType, args: { id: { type: GraphQLString } }, resolve: (parent, args) => { // database call to retrieve blog post based on args.id } } } }); export const schema = new GraphQLSchema({ query: QueryType });
<reponame>opentaps/opentaps-1 /******************************************************************************* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. *******************************************************************************/ /* This file has been modified by Open Source Strategies, Inc. */ package org.ofbiz.minilang.method.ifops; import java.util.List; import java.util.Map; import javolution.util.FastList; import org.ofbiz.base.util.UtilProperties; import org.ofbiz.base.util.UtilValidate; import org.ofbiz.base.util.UtilXml; import org.ofbiz.entity.GenericValue; import org.ofbiz.minilang.SimpleMethod; import org.ofbiz.minilang.method.ContextAccessor; import org.ofbiz.minilang.method.MethodContext; import org.ofbiz.minilang.method.MethodOperation; import org.ofbiz.security.Security; import org.ofbiz.security.authz.Authorization; import org.w3c.dom.Element; /** * If the user does not have the specified permission the fail-message * or fail-property sub-elements are used to add a message to the error-list. */ public class CheckPermission extends MethodOperation { public static final class CheckPermissionFactory implements Factory<CheckPermission> { public CheckPermission createMethodOperation(Element element, SimpleMethod simpleMethod) { return new CheckPermission(element, simpleMethod); } public String getName() { return "check-permission"; } } String message = null; String propertyResource = null; boolean isProperty = false; /** If null no partyId env-name will be checked against the userLogin.partyId and accepted as permission */ ContextAccessor<String> acceptUlPartyIdEnvNameAcsr = null; PermissionInfo permissionInfo; ContextAccessor<List<Object>> errorListAcsr; List<PermissionInfo> altPermissions = null; public CheckPermission(Element element, SimpleMethod simpleMethod) { super(element, simpleMethod); permissionInfo = new PermissionInfo(element); this.errorListAcsr = new ContextAccessor<List<Object>>(element.getAttribute("error-list-name"), "error_list"); Element acceptUserloginPartyElement = UtilXml.firstChildElement(element, "accept-userlogin-party"); if (acceptUserloginPartyElement != null) { acceptUlPartyIdEnvNameAcsr = new ContextAccessor<String>(acceptUserloginPartyElement.getAttribute("party-id-env-name"), "partyId"); } List<? extends Element> altPermElements = UtilXml.childElementList(element, "alt-permission"); if (!altPermElements.isEmpty()) { altPermissions = FastList.newInstance(); } for (Element altPermElement: altPermElements) { altPermissions.add(new PermissionInfo(altPermElement)); } Element failMessage = UtilXml.firstChildElement(element, "fail-message"); Element failProperty = UtilXml.firstChildElement(element, "fail-property"); if (failMessage != null) { this.message = failMessage.getAttribute("message"); this.isProperty = false; } else if (failProperty != null) { this.propertyResource = failProperty.getAttribute("resource"); this.message = failProperty.getAttribute("property"); this.isProperty = true; } } @Override public boolean exec(MethodContext methodContext) { boolean hasPermission = false; List<Object> messages = errorListAcsr.get(methodContext); if (messages == null) { messages = FastList.newInstance(); errorListAcsr.put(methodContext, messages); } // if no user is logged in, treat as if the user does not have permission: do not run subops GenericValue userLogin = methodContext.getUserLogin(); if (userLogin != null) { Authorization authz = methodContext.getAuthz(); Security security = methodContext.getSecurity(); if (this.permissionInfo.hasPermission(methodContext, userLogin, authz, security)) { hasPermission = true; } // if failed, check alternate permissions if (!hasPermission && altPermissions != null) { for (PermissionInfo altPermInfo: altPermissions) { if (altPermInfo.hasPermission(methodContext, userLogin, authz, security)) { hasPermission = true; break; } } } } if (!hasPermission && acceptUlPartyIdEnvNameAcsr != null) { String acceptPartyId = (String) acceptUlPartyIdEnvNameAcsr.get(methodContext); if (UtilValidate.isEmpty(acceptPartyId)) { // try the parameters Map Map<String, Object> parameters = methodContext.getEnv("parameters"); if (parameters != null) { acceptPartyId = acceptUlPartyIdEnvNameAcsr.get(parameters, methodContext); } } if (UtilValidate.isNotEmpty(acceptPartyId) && UtilValidate.isNotEmpty(userLogin.getString("partyId")) && acceptPartyId.equals(userLogin.getString("partyId"))) { hasPermission = true; } } if (!hasPermission) { this.addMessage(messages, methodContext); } return true; } public void addMessage(List<Object> messages, MethodContext methodContext) { String message = methodContext.expandString(this.message); String propertyResource = methodContext.expandString(this.propertyResource); if (!isProperty && message != null) { messages.add(message); // if (Debug.infoOn()) Debug.logInfo("[SimpleMapOperation.addMessage] Adding message: " + message, module); } else if (isProperty && propertyResource != null && message != null) { //String propMsg = UtilProperties.getPropertyValue(UtilURL.fromResource(propertyResource, loader), message); String propMsg = UtilProperties.getMessage(propertyResource, message, methodContext.getEnvMap(), methodContext.getLocale()); if (UtilValidate.isEmpty(propMsg)) { messages.add("Simple Method Permission error occurred, but no message was found, sorry."); } else { messages.add(methodContext.expandString(propMsg)); } // if (Debug.infoOn()) Debug.logInfo("[SimpleMapOperation.addMessage] Adding property message: " + propMsg, module); } else { messages.add("Simple Method Permission error occurred, but no message was found, sorry."); // if (Debug.infoOn()) Debug.logInfo("[SimpleMapOperation.addMessage] ERROR: No message found", module); } } public static class PermissionInfo { String permission; String action; public PermissionInfo(Element altPermissionElement) { this.permission = altPermissionElement.getAttribute("permission"); this.action = altPermissionElement.getAttribute("action"); } public boolean hasPermission(MethodContext methodContext, GenericValue userLogin, Authorization authz, Security security) { String permission = methodContext.expandString(this.permission); String action = methodContext.expandString(this.action); if (UtilValidate.isNotEmpty(action)) { // run hasEntityPermission return security.hasEntityPermission(permission, action, userLogin); } else { // run hasPermission return authz.hasPermission(userLogin.getString("userLoginId"), permission, methodContext.getEnvMap()); } } } @Override public String rawString() { // TODO: add all attributes and other info return "<check-permission/>"; } @Override public String expandedString(MethodContext methodContext) { // TODO: something more than a stub/dummy return this.rawString(); } }
<reponame>hofman-p/alenvi-api<gh_stars>1-10 const Boom = require('@hapi/boom'); const get = require('lodash/get'); const AttendanceHelper = require('../helpers/attendances'); const translate = require('../helpers/translate'); const { language } = translate; const list = async (req) => { try { const { courseSlotsIds, company } = req.pre.attendancesInfos; const attendances = await AttendanceHelper.list(courseSlotsIds, company); return { message: attendances.length ? translate[language].attendancesFound : translate[language].attendancesNotFound, data: { attendances }, }; } catch (e) { req.log('error', e); return Boom.isBoom(e) ? e : Boom.badImplementation(e); } }; const listUnsubscribed = async (req) => { try { req.log('attendanceController - listUnsubscribed - query', req.query); req.log('attendanceController - listUnsubscribed - company', get(req, 'auth.credentials.company._id')); const { course, company, trainee } = req.query; const unsubscribedAttendances = course ? await AttendanceHelper.listUnsubscribed(course, company) : await AttendanceHelper.getTraineeUnsubscribedAttendances(trainee); return { message: unsubscribedAttendances.length ? translate[language].attendancesFound : translate[language].attendancesNotFound, data: { unsubscribedAttendances }, }; } catch (e) { req.log('error', e); return Boom.isBoom(e) ? e : Boom.badImplementation(e); } }; const create = async (req) => { try { await AttendanceHelper.create(req.payload); return { message: translate[language].attendanceCreated }; } catch (e) { req.log('error', e); return Boom.isBoom(e) ? e : Boom.badImplementation(e); } }; const remove = async (req) => { try { await AttendanceHelper.delete(req.params._id); return { message: translate[language].attendanceDeleted }; } catch (e) { req.log('error', e); return Boom.isBoom(e) ? e : Boom.badImplementation(e); } }; module.exports = { list, listUnsubscribed, create, remove };
# shellcheck shell=bash # OUT/ERR/DBG #UseFD 97 #UseFD 99 Describe "string.lib:" Include "../ammlib" Before "ammLib::Require string" UseFD 98 typeset -a _demoArrayWords=(hello world pouet coin lol rofl mao) Describe "pattern matching:" Describe "ammString::Contains" It "returns success if a simple string is contained in another" When call ammString::Contains "wor" "hello world" The status should be success End It "returns success if a complex string is contained in another" When call ammString::Contains "w*l" "hello world" The status should be success End It "returns failure if the string is not contained in another" When call ammString::Contains "toto" "hello world" The status should be failure End End Describe "ammString::ContainsWord" It "returns success if first arg is also one of other args" When call ammString::ContainsWord "rofl" "${_demoArrayWords[@]}" The status should be success End It "returns failure if first arg is not in one of other args" When call ammString::ContainsWord "ohohoh" "${_demoArrayWords[@]}" The status should be failure End End Describe "ammString::StartsWith" End Describe "ammString::EndsWith" End Describe "ammString::IsNotEmpty" End Describe "ammString::IsEmpty" End End Describe "simple file tests:" Describe "ammString::IsFile" End Describe "ammString::IsDirectory" End End Describe "Format tests:" Describe "ammString::IsInteger" End Describe "ammString::IsHex" End Describe "ammString::IsYes" End Describe "ammString::IsNo" End Describe "ammString::IsYesNo" End Describe "ammString::IsTrue" End Describe "ammString::IsIPv4" It "returns success if arg is an usual IPv4" When call ammString::IsIPv4 "10.20.30.40" The status should be success End It "returns success if arg is a short IPv4" When call ammString::IsIPv4 "1.1" The status should be success End It "returns failure if arg is a bad usual IPv4" When call ammString::IsIPv4 "1.2.4.555" The status should be failure End It "returns failure if arg is a bad short IPv4" When call ammString::IsIPv4 "1.256" The status should be failure End End Describe "ammString::IsIPv6" It "returns success if arg is a good full IPv6" When call ammString::IsIPv6 "9999:FFFF:ABCD:EFF:0000:8a2e:0370:7334" The status should be success End It "returns success if arg is a good short IPv6" When call ammString::IsIPv6 "::1" The status should be success End It "returns success if arg is a good short IPv6" When call ammString::IsIPv6 "1::1" The status should be success End It "returns failure if arg is a not a good full IPv6" When call ammString::IsIPv6 "9999:FFFF:ABCD:EFG:0000:8a2e:0370:7334" The status should be failure End It "returns failure if arg is a not a good short IPv6" When call ammString::IsIPv6 "1::G::1" The status should be failure End End Describe "ammString::IsIP" It "returns success if arg is an IPv4" When call ammString::IsIPv4 "10.20.30.40" The status should be success End It "returns success if arg is an IPv6" When call ammString::IsIPv6 "9999:FFFF:ABCD:EFF:0000:8a2e:0370:7334" The status should be success End End Describe "ammString::IsUri" It "returns success if arg is an http uri" When call ammString::IsUri "https://github.com/Saruspete/ammlib/blob/master/README.md#bash-modular-library" The status should be success End It "returns success if arg is a folder" When call ammString::IsUri "file:///tmp/" The status should be success End It "returns success if arg is a git+ssh uri" When call ammString::IsUri "git+ssh://example.com/git/repo?param" The status should be success End It "returns failure if arg is a host without format" When call ammString::IsUri "google.com" The status should be failure End End Describe "ammString::IsDate" End Describe "ammString::IsTime" End Describe "ammString::Type" Parameters "/tmp" "folder" "10.20.30.40" "ipv4" "::1" "ipv6" "2016-11-26" "date" "2019-01-20 19:10:00" "datetime" "hello" "string" End Example "Type of $1" When call ammString::Type "$1" The output should eq "$2" End End End Describe "string modifiers:" Describe "ammString::Trim" It "removes trailing and ending spaces and tabs by default" When call ammString::Trim " toto " The output should eq "toto" End It "removes trailing and ending specified chars" When call ammString::Trim "-- toto --" "[- ]" The output should eq "toto" End End Describe "ammString::ToCapital" It "set a random case to all lower but the first char" When call ammString::ToCapital "heLlOWorLd" The output should eq "Helloworld" End End Describe "ammString::ToLower" It "set all chars to their lower case" When call ammString::ToLower "heLlOWorLd" The output should eq "helloworld" End End Describe "ammString::ToUpper" It "set all chars to their upper case" When call ammString::ToUpper "heLlOWorLd" The output should eq "HELLOWORLD" End End End Describe "multiline extraction:" Describe "ammString::ExtractCmdLine" End Describe "ammString::InputToLines" End End Describe "format conversion:" Describe "ammString::UnitToPow" End Describe "ammString::UnitConvert" End Describe "ammString::BaseConvert" End Describe "ammString::HexToDec" End Describe "ammString::DecToHex" End Describe "ammString::IPv4ToHex" End Describe "ammString::HexToIPv4" End Describe "ammString::IntegerMin" End Describe "ammString::IntegerMax" End Describe "ammString::IntegerAverage" End Describe "ammString::IntegerSum" End End Describe "string match and filtering:" Describe "ammString::Filter" End Describe "ammString::FilterTuples" End Describe "ammString::CountWords" End Describe "ammString::CountLines" End Describe "ammString::SortWords" End End Describe "ammString::ExpandStringBash" It "Expands a string like simple bash expansion" When call ammString::ExpandStringBash '{1..5}' The output should eq "1 2 3 4 5 " End It "Expands a string like simple bash expansion with prefix and suffix" When call ammString::ExpandStringBash 'hello-{1,2}_world' 'hi_{1..3}_jack' 'omg_{1,2-4,6}_wtfbbq' The output should eq "hello-1_world hello-2_world hi_1_jack hi_2_jack hi_3_jack omg_1_wtfbbq omg_2-4_wtfbbq omg_6_wtfbbq " End It "Expands a string like bash expansion with nested groups" When call ammString::ExpandStringBash 'hello_{world,master-{1..2}}' The output should eq "hello_world hello_master-1 hello_master-2 " End It "Expands a string like bash expansion with FQDN" When call ammString::ExpandStringBash 'host-{01..05}.dev.intra' The output should eq "host-01.dev.intra host-02.dev.intra host-03.dev.intra host-04.dev.intra host-05.dev.intra " End It "Expands a string like bash expansion with complex nested groups" When call ammString::ExpandStringBash 'hello_{world-{01..04},master,slave{1..3}}' The output should eq "hello_world-01 hello_world-02 hello_world-03 hello_world-04 hello_master hello_slave1 hello_slave2 hello_slave3 " End End Describe "ammString::ExpandIntegerList" It "Expands a grouped string as an ordered list" When call ammString::ExpandIntegerList "7-10,11,12,5,8,1,11-14" The output should eq "1 5 7 8 9 10 11 12 13 14 " End It "Emits a warning if an element is invalid" When call ammString::ExpandIntegerList "7-10,11,12,a-f,14-16" The fd 98 should include "is not an integer" The output should eq "7 8 9 10 11 12 14 15 16 " End End Describe "ammString::UUIDVersionGet" Parameters "00000000-0000-0000-0000-000000000000" "0" "61dc1f09-4c9e-11eb134f-00d86113b7d2" "1" "78e58f3a-aff6-377a-cc5b-da43e9a30794" "3" "fcaf1927-ab1f-4cc1-9f37-1e0d5da137aa" "4" "060470a1-4756-5543-3a00-c7ed9dfcb865" "5" End Example "Extracts version of UUIDv$2" When call ammString::UUIDVersionGet "$1" The output should eq "$2" End End Describe "ammString::UUIDGenerate" checkuuid() { [ $(ammString::UUIDVersionGet "${checkuuid:?}") -eq "$1" ] } Example "Generate UUID v1" When call ammString::UUIDGenerate 1 The output should satisfy checkuuid 1 End Example "Generate UUID v3" When call ammString::UUIDGenerate 3 The output should satisfy checkuuid 3 End Example "Generate UUID v4" When call ammString::UUIDGenerate 4 The output should satisfy checkuuid 4 End End Describe "ammString::HashMD5" Parameters "d41d8cd98f00b204e9800998ecf8427e" "" "7215ee9c7d9dc229d2921a40e899ec5f" " " "5eb63bbbe01eeed093cb22bb8f5acdc3" "hello world" End Example "Checks MD5 of '$2'" When call ammString::HashMD5 "$2" The output should eq "$1" End End Describe "ammString::HashSHA1" Parameters "da39a3ee5e6b4b0d3255bfef95601890afd80709" "" "b858cb282617fb0956d960215c8e84d1ccf909c6" " " "2aae6c35c94fcfb415dbe95f408b9ce91ee846ed" "hello world" End Example "Checks MD5 of '$2'" When call ammString::HashSHA1 "$2" The output should eq "$1" End End Describe "ammString::Repeat" End End
// Java code public int multiply(int a, int b) { int c = a * b; return c; }
package com.yxyhail.rn.baserlib.openapi; import android.app.Application; import android.content.Context; import androidx.multidex.MultiDexApplication; import com.facebook.react.ReactApplication; import com.facebook.react.ReactNativeHost; import com.facebook.react.ReactPackage; import com.yxyhail.rn.baserlib.BuildConfig; import java.util.HashMap; import java.util.List; import java.util.Map; public class RNBaserApplication extends MultiDexApplication implements RNBaserApiApplication { private static Application app; @Override protected void attachBaseContext(Context base) { super.attachBaseContext(base); RNBaser.onAppAttach(this); } @Override public void onCreate() { super.onCreate(); app = this; RNBaser.onAppCreate(this); } @Override public Boolean isDev() { return BuildConfig.DEBUG; } @Override public String getJSBundleFile() { return null; } @Override public List<ReactPackage> getPackageList() { return null; } @Override public Map<String, Object> getConstants() { Map<String, Object> map = new HashMap<>(); map.put(RNBaserApi.ENV, "release"); map.put(RNBaserApi.SOURCE, ""); map.put(RNBaserApi.WxAppId, ""); map.put(RNBaserApi.TOKEN, ""); return map; } @Override public String[] preLoadModule() { return null; } @Override public ReactNativeHost getReactNativeHost() { return RNBaser.getNativeHost(); } public static ReactApplication getReactApp(){ return (ReactApplication)app; } }
#!/bin/bash ./he_base test.eqn 1 ./he_base test.eqn 6 ./he_base test.eqn 11 ./he_base test.eqn 16 ./he_base test.eqn 21 ./he_base test.eqn 26 ./he_base test.eqn 31 ./he_base test.eqn 36 ./he_base test.eqn 41
#!/bin/bash echo $* sdfgfdgfdgfdgdg
import asyncio import websockets async def hello(websocket, path): greeting = await websocket.recv() print(f"< {greeting}") async def hello_server(): server = await websockets.serve(hello, '127.0.0.1', 8765) await server.wait_closed() asyncio.get_event_loop().run_until_complete(hello_server()) asyncio.get_event_loop().run_forever()
package com.atguigu.gmall.pms.service; import com.atguigu.gmall.pms.vo.AttrVo; import com.baomidou.mybatisplus.extension.service.IService; import com.atguigu.gmall.pms.entity.AttrEntity; import com.atguigu.core.bean.PageVo; import com.atguigu.core.bean.QueryCondition; /** * 商品属性 * * @author lixianfeng * @email <EMAIL> * @date 2019-12-31 19:06:41 */ public interface AttrService extends IService<AttrEntity> { PageVo queryPage(QueryCondition params); PageVo queryByCidOrTypePage(Integer type, Long cid, QueryCondition queryCondition); void saveAttrVO(AttrVo attrVo); }
import React, { Component } from "react"; import * as styles from "./uploadButton.scss"; export default class uploadButton extends React.Component { render() { return <div className={styles.purple}>upload</div>; } }
# Implementation of the post method in the Tornado web handler class @tornado.web.authenticated def post(self): topic_id = self.get_argument('topic_id') hashtag = self.get_argument('hashtag') save_type = self.get_argument('save_type') save_type = True if save_type == 'true' else False topic_hashtag(topic_id, hashtag, save_type) # Implementation of the get method in the handler class class HashtagChartHandler(BaseHandler, TemplateRendering): @tornado.web.authenticated def get(self, argument=None): # Retrieve and display statistics related to the usage of hashtags on the platform # Add code to fetch and display hashtag statistics
var add = function(n1, n2) { return n1 + n2; } var divide = function(n1, n2) { return n1 / n2; } var subtract = function(n1, n2) { return n1 - n2; } var multiply = function(n1, n2) { return n1 * n2; } var PI = 3.1415; export { add, divide, subtract, multiply, PI }
/* * Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one * or more contributor license agreements. Licensed under the Elastic License * 2.0 and the Server Side Public License, v 1; you may not use this file except * in compliance with, at your election, the Elastic License 2.0 or the Server * Side Public License, v 1. */ import React from 'react'; import { render } from 'enzyme'; import { requiredProps } from '../../test/required_props'; import { EuiCommentTimeline } from './comment_timeline'; import { EuiAvatar } from '../avatar'; describe('EuiCommentTimeline', () => { test('is rendered', () => { const component = render(<EuiCommentTimeline {...requiredProps} />); expect(component).toMatchSnapshot(); }); describe('props', () => { describe('type', () => { it('is rendered', () => { const component = render(<EuiCommentTimeline type="update" />); expect(component).toMatchSnapshot(); }); }); describe('timelineIcon', () => { it('is rendered', () => { const component = render( <EuiCommentTimeline timelineIcon={<EuiAvatar size="l" name="Mario" />} /> ); expect(component).toMatchSnapshot(); }); }); }); });
#!/bin/bash kubeadm init \ --apiserver-advertise-address="192.168.50.10" \ --apiserver-cert-extra-sans="192.168.50.10" \ --node-name master \ --pod-network-cidr=192.168.51.0/16 mkdir -p /root/.kube cp -i /etc/kubernetes/admin.conf /root/.kube/config kubectl get nodes #kubectl create -f https://docs.projectcalico.org/v3.4/getting-started/kubernetes/installation/hosted/calico.yaml kubectl apply -f https://raw.githubusercontent.com/coreos/flannel/master/Documentation/kube-flannel.yml kubeadm token create --print-join-command > /vagrant/setup-node-join.sh cp /etc/kubernetes/admin.conf /vagrant/.config kubectl taint node master node-role.kubernetes.io/master:NoSchedule-
cp index.html public cp main.js public cp *.svg public cp style.css public firebase deploy
#!/bin/bash source $ROSWSS_BASE_SCRIPTS/helper/helper.sh if [ "$#" -eq 0 ]; then echo "Usage: broadcast <Command>" exit 1 fi command=$@; if [[ -z "$SINGLE_ROBOT_HOSTNAMES" || -z "$SINGLE_ROBOT_USERS" ]]; then hosts=($ROBOT_HOSTNAMES) users=($ROBOT_USERS) else hosts=($SINGLE_ROBOT_HOSTNAMES) users=($SINGLE_ROBOT_USERS) fi counter=0; for idx in "${!hosts[@]}"; do host="${hosts[$idx]}" user="${users[$idx]}" echo "Executing $command on $host" xterm -T "$host" -geometry 80x25+$counter+0 -e ssh $user@$host -A -t 'bash -l -c -i "'$@'" ; sleep 7' & counter=$(( $counter + 500 )) done
<gh_stars>10-100 src = Split(''' utils_httpc.c utils_epoch_time.c utils_list.c utils_net.c utils_timer.c ''') component = aos_component('libmisc', src) component.add_macro('DEBUG') component.add_includes('.', '../sdk-impl', '../LITE-log', '../LITE-utils') component.add_component_dependencis('utility/iotx-utils/LITE-log') if aos_global_config.board == 'linuxhost': PLATFORM_MISC = 'linux' else: PLATFORM_MISC = 'rhino' comp_names = [comp.name for comp in aos_global_config.components] if 'coap' in comp_names: component.add_sources( '../hal/'+PLATFORM_MISC+'/HAL_TCP_'+PLATFORM_MISC+'.c') CONFIG_COAP_DTLS_SUPPORT=aos_global_config.get_aos_global_config(' CONFIG_COAP_DTLS_SUPPORT') if CONFIG_COAP_DTLS_SUPPORT != 'y': component.add_sources( '../hal/'+PLATFORM_MISC+'/HAL_OS_'+PLATFORM_MISC+'.c') component.add_component_dependencis('utility/iotx-utils/mbedtls-hal')
import pandas as pd def dummy_variable_encoding(data, variable_name): # Convert the specified categorical variable to a pandas Categorical type data[variable_name] = pd.Categorical(data[variable_name]) # Encode the categorical variable into dummy variables dummy_vars = pd.get_dummies(data[variable_name], drop_first=False) # Concatenate the original data with the dummy variables data_with_dummies = pd.concat([data, dummy_vars], axis=1) return data_with_dummies
import { expect } from 'chai'; import { addEventListenersToPuppeteerPage, EventName, loadCachedTestBed, takeScreenshot, TestBed } from '../../../puppeteer-tests/util'; import { ElementHandle } from 'puppeteer'; import { positiveDigest, negativeDigest, untriagedDigest } from '../cluster-page-sk/test_data'; import path from "path"; describe('cluster-digests-sk', () => { let testBed: TestBed; before(async () => { testBed = await loadCachedTestBed( path.join(__dirname, '..', '..', 'webpack.config.ts') ); }); let promiseFactory: <T>(eventName: EventName) => Promise<T>; let clusterDigestsSk: ElementHandle; beforeEach(async () => { promiseFactory = await addEventListenersToPuppeteerPage(testBed.page, ['layout-complete', 'selection-changed']); const loaded = promiseFactory('layout-complete'); // Emitted when layout stabilizes. await testBed.page.goto(`${testBed.baseUrl}/dist/cluster-digests-sk.html`); await loaded; clusterDigestsSk = (await testBed.page.$('#cluster svg'))!; }); it('should render the demo page', async () => { // Smoke test. expect(await testBed.page.$$('cluster-digests-sk')).to.have.length(1); }); it('should take a screenshot', async () => { await takeScreenshot(clusterDigestsSk, 'gold', 'cluster-digests-sk'); }); it('supports single digest selection via clicking', async () => { await clickNodeAndExpectSelectionChangedEvent(positiveDigest, [positiveDigest]); await takeScreenshot(clusterDigestsSk, 'gold', 'cluster-digests-sk_one-positive-selected'); await clickNodeAndExpectSelectionChangedEvent(untriagedDigest, [untriagedDigest]); await takeScreenshot(clusterDigestsSk, 'gold', 'cluster-digests-sk_one-untriaged-selected'); }); it('supports multiple digest selection via shift clicking', async () => { await clickNodeAndExpectSelectionChangedEvent(negativeDigest, [negativeDigest]); await shiftClickNodeAndExpectSelectionChangedEvent(positiveDigest, [negativeDigest, positiveDigest]); await takeScreenshot(clusterDigestsSk, 'gold', 'cluster-digests-sk_two-digests-selected'); await shiftClickNodeAndExpectSelectionChangedEvent(untriagedDigest, [negativeDigest, positiveDigest, untriagedDigest]); await takeScreenshot(clusterDigestsSk, 'gold', 'cluster-digests-sk_three-digests-selected'); }); it('clears selection by clicking anywhere on the svg that is not on a node', async () => { await clickNodeAndExpectSelectionChangedEvent(negativeDigest, [negativeDigest]); const clickEvent = promiseFactory<Array<string>>('selection-changed'); await clusterDigestsSk.click(); const evt = await clickEvent; expect(evt).to.deep.equal([]); }); async function clickNodeAndExpectSelectionChangedEvent(digest: string, expectedSelection: string[]) { const clickEvent = promiseFactory<Array<string>>('selection-changed'); await clickNodeWithDigest(testBed, digest); const evt = await clickEvent; expect(evt).to.deep.equal(expectedSelection); } async function shiftClickNodeAndExpectSelectionChangedEvent(digest: string, expectedSelection: string[]) { const clickEvent = promiseFactory<Array<string>>('selection-changed'); await shiftClickNodeWithDigest(testBed, digest); const evt = await clickEvent; expect(evt).to.deep.equal(expectedSelection); } }); export async function clickNodeWithDigest(testBed: TestBed, digest: string) { await testBed.page.click(`circle.node[data-digest="${digest}"]`); } export async function shiftClickNodeWithDigest(testBed: TestBed, digest: string) { await testBed.page.keyboard.down('Shift'); await clickNodeWithDigest(testBed, digest); await testBed.page.keyboard.up('Shift'); }
#!/bin/sh export CVMFS_PLATFORM_NAME="slc6-i386" export CVMFS_TIMESTAMP=$(date -u +'%Y-%m-%dT%H:%M:%SZ') # source the common platform independent functionality and option parsing script_location=$(cd "$(dirname "$0")"; pwd) . ${script_location}/common_test.sh # reset SELinux context echo -n "restoring SELinux context for /var/lib/cvmfs... " sudo restorecon -R /var/lib/cvmfs || die "fail" echo "done" retval=0 # running unit test suite run_unittests --gtest_shuffle || retval=1 cd ${SOURCE_DIRECTORY}/test echo "running CernVM-FS client test cases..." CVMFS_TEST_CLASS_NAME=ClientIntegrationTests \ ./run.sh $CLIENT_TEST_LOGFILE -o ${CLIENT_TEST_LOGFILE}${XUNIT_OUTPUT_SUFFIX} \ -x src/004-davinci \ src/005-asetup \ src/007-testjobs \ src/024-reload-during-asetup \ src/081-shrinkwrap \ src/082-shrinkwrap-cms \ src/084-premounted \ -- \ src/0* \ || retval=1 echo "running CernVM-FS client migration test cases..." CVMFS_TEST_CLASS_NAME=ClientMigrationTests \ ./run.sh $MIGRATIONTEST_CLIENT_LOGFILE \ -o ${MIGRATIONTEST_CLIENT_LOGFILE}${XUNIT_OUTPUT_SUFFIX} \ migration_tests/0* \ || retval=1 exit $retval
<reponame>dadeke/alexa-skill-onibus-bh<gh_stars>1-10 const speaks = require('../speakStrings'); const { setLastAccess } = require('../util'); /* * * Tratamento de erros genéricos para capturar qualquer sintaxe ou erros * de roteamento. * Se você receber um erro informando que a cadeia do manipulador de solicitação * não foi encontrada, você não implementou um manipulador para o intent * que está sendo invocada ou incluiu isto no construtor de skills. * */ const ErrorHandler = { canHandle() { return true; }, async handle(handlerInput, error) { // eslint-disable-next-line no-console console.error('Error handled:', JSON.stringify(error)); await setLastAccess(handlerInput); return handlerInput.responseBuilder .speak(speaks.PROBLEM) .withStandardCard(speaks.SKILL_NAME, speaks.PROBLEM) .withShouldEndSession(true) .getResponse(); }, }; module.exports = ErrorHandler;
class IndexOutOfBoundsError(LookupError): def __init__(self, index): super().__init__(f'Index {index} is out of bounds') class CustomList: def __init__(self, size): self.size = size self.data = [None] * size def set_value(self, index, value): if index < 0 or index >= self.size: raise IndexOutOfBoundsError(index) self.data[index] = value def get_value(self, index): if index < 0 or index >= self.size: raise IndexOutOfBoundsError(index) return self.data[index] # Example usage custom_list = CustomList(5) try: custom_list.set_value(6, 10) # Raises IndexOutOfBoundsError except IndexOutOfBoundsError as e: print(e) # Output: Index 6 is out of bounds
<reponame>Sensis/text-field-suggester $(document).ready(function () { describe('TextFieldSuggester', function () { var textField, maxSuggestions = 4, suggester, currentValue = ''; function arrowDown() { textField.trigger(new jQuery.Event('keydown', { keyCode: 40 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 40 })); }; function arrowUp() { textField.trigger(new jQuery.Event('keydown', { keyCode: 38 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 38 })); }; function deleteText() { textField.focus(); textField.select(); // Simulate backspace textField.val(''); textField.trigger(new jQuery.Event('keydown', { keyCode: 8 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 8 })); }; function pressDelete() { textField.trigger(new jQuery.Event('keydown', { keyCode: 46 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 46 })); }; function pressEnter() { textField.trigger(new jQuery.Event('keydown', { keyCode: 13 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 13 })); }; function pressEscape() { textField.trigger(new jQuery.Event('keydown', { keyCode: 27 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 27 })); }; function pressTab() { textField.trigger(new jQuery.Event('keydown', { keyCode: 9 })); textField.trigger(new jQuery.Event('keyup', { keyCode: 9 })); }; function enterText(text) { var code, i; textField.focus(); for (i = 0; i < text.length; ++i) { code = text.charCodeAt(i); textField.val(textField.val() + text.charAt(i)); textField.trigger(new jQuery.Event('keydown', { keyCode: code })); textField.trigger(new jQuery.Event('keyup', { keyCode: code })); textField.trigger(new jQuery.Event('input', { keyCode: code })); } }; function fetchSuggestions(textFieldValue, callback) { var suggestions = [ { label: 'Apple', icon: 'example-images/apple.png' }, { label: 'Apricot', icon: null }, { label: 'Pear', icon: null }, { label: 'Banana', icon: 'example-images/banana.png' }, { label: 'Grape', icon: 'example-images/grape.png' }, { label: 'Grapefruit', icon: null }, { label: 'Orange', icon: null }, { label: 'Pineapple', icon: 'example-images/pineapple.png' }, { label: 'Coconut', icon: null }, { label: 'Lemon', icon: null }, { label: 'Kiwi Fruit', icon: null } ], filteredSuggestions = [], i; for (i = 0; i < suggestions.length; ++i) { if (suggestions[i].label.toLowerCase().indexOf(textFieldValue.toLowerCase()) === 0) filteredSuggestions.push(suggestions[i]); } callback(filteredSuggestions); }; textField = $('#theTextField'); suggester = new Sensis.TextFieldSuggester('theTextField', '#theTextField', maxSuggestions, fetchSuggestions); suggester.valueUpdated = function (value) { currentValue = value; }; it('should invoke the valueUpdated callback when the text value is updated', function () { runs(function () { deleteText(); enterText('blah'); }); waitsFor(function () { return currentValue === 'blah'; }, 1000); }); it('should update its position when reposition() is called', function () { runs(function () { deleteText(); enterText('a'); }); waitsFor(function () { var suggestionList = $('.theTextFieldSuggestions'); return suggestionList.css('display') === 'block'; }, 1000); runs(function () { expect(suggester.completion.offset().left).toEqual(textField.offset().left + (textField.outerWidth() - textField.innerWidth()) / 2); expect(suggester.completion.offset().top).toEqual(textField.offset().top + (textField.outerHeight() - textField.innerHeight()) / 2); expect(suggester.completion.width()).toEqual(textField.width()); expect(suggester.completion.height()).toEqual(textField.outerHeight()); expect(suggester.suggestionList.offset().left).toEqual(textField.offset().left); expect(suggester.suggestionList.offset().top).toEqual(textField.offset().top + textField.outerHeight()); expect(suggester.suggestionList.width()).toEqual(textField.width()); textField.css('margin-left', '10px'); textField.css('width', '128px'); textField.css('height', '32px'); suggester.reposition(); expect(suggester.completion.offset().left).toEqual(textField.offset().left + (textField.outerWidth() - textField.innerWidth()) / 2); expect(suggester.completion.offset().top).toEqual(textField.offset().top + (textField.outerHeight() - textField.innerHeight()) / 2); expect(suggester.completion.width()).toEqual(textField.width()); expect(suggester.completion.height()).toEqual(textField.outerHeight()); expect(suggester.suggestionList.offset().left).toEqual(textField.offset().left); expect(suggester.suggestionList.offset().top).toEqual(textField.offset().top + textField.outerHeight()); expect(suggester.suggestionList.width()).toEqual(textField.width()); }); }); describe('Input completion', function () { it('should have an element for displaying the automatically completed text', function () { var completion = $('.theTextFieldCompletion'); expect(completion.length).toEqual(1); expect(completion.find('.prefix').length).toEqual(1); expect(completion.find('.suffix').length).toEqual(1); }); it('should hide the completion prefix', function () { var prefix = $('.theTextFieldCompletion .prefix'); expect(prefix.css('visibility')).toEqual('hidden'); }); it('should set the completion prefix to the entered text', function () { var prefix = $('.theTextFieldCompletion .prefix'); runs(function () { deleteText(); }); waitsFor(function () { return prefix.text() === ''; }, 1000); runs(function () { enterText('gr'); }); waitsFor(function () { return prefix.text() === 'gr'; }, 1000); }); it('should set the completion suffix to the remainder of the first suggestion', function () { var suffix = $('.theTextFieldCompletion .suffix'); runs(function () { deleteText(); }); waitsFor(function () { return suffix.text() === ''; }, 1000); runs(function () { enterText('kiwi'); }); waitsFor(function () { return suffix.text() === ' Fruit'; }, 1000); }); it('should set the text to the full completion when the text field is blurred', function () { var suffix = $('.theTextFieldCompletion .suffix'); runs(function () { deleteText(); enterText('a'); }); waitsFor(function () { return suffix.text() === 'pple'; }); runs(function () { textField.blur(); }); waitsFor(function () { return textField.val() === 'apple'; }, 1000); }); it('should not set the text to the full completion if it isn\'t a prefix when the text field is blurred', function () { var suffix = $('.theTextFieldCompletion .suffix'); runs(function () { deleteText(); enterText('a'); }); waitsFor(function () { return suffix.text() === 'pple'; }); runs(function () { deleteText(); enterText('p'); textField.blur(); }); waitsFor(function () { return textField.val() === 'p'; }, 1000); }); it('should cancel the completion when Enter is pressed in the text field', function () { var suffix = $('.theTextFieldCompletion .suffix'); runs(function () { deleteText(); enterText('ap'); }); waitsFor(function () { return suffix.text() === 'ple'; }); runs(function () { pressEnter(); }); waitsFor(function () { return suffix.text() === ''; }); }); }); describe('Value suggestion', function () { var suggestionList = $('.theTextFieldSuggestions'); it('should have an element for displaying the suggestions', function () { expect(suggestionList.length).toEqual(1); }); it('should have an element for each suggestion item according to the max suggestions', function () { var items = suggestionList.find('.suggestion'); expect(items.length).toEqual(maxSuggestions); }); it('should have an icon and a label for each suggestion item', function () { var items = suggestionList.find('.suggestion'), item, i; for (i = 0; i < items.length; ++i) { item = $(items[i]); expect(item.find('.icon').length).toEqual(1); expect(item.find('.label').length).toEqual(1); } }); it('should show the suggestions list when the text field is focused and hide it when blurred', function () { runs(function () { textField.focus() }); waitsFor(function () { return suggestionList.css('display') === 'block' }, 500); runs(function () { $('#anotherTextField').focus() }); waitsFor(function () { return suggestionList.css('display') === 'none' }, 500); }); it('should show the suggestions with a matching prefix when the text field is typed into', function () { var items = suggestionList.find('.suggestion'), item, i; // Type 'a' and expect Apple and Apricot runs(function () { deleteText(); enterText('a'); }); waitsFor(function () { return $(items[2]).css('display') === 'none' && $(items[0]).find('.label').text() === 'Apple'; }, 1000); runs(function () { item = $(items[0]); expect(item.find('.label').text()).toEqual('Apple'); expect(item.find('.icon').attr('src')).toEqual('example-images/apple.png'); expect(item.find('.icon').css('visibility')).toEqual('visible'); item = $(items[1]); expect(item.find('.label').text()).toEqual('Apricot'); expect(item.find('.icon').attr('src')).toBeFalsy(); expect(item.find('.icon').css('visibility')).toEqual('hidden'); }); // Keep typing 'pp' and expect only Apple runs(function () { enterText('pp'); }); waitsFor(function () { return $(items[1]).css('display') === 'none'; }, 1000); runs(function () { item = $(items[0]); expect(item.find('.label').text()).toEqual('Apple'); expect(item.find('.icon').attr('src')).toEqual('example-images/apple.png'); }); }); it('should mark the first suggestion with a CSS class if it matches the entered text', function () { var first = $(suggestionList.find('.suggestion').get(0)); // Type a letter which matches a couple of things runs(function () { deleteText(); enterText('a'); }); waitsFor(function () { return first.hasClass('suggested'); }); runs(function () { expect(suggestionList.find('.suggested').length).toEqual(1); }); // Type another letter which doesn't match anything runs(function () { enterText('z'); }); waitsFor(function () { return !first.hasClass('suggested'); }); }); it('should cycle through suggestions by adding a CSS class when the down arrow key is pressed', function () { var items = suggestionList.find('.suggestion'); runs(function () { deleteText() }); waitsFor(function () { return $(items[3]).css('display') === 'block' && suggestionList.find('.suggestion.selected').length === 0 }, 1000); runs(function () { arrowDown() }); waitsFor(function () { return $(items[0]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowDown() }); waitsFor(function () { return $(items[1]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowDown() }); waitsFor(function () { return $(items[2]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowDown() }); waitsFor(function () { return $(items[3]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowDown() }); waitsFor(function () { return suggestionList.find('.suggestion.selected').length === 0 }, 1000); }); it('should cycle through suggestions by adding a CSS class when the up arrow key is pressed', function () { var items = suggestionList.find('.suggestion'); runs(function () { deleteText() }); waitsFor(function () { return $(items[3]).css('display') === 'block' && suggestionList.find('.suggestion.selected').length === 0 }, 1000); runs(function () { arrowUp() }); waitsFor(function () { return $(items[3]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowUp() }); waitsFor(function () { return $(items[2]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowUp() }); waitsFor(function () { return $(items[1]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowUp() }); waitsFor(function () { return $(items[0]).hasClass('selected') && suggestionList.find('.suggestion.selected').length === 1 }, 1000); runs(function () { arrowUp() }); waitsFor(function () { return suggestionList.find('.suggestion.selected').length === 0 }, 1000); }); it('should hide the suggestions when Enter is pressed in the text field', function () { var items = suggestionList.find('.suggestion'); runs(function () { deleteText(); enterText('ap'); }); waitsFor(function () { return $(items[1]).css('display') === 'block' && $(items[2]).css('display') === 'none'; }, 1000); runs(function () { pressEnter(); }); waitsFor(function () { return suggestionList.css('display') === 'none'; }, 1000); }); it('should hide the suggestions when Escape is pressed in the text field', function () { var items = suggestionList.find('.suggestion'); runs(function () { deleteText(); enterText('ap'); }); waitsFor(function () { return $(items[1]).css('display') === 'block' && $(items[2]).css('display') === 'none'; }, 1000); runs(function () { pressEscape(); }); waitsFor(function () { return suggestionList.css('display') === 'none'; }, 1000); }); it('should hide the suggestions when Delete is pressed in the text field', function () { var items = suggestionList.find('.suggestion'); runs(function () { deleteText(); enterText('ap'); }); waitsFor(function () { return $(items[1]).css('display') === 'block' && $(items[2]).css('display') === 'none'; }, 1000); runs(function () { pressDelete(); }); waitsFor(function () { return suggestionList.css('display') === 'none'; }, 1000); }); }); }); });
#!/usr/bin/env bash start_sender(){ /app/sendmsg/sender start & } start_receiver(){ /app/sendmsg/receiver start --addr-jaeger=$JAEGER_ADDR --addr-monitor=$MONITOR_ADDR } start_sender sleep 1 start_receiver
cfg_name=cfg_train_lgcn_fashion config=lgcn/configs/$cfg_name.py export CUDA_VISIBLE_DEVICES=0 export PYTHONPATH=. # train python lgcn/main.py \ --config $config \ --phase 'train' # test load_from=data/work_dir/$cfg_name/latest.pth python lgcn/main.py \ --config $config \ --phase 'test' \ --load_from $load_from \ --save_output \ --force
/** * Created by yanbo.ai on 2014/9/25. * Player scripts */ function Player(options) { // check jquery APIs if (typeof $ === "undefined") { console.error("Jquery can't found.") } // check file APIs support. if (!(window.File && window.FileReader && window.FileList && window.Blob)) { console.error("The File APIs are not fully supported in this browser."); } // check stmd if (typeof commonmark === "undefined") { console.error("Common mark can't found.") } var markdown = ""; var pages = []; var __index__ = -1; var __readOnly__ = false; var onplay = options.onplay; var onreadend = options.onreadend; var writer = new commonmark.HtmlRenderer(); var reader = new commonmark.Parser(); var player = document.querySelector("#player"); var dropZone = document.querySelector("#drop_zone"); var playZone = document.querySelector("#play_zone"); this.play = function (index) { __play__(index); }; this.fullScreen = function (selector) { var element = document.querySelector(selector); if (element.requestFullscreen) { element.requestFullscreen(); } else if (element.webkitRequestFullscreen) { element.webkitRequestFullscreen(); } else if (element.mozRequestFullScreen) { element.mozRequestFullScreen(); } else if (element.msRequestFullscreen) { element.msRequestFullscreen(); } }; this.setSwipeEvent = function (selector) { $(selector).on("swipeleft", handleSwipeLeft).on("swiperight", handleSwipeRight); }; this.setMarkdown = function (md) { markdown = md; __index__ = -1; parseMarkdown(markdown); __play__(0); }; this.sync = function (event) { markdown = event.data; parseMarkdown(markdown); __index__ = -1; __play__(event.index); }; this.getMarkdown = function () { return markdown; }; this.getIndex = function () { return __index__; }; this.readOnly = function () { __readOnly__ = true; }; this.display = function (message) { if ("none" == dropZone.style.display) { dropZone.style.display = "block"; } dropZone.querySelector("#drop_zone_header").innerHTML = message.title; dropZone.querySelector("#drop_zone_content").innerHTML = message.content; }; var __play__ = function (index) { dropZone.style.display = "none"; if (pages.length == 0 || __index__ === index) { return; } __index__ = index; playZone.innerHTML = pages[__index__]; }; var swipePlay = function (next) { var index = -1; if (next) { index = __index__ + 1 } else { index = __index__ - 1 } if (index < 0) { index = 0; return; } else if (index > pages.length - 1) { index = pages.length == 0 ? 0 : pages.length - 1; return; } __play__(index); onplay({"data": index}); }; var parseMarkdown = function (markdown) { pages = writer.render(reader.parse(markdown)).split(/<\s*hr\s*\/*\s*>/); }; var handleDragOver = function (event) { event.stopPropagation(); event.preventDefault(); event.dataTransfer.dropEffect = "copy"; }; var handleFileSelect = function (event) { event.stopPropagation(); event.preventDefault(); var files = event.dataTransfer.files; var reader = new FileReader(); reader.readAsText(files[0],"utf-8"); reader.onloadend = function (event) { if (event.target.readyState == FileReader.DONE) { markdown = event.target.result; parseMarkdown(markdown); __play__(0); onreadend({"data": markdown}); } }; }; var handleKeyUp = function (event) { if (__readOnly__) { return; } var keyCode = (typeof event.which === "number") ? event.which : event.keyCode; if (27 == keyCode) { //esc } else if (37 == keyCode) { swipePlay(false); } else if (39 == keyCode) { swipePlay(true); } }; var handleSwipeLeft = function () { if (!__readOnly__) { swipePlay(true); } }; var handleSwipeRight = function () { if (!__readOnly__) { swipePlay(false); } }; player.addEventListener('dragover', handleDragOver, false); player.addEventListener('drop', handleFileSelect, false); window.addEventListener('keyup', handleKeyUp, false); console.log("Player init success."); }
<reponame>Project-XPolaris/YouComuic-Studio<gh_stars>1-10 import React, { useEffect, useState } from 'react'; import { GridContextProvider, GridDropZone, GridItem, swap } from 'react-grid-dnd'; import PageItem from '@/pages/Create/components/PageItem'; import { Page } from '@/pages/Create/model'; interface PageCollectionPropsType { pages?: Page[] onPagesChange: (newPages: Page[]) => void onItemClick: (page: Page) => void onItemSelect: (page: Page) => void selectedPages: Page[] onCrop:(page:Page) => void } function useWindowSize() { const isClient = typeof window === 'object'; function getSize() { return { width: isClient ? window.innerWidth : undefined, height: isClient ? window.innerHeight : undefined, }; } const [windowSize, setWindowSize] = useState(getSize); useEffect(() => { if (!isClient) { return; } function handleResize() { setWindowSize(getSize()); } window.addEventListener('resize', handleResize); return () => window.removeEventListener('resize', handleResize); }, []); // Empty array ensures that effect is only run on mount and unmount return windowSize; } export default function PageCollection({ pages, onPagesChange, onItemClick, onItemSelect, selectedPages,onCrop }: PageCollectionPropsType) { const size = useWindowSize(); // target id will only be set if dragging from one dropzone to another. function onChange(sourceId, sourceIndex, targetIndex, targetId) { const newPages = swap(pages, sourceIndex, targetIndex); onPagesChange(newPages); } return ( <GridContextProvider onChange={onChange}> <GridDropZone id="items" boxesPerRow={Math.ceil(size.width / 120)} rowHeight={240} style={{ height: '400px' }} > {pages.map((item, idx) => ( <div key={item.name}> <GridItem> <PageItem page={item} order={idx + 1} onClick={onItemClick} onSelect={onItemSelect} isSelected={selectedPages.find(selectedPage => selectedPage.name === item.name) !== undefined} onCrop={onCrop} /> </GridItem> </div> ))} </GridDropZone> </GridContextProvider> ); }
#!/bin/bash if [ ! -f /apps/hostpath/hive/.already_setup ]; then ${HADOOP_HOME}/bin/hdfs dfs -mkdir /tmp ${HADOOP_HOME}/bin/hdfs dfs -mkdir -p /user/hive/warehouse echo "HDFS dirs for hive successfully created ." ${HADOOP_HOME}/bin/hdfs dfs -mkdir g+w /tmp ${HADOOP_HOME}/bin/hdfs dfs -chmod g+w /user/hive/warehouse echo "HDFS dirs permissions successfully updated." touch /apps/hostpath/hive/.already_setup fi cd ${HIVE_HOME}/bin # Start Hive Server echo "Starting Hive Server ...." ${HIVE_HOME}/bin/hiveserver2 --hiveconf hive.root.logger=INFO,console hive.server2.enable.doAs=false echo "Hive server successfully started ." # metastore.thrift.uris=thrift://hive-metastore:9083 # hive.metastore.uris=thrift://hive-metastore:9083 # hive.metastore.uri.selection=SEQUENTIAL
/** * @author <NAME> * * @module model/User * @description This module contains the Javascript class representation of the User table * in the database. * @requires module:model/database * @requires module:sequelize */ 'use strict'; const Sequelize = require('sequelize'); const { sequelize } = require('./database.js'); const Bookmark = require('./Bookmark.js'); /** * @class * @extends Sequelize.Model * @classdesc This class represents the User table. Its attributes represent columns within * the table. * @property {string} uuid - A unique identifier (primary key) for a user */ class User extends Sequelize.Model {} User.init({ uuid: { type: Sequelize.DataTypes.STRING, primaryKey: true, unique: true, allowNull: false, }, }, { sequelize, modelName: 'user', }); User.hasMany(Bookmark, { as: 'bookmarks', foreignKey: { name: 'userId', type: Sequelize.DataTypes.STRING, allowNull: false, }, }); module.exports = User;
<reponame>martijnvandijk/th06<gh_stars>0 var searchData= [ ['debug_20logging_20support',['Debug logging support',['../debug.html',1,'']]], ['display_5fstatistics',['display_statistics',['../class_r_t_o_s.html#ace9fc0f1b35c1383b1050222706f3b70',1,'RTOS']]] ];
#!/bin/bash ##============================================================================== #title :remove-api-gateway.sh #description :This script will execute Ansible playbook to remove API Gateway #author :jakub.marciniak@makemycloud.eu #date :02272019 #version :0.1 #usage :bash remove-api-gateway.sh #notes :Install Ansible v2.7 (at least) to use this script ##============================================================================== #------------------------------------------------------------------------------- # Main flow #------------------------------------------------------------------------------- echo "[INFO] ------------------------------------------------------------------" echo "[INFO] Playbook to remove AWS API Gateway started" echo "[INFO] ------------------------------------------------------------------" ansible-playbook ./base-playbook.yml --tags "remove-api-gateway" echo "[INFO] ------------------------------------------------------------------" echo "[INFO] Playbook to remove AWS API Gateway finished" echo "[INFO] ------------------------------------------------------------------"
<gh_stars>100-1000 import { css, keyframes } from 'styled-components'; export const heartbeat = keyframes` 0%, 100% { opacity: 0.1; } 50% { opacity: 0.25; } `; export default { heartbeat: css` 1.5s ${heartbeat} ease infinite; `, };
export const SELECT_CITY = 'SELECT_CITY'; export const REQUEST_CITY = 'REQUEST_CITY'; export const RECEIVE_CITY = 'RECEIVE_CITY'; export const REQUEST_CITY_FAIL = 'REQUEST_CITY_FAIL'; export const selectCity = (city) => { return {type: SELECT_CITY, city}; }; export const requestCity = (userId) => { return {type: REQUEST_CITY, userId}; }; export const receiveCity = (cityList) => { return {type: RECEIVE_CITY, cityList}; }; export const requestCityFail = (err) => { return {type: REQUEST_CITY_FAIL, err}; }; export const fetchCity = (userId) => { // const url = './json/city.api.json'; const url = 'http://localhost:3000/city'; return dispatch => { dispatch(requestCity(userId)); return fetch(url).then(res => res.json()).then(data => { const cityList = data.data; dispatch(receiveCity(cityList)); }).catch(err => { dispatch(requestCityFail(err)); }); } };
<filename>onecms.sql /* Navicat MySQL Data Transfer Source Server : vagrant Source Server Version : 50546 Source Host : localhost:33066 Source Database : onecms Target Server Type : MYSQL Target Server Version : 50546 File Encoding : 65001 Date: 2015-12-22 18:54:46 */ SET FOREIGN_KEY_CHECKS=0; -- ---------------------------- -- Table structure for `advertising` -- ---------------------------- DROP TABLE IF EXISTS `advertising`; CREATE TABLE `advertising` ( `id` int(11) NOT NULL AUTO_INCREMENT, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `position` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `start_date` datetime DEFAULT NULL, `end_date` datetime DEFAULT NULL, `image` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `link` varchar(255) COLLATE utf8_unicode_ci DEFAULT '#', `published` tinyint(4) DEFAULT '10', `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_start_date` (`start_date`), KEY `idx_end_date` (`end_date`), KEY `idx_start_end_date` (`start_date`,`end_date`), KEY `idx_position` (`position`) ) ENGINE=InnoDB AUTO_INCREMENT=14 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of advertising -- ---------------------------- INSERT INTO `advertising` VALUES ('1', 'sl1', 'slideshow', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '/uploads/slideshow/1.jpg', '#', '10', '1', '1', '1450754123', '1450754123'); INSERT INTO `advertising` VALUES ('2', 'sl2', 'slideshow', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '/uploads/slideshow/2.jpg', '#', '10', '1', '1', '1450754280', '1450754302'); INSERT INTO `advertising` VALUES ('3', 'sl3', 'slideshow', '0000-00-00 00:00:00', '0000-00-00 00:00:00', '/uploads/slideshow/3.jpg', '#', '10', '1', '1', '1450754294', '1450784576'); INSERT INTO `advertising` VALUES ('4', 'Sản phẩm 1', 'home_product', null, null, '/uploads/products/1.jpg', '#', '10', '1', '1', '1450782014', '1450784547'); INSERT INTO `advertising` VALUES ('5', 'Sản phẩm 2', 'home_product', null, null, '/uploads/products/2.jpg', '#', '10', '1', '1', '1450782033', '1450784106'); INSERT INTO `advertising` VALUES ('6', 'Sản phẩm 3', 'home_product', null, null, '/uploads/products/3.jpg', '#', '10', '1', '1', '1450782047', '1450784112'); INSERT INTO `advertising` VALUES ('7', 'Sản phẩm 4', 'home_product', null, null, '/uploads/products/4.jpg', '#', '10', '1', '1', '1450782064', '1450784117'); INSERT INTO `advertising` VALUES ('8', 'Sản phẩm 5', 'home_product', null, null, '/uploads/products/5.jpg', '#', '10', '1', '1', '1450782082', '1450784122'); INSERT INTO `advertising` VALUES ('9', 'Sản phẩm 6', 'home_product', null, null, '/uploads/products/6.jpg', '#', '10', '1', '1', '1450782097', '1450784127'); INSERT INTO `advertising` VALUES ('10', 'Sản phẩm 7', 'home_product', null, null, '/uploads/products/1.jpg', '#', '10', '1', '1', '1450782114', '1450784132'); INSERT INTO `advertising` VALUES ('11', 'Sản phẩm 8', 'home_product', null, null, '/uploads/products/2.jpg', '#', '10', '1', '1', '1450782142', '1450784138'); INSERT INTO `advertising` VALUES ('12', 'Sản phẩm 9', 'home_product', null, null, '/uploads/products/3.jpg', '#', '10', '1', '1', '1450782158', '1450784143'); INSERT INTO `advertising` VALUES ('13', 'Sản phẩm 10', 'home_product', null, null, '/uploads/products/4.jpg', '#', '10', '1', '1', '1450782173', '1450784148'); -- ---------------------------- -- Table structure for `auth_assignment` -- ---------------------------- DROP TABLE IF EXISTS `auth_assignment`; CREATE TABLE `auth_assignment` ( `item_name` varchar(64) COLLATE utf8_unicode_ci NOT NULL, `user_id` varchar(64) COLLATE utf8_unicode_ci NOT NULL, `created_at` int(11) DEFAULT NULL, PRIMARY KEY (`item_name`,`user_id`), CONSTRAINT `auth_assignment_ibfk_1` FOREIGN KEY (`item_name`) REFERENCES `auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of auth_assignment -- ---------------------------- -- ---------------------------- -- Table structure for `auth_item` -- ---------------------------- DROP TABLE IF EXISTS `auth_item`; CREATE TABLE `auth_item` ( `name` varchar(64) COLLATE utf8_unicode_ci NOT NULL, `type` int(11) NOT NULL, `description` text COLLATE utf8_unicode_ci, `rule_name` varchar(64) COLLATE utf8_unicode_ci DEFAULT NULL, `data` text COLLATE utf8_unicode_ci, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`name`), KEY `rule_name` (`rule_name`), KEY `idx-auth_item-type` (`type`), CONSTRAINT `auth_item_ibfk_1` FOREIGN KEY (`rule_name`) REFERENCES `auth_rule` (`name`) ON DELETE SET NULL ON UPDATE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of auth_item -- ---------------------------- -- ---------------------------- -- Table structure for `auth_item_child` -- ---------------------------- DROP TABLE IF EXISTS `auth_item_child`; CREATE TABLE `auth_item_child` ( `parent` varchar(64) COLLATE utf8_unicode_ci NOT NULL, `child` varchar(64) COLLATE utf8_unicode_ci NOT NULL, PRIMARY KEY (`parent`,`child`), KEY `child` (`child`), CONSTRAINT `auth_item_child_ibfk_1` FOREIGN KEY (`parent`) REFERENCES `auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE, CONSTRAINT `auth_item_child_ibfk_2` FOREIGN KEY (`child`) REFERENCES `auth_item` (`name`) ON DELETE CASCADE ON UPDATE CASCADE ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of auth_item_child -- ---------------------------- -- ---------------------------- -- Table structure for `auth_rule` -- ---------------------------- DROP TABLE IF EXISTS `auth_rule`; CREATE TABLE `auth_rule` ( `name` varchar(64) COLLATE utf8_unicode_ci NOT NULL, `data` text COLLATE utf8_unicode_ci, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`name`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of auth_rule -- ---------------------------- -- ---------------------------- -- Table structure for `category_post` -- ---------------------------- DROP TABLE IF EXISTS `category_post`; CREATE TABLE `category_post` ( `id` int(11) NOT NULL AUTO_INCREMENT, `parent_id` int(11) DEFAULT NULL, `tree` int(11) NOT NULL, `lft` int(11) NOT NULL, `rgt` int(11) NOT NULL, `depth` int(11) NOT NULL, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `slug` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `image` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `content` mediumtext COLLATE utf8_unicode_ci, `meta_title` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_keywords` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_description` varchar(160) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_params` text COLLATE utf8_unicode_ci, `published` tinyint(4) DEFAULT NULL, `layouts` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `views` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_title` (`title`) USING BTREE, KEY `idx_slug` (`slug`), KEY `idx_lft` (`lft`), KEY `idx_lft_rgt` (`lft`,`rgt`), KEY `idx_id_lft_rgt` (`id`,`lft`,`rgt`), KEY `idx_parent_id` (`parent_id`), KEY `idx_tree_lft` (`tree`,`lft`) ) ENGINE=InnoDB AUTO_INCREMENT=3 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of category_post -- ---------------------------- INSERT INTO `category_post` VALUES ('1', null, '1', '1', '2', '0', 'Tin tức', 'tin-tuc', '', '', '', '', '', null, '10', null, null, '1', '1', '1450760543', '1450760543'); INSERT INTO `category_post` VALUES ('2', null, '2', '1', '2', '0', 'Sự kiện', 'su-kien', '', '', '', '', '', null, '10', null, null, '1', '1', '1450760603', '1450765458'); -- ---------------------------- -- Table structure for `category_product` -- ---------------------------- DROP TABLE IF EXISTS `category_product`; CREATE TABLE `category_product` ( `id` int(11) NOT NULL AUTO_INCREMENT, `parent_id` int(11) DEFAULT NULL, `tree` int(11) NOT NULL, `lft` int(11) NOT NULL, `rgt` int(11) NOT NULL, `depth` int(11) NOT NULL, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `slug` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `image` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `content` mediumtext COLLATE utf8_unicode_ci, `meta_title` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_keywords` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_description` varchar(160) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_params` text COLLATE utf8_unicode_ci, `published` tinyint(4) DEFAULT NULL, `layouts` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `views` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_title` (`title`) USING BTREE, KEY `idx_slug` (`slug`) USING BTREE, KEY `idx_lft` (`lft`) USING BTREE, KEY `idx_lft_rgt` (`lft`,`rgt`) USING BTREE, KEY `idx_id_lft_rgt` (`id`,`lft`,`rgt`) USING BTREE, KEY `idx_parent_id` (`parent_id`) USING BTREE, KEY `idx_tree_lft` (`tree`,`lft`) USING BTREE ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of category_product -- ---------------------------- INSERT INTO `category_product` VALUES ('1', null, '1', '1', '2', '0', 'Sản phẩm', 'san-pham', '', '', '', '', '', null, '10', null, null, '1', '1', '1450765903', '1450768865'); -- ---------------------------- -- Table structure for `migration` -- ---------------------------- DROP TABLE IF EXISTS `migration`; CREATE TABLE `migration` ( `version` varchar(180) NOT NULL, `apply_time` int(11) DEFAULT NULL, PRIMARY KEY (`version`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8; -- ---------------------------- -- Records of migration -- ---------------------------- INSERT INTO `migration` VALUES ('m000000_000000_base', '1450684366'); INSERT INTO `migration` VALUES ('m130524_201442_init', '1450684371'); INSERT INTO `migration` VALUES ('m140506_102106_rbac_init', '1450726671'); -- ---------------------------- -- Table structure for `page` -- ---------------------------- DROP TABLE IF EXISTS `page`; CREATE TABLE `page` ( `id` int(11) NOT NULL AUTO_INCREMENT, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `slug` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `content` mediumtext COLLATE utf8_unicode_ci, `meta_title` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_keywords` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_description` varchar(160) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_params` text COLLATE utf8_unicode_ci, `published` tinyint(4) DEFAULT NULL, `layouts` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `views` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_title` (`title`) USING BTREE, KEY `idx_slug` (`slug`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of page -- ---------------------------- INSERT INTO `page` VALUES ('1', 'Giới thiệu', 'gioi-thieu', '', '', '', '', null, '10', null, null, '1', '1', '1450769510', '1450769510'); -- ---------------------------- -- Table structure for `post` -- ---------------------------- DROP TABLE IF EXISTS `post`; CREATE TABLE `post` ( `id` int(11) NOT NULL AUTO_INCREMENT, `category_id` int(11) NOT NULL, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `slug` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `image` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `content` mediumtext COLLATE utf8_unicode_ci, `meta_params` text COLLATE utf8_unicode_ci, `meta_title` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_keywords` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_description` varchar(160) COLLATE utf8_unicode_ci DEFAULT NULL, `published` tinyint(4) DEFAULT NULL, `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_category_id` (`category_id`), KEY `idx_title` (`title`) USING BTREE, KEY `idx_slug` (`slug`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of post -- ---------------------------- INSERT INTO `post` VALUES ('1', '1', 'Nội dung test thử', 'noi-dung-test-thu', '/uploads/slideshow/8.jpg', '', null, '', '', '', '10', '1', '1', '1450769617', '1450769617'); -- ---------------------------- -- Table structure for `product` -- ---------------------------- DROP TABLE IF EXISTS `product`; CREATE TABLE `product` ( `id` int(11) NOT NULL AUTO_INCREMENT, `category_id` int(11) NOT NULL, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `slug` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `image` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `images` tinytext COLLATE utf8_unicode_ci, `sku` varchar(50) COLLATE utf8_unicode_ci NOT NULL, `quantity` int(11) DEFAULT NULL, `price` int(11) DEFAULT NULL, `discount` int(11) DEFAULT NULL, `video` varchar(100) COLLATE utf8_unicode_ci DEFAULT NULL, `download` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `summary` tinytext COLLATE utf8_unicode_ci, `content` mediumtext COLLATE utf8_unicode_ci, `specification` text COLLATE utf8_unicode_ci, `meta_params` text COLLATE utf8_unicode_ci, `meta_title` varchar(70) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_keywords` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_description` varchar(160) COLLATE utf8_unicode_ci DEFAULT NULL, `published` tinyint(4) DEFAULT NULL, `created_by` int(11) DEFAULT NULL, `updated_by` int(11) DEFAULT NULL, `created_at` int(11) DEFAULT NULL, `updated_at` int(11) DEFAULT NULL, PRIMARY KEY (`id`), KEY `idx_category_id` (`category_id`), KEY `idx_title` (`title`) USING BTREE, KEY `idx_slug` (`slug`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of product -- ---------------------------- INSERT INTO `product` VALUES ('1', '1', 'Máy tạo ozone làm sạch rau quả Ozonier Fresh MUM-99 30W (Trắng)', 'may-tao-ozone-lam-sach-rau-qua-ozonier-fresh-mum-99-30w-trang', '/uploads/products/1447640871096_5818759.jpg', '/uploads/products/1447640871096_5818759.jpg, /uploads/products/1447640873755_6125840.jpg', 'aaa', null, '1575000', null, '72g4Zkexu3E', null, '<p>- Được làm bằng chất liệu nhựa cao cấp, bền bỉ</p>\r\n\r\n<p>- Sử dụng công nghệ ozone khuếch tán hiện đại</p>\r\n\r\n<p>- Giúp khử độc và làm sạch rau củ, thực phẩm an toàn</p>\r\n', '<p>Máy tạo ozone làm sạch rau quả Ozonier Fresh MUM-99 30W (Trắng)</p>\r\n\r\n<p>-&nbsp;Máy tạo ozone làm sạch rau quả Ozonier Fresh MUM-99 30W (Trắng) được làm bằng chất liệu nhựa cao cấp, bền bỉ, không chứa các hóa chất gây hại, đảm bảo an toàn cho sức khỏe người sử dụng.<br />\r\n- Thiết kế độc đáo và hiện đại với các lỗ khí nhỏ được tích hợp ngay trên máy sẽ giúp ozone khuếch tán vào trong nước, thâm nhập vào sâu bên trong thực phẩm, tiêu diệt mầm bệnh và vi khuẩn, giúp rau, củ, quả và thức ăn luôn được tươi ngon và an toàn nhất.<br />\r\n- Sản phẩm có khả năng phân hủy thuốc trừ sâu, tiêu diệt vi khuẩn liên quan đến bệnh đường ruột, loại bỏ các chất độc hữu cơ và khử mùi hải sản, thịt, cá... Ngoài ra, máy còn có chức năng khử độc bát đĩa và làm sạch không gian xung quanh.<br />\r\n- Thiết kế tiện lợi, máy tạo ozone làm sạch rau quả Ozonier Fresh MUM-99 sẽ là một vật dụng hoàn hảo và cần thiết&nbsp;cho gia đình bạn.​<br />\r\n<br />\r\n<strong>Hướng dẫn sử dụng:</strong></p>\r\n\r\n<ul>\r\n <li><strong>Làm sạch rau quả:</strong>&nbsp;rửa sạch bẩn trên rau quả, đổ đầy nước vào chậu với lượng vừa đủ, thả quả sục vào giữa đáy chậu, bật máy, ấn nút hẹn giờ phù hợp.</li>\r\n <li><strong>Làm sạch thịt rau hải sản:</strong>&nbsp;để thực phẩm vào nồi đến khi ngập nước, thả sục vào đáy nồi, ấn nút hẹn thời gian phù hợp. Ozone khuếch tán trong nước thẩm thấu vào thực phẩm khử sạch vi khuẩn và mần bệnh. Sau khi rửa lại thực phẩm bằng nước sạch, sau 5 - 10 phút mới ướp.</li>\r\n <li><strong>Khử độc bát đĩa:</strong>&nbsp;sau khi rửa bát, cốc xong. Xếp gọn vào chậu nước sạch để sục khí ozon, bát đĩa, ly cốc chén sẽ được diệt trùng hoàn toàn.</li>\r\n <li><strong>Làm sạch nước:</strong>&nbsp;thả quả sục vào đáy bình, chậu sau đó bật công tắc nguồn, ấn nút hẹn giờ phù hợp với dung tích nước cần xử lý. Nước xử lý xong cần lọc lại trước khi sử dụng.</li>\r\n <li><strong>Khử mùi không khí:</strong>&nbsp;tháo quả sục, đặt ống vào vị trí cần khử mùi, bật máy, ấn nút hẹn giờ phù hợp với diện tích cần làm sạch. Ozone sẽ khử sạch mùi, không gian được trả lại không khí trong lành tự nhiên.</li>\r\n</ul>\r\n', '<table class=\"table table-bordered table-striped\" id=\"tblGeneralAttribute\" style=\"background-color:transparent; border-collapse:collapse; border-spacing:0px; border:1px solid rgb(221, 221, 221); box-sizing:border-box; margin-bottom:20px; max-width:100%; width:1140px\">\r\n <tbody>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Màu sắc</strong></td>\r\n <td style=\"vertical-align:top\">Trắng</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Chất liệu</strong></td>\r\n <td style=\"vertical-align:top\">Nhựa</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Model</strong></td>\r\n <td style=\"vertical-align:top\">MUM-99</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Kích thước</strong></td>\r\n <td style=\"vertical-align:top\">32 x 12 x 27 (cm)</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Xuất xứ</strong></td>\r\n <td style=\"vertical-align:top\">Trung Quốc</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Điện áp</strong></td>\r\n <td style=\"vertical-align:top\">220 (V)</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Công suất</strong></td>\r\n <td style=\"vertical-align:top\">30 (W)</td>\r\n </tr>\r\n <tr>\r\n <td style=\"vertical-align:top\"><strong>Bảo hành</strong></td>\r\n <td style=\"vertical-align:top\">12 (tháng)</td>\r\n </tr>\r\n </tbody>\r\n</table>\r\n', null, '', '', '', '10', '1', '1', '1450777213', '1450785241'); -- ---------------------------- -- Table structure for `setting` -- ---------------------------- DROP TABLE IF EXISTS `setting`; CREATE TABLE `setting` ( `id` int(11) NOT NULL AUTO_INCREMENT, `title` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `summary` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `meta_key` varchar(200) COLLATE utf8_unicode_ci NOT NULL, `meta_value` mediumtext COLLATE utf8_unicode_ci, PRIMARY KEY (`id`), KEY `idx_meta_key` (`meta_key`) ) ENGINE=InnoDB DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of setting -- ---------------------------- -- ---------------------------- -- Table structure for `user` -- ---------------------------- DROP TABLE IF EXISTS `user`; CREATE TABLE `user` ( `id` int(11) NOT NULL AUTO_INCREMENT, `username` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `auth_key` varchar(32) COLLATE utf8_unicode_ci NOT NULL, `password_hash` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `password_reset_token` varchar(255) COLLATE utf8_unicode_ci DEFAULT NULL, `email` varchar(255) COLLATE utf8_unicode_ci NOT NULL, `status` smallint(6) NOT NULL DEFAULT '10', `created_at` int(11) NOT NULL, `updated_at` int(11) NOT NULL, PRIMARY KEY (`id`), UNIQUE KEY `username` (`username`), UNIQUE KEY `email` (`email`), UNIQUE KEY `password_reset_token` (`password_reset_token`) ) ENGINE=InnoDB AUTO_INCREMENT=2 DEFAULT CHARSET=utf8 COLLATE=utf8_unicode_ci; -- ---------------------------- -- Records of user -- ---------------------------- INSERT INTO `user` VALUES ('1', 'admin', 'RbFi1_U9E69y6QsQJq5A8eL2V7oMCZoe', '$2y$13$VO33A5o6g9zIOlFeu/PXPuS0iZy.ik0ZYSXN5jJ77Nzx33we3T.Wy', null, '<EMAIL>', '10', '1450684500', '1450684500');
<reponame>Slaying-Gitcoin/graphology /** * Graphology Simple Size Unit Tests * ================================== */ var assert = require('assert'); var Graph = require('graphology'); var simpleSize = require('../../graph/simple-size.js'); describe('simpleSize', function () { it('should throw if given wrong arguments.', function () { assert.throws(function () { simpleSize(null); }, /instance/); }); it('should return 0 is the graph is empty.', function () { var graph = new Graph(); graph.addNode(1); assert.strictEqual(simpleSize(graph), 0); }); it('should return the correct size.', function () { var graph = new Graph(); graph.mergeEdge(1, 2); graph.mergeEdge(2, 3); assert.strictEqual(simpleSize(graph), 2); graph = new Graph({multi: true}); graph.mergeEdge(1, 2); graph.mergeEdge(1, 2); graph.mergeEdge(1, 2); graph.mergeEdge(3, 4); graph.mergeUndirectedEdge(4, 5); assert.strictEqual(simpleSize(graph), 3); }); });
package chylex.hee.entity.technical; import java.util.List; import java.util.Random; import net.minecraft.entity.EntityLiving; import net.minecraft.entity.player.EntityPlayer; import net.minecraft.nbt.NBTTagCompound; import net.minecraft.world.World; import chylex.hee.system.abstractions.Vec; import chylex.hee.system.abstractions.entity.EntitySelector; import chylex.hee.system.logging.Log; import chylex.hee.world.util.BoundingBox; import com.google.common.collect.ImmutableList; public class EntityTechnicalSpawner<T extends EntityLiving> extends EntityTechnicalBase{ private IVirtualSpawner<T> spawner; private int timer = -1; public EntityTechnicalSpawner(World world){ super(world); } public EntityTechnicalSpawner(World world, double x, double y, double z, IVirtualSpawner<T> spawner){ super(world); setPosition(x, y, z); this.spawner = spawner; } @Override protected void entityInit(){} @Override public void onUpdate(){ super.onUpdate(); if (worldObj.isRemote)return; if (spawner == null)setDead(); else if (timer == -1){ spawner.init(this); timer = spawner.getCheckTimer(rand); } else if (--timer == 0){ timer = spawner.getCheckTimer(rand); ImmutableList<EntityPlayer> players = ImmutableList.copyOf(spawner.getPlayersInRange(worldObj)); if (!players.isEmpty()){ for(EntityPlayer player:players){ int mobsLeft = spawner.getSpawnLimit(rand); if (mobsLeft == 0)continue; for(int attemptsLeft = spawner.getSpawnAttempts(rand); attemptsLeft > 0 && mobsLeft > 0; attemptsLeft--){ T entity = spawner.createEntity(worldObj); spawner.findSpawnPosition(worldObj, rand, player, entity, spawner.getSpawnRange(rand)); if (spawner.checkSpawnConditions(worldObj, rand, players, player, entity)){ worldObj.spawnEntityInWorld(entity); --mobsLeft; } } } } } } @Override protected void writeEntityToNBT(NBTTagCompound nbt){ if (spawner != null)nbt.setString("spawnerCls", spawner.getClass().getName()); } @Override protected void readEntityFromNBT(NBTTagCompound nbt){ try{ this.spawner = ((Class<? extends IVirtualSpawner>)Class.forName(nbt.getString("spawnerCls"))).newInstance(); }catch(Throwable t){ Log.throwable(t, "Unable to load a spawner entity: $0", nbt.getString("spawnerCls")); setDead(); } } public static interface IVirtualSpawner<T extends EntityLiving>{ void init(EntityTechnicalSpawner owner); T createEntity(World world); int getCheckTimer(Random rand); int getSpawnAttempts(Random rand); int getSpawnLimit(Random rand); double getSpawnRange(Random rand); BoundingBox getCheckBox(); default List<EntityPlayer> getPlayersInRange(World world){ return EntitySelector.players(world, getCheckBox().toAABB()); } default void findSpawnPosition(World world, Random rand, EntityPlayer target, T entity, double range){ Vec vec = Vec.xzRandom(rand); entity.setPositionAndRotation(target.posX+vec.x*range, target.posY+(rand.nextDouble()-0.5D)*range, target.posZ+vec.z*range, rand.nextFloat()*360F-180F, 0F); } default boolean checkSpawnConditions(World world, Random rand, ImmutableList<EntityPlayer> playersInRange, EntityPlayer target, T entity){ return entity.getCanSpawnHere(); } } }
<reponame>tuanalumi/vue-reactive-provide import { mount, createLocalVue } from '@vue/test-utils' import { createGlobalMixin } from '../../lib/mixins/GlobalMixin' const testProd = !!process.env.VUE_APP_TEST_PROD const ReactiveProvide = testProd ? require('../../dist/VueReactiveProvide.common').default : require('../../lib').default function mountWithPlugin(component, options) { const _Vue = createLocalVue() _Vue.use(ReactiveProvide) return mount(component, { ...options, localVue: _Vue, }) } describe('The Plugin', () => { it('shows Version', () => { const pluginVersion = ReactiveProvide.version const { version } = require('../../package.json') expect(pluginVersion).toBe(version) }) it('createGlobalMixin: creates mixin with functional beforeCreate hook', () => { const $options = { computed: {}, provide: {}, inject: {}, reactiveProvide: { name: 'test', include: ['msg'], }, } const $once = jest.fn().mockName('$once') const $on = jest.fn().mockName('$on') const VueStub = { $options, $once, $on, } const mixin = createGlobalMixin() expect(mixin).toMatchObject({ beforeCreate: expect.any(Function), }) mixin.beforeCreate.call(VueStub) expect($once).toHaveBeenCalledWith('hook:created', expect.any(Function)) expect($on).not.toHaveBeenCalled() expect($options).toMatchObject({ computed: { test: expect.any(Function), }, reactiveProvide: expect.any(Object), provide: expect.any(Function), }) expect($options).toMatchSnapshot() }) it('installs global beforeCreate mixin', () => { const mixin = jest.fn() const Vue = { mixin, } ReactiveProvide.install(Vue) expect(mixin).toHaveBeenCalledWith( expect.objectContaining({ beforeCreate: expect.any(Function), }) ) }) it('Basic: provides a working injection to a child component', () => { const wrapper = mountWithPlugin( require('./resources/plugin/Basic.vue').default ) const child = wrapper.find({ name: 'child' }) expect(child.vm.test).toMatchObject({ msg: 'Testmessage', }) }) it('Props/Attrs: provides props & attrs', () => { const wrapper = mountWithPlugin( require('./resources/plugin/Props.vue').default, { propsData: { name: 'Test', msg: 'Hello', }, attrs: { title: 'Title', }, } ) const child = wrapper.find({ name: 'child' }) expect(child.vm.test).toMatchObject({ name: 'Test', msg: 'Hello', title: 'Title', }) }) it('Listeners: provides callbacks', () => { const handler = () => {} const wrapper = mountWithPlugin( require('./resources/plugin/Listeners.vue').default, { listeners: { change: handler, }, } ) const child = wrapper.find({ name: 'child' }) expect(child.vm.test).toMatchObject({ change: expect.any(Function), }) }) it('OptionAsFunction: works when options are defined as a function', () => { const wrapper = mountWithPlugin( require('./resources/plugin/OptionAsFunction.vue').default, { propsData: { injectName: 'test', }, } ) const child = wrapper.find({ name: 'child' }) expect(child.vm.test).toMatchObject({ msg: 'Testmessage', }) }) })
<gh_stars>0 package frc.robot.commands; import edu.wpi.first.wpilibj2.command.CommandBase; import frc.robot.subsystems.DriveSubsystem; public class TurnDegrees extends CommandBase { private final double degrees; private final double speed; private final DriveSubsystem drive; public TurnDegrees(double degrees, double speed, DriveSubsystem drive) { this.degrees = degrees; this.speed = speed; this.drive = drive; addRequirements(drive); } @Override public void initialize() { drive.arcadeDrive(0, 0); // Tell motors to stop drive.resetEncoders(); } @Override public void execute() { drive.arcadeDrive(0, speed); } @Override public void end(boolean interrupted) { drive.arcadeDrive(0, 0); } @Override public boolean isFinished() { /* Need to convert distance travelled to degrees. The Standard Romi Chassis found here, https://www.pololu.com/category/203/romi-chassis-kits, has a wheel placement diameter (149 mm) - width of the wheel (8 mm) = 141 mm or 5.551 inches. We then take into consideration the width of the tires. */ double inchPerDegree = Math.PI * 5.551 / 360; // Compare distance travelled from start to distance based on degree turn return getAverageTurningDistance() >= (inchPerDegree * degrees); } private double getAverageTurningDistance() { double leftDistance = Math.abs(drive.getLeftDistance()); double rightDistance = Math.abs(drive.getRightDistance()); return (leftDistance + rightDistance) / 2.0; } }
<filename>lib/discovery/client-discovery.js "use strict"; var __awaiter = (this && this.__awaiter) || function (thisArg, _arguments, P, generator) { function adopt(value) { return value instanceof P ? value : new P(function (resolve) { resolve(value); }); } return new (P || (P = Promise))(function (resolve, reject) { function fulfilled(value) { try { step(generator.next(value)); } catch (e) { reject(e); } } function rejected(value) { try { step(generator["throw"](value)); } catch (e) { reject(e); } } function step(result) { result.done ? resolve(result.value) : adopt(result.value).then(fulfilled, rejected); } step((generator = generator.apply(thisArg, _arguments || [])).next()); }); }; var __importDefault = (this && this.__importDefault) || function (mod) { return (mod && mod.__esModule) ? mod : { "default": mod }; }; var __importStar = (this && this.__importStar) || function (mod) { if (mod && mod.__esModule) return mod; var result = {}; if (mod != null) for (var k in mod) if (Object.hasOwnProperty.call(mod, k)) result[k] = mod[k]; result["default"] = mod; return result; }; Object.defineProperty(exports, "__esModule", { value: true }); const axios_1 = __importDefault(require("axios")); const jose_1 = __importStar(require("jose")); const valid_url_1 = __importDefault(require("valid-url")); const oidc_constants_1 = __importDefault(require("../introspection/oidc-constants")); const discovery_document_response_1 = __importDefault(require("./discovery-document-response")); const discovery_error_1 = __importDefault(require("./discovery.error")); class ClientDiscovery { /** * Load discovery document. .well-known */ static loadDiscoveryDocument(authority) { return __awaiter(this, void 0, void 0, function* () { if (!valid_url_1.default.isUri(authority)) throw new discovery_error_1.default(authority, "Invalid uri"); var discoveryDocument = discovery_document_response_1.default.getInstance(); if (discoveryDocument != undefined && discoveryDocument.loaded) return; return axios_1.default.get(this.getDiscoveryUrl(authority)).then(response => { let instance = response.data; let keystore = new jose_1.JWKS.KeyStore(); instance = Object.assign({ loaded: true, KeySet: keystore }, instance); discovery_document_response_1.default.saveInstance(instance); }); }); } /** * Load public keys from jwks_uri */ static loadJsonWebKeySet(authority) { return __awaiter(this, void 0, void 0, function* () { var discoveryDocument = discovery_document_response_1.default.getInstance(); if (discoveryDocument == undefined || !discoveryDocument.loaded) yield this.loadDiscoveryDocument(authority); // Reload from cache discoveryDocument = discovery_document_response_1.default.getInstance(); return axios_1.default.get(discoveryDocument.jwks_uri).then(response => { const keys = response.data.keys.map(key => jose_1.default.JWK.asKey(key)); keys.forEach(k => discoveryDocument.KeySet.add(k)); discovery_document_response_1.default.saveInstance(discoveryDocument); }); }); } static getDiscoveryUrl(authority) { let url = this.removeTrailingSlash(authority); if (url.endsWith(oidc_constants_1.default.DiscoveryEndpoint)) { return authority; } return `${url}/${oidc_constants_1.default.DiscoveryEndpoint}`; } static removeTrailingSlash(term) { if (term != null && term.endsWith("/")) term = term.substring(0, term.length - 1); return term; } } exports.default = ClientDiscovery; //# sourceMappingURL=client-discovery.js.map
#!/bin/bash current_script_path=$(dirname $0) export N_FOLDS="2" export DATA="$current_script_path/../data/train/" export RESULTS="$current_script_path/../results/config/" python -O $current_script_path/../scripts/generate_experiments.py
def intersection(list1, list2): result = [] for element in list1: if element in list2: result.append(element) return result intersection(list1, list2) // Output: [3, 5]
package types type SetPinCommentRequestBody struct { PinComment string `json:"pin_comment"` }
// SPDX-License-Identifier: MIT // // Copyright (C) 2021 <NAME>. All Rights Reserved. // // This source code is licensed under the MIT license found in the // LICENSE file in the root directory of this source tree or at // https://spdx.org/licenses/MIT.html // Package tag provides the static tag strings to OPAQUE. package tag // These strings are the static tags and labels used throughout the protocol. const ( // OPRF tags. // OPRF is a string explicitly stating the version name. OPRF = "VOPRF08-" // OPRFContextPrefix is the DST prefix for the metadata tag. OPRFContextPrefix = "Context-" // OPRFPointPrefix is the DST prefix to use for HashToGroup operations. OPRFPointPrefix = "HashToGroup-" // OPRFScalarPrefix is the DST prefix to use for HashToGroup operations. OPRFScalarPrefix = "HashToScalar-" // OPRFFinalize is the DST suffix used in the client transcript. OPRFFinalize = "Finalize-" // Envelope tags. // AuthKey is the envelope's MAC key's KDF dst. AuthKey = "AuthKey" // ExportKey is the export key's KDF dst. ExportKey = "ExportKey" // MaskingKey is the masking key's creation KDF dst. MaskingKey = "MaskingKey" // DerivePrivateKey is the client's internal mode private key hash-to-scalar dst. DerivePrivateKey = "OPAQUE-DeriveAuthKeyPair" // ExpandPrivateKey is the client's internal mode private key seed KDF dst. ExpandPrivateKey = "PrivateKey" // 3DH tags. // VersionTag indicates the protocol RFC identifier for the AKE transcript prefix. VersionTag = "RFCXXXX" // LabelPrefix is the 3DH secret KDF dst prefix. LabelPrefix = "OPAQUE-" // Handshake is the 3DH HandshakeSecret dst. Handshake = "HandshakeSecret" // SessionKey is the 3DH session secret dst. SessionKey = "SessionKey" // MacServer is 3DH server's MAC key KDF dst. MacServer = "ServerMAC" // MacClient is 3DH server's MAC key KDF dst. MacClient = "ClientMAC" // Client tags. // CredentialResponsePad is the masking keys KDF dst to expand to the input. CredentialResponsePad = "CredentialResponsePad" // Server tags. // ExpandOPRF is the server's OPRF key seed KDF dst. ExpandOPRF = "OprfKey" // DeriveKeyPair is the server's OPRF hash-to-scalar dst. DeriveKeyPair = "OPAQUE-DeriveKeyPair" )
<reponame>xNerhu/class-bot import { config } from 'dotenv'; import * as express from 'express'; import { WolframClient } from 'node-wolfram-alpha'; import { ChatButtons } from 'fb-chat-api-buttons'; config(); import { init as initApi, listen, setOptions } from './api'; import { handleMessage } from './handler'; import Store from './store'; const init = async () => { const { CALLBACK_ENDPOINT, WOLFRAM_ALPHA_APP_ID, EXPRESS_PORT } = process.env; const app = express(); const originalApi = await initApi(); app.listen(EXPRESS_PORT, () => { console.log(`Listening on ${EXPRESS_PORT}!`); }); setOptions({ listenEvents: true, selfListen: true }); listen(handleMessage); Store.buttons = new ChatButtons({ app, api: originalApi, endpoint: CALLBACK_ENDPOINT, }); Store.wolfram = new WolframClient(WOLFRAM_ALPHA_APP_ID); }; init();
<reponame>AndroidKitKat/sneedacity /********************************************************************** Sneedacity: A Digital Audio Editor Benchmark.h <NAME> **********************************************************************/ #ifndef __SNEEDACITY_BENCHMARK__ #define __SNEEDACITY_BENCHMARK__ class wxWindow; class SneedacityProject; SNEEDACITY_DLL_API void RunBenchmark( wxWindow *parent, SneedacityProject &project ); #endif // define __SNEEDACITY_BENCHMARK__