content
stringlengths 66
45k
| language
stringclasses 11
values | license
stringclasses 14
values | path
stringlengths 20
176
| annotation_id
stringlengths 36
36
| pii
stringlengths 2
19.6k
| pii_modified
stringlengths 2
19.6k
|
---|---|---|---|---|---|---|
// license:BSD-3-Clause
// copyright-holders:Sandro Ronco
/***************************************************************************
Mattel HyperScan
08/17/2013 Skeleton driver by Sandro Ronco
HyperScan TODO:
- Various graphics glitches
- Sound
- X-Men hangs after the first match
- USB
Hyperscan has a hidden test menu that can be accessed with a specific inputs sequence:
- During boot press and hold Select + Left Shoulder + Green until 'PLEASE WAIT' is shown on the screen
- Press and release Red, Red, Green, Green, Yellow, Blue
****************************************************************************
SPG290 Interrupt:
Vector Source
63 SPU FIQ
62 SPU Beatirq
61 SPU Envirq
60 CD servo
59 ADC gain overflow / ADC recorder / FIFO overflow
58 General purpose ADC
57 Timer base
56 Timer
55 TV vblanking start
54 LCD vblanking start
53 PPU vblanking start
52 TV
51 Sensor frame end
50 Sensor coordinate hit
49 Sensor motion frame end
48 Sensor capture done + sensor debug IRQ
47 TV coordinate hit
46 PPU coordinate hit
45 USB host+device
44 SIO
43 SPI
42 Uart (IrDA)
41 NAND
40 SD
39 I2C master
38 I2S slave
37 APBDMA CH1
36 APBDMA CH2
35 LDM_DMA
34 BLN_DMA
33 APBDMA CH3
32 APBDMA CH4
31 Alarm + HMS
30 MP4
29 C3 (ECC module)
28 GPIO
27 Bufctl (for debug) + TV/PPU vblanking end (for debug)
26 RESERVED1
25 RESERVED2
24 RESERVED3
-------
CPU die markings on Big Buck Hunter "SunplusmM LU9001"
****************************************************************************/
#include "emu.h"
#include "cpu/score/score.h"
#include "machine/spg290_cdservo.h"
#include "machine/spg290_i2c.h"
#include "machine/spg290_ppu.h"
#include "machine/spg290_timer.h"
#include "machine/hyperscan_card.h"
#include "machine/hyperscan_ctrl.h"
#include "screen.h"
#include "softlist_dev.h"
class spg29x_game_state : public driver_device
{
public:
spg29x_game_state(const machine_config &mconfig, device_type type, const char *tag) :
driver_device(mconfig, type, tag),
m_maincpu(*this, "maincpu"),
m_screen(*this, "screen"),
m_ppu(*this, "ppu"),
m_i2c(*this, "i2c"),
m_timers(*this, "timer%u", 0U),
m_hyperscan_card(*this, "card"),
m_hyperscan_ctrl(*this, "ctrl%u", 0U),
m_leds(*this, "led%u", 0U)
{ }
void spg29x(machine_config &config);
void hyperscan(machine_config &config);
protected:
virtual void machine_reset() override;
required_device<score7_cpu_device> m_maincpu;
private:
virtual void machine_start() override;
uint32_t spg290_screen_update(screen_device &screen, bitmap_rgb32 &bitmap, const rectangle &cliprect);
void spg290_mem(address_map &map);
void spg290_bios_mem(address_map &map);
void space_byte_w(offs_t offset, uint8_t data) { return m_maincpu->space(AS_PROGRAM).write_byte(offset, data); }
uint32_t space_dword_r(offs_t offset) { return m_maincpu->space(AS_PROGRAM).read_dword(offset); }
uint16_t i2c_r(offs_t offset);
required_device<screen_device> m_screen;
required_device<spg290_ppu_device> m_ppu;
required_device<spg290_i2c_device> m_i2c;
required_device_array<spg290_timer_device, 6> m_timers;
optional_device<hyperscan_card_device> m_hyperscan_card;
optional_device_array<hyperscan_ctrl_device, 2> m_hyperscan_ctrl;
output_finder<8> m_leds;
void tve_control_w(offs_t offset, uint32_t data, uint32_t mem_mask);
void gpio_out_w(offs_t offset, uint32_t data, uint32_t mem_mask);
void timers_clk_sel_w(offs_t offset, uint32_t data, uint32_t mem_mask);
uint16_t m_tve_control;
uint8_t m_tve_fade_offset;
uint16_t m_gpio_out;
};
class spg29x_nand_game_state : public spg29x_game_state
{
public:
spg29x_nand_game_state(const machine_config& mconfig, device_type type, const char* tag) :
spg29x_game_state(mconfig, type, tag)
{ }
void nand_init(int blocksize, int blocksize_stripped);
void nand_jak_bbh();
void nand_jak_bbsf();
protected:
void machine_reset() override;
private:
std::vector<uint8_t> m_strippedrom;
int m_firstvector;
};
class spg29x_zone3d_game_state : public spg29x_game_state
{
public:
spg29x_zone3d_game_state(const machine_config& mconfig, device_type type, const char* tag) :
spg29x_game_state(mconfig, type, tag)
{ }
void init_zone3d();
protected:
void machine_reset() override;
private:
};
void spg29x_game_state::timers_clk_sel_w(offs_t offset, uint32_t data, uint32_t mem_mask)
{
auto clock = 27_MHz_XTAL / ((data & 0xff) + 1);
uint32_t mask = 0x100;
for(int i=0; i<m_timers.size(); i++)
{
if (data & mask)
m_timers[i]->set_clock(32.768_kHz_XTAL);
else
m_timers[i]->set_clock(clock);
mask <<= 1;
}
}
void spg29x_game_state::tve_control_w(offs_t offset, uint32_t data, uint32_t mem_mask)
{
COMBINE_DATA(&m_tve_control);
rectangle visarea;
switch(m_tve_control & 0xc)
{
case 0x0: // QVGA
visarea.set(0, 320-1, 0, 240-1);
break;
case 0x4: // VGA
visarea.set(0, 640-1, 0, 480-1);
break;
case 0x8: // HVGA
visarea.set(0, 640-1, 0, 240-1);
break;
}
int interlaced = m_tve_control & 1;
if (m_tve_control & 2)
m_screen->configure(864, 625, visarea, HZ_TO_ATTOSECONDS(27_MHz_XTAL) * 864 * 625 * (interlaced ? 2 : 1)); // PAL
else
m_screen->configure(858, 525, visarea, HZ_TO_ATTOSECONDS(27_MHz_XTAL) * 858 * 525 * (interlaced ? 2 : 1)); // NTSC
}
void spg29x_game_state::gpio_out_w(offs_t offset, uint32_t data, uint32_t mem_mask)
{
COMBINE_DATA(&m_gpio_out);
if (ACCESSING_BITS_0_7)
m_hyperscan_card->write(BIT(m_gpio_out,1));
for(int i=0; i<8; i++)
m_leds[i] = BIT(m_gpio_out, 5 + i);
}
uint16_t spg29x_game_state::i2c_r(offs_t offset)
{
int port = (offset >> 4) & 0x0f;
if (port < 2)
return m_hyperscan_ctrl[port]->read(offset);
return 0xffff;
}
uint32_t spg29x_game_state::spg290_screen_update(screen_device &screen, bitmap_rgb32 &bitmap, const rectangle &cliprect)
{
m_ppu->screen_update(screen, bitmap, cliprect);
if (m_tve_fade_offset)
{
int fade_offset = 255 - m_tve_fade_offset;
for (int y=0; y <= cliprect.max_y; y++)
for (int x=0; x <= cliprect.max_x; x++)
{
rgb_t pix(bitmap.pix(y, x));
bitmap.pix(y, x) = rgb_t(pix.r() * fade_offset / 255, pix.g() * fade_offset / 255, pix.b() * fade_offset / 255);
}
}
return 0;
}
void spg29x_game_state::spg290_mem(address_map &map)
{
map.global_mask(0x1fffffff);
map(0x00000000, 0x00ffffff).ram().mirror(0x07000000);
map(0x08030000, 0x08030003).w(FUNC(spg29x_game_state::tve_control_w)).lr32(NAME([this](uint32_t data) { return m_tve_control; }));
map(0x0803000c, 0x0803000f).lw32(NAME([this](uint32_t data) { m_tve_fade_offset = data & 0xff; }));
map(0x0807006c, 0x0807006f).lr32(NAME([]() { return 0x01;})); // MUI Status: SDRAM is in the self-refresh mode
//map(0x08150000, 0x08150000).lw8(NAME([this](uint8_t data) { printf("%c", data); })); // UART
map(0x082100e4, 0x082100e7).w(FUNC(spg29x_game_state::timers_clk_sel_w)); // Timer Source Clock Selection
map(0x08240000, 0x0824000f).noprw();
//map(0x08000000, 0x0800ffff); // CSI
map(0x08010000, 0x0801ffff).m("ppu", FUNC(spg290_ppu_device::map));
//map(0x08020000, 0x0802ffff); // JPG
//map(0x08030000, 0x0803ffff); // TV
//map(0x08040000, 0x0804ffff); // LCD
//map(0x08050000, 0x0805ffff); // SPU
map(0x08060000, 0x0806ffff).rw("cdservo", FUNC(spg290_cdservo_device::read), FUNC(spg290_cdservo_device::write));
//map(0x08070000, 0x0807ffff); // MIU
//map(0x08080000, 0x0808ffff); // APBDMA
//map(0x08090000, 0x0809ffff); // BUFCTL
//map(0x080a0000, 0x080affff); // IRQCTL
//map(0x080b0000, 0x080bffff); // GPUBUF
//map(0x080c0000, 0x080cffff); // LDMDMA
//map(0x080d0000, 0x080dffff); // BLNDMA
//map(0x080e0000, 0x080effff); // TPGBUF
//map(0x080f0000, 0x080fffff); // AHBDEC
//map(0x08100000, 0x0810ffff); // GPIO
//map(0x08110000, 0x0811ffff); // SPI
//map(0x08120000, 0x0812ffff); // SIO
map(0x08130000, 0x0813ffff).rw("i2c", FUNC(spg290_i2c_device::read), FUNC(spg290_i2c_device::write));
//map(0x08140000, 0x0814ffff); // I2S
//map(0x08150000, 0x0815ffff); // UART
map(0x08160000, 0x08160fff).rw(m_timers[0], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
map(0x08161000, 0x08161fff).rw(m_timers[1], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
map(0x08162000, 0x08162fff).rw(m_timers[2], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
map(0x08163000, 0x08163fff).rw(m_timers[3], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
map(0x08164000, 0x08164fff).rw(m_timers[4], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
map(0x08165000, 0x08165fff).rw(m_timers[5], FUNC(spg290_timer_device::read), FUNC(spg290_timer_device::write));
//map(0x08166000, 0x08166fff); // RTC
//map(0x08170000, 0x0817ffff); // WDOG
//map(0x08180000, 0x0818ffff); // SD
//map(0x08190000, 0x0819ffff); // FLASH
//map(0x081a0000, 0x081affff); // ADC
//map(0x081b0000, 0x081bffff); // USB device
//map(0x081c0000, 0x081cffff); // USB host
//map(0x081d0000, 0x081dffff); // reserved
//map(0x081e0000, 0x081effff); // Reserved
//map(0x081f0000, 0x081fffff); // reserved
//map(0x08200000, 0x0820ffff); // SFTCFG
//map(0x08210000, 0x0821ffff); // CKG
map(0x0821006c, 0x0821006f).w(m_timers[0], FUNC(spg290_timer_device::control_w));
map(0x08210070, 0x08210073).w(m_timers[1], FUNC(spg290_timer_device::control_w));
map(0x08210074, 0x08210077).w(m_timers[2], FUNC(spg290_timer_device::control_w));
map(0x08210078, 0x0821007b).w(m_timers[3], FUNC(spg290_timer_device::control_w));
map(0x0821007c, 0x0821007f).w(m_timers[4], FUNC(spg290_timer_device::control_w));
map(0x08210080, 0x08210083).w(m_timers[5], FUNC(spg290_timer_device::control_w));
//map(0x08220000, 0x0822ffff); // MP4
//map(0x08230000, 0x0823ffff); // MIU2
//map(0x08240000, 0x0824ffff); // ECC
map(0x0a000000, 0x0a003fff).ram(); // internal SRAM
map(0x0b000000, 0x0b007fff).rom().region("spg290", 0); // internal ROM
}
void spg29x_game_state::spg290_bios_mem(address_map& map)
{
spg290_mem(map);
map(0x08200024, 0x08200027).w(FUNC(spg29x_game_state::gpio_out_w)).lr32(NAME([this]() { return m_gpio_out; }));
map(0x08200068, 0x0820006b).lr32(NAME([this]() { return m_hyperscan_card->read(); }));
map(0x10000000, 0x100fffff).rom().region("bios", 0).mirror(0x0ff00000);
}
/* Input ports */
static INPUT_PORTS_START( hyperscan )
INPUT_PORTS_END
void spg29x_game_state::machine_start()
{
m_leds.resolve();
save_item(NAME(m_tve_control));
save_item(NAME(m_tve_fade_offset));
save_item(NAME(m_gpio_out));
}
void spg29x_game_state::machine_reset()
{
m_tve_control = 0;
m_tve_fade_offset = 0;
m_gpio_out = 0;
// disable JTAG
m_maincpu->set_state_int(SCORE_CR + 29, 0x20000000);
}
void spg29x_nand_game_state::machine_reset()
{
spg29x_game_state::machine_reset();
uint32_t bootstrap_ram_start = (m_strippedrom[m_firstvector+0] << 0) | (m_strippedrom[m_firstvector+1] << 8) | (m_strippedrom[m_firstvector+2] << 16) | (m_strippedrom[m_firstvector+3] << 24);
uint32_t bootstrap_ram_end = (m_strippedrom[m_firstvector+4] << 0) | (m_strippedrom[m_firstvector+5] << 8) | (m_strippedrom[m_firstvector+6] << 16) | (m_strippedrom[m_firstvector+7] << 24);
uint32_t bootstrap_ram_boot = (m_strippedrom[m_firstvector+8] << 0) | (m_strippedrom[m_firstvector+9] << 8) | (m_strippedrom[m_firstvector+10] << 16) | (m_strippedrom[m_firstvector+11] << 24);
// there is a 0x01 at 0x26, possibly related to source location / block in NAND to copy from?
logerror("NAND Bootstrap RAM start: %08x RAM end: %08x RAM boot: %08x", bootstrap_ram_start, bootstrap_ram_end, bootstrap_ram_boot);
uint32_t sourceaddr = 0x10000;
for (uint32_t addr = bootstrap_ram_start; addr <= bootstrap_ram_end; addr++)
{
address_space& mem = m_maincpu->space(AS_PROGRAM);
uint8_t byte = m_strippedrom[sourceaddr];
mem.write_byte(addr, byte);
sourceaddr++;
}
// probably jumped to from internal ROM?
m_maincpu->set_state_int(SCORE_PC, bootstrap_ram_boot);
}
void spg29x_zone3d_game_state::machine_reset()
{
spg29x_game_state::machine_reset();
uint8_t* rom = memregion("spi")->base();
int size = memregion("spi")->bytes();
uint32_t destaddr = 0x1dc;
for (uint32_t addr = 0; addr < size; addr++)
{
address_space& mem = m_maincpu->space(AS_PROGRAM);
uint8_t byte = rom[addr];
mem.write_byte(addr+destaddr, byte);
}
m_maincpu->set_state_int(SCORE_PC, 0x1000);
}
void spg29x_game_state::spg29x(machine_config &config)
{
/* basic machine hardware */
SCORE7(config, m_maincpu, 27_MHz_XTAL * 4); // 108MHz S+core 7
m_maincpu->set_addrmap(AS_PROGRAM, &spg29x_game_state::spg290_mem);
/* video hardware */
SCREEN(config, m_screen, SCREEN_TYPE_RASTER);
m_screen->set_raw(27_MHz_XTAL, 858, 0, 640, 525, 0, 480);
m_screen->set_screen_update(FUNC(spg29x_game_state::spg290_screen_update));
m_screen->screen_vblank().set(m_ppu, FUNC(spg290_ppu_device::screen_vblank));
for (int i=0; i<6; i++)
{
SPG290_TIMER(config, m_timers[i], 27_MHz_XTAL);
m_timers[i]->irq_cb().set_inputline(m_maincpu, 56);
}
SPG290_PPU(config, m_ppu, 27_MHz_XTAL, m_screen);
m_ppu->vblank_irq_cb().set_inputline(m_maincpu, 53);
m_ppu->space_read_cb().set(FUNC(spg29x_game_state::space_dword_r));
spg290_cdservo_device &cdservo(SPG290_CDSERVO(config, "cdservo", 27_MHz_XTAL, "cdrom"));
cdservo.irq_cb().set_inputline(m_maincpu, 60);
cdservo.space_write_cb().set(FUNC(spg29x_game_state::space_byte_w));
SPG290_I2C(config, m_i2c, 27_MHz_XTAL);
m_i2c->irq_cb().set_inputline(m_maincpu, 39);
}
void spg29x_game_state::hyperscan(machine_config &config)
{
spg29x(config);
m_maincpu->set_addrmap(AS_PROGRAM, &spg29x_game_state::spg290_bios_mem);
m_i2c->i2c_read_cb().set(FUNC(spg29x_game_state::i2c_r));
CDROM(config, "cdrom").set_interface("cdrom");
HYPERSCAN_CTRL(config, m_hyperscan_ctrl[0], 0);
HYPERSCAN_CTRL(config, m_hyperscan_ctrl[1], 0);
HYPERSCAN_CARD(config, m_hyperscan_card, 0);
SOFTWARE_LIST(config, "cd_list").set_original("hyperscan");
SOFTWARE_LIST(config, "card_list").set_original("hyperscan_card");
}
void spg29x_nand_game_state::nand_init(int blocksize, int blocksize_stripped)
{
uint8_t* rom = memregion("nand")->base();
int size = memregion("nand")->bytes();
int numblocks = size / blocksize;
m_strippedrom.resize(numblocks * blocksize_stripped);
for (int i = 0; i < numblocks; i++)
{
const int base = i * blocksize;
const int basestripped = i * blocksize_stripped;
for (int j = 0; j < blocksize_stripped; j++)
{
m_strippedrom[basestripped + j] = rom[base + j];
}
}
// debug to allow for easy use of unidasm.exe
if (0)
{
FILE *fp;
char filename[256];
sprintf(filename,"stripped_%s", machine().system().name);
fp=fopen(filename, "w+b");
if (fp)
{
fwrite(&m_strippedrom[0], blocksize_stripped * numblocks, 1, fp);
fclose(fp);
}
}
}
void spg29x_nand_game_state::nand_jak_bbh()
{
nand_init(0x210, 0x200);
m_firstvector = 0xc;
}
void spg29x_nand_game_state::nand_jak_bbsf()
{
nand_init(0x210, 0x200);
m_firstvector = 0x8;
}
void spg29x_zone3d_game_state::init_zone3d()
{
}
/* ROM definition */
ROM_START( hyprscan )
ROM_REGION( 0x100000, "bios", ROMREGION_32BIT | ROMREGION_LE )
ROM_LOAD32_DWORD("hyperscan.bin", 0x000000, 0x100000, CRC(ce346a14) SHA1(560cb747e7193e6781d4b8b0bd4d7b45d3d28690))
ROM_REGION( 0x008000, "spg290", ROMREGION_32BIT | ROMREGION_LE )
ROM_LOAD32_DWORD("spg290.bin", 0x000000, 0x008000, NO_DUMP) // 256Kbit SPG290 internal ROM
ROM_END
ROM_START( jak_bbh )
ROM_REGION( 0x4200000, "nand", 0 ) // ID returned C25A, read as what appears to be a compatible type.
ROM_LOAD("bigbuckhunterpro_as_hy27us0812a_c25a.bin", 0x000000, 0x4200000, CRC(e2627540) SHA1(c8c6e5fbc4084fa695390bbb4e1e52e671f050da) )
ROM_REGION( 0x008000, "spg290", ROMREGION_32BIT | ROMREGION_LE )
ROM_LOAD32_DWORD("internal.rom", 0x000000, 0x008000, NO_DUMP)
ROM_END
ROM_START( jak_bbsf )
ROM_REGION( 0x4200000, "nand", 0 )
ROM_LOAD("bigbucksafari.bin", 0x000000, 0x4200000, CRC(dc5f9bf1) SHA1(27893c396d62f353ced52ef88fd9ade5c051598f) )
ROM_REGION( 0x008000, "spg290", ROMREGION_32BIT | ROMREGION_LE )
ROM_LOAD32_DWORD("internal.rom", 0x000000, 0x008000, NO_DUMP)
ROM_END
ROM_START( zone3d )
ROM_REGION( 0x100000, "spi", 0 )
ROM_LOAD("zone_25l8006e_c22014.bin", 0x000000, 0x100000, CRC(8c571771) SHA1(cdb46850286d31bf58d45b75ffc396ed774ac4fd) )
/*
model: Lexar SD
revision: LX01
serial number: 00000000XL10
size: 362.00 MiB (741376 sectors * 512 bytes)
unk1: 0000000000000007
unk2: 00000000000000fa
unk3: 01
The SD card has no label, but there's some printing on the back:
MMAGF0380M3085-WY
TC00201106 by Taiwan
--
Dumped with hardware write blocker, so this image is correct, and hasn't been corrupted by Windows
Image contains a FAT filesystem with a number of compressed? programs that presumably get loaded into RAM by
the bootloader in the serial flash ROM
*/
DISK_REGION( "cfcard" )
DISK_IMAGE( "zone3d", 0, SHA1(77971e2dbfb2ceac12f482d72539c2e042fd9108) )
ROM_REGION( 0x008000, "spg290", ROMREGION_32BIT | ROMREGION_LE )
ROM_LOAD32_DWORD("internal.rom", 0x000000, 0x008000, NO_DUMP)
ROM_END
/* Driver */
// YEAR NAME PARENT COMPAT MACHINE INPUT CLASS INIT COMPANY FULLNAME FLAGS
COMP( 2006, hyprscan, 0, 0, hyperscan, hyperscan, spg29x_game_state, empty_init, "Mattel", "HyperScan", MACHINE_NOT_WORKING | MACHINE_NO_SOUND )
// There were 1 player and 2 player versions for these JAKKS guns. The 2nd gun appears to be simply a controller (no AV connectors) but as they were separate products with the 2 player verisons being released up to a year after the original, the code could differ.
// If they differ, it is currently uncertain which versions these ROMs are from
COMP( 2009, jak_bbh, 0, 0, spg29x, hyperscan, spg29x_nand_game_state, nand_jak_bbh, "JAKKS Pacific Inc", "Big Buck Hunter Pro (JAKKS Pacific TV Game)", MACHINE_NOT_WORKING | MACHINE_NO_SOUND ) //has ISSI 404A (24C04)
COMP( 2011, jak_bbsf, 0, 0, spg29x, hyperscan, spg29x_nand_game_state, nand_jak_bbsf,"JAKKS Pacific Inc", "Big Buck Safari (JAKKS Pacific TV Game)", MACHINE_NOT_WORKING | MACHINE_NO_SOUND ) // has ISSI 416A (24C16)
// ends up doing the fllowing, which causes a jump to 0xbf000024, where we have nothing mapped (internal ROM related, or thinks it's loaded code there? This is the area Hyperscan uses as 'BIOS' not Internal ROM so could be RAM here)
// 000011D4: ldis r8, 0xbf00
// 000011D8: ori r8, 0x0024
// 000011DC: br r8
COMP( 201?, zone3d, 0, 0, spg29x, hyperscan, spg29x_zone3d_game_state, init_zone3d,"Zone", "Zone 3D", MACHINE_NOT_WORKING | MACHINE_NO_SOUND )
|
C++
|
BSD-3-Clause
|
Robbbert/messui/src/mame/drivers/spg29x.cpp
|
21c717e1-9a07-4664-9b4e-7f8b690b1622
|
[{"tag": "NAME", "value": "Sandro Ronco", "start": 200, "end": 212, "context": " HyperScan\n\n 08/17/2013 Skeleton driver by Sandro Ronco\n\n HyperScan TODO:\n - Various graphi"}, {"tag": "NAME", "value": "Sandro Ronco", "start": 45, "end": 57, "context": "// license:BSD-3-Clause\n// copyright-holders:Sandro Ronco\n/************************************************"}]
|
[{"tag": "NAME", "value": "Sandro Ronco", "start": 200, "end": 212, "context": " HyperScan\n\n 08/17/2013 Skeleton driver by Sandro Ronco\n\n HyperScan TODO:\n - Various graphi"}, {"tag": "NAME", "value": "Sandro Ronco", "start": 45, "end": 57, "context": "// license:BSD-3-Clause\n// copyright-holders:Sandro Ronco\n/************************************************"}]
|
This directory contains rachsug's project work.
Since completion of the project, there have been updates by others in order to work with a new build system. In particular:
(a) moving tflm_overlay to src
(b) updating the old standard cfu_gen.py to the new standard cfu_gen.py.
(c) changes to the Makefile to use the new build system
(d) changed name of binary used by renode
(e) removing basic_cfu directory, as it was unused
Small further changes were required when the cfu_opX() macros was refactored to have 3 parameters rather than 2.
There was one bug fix:
* Commit b2dac48dd09e0707d2aafd8b91f2e3beca2be13d:
The specialized version of the routine unrolls the inner loop by a factor of 8,
and only works correctly when the number of iterations is a multiple of 8.
The check for meeting the specialization criteria failed to check for
this requirement.
There have been no significant updates to other parts of the directory.
|
Markdown
|
Apache-2.0
|
APIUM/CFU-Playground/proj/proj_accel_1/README.md
|
ab2668f3-e2b0-427b-af09-b71dfdf57172
|
[{"tag": "PASSWORD", "value": "b2dac48dd09e0707d2aafd8b91f2e3beca2be13d", "start": 577, "end": 617, "context": "ather than 2.\n\nThere was one bug fix:\n\n * Commit b2dac48dd09e0707d2aafd8b91f2e3beca2be13d: \n The specialized version of the routine unro"}]
|
[{"tag": "PASSWORD", "value": "b2dac48dd09e0707d2aafd8b91f2e3beca2be13d", "start": 577, "end": 617, "context": "ather than 2.\n\nThere was one bug fix:\n\n * Commit b2dac48dd09e0707d2aafd8b91f2e3beca2be13d: \n The specialized version of the routine unro"}]
|
/*
* Twilio - Api
*
* This is the public Twilio REST API.
*
* API version: 1.24.0
* Contact: support@twilio.com
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package openapi
// ListIncomingPhoneNumberAssignedAddOnResponse struct for ListIncomingPhoneNumberAssignedAddOnResponse
type ListIncomingPhoneNumberAssignedAddOnResponse struct {
AssignedAddOns []ApiV2010IncomingPhoneNumberAssignedAddOn `json:"assigned_add_ons,omitempty"`
End int `json:"end,omitempty"`
FirstPageUri string `json:"first_page_uri,omitempty"`
NextPageUri string `json:"next_page_uri,omitempty"`
Page int `json:"page,omitempty"`
PageSize int `json:"page_size,omitempty"`
PreviousPageUri string `json:"previous_page_uri,omitempty"`
Start int `json:"start,omitempty"`
Uri string `json:"uri,omitempty"`
}
|
GO
|
MIT
|
NellybettIrahola/twilio-go/rest/api/v2010/model_list_incoming_phone_number_assigned_add_on_response.go
|
2c372c63-485b-475a-8809-e36603f44cc0
|
[{"tag": "EMAIL", "value": "support@twilio.com", "start": 99, "end": 117, "context": "o REST API.\n *\n * API version: 1.24.0\n * Contact: support@twilio.com\n */\n\n// Code generated by OpenAPI Generator (http"}]
|
[{"tag": "EMAIL", "value": "support@twilio.com", "start": 99, "end": 117, "context": "o REST API.\n *\n * API version: 1.24.0\n * Contact: support@twilio.com\n */\n\n// Code generated by OpenAPI Generator (http"}]
|
from flask import Flask, request, jsonify
from flask_jsonrpc import JSONRPC
# import json_to_db
import psycopg2
import sys
from obs import *
import config
app = Flask(__name__)
app.config.from_object(config.DevelopmentMaxConfig)
jsonrpc = JSONRPC(app,'/api')
sys.path.insert(0,app.config['SQL_PATH'])
from sql_methods import *
sys.path.insert(0,app.config['SCRIPTS_PATH'])
from file_utils import insert_history_to_file
@app.route('/')
def index():
return "Template to recieve data"
@app.route('/api/get_history', methods=['GET', 'POST'])
def get_history():
content = request.get_json(force=True)
insert_history_to_file(content, HISTORY_PATH)
return jsonify(content)
@app.route('/api/get_content', methods=['GET', 'POST'])
def get_content():
content = ("""{}""".format(request.get_json(force=True))).replace('\'','\"')
if content != "[]" and content:
if content[0] != '[':
content = '[' + content + ']'
content += '\n\n'
# print(content)
client.put(content)
return jsonify(content)
if __name__ == '__main__':
client = Client("127.0.0.1", 8181, app.config['DB'], app.config['USER'], app.config['PASSWORD'], app.config['HOST'], app.config['PORT'])
app.run(host='127.0.0.1', port= 5000)
# json_insert.to_csv('/Users/MaximZubkov/Desktop/Programming/Python/Python_Project/analysis/son.csv')
client.close()
|
Python
|
MIT
|
ilvivl/Python_Project/server_data_tmp/app/local_server.py
|
ca8baff9-90b6-4c76-b5fc-213df7a569ec
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1197, "end": 1206, "context": "onfig['HOST'], app.config['PORT'])\n\tapp.run(host='127.0.0.1', port= 5000)\n\t# json_insert.to_csv('/Users/Maxim"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1062, "end": 1071, "context": "nt)\n\nif __name__ == '__main__':\n\tclient = Client(\"127.0.0.1\", 8181, app.config['DB'], app.config['USER'], app"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1197, "end": 1206, "context": "onfig['HOST'], app.config['PORT'])\n\tapp.run(host='127.0.0.1', port= 5000)\n\t# json_insert.to_csv('/Users/Maxim"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1062, "end": 1071, "context": "nt)\n\nif __name__ == '__main__':\n\tclient = Client(\"127.0.0.1\", 8181, app.config['DB'], app.config['USER'], app"}]
|
#!/usr/bin/python
# Copyright (c) 2017, 2021 Oracle and/or its affiliates.
# This software is made available to you under the terms of the GPL 3.0 license or the Apache 2.0 license.
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# Apache License v2.0
# See LICENSE.TXT for details.
# GENERATED FILE - DO NOT EDIT - MANUAL CHANGES WILL BE OVERWRITTEN
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
"metadata_version": "1.1",
"status": ["preview"],
"supported_by": "community",
}
DOCUMENTATION = """
---
module: oci_vault_secret_actions
short_description: Perform actions on a Secret resource in Oracle Cloud Infrastructure
description:
- Perform actions on a Secret resource in Oracle Cloud Infrastructure
- For I(action=cancel_secret_deletion), cancels the pending deletion of the specified secret. Canceling
a scheduled deletion restores the secret's lifecycle state to what
it was before you scheduled the secret for deletion.
- For I(action=schedule_secret_deletion), schedules the deletion of the specified secret. This sets the lifecycle state of the secret
to `PENDING_DELETION` and then deletes it after the specified retention period ends.
version_added: "2.9"
author: Oracle (@oracle)
options:
secret_id:
description:
- The OCID of the secret.
type: str
aliases: ["id"]
required: true
time_of_deletion:
description:
- An optional property indicating when to delete the secret version, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339) timestamp format.
- Applicable only for I(action=schedule_secret_deletion).
type: str
action:
description:
- The action to perform on the Secret.
type: str
required: true
choices:
- "cancel_secret_deletion"
- "schedule_secret_deletion"
extends_documentation_fragment: [ oracle.oci.oracle ]
"""
EXAMPLES = """
- name: Perform action cancel_secret_deletion on secret
oci_vault_secret_actions:
secret_id: ocid1.secret.oc1..xxxxxxEXAMPLExxxxxx
action: cancel_secret_deletion
- name: Perform action schedule_secret_deletion on secret
oci_vault_secret_actions:
time_of_deletion: 2018-04-03T21:10:29.600Z
secret_id: ocid1.secret.oc1..xxxxxxEXAMPLExxxxxx
action: schedule_secret_deletion
"""
RETURN = """
secret:
description:
- Details of the Secret resource acted upon by the current operation
returned: on success
type: complex
contains:
compartment_id:
description:
- The OCID of the compartment where you want to create the secret.
returned: on success
type: string
sample: ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx
current_version_number:
description:
- The version number of the secret version that's currently in use.
returned: on success
type: int
sample: 56
defined_tags:
description:
- "Defined tags for this resource. Each key is predefined and scoped to a namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Operations\\": {\\"CostCenter\\": \\"42\\"}}`"
returned: on success
type: dict
sample: {'Operations': {'CostCenter': 'US'}}
description:
description:
- A brief description of the secret. Avoid entering confidential information.
returned: on success
type: string
sample: description_example
freeform_tags:
description:
- "Free-form tags for this resource. Each tag is a simple key-value pair with no predefined name, type, or namespace.
For more information, see L(Resource Tags,https://docs.cloud.oracle.com/Content/General/Concepts/resourcetags.htm).
Example: `{\\"Department\\": \\"Finance\\"}`"
returned: on success
type: dict
sample: {'Department': 'Finance'}
id:
description:
- The OCID of the secret.
returned: on success
type: string
sample: ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx
key_id:
description:
- The OCID of the master encryption key that is used to encrypt the secret.
returned: on success
type: string
sample: ocid1.key.oc1..xxxxxxEXAMPLExxxxxx
lifecycle_details:
description:
- Additional information about the current lifecycle state of the secret.
returned: on success
type: string
sample: lifecycle_details_example
lifecycle_state:
description:
- The current lifecycle state of the secret.
returned: on success
type: string
sample: CREATING
metadata:
description:
- Additional metadata that you can use to provide context about how to use the secret or during rotation or
other administrative tasks. For example, for a secret that you use to connect to a database, the additional
metadata might specify the connection endpoint and the connection string. Provide additional metadata as key-value pairs.
returned: on success
type: dict
sample: {}
secret_name:
description:
- The user-friendly name of the secret. Avoid entering confidential information.
returned: on success
type: string
sample: secret_name_example
secret_rules:
description:
- A list of rules that control how the secret is used and managed.
returned: on success
type: complex
contains:
rule_type:
description:
- The type of rule, which either controls when the secret contents expire or whether they can be reused.
returned: on success
type: string
sample: SECRET_EXPIRY_RULE
secret_version_expiry_interval:
description:
- A property indicating how long the secret contents will be considered valid, expressed in
L(ISO 8601,https://en.wikipedia.org/wiki/ISO_8601#Time_intervals) format. The secret needs to be
updated when the secret content expires. No enforcement mechanism exists at this time, but audit logs
record the expiration on the appropriate date, according to the time interval specified in the rule.
The timer resets after you update the secret contents.
The minimum value is 1 day and the maximum value is 90 days for this property. Currently, only intervals expressed in days are
supported.
For example, pass `P3D` to have the secret version expire every 3 days.
returned: on success
type: string
sample: secret_version_expiry_interval_example
time_of_absolute_expiry:
description:
- "An optional property indicating the absolute time when this secret will expire, expressed in L(RFC
3339,https://tools.ietf.org/html/rfc3339) timestamp format.
The minimum number of days from current time is 1 day and the maximum number of days from current time is 365 days.
Example: `2019-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2019-04-03T21:10:29.600Z
is_secret_content_retrieval_blocked_on_expiry:
description:
- A property indicating whether to block retrieval of the secret content, on expiry. The default is false.
If the secret has already expired and you would like to retrieve the secret contents,
you need to edit the secret rule to disable this property, to allow reading the secret content.
returned: on success
type: bool
sample: true
is_enforced_on_deleted_secret_versions:
description:
- A property indicating whether the rule is applied even if the secret version with the content you are trying to reuse was deleted.
returned: on success
type: bool
sample: true
time_created:
description:
- "A property indicating when the secret was created, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339) timestamp format.
Example: `2019-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2019-04-03T21:10:29.600Z
time_of_current_version_expiry:
description:
- "An optional property indicating when the current secret version will expire, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339)
timestamp format.
Example: `2019-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2019-04-03T21:10:29.600Z
time_of_deletion:
description:
- "An optional property indicating when to delete the secret, expressed in L(RFC 3339,https://tools.ietf.org/html/rfc3339) timestamp format.
Example: `2019-04-03T21:10:29.600Z`"
returned: on success
type: string
sample: 2019-04-03T21:10:29.600Z
vault_id:
description:
- The OCID of the vault where the secret exists.
returned: on success
type: string
sample: ocid1.vault.oc1..xxxxxxEXAMPLExxxxxx
sample: {
"compartment_id": "ocid1.compartment.oc1..xxxxxxEXAMPLExxxxxx",
"current_version_number": 56,
"defined_tags": {'Operations': {'CostCenter': 'US'}},
"description": "description_example",
"freeform_tags": {'Department': 'Finance'},
"id": "ocid1.resource.oc1..xxxxxxEXAMPLExxxxxx",
"key_id": "ocid1.key.oc1..xxxxxxEXAMPLExxxxxx",
"lifecycle_details": "lifecycle_details_example",
"lifecycle_state": "CREATING",
"metadata": {},
"secret_name": "secret_name_example",
"secret_rules": [{
"rule_type": "SECRET_EXPIRY_RULE",
"secret_version_expiry_interval": "secret_version_expiry_interval_example",
"time_of_absolute_expiry": "2019-04-03T21:10:29.600Z",
"is_secret_content_retrieval_blocked_on_expiry": true,
"is_enforced_on_deleted_secret_versions": true
}],
"time_created": "2019-04-03T21:10:29.600Z",
"time_of_current_version_expiry": "2019-04-03T21:10:29.600Z",
"time_of_deletion": "2019-04-03T21:10:29.600Z",
"vault_id": "ocid1.vault.oc1..xxxxxxEXAMPLExxxxxx"
}
"""
from ansible.module_utils.basic import AnsibleModule
from ansible_collections.oracle.oci.plugins.module_utils import (
oci_common_utils,
oci_wait_utils,
)
from ansible_collections.oracle.oci.plugins.module_utils.oci_resource_utils import (
OCIActionsHelperBase,
get_custom_class,
)
try:
from oci.vault import VaultsClient
from oci.vault.models import ScheduleSecretDeletionDetails
HAS_OCI_PY_SDK = True
except ImportError:
HAS_OCI_PY_SDK = False
class SecretActionsHelperGen(OCIActionsHelperBase):
"""
Supported actions:
cancel_secret_deletion
schedule_secret_deletion
"""
@staticmethod
def get_module_resource_id_param():
return "secret_id"
def get_module_resource_id(self):
return self.module.params.get("secret_id")
def get_get_fn(self):
return self.client.get_secret
def get_resource(self):
return oci_common_utils.call_with_backoff(
self.client.get_secret, secret_id=self.module.params.get("secret_id"),
)
def cancel_secret_deletion(self):
return oci_wait_utils.call_and_wait(
call_fn=self.client.cancel_secret_deletion,
call_fn_args=(),
call_fn_kwargs=dict(secret_id=self.module.params.get("secret_id"),),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
def schedule_secret_deletion(self):
action_details = oci_common_utils.convert_input_data_to_model_class(
self.module.params, ScheduleSecretDeletionDetails
)
return oci_wait_utils.call_and_wait(
call_fn=self.client.schedule_secret_deletion,
call_fn_args=(),
call_fn_kwargs=dict(
secret_id=self.module.params.get("secret_id"),
schedule_secret_deletion_details=action_details,
),
waiter_type=oci_wait_utils.NONE_WAITER_KEY,
operation="{0}_{1}".format(
self.module.params.get("action").upper(),
oci_common_utils.ACTION_OPERATION_KEY,
),
waiter_client=self.get_waiter_client(),
resource_helper=self,
wait_for_states=self.get_action_desired_states(
self.module.params.get("action")
),
)
SecretActionsHelperCustom = get_custom_class("SecretActionsHelperCustom")
class ResourceHelper(SecretActionsHelperCustom, SecretActionsHelperGen):
pass
def main():
module_args = oci_common_utils.get_common_arg_spec(
supports_create=False, supports_wait=False
)
module_args.update(
dict(
secret_id=dict(aliases=["id"], type="str", required=True),
time_of_deletion=dict(type="str"),
action=dict(
type="str",
required=True,
choices=["cancel_secret_deletion", "schedule_secret_deletion"],
),
)
)
module = AnsibleModule(argument_spec=module_args, supports_check_mode=True)
if not HAS_OCI_PY_SDK:
module.fail_json(msg="oci python sdk required for this module.")
resource_helper = ResourceHelper(
module=module,
resource_type="secret",
service_client_class=VaultsClient,
namespace="vault",
)
result = resource_helper.perform_action(module.params.get("action"))
module.exit_json(**result)
if __name__ == "__main__":
main()
|
Python
|
Apache-2.0
|
hanielburton/oci-ansible-collection/plugins/modules/oci_vault_secret_actions.py
|
28055e48-ec81-4f24-bade-b621fb5908fb
|
[]
|
[]
|
# coding: utf-8
import re
from sqlalchemy import BigInteger
from sqlalchemy import Column
from sqlalchemy import DateTime
from sqlalchemy import DDL
from sqlalchemy import DefaultClause
from sqlalchemy import event
from sqlalchemy import exc
from sqlalchemy import ForeignKey
from sqlalchemy import Index
from sqlalchemy import inspect
from sqlalchemy import Integer
from sqlalchemy import LargeBinary
from sqlalchemy import MetaData
from sqlalchemy import NCHAR
from sqlalchemy import select
from sqlalchemy import SmallInteger
from sqlalchemy import sql
from sqlalchemy import String
from sqlalchemy import Table
from sqlalchemy import testing
from sqlalchemy import Text
from sqlalchemy import TIMESTAMP
from sqlalchemy import Unicode
from sqlalchemy import UnicodeText
from sqlalchemy import UniqueConstraint
from sqlalchemy import util
from sqlalchemy.dialects.mysql import base as mysql
from sqlalchemy.dialects.mysql import reflection as _reflection
from sqlalchemy.schema import CreateIndex
from sqlalchemy.testing import assert_raises_message
from sqlalchemy.testing import AssertsCompiledSQL
from sqlalchemy.testing import eq_
from sqlalchemy.testing import expect_warnings
from sqlalchemy.testing import fixtures
from sqlalchemy.testing import is_
from sqlalchemy.testing import mock
class TypeReflectionTest(fixtures.TestBase):
__only_on__ = "mysql"
__backend__ = True
@testing.provide_metadata
def _run_test(self, specs, attributes):
columns = [Column("c%i" % (i + 1), t[0]) for i, t in enumerate(specs)]
# Early 5.0 releases seem to report more "general" for columns
# in a view, e.g. char -> varchar, tinyblob -> mediumblob
use_views = testing.db.dialect.server_version_info > (5, 0, 10)
m = self.metadata
Table("mysql_types", m, *columns)
if use_views:
event.listen(
m,
"after_create",
DDL(
"CREATE OR REPLACE VIEW mysql_types_v "
"AS SELECT * from mysql_types"
),
)
event.listen(
m, "before_drop", DDL("DROP VIEW IF EXISTS mysql_types_v")
)
m.create_all()
m2 = MetaData(testing.db)
tables = [Table("mysql_types", m2, autoload=True)]
if use_views:
tables.append(Table("mysql_types_v", m2, autoload=True))
for table in tables:
for i, (reflected_col, spec) in enumerate(zip(table.c, specs)):
expected_spec = spec[1]
reflected_type = reflected_col.type
is_(type(reflected_type), type(expected_spec))
for attr in attributes:
eq_(
getattr(reflected_type, attr),
getattr(expected_spec, attr),
"Column %s: Attribute %s value of %s does not "
"match %s for type %s"
% (
"c%i" % (i + 1),
attr,
getattr(reflected_type, attr),
getattr(expected_spec, attr),
spec[0],
),
)
def test_time_types(self):
specs = []
if testing.requires.mysql_fsp.enabled:
fsps = [None, 0, 5]
else:
fsps = [None]
for type_ in (mysql.TIMESTAMP, mysql.DATETIME, mysql.TIME):
# MySQL defaults fsp to 0, and if 0 does not report it.
# we don't actually render 0 right now in DDL but even if we do,
# it comes back blank
for fsp in fsps:
if fsp:
specs.append((type_(fsp=fsp), type_(fsp=fsp)))
else:
specs.append((type_(), type_()))
specs.extend(
[(TIMESTAMP(), mysql.TIMESTAMP()), (DateTime(), mysql.DATETIME())]
)
# note 'timezone' should always be None on both
self._run_test(specs, ["fsp", "timezone"])
def test_year_types(self):
specs = [
(mysql.YEAR(), mysql.YEAR(display_width=4)),
(mysql.YEAR(display_width=4), mysql.YEAR(display_width=4)),
]
self._run_test(specs, ["display_width"])
def test_string_types(self):
specs = [
(String(1), mysql.MSString(1)),
(String(3), mysql.MSString(3)),
(Text(), mysql.MSText()),
(Unicode(1), mysql.MSString(1)),
(Unicode(3), mysql.MSString(3)),
(UnicodeText(), mysql.MSText()),
(mysql.MSChar(1), mysql.MSChar(1)),
(mysql.MSChar(3), mysql.MSChar(3)),
(NCHAR(2), mysql.MSChar(2)),
(mysql.MSNChar(2), mysql.MSChar(2)),
(mysql.MSNVarChar(22), mysql.MSString(22)),
]
self._run_test(specs, ["length"])
def test_integer_types(self):
specs = []
for type_ in [
mysql.TINYINT,
mysql.SMALLINT,
mysql.MEDIUMINT,
mysql.INTEGER,
mysql.BIGINT,
]:
for display_width in [None, 4, 7]:
for unsigned in [False, True]:
for zerofill in [None, True]:
kw = {}
if display_width:
kw["display_width"] = display_width
if unsigned is not None:
kw["unsigned"] = unsigned
if zerofill is not None:
kw["zerofill"] = zerofill
zerofill = bool(zerofill)
source_type = type_(**kw)
if display_width is None:
display_width = {
mysql.MEDIUMINT: 9,
mysql.SMALLINT: 6,
mysql.TINYINT: 4,
mysql.INTEGER: 11,
mysql.BIGINT: 20,
}[type_]
if zerofill:
unsigned = True
expected_type = type_(
display_width=display_width,
unsigned=unsigned,
zerofill=zerofill,
)
specs.append((source_type, expected_type))
specs.extend(
[
(SmallInteger(), mysql.SMALLINT(display_width=6)),
(Integer(), mysql.INTEGER(display_width=11)),
(BigInteger, mysql.BIGINT(display_width=20)),
]
)
self._run_test(specs, ["display_width", "unsigned", "zerofill"])
def test_binary_types(self):
specs = [
(LargeBinary(3), mysql.TINYBLOB()),
(LargeBinary(), mysql.BLOB()),
(mysql.MSBinary(3), mysql.MSBinary(3)),
(mysql.MSVarBinary(3), mysql.MSVarBinary(3)),
(mysql.MSTinyBlob(), mysql.MSTinyBlob()),
(mysql.MSBlob(), mysql.MSBlob()),
(mysql.MSBlob(1234), mysql.MSBlob()),
(mysql.MSMediumBlob(), mysql.MSMediumBlob()),
(mysql.MSLongBlob(), mysql.MSLongBlob()),
]
self._run_test(specs, [])
@testing.uses_deprecated("Manually quoting ENUM value literals")
def test_legacy_enum_types(self):
specs = [(mysql.ENUM("''", "'fleem'"), mysql.ENUM("''", "'fleem'"))]
self._run_test(specs, ["enums"])
class ReflectionTest(fixtures.TestBase, AssertsCompiledSQL):
__only_on__ = "mysql"
__backend__ = True
def test_default_reflection(self):
"""Test reflection of column defaults."""
from sqlalchemy.dialects.mysql import VARCHAR
def_table = Table(
"mysql_def",
MetaData(testing.db),
Column(
"c1",
VARCHAR(10, collation="utf8_unicode_ci"),
DefaultClause(""),
nullable=False,
),
Column("c2", String(10), DefaultClause("0")),
Column("c3", String(10), DefaultClause("abc")),
Column("c4", TIMESTAMP, DefaultClause("2009-04-05 12:00:00")),
Column("c5", TIMESTAMP),
Column(
"c6",
TIMESTAMP,
DefaultClause(
sql.text(
"CURRENT_TIMESTAMP " "ON UPDATE CURRENT_TIMESTAMP"
)
),
),
)
def_table.create()
try:
reflected = Table("mysql_def", MetaData(testing.db), autoload=True)
finally:
def_table.drop()
assert def_table.c.c1.server_default.arg == ""
assert def_table.c.c2.server_default.arg == "0"
assert def_table.c.c3.server_default.arg == "abc"
assert def_table.c.c4.server_default.arg == "2009-04-05 12:00:00"
assert str(reflected.c.c1.server_default.arg) == "''"
assert str(reflected.c.c2.server_default.arg) == "'0'"
assert str(reflected.c.c3.server_default.arg) == "'abc'"
assert (
str(reflected.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
)
assert reflected.c.c5.default is None
assert reflected.c.c5.server_default is None
assert reflected.c.c6.default is None
assert re.match(
r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
str(reflected.c.c6.server_default.arg).upper(),
)
reflected.create()
try:
reflected2 = Table(
"mysql_def", MetaData(testing.db), autoload=True
)
finally:
reflected.drop()
assert str(reflected2.c.c1.server_default.arg) == "''"
assert str(reflected2.c.c2.server_default.arg) == "'0'"
assert str(reflected2.c.c3.server_default.arg) == "'abc'"
assert (
str(reflected2.c.c4.server_default.arg) == "'2009-04-05 12:00:00'"
)
assert reflected.c.c5.default is None
assert reflected.c.c5.server_default is None
assert reflected.c.c6.default is None
assert re.match(
r"CURRENT_TIMESTAMP(\(\))? ON UPDATE CURRENT_TIMESTAMP(\(\))?",
str(reflected.c.c6.server_default.arg).upper(),
)
def test_reflection_with_table_options(self):
comment = r"""Comment types type speedily ' " \ '' Fun!"""
def_table = Table(
"mysql_def",
MetaData(testing.db),
Column("c1", Integer()),
mysql_engine="MEMORY",
comment=comment,
mysql_default_charset="utf8",
mysql_auto_increment="5",
mysql_avg_row_length="3",
mysql_password="secret",
mysql_connection="fish",
)
def_table.create()
try:
reflected = Table("mysql_def", MetaData(testing.db), autoload=True)
finally:
def_table.drop()
assert def_table.kwargs["mysql_engine"] == "MEMORY"
assert def_table.comment == comment
assert def_table.kwargs["mysql_default_charset"] == "utf8"
assert def_table.kwargs["mysql_auto_increment"] == "5"
assert def_table.kwargs["mysql_avg_row_length"] == "3"
assert def_table.kwargs["mysql_password"] == "secret"
assert def_table.kwargs["mysql_connection"] == "fish"
assert reflected.kwargs["mysql_engine"] == "MEMORY"
assert reflected.comment == comment
assert reflected.kwargs["mysql_comment"] == comment
assert reflected.kwargs["mysql_default charset"] == "utf8"
assert reflected.kwargs["mysql_avg_row_length"] == "3"
assert reflected.kwargs["mysql_connection"] == "fish"
# This field doesn't seem to be returned by mysql itself.
# assert reflected.kwargs['mysql_password'] == 'secret'
# This is explicitly ignored when reflecting schema.
# assert reflected.kwargs['mysql_auto_increment'] == '5'
def test_reflection_on_include_columns(self):
"""Test reflection of include_columns to be sure they respect case."""
case_table = Table(
"mysql_case",
MetaData(testing.db),
Column("c1", String(10)),
Column("C2", String(10)),
Column("C3", String(10)),
)
try:
case_table.create()
reflected = Table(
"mysql_case",
MetaData(testing.db),
autoload=True,
include_columns=["c1", "C2"],
)
for t in case_table, reflected:
assert "c1" in t.c.keys()
assert "C2" in t.c.keys()
reflected2 = Table(
"mysql_case",
MetaData(testing.db),
autoload=True,
include_columns=["c1", "c2"],
)
assert "c1" in reflected2.c.keys()
for c in ["c2", "C2", "C3"]:
assert c not in reflected2.c.keys()
finally:
case_table.drop()
def test_autoincrement(self):
meta = MetaData(testing.db)
try:
Table(
"ai_1",
meta,
Column("int_y", Integer, primary_key=True, autoincrement=True),
Column("int_n", Integer, DefaultClause("0"), primary_key=True),
mysql_engine="MyISAM",
)
Table(
"ai_2",
meta,
Column("int_y", Integer, primary_key=True, autoincrement=True),
Column("int_n", Integer, DefaultClause("0"), primary_key=True),
mysql_engine="MyISAM",
)
Table(
"ai_3",
meta,
Column(
"int_n",
Integer,
DefaultClause("0"),
primary_key=True,
autoincrement=False,
),
Column("int_y", Integer, primary_key=True, autoincrement=True),
mysql_engine="MyISAM",
)
Table(
"ai_4",
meta,
Column(
"int_n",
Integer,
DefaultClause("0"),
primary_key=True,
autoincrement=False,
),
Column(
"int_n2",
Integer,
DefaultClause("0"),
primary_key=True,
autoincrement=False,
),
mysql_engine="MyISAM",
)
Table(
"ai_5",
meta,
Column("int_y", Integer, primary_key=True, autoincrement=True),
Column(
"int_n",
Integer,
DefaultClause("0"),
primary_key=True,
autoincrement=False,
),
mysql_engine="MyISAM",
)
Table(
"ai_6",
meta,
Column("o1", String(1), DefaultClause("x"), primary_key=True),
Column("int_y", Integer, primary_key=True, autoincrement=True),
mysql_engine="MyISAM",
)
Table(
"ai_7",
meta,
Column("o1", String(1), DefaultClause("x"), primary_key=True),
Column("o2", String(1), DefaultClause("x"), primary_key=True),
Column("int_y", Integer, primary_key=True, autoincrement=True),
mysql_engine="MyISAM",
)
Table(
"ai_8",
meta,
Column("o1", String(1), DefaultClause("x"), primary_key=True),
Column("o2", String(1), DefaultClause("x"), primary_key=True),
mysql_engine="MyISAM",
)
meta.create_all()
table_names = [
"ai_1",
"ai_2",
"ai_3",
"ai_4",
"ai_5",
"ai_6",
"ai_7",
"ai_8",
]
mr = MetaData(testing.db)
mr.reflect(only=table_names)
for tbl in [mr.tables[name] for name in table_names]:
for c in tbl.c:
if c.name.startswith("int_y"):
assert c.autoincrement
elif c.name.startswith("int_n"):
assert not c.autoincrement
tbl.insert().execute()
if "int_y" in tbl.c:
assert select([tbl.c.int_y]).scalar() == 1
assert list(tbl.select().execute().first()).count(1) == 1
else:
assert 1 not in list(tbl.select().execute().first())
finally:
meta.drop_all()
@testing.provide_metadata
def test_view_reflection(self):
Table(
"x", self.metadata, Column("a", Integer), Column("b", String(50))
)
self.metadata.create_all()
with testing.db.connect() as conn:
conn.execute("CREATE VIEW v1 AS SELECT * FROM x")
conn.execute("CREATE ALGORITHM=MERGE VIEW v2 AS SELECT * FROM x")
conn.execute(
"CREATE ALGORITHM=UNDEFINED VIEW v3 AS SELECT * FROM x"
)
conn.execute(
"CREATE DEFINER=CURRENT_USER VIEW v4 AS SELECT * FROM x"
)
@event.listens_for(self.metadata, "before_drop")
def cleanup(*arg, **kw):
with testing.db.connect() as conn:
for v in ["v1", "v2", "v3", "v4"]:
conn.execute("DROP VIEW %s" % v)
insp = inspect(testing.db)
for v in ["v1", "v2", "v3", "v4"]:
eq_(
[
(col["name"], col["type"].__class__)
for col in insp.get_columns(v)
],
[("a", mysql.INTEGER), ("b", mysql.VARCHAR)],
)
@testing.provide_metadata
def test_skip_not_describable(self):
@event.listens_for(self.metadata, "before_drop")
def cleanup(*arg, **kw):
with testing.db.connect() as conn:
conn.execute("DROP TABLE IF EXISTS test_t1")
conn.execute("DROP TABLE IF EXISTS test_t2")
conn.execute("DROP VIEW IF EXISTS test_v")
with testing.db.connect() as conn:
conn.execute("CREATE TABLE test_t1 (id INTEGER)")
conn.execute("CREATE TABLE test_t2 (id INTEGER)")
conn.execute("CREATE VIEW test_v AS SELECT id FROM test_t1")
conn.execute("DROP TABLE test_t1")
m = MetaData()
with expect_warnings(
"Skipping .* Table or view named .?test_v.? could not be "
"reflected: .* references invalid table"
):
m.reflect(views=True, bind=conn)
eq_(m.tables["test_t2"].name, "test_t2")
assert_raises_message(
exc.UnreflectableTableError,
"references invalid table",
Table,
"test_v",
MetaData(),
autoload_with=conn,
)
@testing.exclude("mysql", "<", (5, 0, 0), "no information_schema support")
def test_system_views(self):
dialect = testing.db.dialect
connection = testing.db.connect()
view_names = dialect.get_view_names(connection, "information_schema")
self.assert_("TABLES" in view_names)
@testing.provide_metadata
def test_nullable_reflection(self):
"""test reflection of NULL/NOT NULL, in particular with TIMESTAMP
defaults where MySQL is inconsistent in how it reports CREATE TABLE.
"""
meta = self.metadata
# this is ideally one table, but older MySQL versions choke
# on the multiple TIMESTAMP columns
row = testing.db.execute(
"show variables like '%%explicit_defaults_for_timestamp%%'"
).first()
explicit_defaults_for_timestamp = row[1].lower() in ("on", "1", "true")
reflected = []
for idx, cols in enumerate(
[
[
"x INTEGER NULL",
"y INTEGER NOT NULL",
"z INTEGER",
"q TIMESTAMP NULL",
],
["p TIMESTAMP NULL DEFAULT CURRENT_TIMESTAMP"],
["r TIMESTAMP NOT NULL"],
["s TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP"],
["t TIMESTAMP"],
["u TIMESTAMP DEFAULT CURRENT_TIMESTAMP"],
]
):
Table("nn_t%d" % idx, meta) # to allow DROP
testing.db.execute(
"""
CREATE TABLE nn_t%d (
%s
)
"""
% (idx, ", \n".join(cols))
)
reflected.extend(
{
"name": d["name"],
"nullable": d["nullable"],
"default": d["default"],
}
for d in inspect(testing.db).get_columns("nn_t%d" % idx)
)
if testing.db.dialect._is_mariadb_102:
current_timestamp = "current_timestamp()"
else:
current_timestamp = "CURRENT_TIMESTAMP"
eq_(
reflected,
[
{"name": "x", "nullable": True, "default": None},
{"name": "y", "nullable": False, "default": None},
{"name": "z", "nullable": True, "default": None},
{"name": "q", "nullable": True, "default": None},
{"name": "p", "nullable": True, "default": current_timestamp},
{
"name": "r",
"nullable": False,
"default": None
if explicit_defaults_for_timestamp
else (
"%(current_timestamp)s "
"ON UPDATE %(current_timestamp)s"
)
% {"current_timestamp": current_timestamp},
},
{"name": "s", "nullable": False, "default": current_timestamp},
{
"name": "t",
"nullable": True
if explicit_defaults_for_timestamp
else False,
"default": None
if explicit_defaults_for_timestamp
else (
"%(current_timestamp)s "
"ON UPDATE %(current_timestamp)s"
)
% {"current_timestamp": current_timestamp},
},
{
"name": "u",
"nullable": True
if explicit_defaults_for_timestamp
else False,
"default": current_timestamp,
},
],
)
@testing.provide_metadata
def test_reflection_with_unique_constraint(self):
insp = inspect(testing.db)
meta = self.metadata
uc_table = Table(
"mysql_uc",
meta,
Column("a", String(10)),
UniqueConstraint("a", name="uc_a"),
)
uc_table.create()
# MySQL converts unique constraints into unique indexes.
# separately we get both
indexes = dict((i["name"], i) for i in insp.get_indexes("mysql_uc"))
constraints = set(
i["name"] for i in insp.get_unique_constraints("mysql_uc")
)
self.assert_("uc_a" in indexes)
self.assert_(indexes["uc_a"]["unique"])
self.assert_("uc_a" in constraints)
# reflection here favors the unique index, as that's the
# more "official" MySQL construct
reflected = Table("mysql_uc", MetaData(testing.db), autoload=True)
indexes = dict((i.name, i) for i in reflected.indexes)
constraints = set(uc.name for uc in reflected.constraints)
self.assert_("uc_a" in indexes)
self.assert_(indexes["uc_a"].unique)
self.assert_("uc_a" not in constraints)
@testing.provide_metadata
def test_reflect_fulltext(self):
mt = Table(
"mytable",
self.metadata,
Column("id", Integer, primary_key=True),
Column("textdata", String(50)),
mysql_engine="InnoDB",
)
Index("textdata_ix", mt.c.textdata, mysql_prefix="FULLTEXT")
self.metadata.create_all(testing.db)
mt = Table("mytable", MetaData(), autoload_with=testing.db)
idx = list(mt.indexes)[0]
eq_(idx.name, "textdata_ix")
eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
self.assert_compile(
CreateIndex(idx),
"CREATE FULLTEXT INDEX textdata_ix ON mytable (textdata)",
)
@testing.requires.mysql_ngram_fulltext
@testing.provide_metadata
def test_reflect_fulltext_comment(self):
mt = Table(
"mytable",
self.metadata,
Column("id", Integer, primary_key=True),
Column("textdata", String(50)),
mysql_engine="InnoDB",
)
Index(
"textdata_ix",
mt.c.textdata,
mysql_prefix="FULLTEXT",
mysql_with_parser="ngram",
)
self.metadata.create_all(testing.db)
mt = Table("mytable", MetaData(), autoload_with=testing.db)
idx = list(mt.indexes)[0]
eq_(idx.name, "textdata_ix")
eq_(idx.dialect_options["mysql"]["prefix"], "FULLTEXT")
eq_(idx.dialect_options["mysql"]["with_parser"], "ngram")
self.assert_compile(
CreateIndex(idx),
"CREATE FULLTEXT INDEX textdata_ix ON mytable "
"(textdata) WITH PARSER ngram",
)
@testing.provide_metadata
def test_non_column_index(self):
m1 = self.metadata
t1 = Table(
"add_ix", m1, Column("x", String(50)), mysql_engine="InnoDB"
)
Index("foo_idx", t1.c.x.desc())
m1.create_all()
insp = inspect(testing.db)
eq_(
insp.get_indexes("add_ix"),
[{"name": "foo_idx", "column_names": ["x"], "unique": False}],
)
def _bug_88718_casing_0(self):
fkeys_casing_0 = [
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": "test_schema",
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
]
ischema_casing_0 = [
("test", "Track", "TrackID"),
("test_schema", "Track", "TrackID"),
]
return fkeys_casing_0, ischema_casing_0
def _bug_88718_casing_1(self):
fkeys_casing_1 = [
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": "test_schema",
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
]
ischema_casing_1 = [
(util.u("test"), util.u("Track"), "TrackID"),
(util.u("test_schema"), util.u("Track"), "TrackID"),
]
return fkeys_casing_1, ischema_casing_1
def _bug_88718_casing_2(self):
fkeys_casing_2 = [
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": "test_schema",
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "Track",
"referred_columns": ["trackid"],
"options": {},
},
]
ischema_casing_2 = [
("test", "Track", "TrackID"),
("test_schema", "Track", "TrackID"),
]
return fkeys_casing_2, ischema_casing_2
def test_correct_for_mysql_bug_88718(self):
dialect = mysql.dialect()
for casing, (fkeys, ischema) in [
(0, self._bug_88718_casing_0()),
(1, self._bug_88718_casing_1()),
(2, self._bug_88718_casing_2()),
]:
dialect._casing = casing
dialect.default_schema_name = "test"
connection = mock.Mock(
dialect=dialect, execute=lambda stmt, **params: ischema
)
dialect._correct_for_mysql_bug_88718(fkeys, connection)
eq_(
fkeys,
[
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": "test_schema",
"referred_table": "Track",
"referred_columns": ["TrackID"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "Track",
"referred_columns": ["TrackID"],
"options": {},
},
],
)
@testing.provide_metadata
def test_case_sensitive_column_constraint_reflection(self):
# test for issue #4344 which works around
# MySQL 8.0 bug https://bugs.mysql.com/bug.php?id=88718
m1 = self.metadata
Table(
"Track",
m1,
Column("TrackID", Integer, primary_key=True),
mysql_engine="InnoDB",
)
Table(
"Track",
m1,
Column("TrackID", Integer, primary_key=True),
schema=testing.config.test_schema,
mysql_engine="InnoDB",
)
Table(
"PlaylistTrack",
m1,
Column("id", Integer, primary_key=True),
Column(
"TrackID",
ForeignKey("Track.TrackID", name="FK_PlaylistTrackId"),
),
Column(
"TTrackID",
ForeignKey(
"%s.Track.TrackID" % (testing.config.test_schema,),
name="FK_PlaylistTTrackId",
),
),
mysql_engine="InnoDB",
)
m1.create_all()
if testing.db.dialect._casing in (1, 2):
eq_(
inspect(testing.db).get_foreign_keys("PlaylistTrack"),
[
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": testing.config.test_schema,
"referred_table": "track",
"referred_columns": ["TrackID"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "track",
"referred_columns": ["TrackID"],
"options": {},
},
],
)
else:
eq_(
inspect(testing.db).get_foreign_keys("PlaylistTrack"),
[
{
"name": "FK_PlaylistTTrackId",
"constrained_columns": ["TTrackID"],
"referred_schema": testing.config.test_schema,
"referred_table": "Track",
"referred_columns": ["TrackID"],
"options": {},
},
{
"name": "FK_PlaylistTrackId",
"constrained_columns": ["TrackID"],
"referred_schema": None,
"referred_table": "Track",
"referred_columns": ["TrackID"],
"options": {},
},
],
)
@testing.requires.mysql_fully_case_sensitive
@testing.provide_metadata
def test_case_sensitive_reflection_dual_case_references(self):
# this tests that within the fix we do for MySQL bug
# 88718, we don't do case-insensitive logic if the backend
# is case sensitive
m = self.metadata
Table(
"t1",
m,
Column("some_id", Integer, primary_key=True),
mysql_engine="InnoDB",
)
Table(
"T1",
m,
Column("Some_Id", Integer, primary_key=True),
mysql_engine="InnoDB",
)
Table(
"t2",
m,
Column("id", Integer, primary_key=True),
Column("t1id", ForeignKey("t1.some_id", name="t1id_fk")),
Column("cap_t1id", ForeignKey("T1.Some_Id", name="cap_t1id_fk")),
mysql_engine="InnoDB",
)
m.create_all(testing.db)
eq_(
dict(
(rec["name"], rec)
for rec in inspect(testing.db).get_foreign_keys("t2")
),
{
"cap_t1id_fk": {
"name": "cap_t1id_fk",
"constrained_columns": ["cap_t1id"],
"referred_schema": None,
"referred_table": "T1",
"referred_columns": ["Some_Id"],
"options": {},
},
"t1id_fk": {
"name": "t1id_fk",
"constrained_columns": ["t1id"],
"referred_schema": None,
"referred_table": "t1",
"referred_columns": ["some_id"],
"options": {},
},
},
)
class RawReflectionTest(fixtures.TestBase):
__backend__ = True
def setup(self):
dialect = mysql.dialect()
self.parser = _reflection.MySQLTableDefinitionParser(
dialect, dialect.identifier_preparer
)
def test_key_reflection(self):
regex = self.parser._re_key
assert regex.match(" PRIMARY KEY (`id`),")
assert regex.match(" PRIMARY KEY USING BTREE (`id`),")
assert regex.match(" PRIMARY KEY (`id`) USING BTREE,")
assert regex.match(" PRIMARY KEY (`id`)")
assert regex.match(" PRIMARY KEY USING BTREE (`id`)")
assert regex.match(" PRIMARY KEY (`id`) USING BTREE")
assert regex.match(
" PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE 16"
)
assert regex.match(
" PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE=16"
)
assert regex.match(
" PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = 16"
)
assert not regex.match(
" PRIMARY KEY (`id`) USING BTREE KEY_BLOCK_SIZE = = 16"
)
assert regex.match(" KEY (`id`) USING BTREE COMMENT 'comment'")
# `SHOW CREATE TABLE` returns COMMENT '''comment'
# after creating table with COMMENT '\'comment'
assert regex.match(" KEY (`id`) USING BTREE COMMENT '''comment'")
assert regex.match(" KEY (`id`) USING BTREE COMMENT 'comment'''")
assert regex.match(" KEY (`id`) USING BTREE COMMENT 'prefix''suffix'")
assert regex.match(
" KEY (`id`) USING BTREE COMMENT 'prefix''text''suffix'"
)
# https://forums.mysql.com/read.php?20,567102,567111#msg-567111
# "It means if the MySQL version >= 501, execute what's in the comment"
assert regex.match(
" FULLTEXT KEY `ix_fulltext_oi_g_name` (`oi_g_name`) "
"/*!50100 WITH PARSER `ngram` */ "
)
def test_key_reflection_columns(self):
regex = self.parser._re_key
exprs = self.parser._re_keyexprs
m = regex.match(" KEY (`id`) USING BTREE COMMENT '''comment'")
eq_(m.group("columns"), "`id`")
m = regex.match(" KEY (`x`, `y`) USING BTREE")
eq_(m.group("columns"), "`x`, `y`")
eq_(exprs.findall(m.group("columns")), [("x", "", ""), ("y", "", "")])
m = regex.match(" KEY (`x`(25), `y`(15)) USING BTREE")
eq_(m.group("columns"), "`x`(25), `y`(15)")
eq_(
exprs.findall(m.group("columns")),
[("x", "25", ""), ("y", "15", "")],
)
m = regex.match(" KEY (`x`(25) DESC, `y`(15) ASC) USING BTREE")
eq_(m.group("columns"), "`x`(25) DESC, `y`(15) ASC")
eq_(
exprs.findall(m.group("columns")),
[("x", "25", "DESC"), ("y", "15", "ASC")],
)
m = regex.match(" KEY `foo_idx` (`x` DESC)")
eq_(m.group("columns"), "`x` DESC")
eq_(exprs.findall(m.group("columns")), [("x", "", "DESC")])
eq_(exprs.findall(m.group("columns")), [("x", "", "DESC")])
m = regex.match(" KEY `foo_idx` (`x` DESC, `y` ASC)")
eq_(m.group("columns"), "`x` DESC, `y` ASC")
def test_fk_reflection(self):
regex = self.parser._re_fk_constraint
m = regex.match(
" CONSTRAINT `addresses_user_id_fkey` "
"FOREIGN KEY (`user_id`) "
"REFERENCES `users` (`id`) "
"ON DELETE CASCADE ON UPDATE CASCADE"
)
eq_(
m.groups(),
(
"addresses_user_id_fkey",
"`user_id`",
"`users`",
"`id`",
None,
"CASCADE",
"CASCADE",
),
)
m = regex.match(
" CONSTRAINT `addresses_user_id_fkey` "
"FOREIGN KEY (`user_id`) "
"REFERENCES `users` (`id`) "
"ON DELETE CASCADE ON UPDATE SET NULL"
)
eq_(
m.groups(),
(
"addresses_user_id_fkey",
"`user_id`",
"`users`",
"`id`",
None,
"CASCADE",
"SET NULL",
),
)
|
Python
|
MIT
|
AngelLiang/hacking-sqlalchemy/test/dialect/mysql/test_reflection.py
|
789088fd-6fad-4762-9036-d9f0a324b0fe
|
[]
|
[]
|
"""
Django settings for backend project.
Generated by 'django-admin startproject' using Django 3.1.3.
For more information on this file, see
https://docs.djangoproject.com/en/3.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.1/ref/settings/
"""
from pathlib import Path
from datetime import timedelta
import os
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '2(iwreobf4b(-=h_p=^!obgxdgn3_*s!17=_3wc4dun9_y^q+c'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'rest_framework',
'backend.core',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'corsheaders.middleware.CorsMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'backend.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'backend.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/3.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
LOGIN_URL = "/api/v1/signin"
SIMPLE_JWT = {
"ACCESS_TOKEN_LIFETIME": timedelta(minutes=60),
"REFRESH_TOKEN_LIFETIME": timedelta(days=2),
}
CORS_ORIGIN_WHITELIST = ["http://localhost:3000", "http://127.0.0.1:3000"]
# Internationalization
# https://docs.djangoproject.com/en/3.1/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, "static/")
REST_FRAMEWORK = {
"DEFAULT_AUTHENTICATION_CLASSES": ["rest_framework_simplejwt.authentication.JWTAuthentication"],
"DEFAULT_RENDERER_CLASSES": ["rest_framework.renderers.JSONRenderer"],
"TEST_REQUEST_DEFAULT_FORMAT": "json",
"DEFAULT_PERMISSION_CLASSES": ("rest_framework.permissions.DjangoModelPermissions",),
}
|
Python
|
MIT
|
jesusmaherrera/django-nuxtjs/backend/settings.py
|
1c412d70-b646-4327-91f4-294510df3ca9
|
[{"tag": "API_KEY", "value": "2(iwreobf4b(-=h_p=^!obgxdgn3_*s!17=_3wc4dun9_y^q+c", "start": 709, "end": 759, "context": "cret key used in production secret!\nSECRET_KEY = '2(iwreobf4b(-=h_p=^!obgxdgn3_*s!17=_3wc4dun9_y^q+c'\n\n# SECURITY WARNING: don't run with debug turned"}]
|
[{"tag": "KEY", "value": "2(iwreobf4b(-=h_p=^!obgxdgn3_*s!17=_3wc4dun9_y^q+c", "start": 709, "end": 759, "context": "cret key used in production secret!\nSECRET_KEY = '2(iwreobf4b(-=h_p=^!obgxdgn3_*s!17=_3wc4dun9_y^q+c'\n\n# SECURITY WARNING: don't run with debug turned"}]
|
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from urllib import parse
from kazoo.client import KazooClient
from kazoo.security import make_digest_acl
from arch.api.utils import file_utils
from arch.api.utils.core_utils import get_lan_ip
class CenterConfig(object):
SERVERS = None
USE_ACL = False
ZK_USERNAME = 'fate'
ZK_PASSWORD = 'fate'
@staticmethod
def get_settings(path, servings_zk_path=None, use_zk=False, hosts=None, server_conf_path=''):
if servings_zk_path and use_zk:
return CenterConfig.get_servings_from_zookeeper(servings_zk_path, hosts)
return CenterConfig.get_settings_from_file(path, server_conf_path)
@staticmethod
def get_settings_from_file(path, server_conf_path):
server_conf = file_utils.load_json_conf(server_conf_path)
data = server_conf
for k in path.split('/')[1:]:
data = data.get(k, None)
return data
@staticmethod
def get_zk(hosts):
if CenterConfig.USE_ACL:
default_acl = make_digest_acl(CenterConfig.ZK_USERNAME, CenterConfig.ZK_PASSWORD, all=True)
zk = KazooClient(hosts=hosts, default_acl=[default_acl], auth_data=[("digest", "{}:{}".format(
CenterConfig.ZK_USERNAME, CenterConfig.ZK_PASSWORD))])
else:
zk = KazooClient(hosts=hosts)
return zk
@staticmethod
def get_servings_from_zookeeper(path, hosts):
try:
zk = CenterConfig.get_zk(hosts)
zk.start()
nodes = zk.get_children(path)
CenterConfig.SERVERS = nodes_unquote(nodes)
zk.stop()
return CenterConfig.SERVERS
except Exception as e:
raise Exception('loading servings node failed from zookeeper: {}'.format(e))
@staticmethod
def init(hosts, use_configuation_center, fate_flow_zk_path, fate_flow_port, model_transfer_path):
if use_configuation_center:
zk = CenterConfig.get_zk(hosts)
zk.start()
model_host = 'http://{}:{}{}'.format(get_lan_ip(), fate_flow_port, model_transfer_path)
fate_flow_zk_path = '{}/{}'.format(fate_flow_zk_path, parse.quote(model_host, safe=' '))
try:
zk.create(fate_flow_zk_path, makepath=True)
except:
pass
zk.stop()
def nodes_unquote(nodes):
urls = [parse.unquote(node) for node in nodes]
servings = []
for url in urls:
try:
servings.append(url.split('/')[2])
except:
pass
return servings
|
Python
|
Apache-2.0
|
AustinNeverPee/FedRec/fate_flow/utils/setting_utils.py
|
2d192d82-bccc-4411-90a2-417f604e9154
|
[]
|
[]
|
# You should modify this for your own use.
# In particular, set the FQDN to your domain name, and
# pick and set a secure SECRET_KEY. If you are going
# to run HA, you will want to modify the SQLALCHEMY
# variables to point to your shared server rather than
# SQLite3.
import os
ENV = os.environ.get("ENV", "dev")
SECRET_KEY = 'top-secret'
SQLALCHEMY_DATABASE_URI = 'sqlite:///db.sqlite'
SQLALCHEMY_TRACK_MODIFICATIONS = False
PERMANENT_SESSION_LIFETIME = 60 * 60 * 20
BOOTSTRAP_CDN_FORCE_SSL = True
BOOTSTRAP_SERVE_LOCAL = True
SCHEME = "https"
FQDN = f'fed-{ENV}.bortels.us'
URL = f'{SCHEME}://{FQDN}'
|
Python
|
MIT
|
bortels/awsfed/app/config.py
|
a83df554-bbbe-4db6-a053-b2756988dfcc
|
[]
|
[]
|
"""
Credentials used when making CLIs.
"""
from pathlib import Path
from dcos_e2e.cluster import Cluster
DEFAULT_SUPERUSER_USERNAME = 'bootstrapuser'
DEFAULT_SUPERUSER_PASSWORD = 'deleteme'
def add_authorized_key(cluster: Cluster, public_key_path: Path) -> None:
"""
Add an authorized key to all nodes in the given cluster.
"""
nodes = {
*cluster.masters,
*cluster.agents,
*cluster.public_agents,
}
for node in nodes:
node.run(
args=['echo', '', '>>', '/root/.ssh/authorized_keys'],
shell=True,
)
node.run(
args=[
'echo',
public_key_path.read_text(),
'>>',
'/root/.ssh/authorized_keys',
],
shell=True,
)
|
Python
|
Apache-2.0
|
dcos/dcos-e2e/src/dcos_e2e_cli/common/credentials.py
|
92dd7d2c-c235-4a24-97f8-4460a31bd1c2
|
[]
|
[]
|
# Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
import os
from spack.package import *
class Visit(CMakePackage):
"""VisIt is an Open Source, interactive, scalable, visualization,
animation and analysis tool. See comments in VisIt's package.py
for tips about building VisIt with spack. Building VisIt with
Spack is still experimental and many standard features are likely
disabled
LINUX-------------------------------------------------------------------
spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0
LINUX-W/O-OPENGL--------------------------------------------------------
spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0 \\
^mesa+opengl
MACOS-------------------------------------------------------------------
spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0 \\
^qt~framework
"""
############################
# Suggestions for building:
############################
# cyrush note:
#
# Out of the box, VisIt's python 2 requirement will cause
# spack spec constraint errors due Qt + Mesa build
# dependencies.
#
# You can avoid this using:
#
# linux:
# spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0
#
# linux w/o opengl: (add mesa as opengl if system lacks system opengl )
#
# spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0 \
# ^mesa+opengl
#
# macOS:
# spack install visit ^python+shared ^glib@2.56.3 ^py-setuptools@44.1.0 \
# ^qt~framework
#
# Rpath issues undermine qwt (not qt) when a build as a framework
# VisIt's osxfixup resolves this for us in other cases,
# but we can't use osxfixup with spack b/c it will undermine other libs.
#
# Even with these changes, VisIt's Python CLI does not work on macOS,
# there is a linking issue related to OpenSSL.
# (dyld: Symbol not found: _GENERAL_NAME_free - which comes from OpenSSL)
#
############################
homepage = "https://wci.llnl.gov/simulation/computer-codes/visit/"
git = "https://github.com/visit-dav/visit.git"
url = "https://github.com/visit-dav/visit/releases/download/v3.2.1/visit3.2.1.tar.gz"
tags = ['radiuss']
maintainers = ['cyrush']
extendable = True
executables = ['^visit$']
version('develop', branch='develop')
version('3.2.2', sha256='d19ac24c622a3bc0a71bc9cd6e5c9860e43f39e3279672129278b6ebce8d0ead')
version('3.2.1', sha256='779d59564c63f31fcbfeff24b14ddd6ac941b3bb7d671d31765a770d193f02e8')
version('3.1.1', sha256='0b60ac52fd00aff3cf212a310e36e32e13ae3ca0ddd1ea3f54f75e4d9b6c6cf0')
version('3.0.1', sha256='a506d4d83b8973829e68787d8d721199523ce7ec73e7594e93333c214c2c12bd')
root_cmakelists_dir = 'src'
generator = "Ninja"
variant('gui', default=True, description='Enable VisIt\'s GUI')
variant('osmesa', default=False, description='Use OSMesa for off-screen CPU rendering')
variant('adios2', default=True, description='Enable ADIOS2 file format')
variant('hdf5', default=True, description='Enable HDF5 file format')
variant('silo', default=True, description='Enable Silo file format')
variant('python', default=True, description='Enable Python support')
variant('mpi', default=True, description='Enable parallel engine')
patch('spack-changes-3.1.patch', when="@3.1.0:,develop")
patch('spack-changes-3.0.1.patch', when="@3.0.1")
patch('nonframework-qwt.patch', when='^qt~framework platform=darwin')
patch('parallel-hdf5.patch', when='+hdf5+mpi')
# Exactly one of 'gui' or 'osmesa' has to be enabled
conflicts('+gui', when='+osmesa')
#############################################
# Full List of dependencies from build_visit
#############################################
# cyrush note:
# I added these here to give folks details
# to help eventually build up to full
# support for visit
#############################################
# =====================================
# core:
# =====================================
# cmake (build)
# vtk
# qt
# qwt
# python
# mpi
#
# =====================================
# rendering (optional):
# =====================================
# icet
# vtk-m
# vtk-h
# llvm
# mesagl
# osmesa
# tbb
# embree
# ispc
# ospray
#
# =====================================
# python modules:
# =====================================
# numpy
# pillow
# mpi4py
# seedme
# sphinx (build, docs)
# sphinx rtd theme (build, docs)
# pyqt (visit support deprecated)
# pyside (note: we want pyside 2)
#
# =====================================
# testing related:
# =====================================
# p7zip (build, test)
#
# =====================================
# io libs:
# =====================================
# adios
# adios2
# advio
# boost
# boxlib
# cfitsio
# cgns
# conduit
# damaris
# fastbit
# fastquery
# gdal
# h5part
# hdf4
# hdf5
# mdsplus
# mfem
# mili
# moab
# mxml
# nektarpp
# netcdf
# openexr
# pidx
# silo
# stripack
# szip
# tbb
# uintah
# xdmf
# xercesc
# xsd
# zlib
#
# =====================================
depends_on('cmake@3.14.7:', type='build')
depends_on('ninja', type='build')
depends_on('mpi', when='+mpi')
# VTK flavors
depends_on('vtk@8.1:8 +opengl2')
depends_on('vtk +osmesa', when='+osmesa')
depends_on('vtk +qt', when='+gui')
depends_on('vtk +python', when='+python')
depends_on('vtk +mpi', when='+mpi')
depends_on('vtk ~mpi', when='~mpi')
# Necessary VTK patches
depends_on('vtk', patches=[patch('vtk_compiler_visibility.patch')])
depends_on('vtk', patches=[patch('vtk_rendering_opengl2_x11.patch')],
when='~osmesa platform=linux')
depends_on('vtk', patches=[patch('vtk_wrapping_python_x11.patch')],
when='+python')
depends_on('glu', when='~osmesa')
depends_on('mesa-glu+osmesa', when='+osmesa')
# VisIt doesn't work with later versions of qt.
depends_on('qt+gui+opengl@5:5.14', when='+gui')
depends_on('qwt', when='+gui')
# python@3.8 doesn't work with VisIt.
depends_on('python@3.2:3.7', when='+python')
extends('python', when='+python')
# VisIt uses the hdf5 1.8 api
# set the API version later on down in setup_build_environment
depends_on('hdf5@1.8:', when='+hdf5')
depends_on('hdf5+mpi', when='+hdf5+mpi')
depends_on('hdf5~mpi', when='+hdf5~mpi')
# VisIt uses Silo's 'ghost zone' data structures, which are only available
# in v4.10+ releases: https://wci.llnl.gov/simulation/computer-codes/silo/releases/release-notes-4.10
depends_on('silo@4.10: +shared', when='+silo')
depends_on('silo+hdf5', when='+silo+hdf5')
depends_on('silo~hdf5', when='+silo~hdf5')
depends_on('silo+mpi', when='+silo+mpi')
depends_on('silo~mpi', when='+silo~mpi')
depends_on('adios2@2.6:', when='+adios2')
depends_on('adios2+hdf5', when='+adios2+hdf5')
depends_on('adios2~hdf5', when='+adios2~hdf5')
depends_on('adios2+mpi', when='+adios2+mpi')
depends_on('adios2~mpi', when='+adios2~mpi')
depends_on('adios2+python', when='+adios2+python')
depends_on('adios2~python', when='+adios2~python')
depends_on('zlib')
@when('@3:,develop')
def patch(self):
# Some of VTK's targets don't create explicit libraries, so there is no
# 'vtktiff'. Instead, replace with the library variable defined from
# VTK's module flies (e.g. lib/cmake/vtk-8.1/Modules/vtktiff.cmake)
for filename in find('src', 'CMakeLists.txt'):
filter_file(r'\bvtk(tiff|jpeg|png)', r'${vtk\1_LIBRARIES}',
filename)
def flag_handler(self, name, flags):
if name in ('cflags', 'cxxflags'):
# NOTE: This is necessary in order to allow VisIt to compile a couple
# of lines of code with 'const char*' to/from 'char*' conversions.
if '@3:%gcc' in self.spec:
flags.append('-fpermissive')
# VisIt still uses the hdf5 1.8 api
if '+hdf5' in self.spec and '@1.10:' in self.spec['hdf5']:
flags.append('-DH5_USE_18_API')
return (flags, None, None)
def cmake_args(self):
spec = self.spec
args = [
self.define('CMAKE_POSITION_INDEPENDENT_CODE', True),
self.define('VTK_MAJOR_VERSION', spec['vtk'].version[0]),
self.define('VTK_MINOR_VERSION', spec['vtk'].version[1]),
self.define('VISIT_VTK_DIR', spec['vtk'].prefix),
self.define('VISIT_ZLIB_DIR', spec['zlib'].prefix),
self.define('VISIT_USE_GLEW', False),
self.define('VISIT_CONFIG_SITE', 'NONE'),
self.define('VISIT_INSTALL_THIRD_PARTY', True),
]
if '@3.1: platform=darwin' in spec:
args.append(self.define('FIXUP_OSX', False))
if '+python' in spec:
args.extend([
self.define('VISIT_PYTHON_FILTERS', True),
self.define('VISIT_PYTHON_SCRIPTING', True),
self.define('PYTHON_DIR', spec['python'].home),
])
else:
args.extend([
self.define('VISIT_PYTHON_FILTERS', False),
self.define('VISIT_PYTHON_SCRIPTING', False),
])
if '+gui' in spec:
qt_bin = spec['qt'].prefix.bin
qmake_exe = os.path.join(qt_bin, 'qmake')
args.extend([
self.define('VISIT_SERVER_COMPONENTS_ONLY', False),
self.define('VISIT_ENGINE_ONLY', False),
self.define('VISIT_LOC_QMAKE_EXE', qmake_exe),
self.define('VISIT_QT_DIR', spec['qt'].prefix),
self.define('VISIT_QWT_DIR', spec['qwt'].prefix),
])
else:
args.extend([
self.define('VISIT_SERVER_COMPONENTS_ONLY', True),
self.define('VISIT_ENGINE_ONLY', True),
])
# No idea why this is actually needed
if '^mesa' in spec:
args.append(self.define('VISIT_MESAGL_DIR', spec['mesa'].prefix))
if '+llvm' in spec['mesa']:
args.append(self.define('VISIT_LLVM_DIR', spec['libllvm'].prefix))
if '+hdf5' in spec:
args.append(self.define('VISIT_HDF5_DIR', spec['hdf5'].prefix))
if '+mpi' in spec and '+mpi' in spec['hdf5']:
args.append(self.define('VISIT_HDF5_MPI_DIR', spec['hdf5'].prefix))
if '+silo' in spec:
args.append(self.define('VISIT_SILO_DIR', spec['silo'].prefix))
if '+mpi' in spec:
args.extend([
self.define('VISIT_PARALLEL', True),
self.define('VISIT_MPI_COMPILER', spec['mpi'].mpicxx),
])
else:
args.append(self.define('VISIT_PARALLEL', False))
return args
# https://spack.readthedocs.io/en/latest/packaging_guide.html?highlight=executables#making-a-package-discoverable-with-spack-external-find
# Here we are only able to determine the latest version
# despite VisIt may have multiple versions
@classmethod
def determine_version(cls, exe):
output = Executable(exe)('-version', output=str, error=str)
match = re.search(r'\s*(\d[\d\.]+)\.', output)
return match.group(1) if match else None
|
Python
|
ECL-2.0
|
jmellorcrummey/spack/var/spack/repos/builtin/packages/visit/package.py
|
8868f637-32c6-4521-886e-5c80c791585e
|
[]
|
[]
|
"""
app.py - Flask-based server.
@author Thomas J. Daley, J.D.
@version: 0.0.1
Copyright (c) 2019 by Thomas J. Daley, J.D.
"""
import argparse
import random
from flask import Flask, render_template, request, flash, redirect, url_for, session, jsonify
from wtforms import Form, StringField, TextAreaField, PasswordField, validators
from functools import wraps
from views.decorators import is_admin_user, is_logged_in, is_case_set
from webservice import WebService
from util.database import Database
from views.admin.admin_routes import admin_routes
from views.cases.case_routes import case_routes
from views.discovery.discovery_routes import discovery_routes
from views.drivers.driver_routes import driver_routes
from views.info.info_routes import info_routes
from views.login.login import login
from views.objections.objection_routes import objection_routes
from views.real_property.real_property_routes import rp_routes
from views.responses.response_routes import response_routes
from views.vehicles.vehicle_routes import vehicle_routes
from views.decorators import is_admin_user, is_case_set, is_logged_in
WEBSERVICE = None
DATABASE = Database()
DATABASE.connect()
app = Flask(__name__)
app.register_blueprint(admin_routes)
app.register_blueprint(case_routes)
app.register_blueprint(discovery_routes)
app.register_blueprint(driver_routes)
app.register_blueprint(info_routes)
app.register_blueprint(login)
app.register_blueprint(objection_routes)
app.register_blueprint(rp_routes)
app.register_blueprint(response_routes)
app.register_blueprint(vehicle_routes)
# Helper to create Public Data credentials from session variables
def pd_credentials(mysession) -> dict:
return {
"username": session["pd_username"],
"password": session["pd_password"]
}
@app.route('/', methods=['GET'])
def index():
return render_template('home.html')
@app.route('/attorney/find/<string:bar_number>', methods=['POST'])
@is_logged_in
def find_attorney(bar_number: str):
attorney = DATABASE.attorney(bar_number)
if attorney:
attorney['success'] = True
return jsonify(attorney)
return jsonify(
{
'success': False,
'message': "Unable to find attorney having Bar Number {}"
.format(bar_number)
}
)
if __name__ == "__main__":
parser = argparse.ArgumentParser(description="Webservice for DiscoveryBot")
parser.add_argument(
"--debug",
help="Run server in debug mode",
action='store_true'
)
parser.add_argument(
"--port",
help="TCP port to listen on",
type=int,
default=5001
)
parser.add_argument(
"--zillowid",
"-z",
help="Zillow API credential from https://www.zillow.com/howto/api/APIOverview.htm" # NOQA
)
args = parser.parse_args()
WEBSERVICE = WebService(args.zillowid)
app.secret_key = "SDFIIUWER*HGjdf8*"
app.run(debug=args.debug, port=args.port)
|
Python
|
MIT
|
tjdaley/publicdataws/app/app.py
|
675c6fcc-25c7-4e77-b934-02700eace8a9
|
[{"tag": "NAME", "value": "Thomas J. Daley", "start": 42, "end": 57, "context": "\"\"\"\napp.py - Flask-based server.\n\n@author Thomas J. Daley, J.D.\n@version: 0.0.1\nCopyright (c) 2019 by Thoma"}, {"tag": "NAME", "value": "Thomas J. Daley", "start": 102, "end": 117, "context": "Daley, J.D.\n@version: 0.0.1\nCopyright (c) 2019 by Thomas J. Daley, J.D.\n\"\"\"\nimport argparse\nimport random\nfrom flas"}]
|
[{"tag": "NAME", "value": "Thomas J. Daley", "start": 42, "end": 57, "context": "\"\"\"\napp.py - Flask-based server.\n\n@author Thomas J. Daley, J.D.\n@version: 0.0.1\nCopyright (c) 2019 by Thoma"}, {"tag": "NAME", "value": "Thomas J. Daley", "start": 102, "end": 117, "context": "Daley, J.D.\n@version: 0.0.1\nCopyright (c) 2019 by Thomas J. Daley, J.D.\n\"\"\"\nimport argparse\nimport random\nfrom flas"}]
|
from ewah.hooks.base import EWAHBaseHook
import requests
import time
class EWAHAircallHook(EWAHBaseHook):
_ATTR_RELABEL = {
"api_id": "login",
"api_token": "password",
}
conn_name_attr = "ewah_aircall_conn_id"
default_conn_name = "ewah_aircall_default"
conn_type = "ewah_aircall"
hook_name = "EWAH Aircall Connection"
_RESOURCES = {
"users": {"incremental": True},
"teams": {},
"calls": {"incremental": True},
"numbers": {"incremental": True},
"contacts": {"incremental": True},
"tags": {},
}
_BASE_URL = "https://api.aircall.io/v1/{0}"
@staticmethod
def get_ui_field_behaviour():
return {
"hidden_fields": ["port", "schema", "extra", "host"],
"relabeling": {
"login": "Basic Auth API ID",
"password": "Baisc Auth API Token",
},
}
def get_data_in_batches(
self,
resource,
data_from=None,
data_until=None,
batch_size=10000,
batch_call_pause_seconds=1,
):
_msg = "batch_size param must be a positive integer <= 10k "
assert isinstance(batch_size, int), _msg
assert batch_size > 0, _msg
assert batch_size <= 10000, _msg
page_size = 50 # maximum page size is 50
auth = requests.auth.HTTPBasicAuth(
self.conn.api_id,
self.conn.api_token,
)
url = self._BASE_URL.format(resource)
params = {
"per_page": page_size,
}
if data_from:
params["from"] = int(time.mktime(data_from.timetuple()))
if data_until:
params["to"] = int(time.mktime((data_until).timetuple()))
data = []
while url:
time.sleep(batch_call_pause_seconds)
request = requests.get(url, params=params, auth=auth)
assert request.status_code == 200, request.text
response = request.json()
url = response.get("meta", {}).get("next_page_link")
data += response.get(resource, [])
if (not url) or (len(data) + page_size > batch_size):
yield data
data = []
|
Python
|
MIT
|
Gemma-Analytics/ewah/ewah/hooks/aircall.py
|
dc34cd4b-9876-4530-9e76-8399c538c613
|
[]
|
[]
|
/*
* Copyright 2019-2020 Douglas Kaip
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/*
* vkCmdCopyQueryPoolResults.cpp
*
* Created on: Oct 23, 2019
* Author: Douglas Kaip
*/
#include "com_CIMthetics_jvulkan_VulkanCore_NativeProxies.h"
#include "JVulkanHelperFunctions.hh"
#include "slf4j.hh"
/*
* Class: com_CIMthetics_jvulkan_VulkanCore_NativeProxies
* Method: vkCmdCopyQueryPoolResults
* Signature: (Lcom/CIMthetics/jvulkan/VulkanCore/Handles/VkCommandBuffer;Lcom/CIMthetics/jvulkan/VulkanCore/Handles/VkQueryPool;IILcom/CIMthetics/jvulkan/VulkanCore/Handles/VkBuffer;JJLjava/util/EnumSet;)V
*/
JNIEXPORT void JNICALL Java_com_CIMthetics_jvulkan_VulkanCore_NativeProxies_vkCmdCopyQueryPoolResults
(JNIEnv *env, jobject, jobject jVkCommandBuffer, jobject jVkQueryPool, jint firstQuery, jint queryCount, jobject jVkBuffer, jlong dstOffset, jlong stride, jobject jVkQueryResultFlags)
{
VkCommandBuffer_T *commandBufferHandle = (VkCommandBuffer_T *)jvulkan::getHandleValue(env, jVkCommandBuffer);
if (env->ExceptionOccurred())
{
LOGERROR(env, "%s", "Could not retrieve VkCommandBuffer handle");
return;
}
VkQueryPool_T *queryPoolHandle = (VkQueryPool_T *)jvulkan::getHandleValue(env, jVkQueryPool);
if (env->ExceptionOccurred())
{
LOGERROR(env, "%s", "Could not retrieve jVkQueryPool handle");
return;
}
VkBuffer_T *bufferHandle = (VkBuffer_T *)jvulkan::getHandleValue(env, jVkBuffer);
if (env->ExceptionOccurred())
{
LOGERROR(env, "%s", "Could not retrieve VkBuffer handle");
return;
}
VkQueryResultFlags flags = (VkQueryResultFlags)jvulkan::getEnumSetValue(
env,
jVkQueryResultFlags,
"com/CIMthetics/jvulkan/VulkanCore/Enums/VkQueryResultFlagBits");
if (env->ExceptionOccurred())
{
LOGERROR(env, "%s", "Error calling getEnumSetValue");
return;
}
vkCmdCopyQueryPoolResults(
commandBufferHandle,
queryPoolHandle,
(unsigned int)firstQuery,
(unsigned int)queryCount,
bufferHandle,
(unsigned long int)dstOffset,
(unsigned long int)stride,
flags);
}
|
C++
|
Apache-2.0
|
dkaip/jvulkan-natives-Linux-x86_64/VulkanFunctions/vkCmdCopyQueryPoolResults.cpp
|
f49d2631-d2b8-4997-a08b-7e2b7b807a6b
|
[{"tag": "NAME", "value": "Douglas Kaip", "start": 681, "end": 693, "context": "p\n *\n * Created on: Oct 23, 2019\n * Author: Douglas Kaip\n */\n\n#include \"com_CIMthetics_jvulkan_VulkanCore_"}, {"tag": "NAME", "value": "Douglas Kaip", "start": 26, "end": 38, "context": "/*\n * Copyright 2019-2020 Douglas Kaip\n *\n * Licensed under the Apache License, Version "}]
|
[{"tag": "NAME", "value": "Douglas Kaip", "start": 681, "end": 693, "context": "p\n *\n * Created on: Oct 23, 2019\n * Author: Douglas Kaip\n */\n\n#include \"com_CIMthetics_jvulkan_VulkanCore_"}, {"tag": "NAME", "value": "Douglas Kaip", "start": 26, "end": 38, "context": "/*\n * Copyright 2019-2020 Douglas Kaip\n *\n * Licensed under the Apache License, Version "}]
|
/* -*- Mode: C++; tab-width: 2; indent-tabs-mode: nil; c-basic-offset: 2 -*- */
/* vim: set ts=2 et sw=2 tw=80: */
/* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at http://mozilla.org/MPL/2.0/. */
#include "sandboxBroker.h"
#include "sandbox/win/src/sandbox.h"
#include "sandbox/win/src/sandbox_factory.h"
#include "sandbox/win/src/security_level.h"
#include "mozilla/sandboxing/sandboxLogging.h"
namespace mozilla
{
sandbox::BrokerServices *SandboxBroker::sBrokerService = nullptr;
SandboxBroker::SandboxBroker()
{
// XXX: This is not thread-safe! Two threads could simultaneously try
// to set `sBrokerService`
if (!sBrokerService) {
sBrokerService = sandbox::SandboxFactory::GetBrokerServices();
if (sBrokerService) {
sandbox::ResultCode result = sBrokerService->Init();
if (result != sandbox::SBOX_ALL_OK) {
sBrokerService = nullptr;
}
}
}
mPolicy = sBrokerService->CreatePolicy();
}
bool
SandboxBroker::LaunchApp(const wchar_t *aPath,
const wchar_t *aArguments,
const bool aEnableLogging,
void **aProcessHandle)
{
if (!sBrokerService || !mPolicy) {
return false;
}
// Set stdout and stderr, to allow inheritance for logging.
mPolicy->SetStdoutHandle(::GetStdHandle(STD_OUTPUT_HANDLE));
mPolicy->SetStderrHandle(::GetStdHandle(STD_ERROR_HANDLE));
// If logging enabled, set up the policy.
if (aEnableLogging) {
mozilla::sandboxing::ApplyLoggingPolicy(*mPolicy);
}
// Ceate the sandboxed process
PROCESS_INFORMATION targetInfo;
sandbox::ResultCode result;
result = sBrokerService->SpawnTarget(aPath, aArguments, mPolicy, &targetInfo);
// The sandboxed process is started in a suspended state, resume it now that
// we've set things up.
ResumeThread(targetInfo.hThread);
CloseHandle(targetInfo.hThread);
// Return the process handle to the caller
*aProcessHandle = targetInfo.hProcess;
return true;
}
#if defined(MOZ_CONTENT_SANDBOX)
bool
SandboxBroker::SetSecurityLevelForContentProcess(int32_t aSandboxLevel)
{
if (!mPolicy) {
return false;
}
sandbox::JobLevel jobLevel;
sandbox::TokenLevel accessTokenLevel;
sandbox::IntegrityLevel initialIntegrityLevel;
sandbox::IntegrityLevel delayedIntegrityLevel;
if (aSandboxLevel > 2) {
jobLevel = sandbox::JOB_LOCKDOWN;
accessTokenLevel = sandbox::USER_LOCKDOWN;
initialIntegrityLevel = sandbox::INTEGRITY_LEVEL_LOW;
delayedIntegrityLevel = sandbox::INTEGRITY_LEVEL_UNTRUSTED;
} else if (aSandboxLevel == 2) {
jobLevel = sandbox::JOB_RESTRICTED;
accessTokenLevel = sandbox::USER_LIMITED;
initialIntegrityLevel = sandbox::INTEGRITY_LEVEL_LOW;
delayedIntegrityLevel = sandbox::INTEGRITY_LEVEL_LOW;
} else if (aSandboxLevel == 1) {
jobLevel = sandbox::JOB_NONE;
accessTokenLevel = sandbox::USER_NON_ADMIN;
initialIntegrityLevel = sandbox::INTEGRITY_LEVEL_LOW;
delayedIntegrityLevel = sandbox::INTEGRITY_LEVEL_LOW;
} else {
jobLevel = sandbox::JOB_NONE;
accessTokenLevel = sandbox::USER_NON_ADMIN;
// INTEGRITY_LEVEL_LAST effectively means don't change from the integrity
// level of the broker process.
initialIntegrityLevel = sandbox::INTEGRITY_LEVEL_LAST;
delayedIntegrityLevel = sandbox::INTEGRITY_LEVEL_MEDIUM;
}
sandbox::ResultCode result = mPolicy->SetJobLevel(jobLevel,
0 /* ui_exceptions */);
bool ret = (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetTokenLevel(sandbox::USER_RESTRICTED_SAME_ACCESS,
accessTokenLevel);
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetIntegrityLevel(initialIntegrityLevel);
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetDelayedIntegrityLevel(delayedIntegrityLevel);
ret = ret && (sandbox::SBOX_ALL_OK == result);
if (aSandboxLevel > 1) {
result = mPolicy->SetAlternateDesktop(true);
ret = ret && (sandbox::SBOX_ALL_OK == result);
}
// Add the policy for the client side of a pipe. It is just a file
// in the \pipe\ namespace. We restrict it to pipes that start with
// "chrome." so the sandboxed process cannot connect to system services.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\chrome.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// Add the policy for the client side of the crash server pipe.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\gecko-crash-server-pipe.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// The content process needs to be able to duplicate named pipes back to the
// broker process, which are File type handles.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_HANDLES,
sandbox::TargetPolicy::HANDLES_DUP_BROKER,
L"File");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// The content process needs to be able to duplicate shared memory to the
// broker process, which are Section type handles.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_HANDLES,
sandbox::TargetPolicy::HANDLES_DUP_BROKER,
L"Section");
ret = ret && (sandbox::SBOX_ALL_OK == result);
return ret;
}
#endif
bool
SandboxBroker::SetSecurityLevelForPluginProcess(int32_t aSandboxLevel)
{
if (!mPolicy) {
return false;
}
sandbox::ResultCode result;
bool ret;
if (aSandboxLevel >= 2) {
result = mPolicy->SetJobLevel(sandbox::JOB_UNPROTECTED,
0 /* ui_exceptions */);
ret = (sandbox::SBOX_ALL_OK == result);
sandbox::TokenLevel tokenLevel;
if (aSandboxLevel >= 3) {
tokenLevel = sandbox::USER_LIMITED;
} else {
tokenLevel = sandbox::USER_INTERACTIVE;
}
result = mPolicy->SetTokenLevel(sandbox::USER_RESTRICTED_SAME_ACCESS,
tokenLevel);
ret = ret && (sandbox::SBOX_ALL_OK == result);
sandbox::MitigationFlags mitigations =
sandbox::MITIGATION_BOTTOM_UP_ASLR |
sandbox::MITIGATION_HEAP_TERMINATE |
sandbox::MITIGATION_SEHOP |
sandbox::MITIGATION_DEP_NO_ATL_THUNK |
sandbox::MITIGATION_DEP;
result = mPolicy->SetProcessMitigations(mitigations);
ret = ret && (sandbox::SBOX_ALL_OK == result);
mitigations =
sandbox::MITIGATION_STRICT_HANDLE_CHECKS;
result = mPolicy->SetDelayedProcessMitigations(mitigations);
ret = ret && (sandbox::SBOX_ALL_OK == result);
// The following is required for the Java plugin.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\jpi2_pid*_pipe*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
} else {
result = mPolicy->SetJobLevel(sandbox::JOB_NONE,
0 /* ui_exceptions */);
ret = (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetTokenLevel(sandbox::USER_RESTRICTED_SAME_ACCESS,
sandbox::USER_NON_ADMIN);
ret = ret && (sandbox::SBOX_ALL_OK == result);
}
result = mPolicy->SetDelayedIntegrityLevel(sandbox::INTEGRITY_LEVEL_MEDIUM);
ret = ret && (sandbox::SBOX_ALL_OK == result);
// Add the policy for the client side of a pipe. It is just a file
// in the \pipe\ namespace. We restrict it to pipes that start with
// "chrome." so the sandboxed process cannot connect to system services.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\chrome.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// The NPAPI process needs to be able to duplicate shared memory to the
// content process, which are Section type handles.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_HANDLES,
sandbox::TargetPolicy::HANDLES_DUP_ANY,
L"Section");
ret = ret && (sandbox::SBOX_ALL_OK == result);
return ret;
}
bool
SandboxBroker::SetSecurityLevelForIPDLUnitTestProcess()
{
if (!mPolicy) {
return false;
}
auto result = mPolicy->SetJobLevel(sandbox::JOB_NONE, 0);
bool ret = (sandbox::SBOX_ALL_OK == result);
result =
mPolicy->SetTokenLevel(sandbox::USER_RESTRICTED_SAME_ACCESS,
sandbox::USER_RESTRICTED_SAME_ACCESS);
ret = ret && (sandbox::SBOX_ALL_OK == result);
return ret;
}
bool
SandboxBroker::SetSecurityLevelForGMPlugin()
{
if (!mPolicy) {
return false;
}
auto result = mPolicy->SetJobLevel(sandbox::JOB_LOCKDOWN, 0);
bool ret = (sandbox::SBOX_ALL_OK == result);
result =
mPolicy->SetTokenLevel(sandbox::USER_RESTRICTED_SAME_ACCESS,
sandbox::USER_LOCKDOWN);
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetAlternateDesktop(true);
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->SetIntegrityLevel(sandbox::INTEGRITY_LEVEL_LOW);
ret = ret && (sandbox::SBOX_ALL_OK == result);
result =
mPolicy->SetDelayedIntegrityLevel(sandbox::INTEGRITY_LEVEL_UNTRUSTED);
ret = ret && (sandbox::SBOX_ALL_OK == result);
sandbox::MitigationFlags mitigations =
sandbox::MITIGATION_BOTTOM_UP_ASLR |
sandbox::MITIGATION_HEAP_TERMINATE |
sandbox::MITIGATION_SEHOP |
sandbox::MITIGATION_DEP_NO_ATL_THUNK |
sandbox::MITIGATION_DEP;
result = mPolicy->SetProcessMitigations(mitigations);
ret = ret && (sandbox::SBOX_ALL_OK == result);
mitigations =
sandbox::MITIGATION_STRICT_HANDLE_CHECKS |
sandbox::MITIGATION_DLL_SEARCH_ORDER;
result = mPolicy->SetDelayedProcessMitigations(mitigations);
ret = ret && (sandbox::SBOX_ALL_OK == result);
// Add the policy for the client side of a pipe. It is just a file
// in the \pipe\ namespace. We restrict it to pipes that start with
// "chrome." so the sandboxed process cannot connect to system services.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\chrome.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// Add the policy for the client side of the crash server pipe.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
L"\\??\\pipe\\gecko-crash-server-pipe.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
#ifdef DEBUG
// The plugin process can't create named events, but we'll
// make an exception for the events used in logging. Removing
// this will break EME in debug builds.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_SYNC,
sandbox::TargetPolicy::EVENTS_ALLOW_ANY,
L"ChromeIPCLog.*");
ret = ret && (sandbox::SBOX_ALL_OK == result);
#endif
// The following rules were added because, during analysis of an EME
// plugin during development, these registry keys were accessed when
// loading the plugin. Commenting out these policy exceptions caused
// plugin loading to fail, so they are necessary for proper functioning
// of at least one EME plugin.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_CURRENT_USER");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_CURRENT_USER\\Control Panel\\Desktop");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_CURRENT_USER\\Control Panel\\Desktop\\LanguageConfiguration");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\SideBySide");
ret = ret && (sandbox::SBOX_ALL_OK == result);
// The following rules were added because, during analysis of an EME
// plugin during development, these registry keys were accessed when
// loading the plugin. Commenting out these policy exceptions did not
// cause anything to break during initial testing, but might cause
// unforeseen issues down the road.
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_LOCAL_MACHINE\\SOFTWARE\\Policies\\Microsoft\\MUI\\Settings");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_CURRENT_USER\\Software\\Policies\\Microsoft\\Control Panel\\Desktop");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_CURRENT_USER\\Control Panel\\Desktop\\PreferredUILanguages");
ret = ret && (sandbox::SBOX_ALL_OK == result);
result = mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_REGISTRY,
sandbox::TargetPolicy::REG_ALLOW_READONLY,
L"HKEY_LOCAL_MACHINE\\SOFTWARE\\Microsoft\\Windows\\CurrentVersion\\SideBySide\\PreferExternalManifest");
ret = ret && (sandbox::SBOX_ALL_OK == result);
return ret;
}
bool
SandboxBroker::AllowReadFile(wchar_t const *file)
{
auto result =
mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_READONLY,
file);
return (sandbox::SBOX_ALL_OK == result);
}
bool
SandboxBroker::AllowReadWriteFile(wchar_t const *file)
{
auto result =
mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_ANY,
file);
return (sandbox::SBOX_ALL_OK == result);
}
bool
SandboxBroker::AllowDirectory(wchar_t const *dir)
{
auto result =
mPolicy->AddRule(sandbox::TargetPolicy::SUBSYS_FILES,
sandbox::TargetPolicy::FILES_ALLOW_DIR_ANY,
dir);
return (sandbox::SBOX_ALL_OK == result);
}
SandboxBroker::~SandboxBroker()
{
if (mPolicy) {
mPolicy->Release();
mPolicy = nullptr;
}
}
}
|
C++
|
MIT
|
fstudio/Phoenix/tools/Packers/deps/sandbox/win/src/sandboxbroker/sandboxBroker.cpp
|
e90c4120-fd1d-4a87-b67d-d9f4a6a52966
|
[]
|
[]
|
// Copyright (c) 2010 Satoshi Nakamoto
// Original Code: Copyright (c) 2009-2014 The Bitcoin Core Developers
// Modified Code: Copyright (c) 2014 Project Bitmark
// Distributed under the MIT/X11 software license, see the accompanying
// file COPYING or http://www.opensource.org/licenses/mit-license.php.
#include "base58.h"
#include "core.h"
#include "init.h"
#include "keystore.h"
#include "main.h"
#include "net.h"
#include "rpcserver.h"
#include "uint256.h"
#ifdef ENABLE_WALLET
#include "wallet.h"
#endif
#include <stdint.h>
#include <boost/assign/list_of.hpp>
#include "json/json_spirit_utils.h"
#include "json/json_spirit_value.h"
using namespace std;
using namespace boost;
using namespace boost::assign;
using namespace json_spirit;
void ScriptPubKeyToJSON(const CScript& scriptPubKey, Object& out, bool fIncludeHex)
{
txnouttype type;
vector<CTxDestination> addresses;
int nRequired;
out.push_back(Pair("asm", scriptPubKey.ToString()));
if (fIncludeHex)
out.push_back(Pair("hex", HexStr(scriptPubKey.begin(), scriptPubKey.end())));
if (!ExtractDestinations(scriptPubKey, type, addresses, nRequired))
{
out.push_back(Pair("type", GetTxnOutputType(type)));
return;
}
out.push_back(Pair("reqSigs", nRequired));
out.push_back(Pair("type", GetTxnOutputType(type)));
Array a;
BOOST_FOREACH(const CTxDestination& addr, addresses)
a.push_back(CBitmarkAddress(addr).ToString());
out.push_back(Pair("addresses", a));
}
void TxToJSON(const CTransaction& tx, const uint256 hashBlock, Object& entry)
{
entry.push_back(Pair("txid", tx.GetHash().GetHex()));
entry.push_back(Pair("version", tx.nVersion));
entry.push_back(Pair("locktime", (int64_t)tx.nLockTime));
Array vin;
BOOST_FOREACH(const CTxIn& txin, tx.vin)
{
Object in;
if (tx.IsCoinBase())
in.push_back(Pair("coinbase", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
else
{
in.push_back(Pair("txid", txin.prevout.hash.GetHex()));
in.push_back(Pair("vout", (int64_t)txin.prevout.n));
Object o;
o.push_back(Pair("asm", txin.scriptSig.ToString()));
o.push_back(Pair("hex", HexStr(txin.scriptSig.begin(), txin.scriptSig.end())));
in.push_back(Pair("scriptSig", o));
}
in.push_back(Pair("sequence", (int64_t)txin.nSequence));
vin.push_back(in);
}
entry.push_back(Pair("vin", vin));
Array vout;
for (unsigned int i = 0; i < tx.vout.size(); i++)
{
const CTxOut& txout = tx.vout[i];
Object out;
out.push_back(Pair("value", ValueFromAmount(txout.nValue)));
out.push_back(Pair("n", (int64_t)i));
Object o;
ScriptPubKeyToJSON(txout.scriptPubKey, o, true);
out.push_back(Pair("scriptPubKey", o));
vout.push_back(out);
}
entry.push_back(Pair("vout", vout));
if (hashBlock != 0)
{
entry.push_back(Pair("blockhash", hashBlock.GetHex()));
map<uint256, CBlockIndex*>::iterator mi = mapBlockIndex.find(hashBlock);
if (mi != mapBlockIndex.end() && (*mi).second)
{
CBlockIndex* pindex = (*mi).second;
if (chainActive.Contains(pindex))
{
entry.push_back(Pair("confirmations", 1 + chainActive.Height() - pindex->nHeight));
entry.push_back(Pair("time", (int64_t)pindex->nTime));
entry.push_back(Pair("blocktime", (int64_t)pindex->nTime));
}
else
entry.push_back(Pair("confirmations", 0));
}
}
}
Value getrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"getrawtransaction \"txid\" ( verbose )\n"
"\nReturn the raw transaction data.\n"
"\nIf verbose=0, returns a string that is serialized, hex-encoded data for 'txid'.\n"
"If verbose is non-zero, returns an Object with information about 'txid'.\n"
"\nArguments:\n"
"1. \"txid\" (string, required) The transaction id\n"
"2. verbose (numeric, optional, default=0) If 0, return a string, other return a json object\n"
"\nResult (if verbose is not set or set to 0):\n"
"\"data\" (string) The serialized, hex-encoded data for 'txid'\n"
"\nResult (if verbose > 0):\n"
"{\n"
" \"hex\" : \"data\", (string) The serialized, hex-encoded data for 'txid'\n"
" \"txid\" : \"id\", (string) The transaction id (same as provided)\n"
" \"version\" : n, (numeric) The version\n"
" \"locktime\" : ttt, (numeric) The lock time\n"
" \"vin\" : [ (array of json objects)\n"
" {\n"
" \"txid\": \"id\", (string) The transaction id\n"
" \"vout\": n, (numeric) \n"
" \"scriptSig\": { (json object) The script\n"
" \"asm\": \"asm\", (string) asm\n"
" \"hex\": \"hex\" (string) hex\n"
" },\n"
" \"sequence\": n (numeric) The script sequence number\n"
" }\n"
" ,...\n"
" ],\n"
" \"vout\" : [ (array of json objects)\n"
" {\n"
" \"value\" : x.xxx, (numeric) The value in btm\n"
" \"n\" : n, (numeric) index\n"
" \"scriptPubKey\" : { (json object)\n"
" \"asm\" : \"asm\", (string) the asm\n"
" \"hex\" : \"hex\", (string) the hex\n"
" \"reqSigs\" : n, (numeric) The required sigs\n"
" \"type\" : \"pubkeyhash\", (string) The type, eg 'pubkeyhash'\n"
" \"addresses\" : [ (json array of string)\n"
" \"bitmarkaddress\" (string) bitmark address\n"
" ,...\n"
" ]\n"
" }\n"
" }\n"
" ,...\n"
" ],\n"
" \"blockhash\" : \"hash\", (string) the block hash\n"
" \"confirmations\" : n, (numeric) The confirmations\n"
" \"time\" : ttt, (numeric) The transaction time in seconds since epoch (Jan 1 1970 GMT)\n"
" \"blocktime\" : ttt (numeric) The block time in seconds since epoch (Jan 1 1970 GMT)\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("getrawtransaction", "\"mytxid\"")
+ HelpExampleCli("getrawtransaction", "\"mytxid\" 1")
+ HelpExampleRpc("getrawtransaction", "\"mytxid\", 1")
);
uint256 hash = ParseHashV(params[0], "parameter 1");
bool fVerbose = false;
if (params.size() > 1)
fVerbose = (params[1].get_int() != 0);
CTransaction tx;
uint256 hashBlock = 0;
if (!GetTransaction(hash, tx, hashBlock, true))
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "No information available about transaction");
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << tx;
string strHex = HexStr(ssTx.begin(), ssTx.end());
if (!fVerbose)
return strHex;
Object result;
result.push_back(Pair("hex", strHex));
TxToJSON(tx, hashBlock, result);
return result;
}
#ifdef ENABLE_WALLET
Value listunspent(const Array& params, bool fHelp)
{
if (fHelp || params.size() > 3)
throw runtime_error(
"listunspent ( minconf maxconf [\"address\",...] )\n"
"\nReturns array of unspent transaction outputs\n"
"with between minconf and maxconf (inclusive) confirmations.\n"
"Optionally filter to only include txouts paid to specified addresses.\n"
"Results are an array of Objects, each of which has:\n"
"{txid, vout, scriptPubKey, amount, confirmations}\n"
"\nArguments:\n"
"1. minconf (numeric, optional, default=1) The minimum confirmationsi to filter\n"
"2. maxconf (numeric, optional, default=9999999) The maximum confirmations to filter\n"
"3. \"addresses\" (string) A json array of bitmark addresses to filter\n"
" [\n"
" \"address\" (string) bitmark address\n"
" ,...\n"
" ]\n"
"\nResult\n"
"[ (array of json object)\n"
" {\n"
" \"txid\" : \"txid\", (string) the transaction id \n"
" \"vout\" : n, (numeric) the vout value\n"
" \"address\" : \"address\", (string) the bitmark address\n"
" \"account\" : \"account\", (string) The associated account, or \"\" for the default account\n"
" \"scriptPubKey\" : \"key\", (string) the script key\n"
" \"amount\" : x.xxx, (numeric) the transaction amount in btm\n"
" \"confirmations\" : n (numeric) The number of confirmations\n"
" }\n"
" ,...\n"
"]\n"
"\nExamples\n"
+ HelpExampleCli("listunspent", "")
+ HelpExampleCli("listunspent", "6 9999999 \"[\\\"1PGFqEzfmQch1gKD3ra4k18PNj3tTUUSqg\\\",\\\"1LtvqCaApEdUGFkpKMM4MstjcaL4dKg8SP\\\"]\"")
+ HelpExampleRpc("listunspent", "6, 9999999 \"[\\\"1PGFqEzfmQch1gKD3ra4k18PNj3tTUUSqg\\\",\\\"1LtvqCaApEdUGFkpKMM4MstjcaL4dKg8SP\\\"]\"")
);
RPCTypeCheck(params, list_of(int_type)(int_type)(array_type));
int nMinDepth = 1;
if (params.size() > 0)
nMinDepth = params[0].get_int();
int nMaxDepth = 9999999;
if (params.size() > 1)
nMaxDepth = params[1].get_int();
set<CBitmarkAddress> setAddress;
if (params.size() > 2)
{
Array inputs = params[2].get_array();
BOOST_FOREACH(Value& input, inputs)
{
CBitmarkAddress address(input.get_str());
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Bitmark address: ")+input.get_str());
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+input.get_str());
setAddress.insert(address);
}
}
Array results;
vector<COutput> vecOutputs;
assert(pwalletMain != NULL);
pwalletMain->AvailableCoins(vecOutputs, false);
BOOST_FOREACH(const COutput& out, vecOutputs)
{
if (out.nDepth < nMinDepth || out.nDepth > nMaxDepth)
continue;
if (setAddress.size())
{
CTxDestination address;
if (!ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
continue;
if (!setAddress.count(address))
continue;
}
int64_t nValue = out.tx->vout[out.i].nValue;
const CScript& pk = out.tx->vout[out.i].scriptPubKey;
Object entry;
entry.push_back(Pair("txid", out.tx->GetHash().GetHex()));
entry.push_back(Pair("vout", out.i));
CTxDestination address;
if (ExtractDestination(out.tx->vout[out.i].scriptPubKey, address))
{
entry.push_back(Pair("address", CBitmarkAddress(address).ToString()));
if (pwalletMain->mapAddressBook.count(address))
entry.push_back(Pair("account", pwalletMain->mapAddressBook[address].name));
}
entry.push_back(Pair("scriptPubKey", HexStr(pk.begin(), pk.end())));
if (pk.IsPayToScriptHash())
{
CTxDestination address;
if (ExtractDestination(pk, address))
{
const CScriptID& hash = boost::get<CScriptID>(address);
CScript redeemScript;
if (pwalletMain->GetCScript(hash, redeemScript))
entry.push_back(Pair("redeemScript", HexStr(redeemScript.begin(), redeemScript.end())));
}
}
entry.push_back(Pair("amount",ValueFromAmount(nValue)));
entry.push_back(Pair("confirmations",out.nDepth));
entry.push_back(Pair("spendable", out.fSpendable));
results.push_back(entry);
}
return results;
}
#endif
Value createrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 2)
throw runtime_error(
"createrawtransaction [{\"txid\":\"id\",\"vout\":n},...] {\"address\":amount,...}\n"
"\nCreate a transaction spending the given inputs and sending to the given addresses.\n"
"Returns hex-encoded raw transaction.\n"
"Note that the transaction's inputs are not signed, and\n"
"it is not stored in the wallet or transmitted to the network.\n"
"\nArguments:\n"
"1. \"transactions\" (string, required) A json array of json objects\n"
" [\n"
" {\n"
" \"txid\":\"id\", (string, required) The transaction id\n"
" \"vout\":n (numeric, required) The output number\n"
" }\n"
" ,...\n"
" ]\n"
"2. \"addresses\" (string, required) a json object with addresses as keys and amounts as values\n"
" {\n"
" \"address\": x.xxx (numeric, required) The key is the bitmark address, the value is the btm amount\n"
" ,...\n"
" }\n"
"\nResult:\n"
"\"transaction\" (string) hex string of the transaction\n"
"\nExamples\n"
+ HelpExampleCli("createrawtransaction", "\"[{\\\"txid\\\":\\\"myid\\\",\\\"vout\\\":0}]\" \"{\\\"address\\\":0.01}\"")
+ HelpExampleRpc("createrawtransaction", "\"[{\\\"txid\\\":\\\"myid\\\",\\\"vout\\\":0}]\", \"{\\\"address\\\":0.01}\"")
);
RPCTypeCheck(params, list_of(array_type)(obj_type));
Array inputs = params[0].get_array();
Object sendTo = params[1].get_obj();
CTransaction rawTx;
BOOST_FOREACH(const Value& input, inputs)
{
const Object& o = input.get_obj();
uint256 txid = ParseHashO(o, "txid");
const Value& vout_v = find_value(o, "vout");
if (vout_v.type() != int_type)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, missing vout key");
int nOutput = vout_v.get_int();
if (nOutput < 0)
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid parameter, vout must be positive");
CTxIn in(COutPoint(txid, nOutput));
rawTx.vin.push_back(in);
}
set<CBitmarkAddress> setAddress;
BOOST_FOREACH(const Pair& s, sendTo)
{
CBitmarkAddress address(s.name_);
if (!address.IsValid())
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, string("Invalid Bitmark address: ")+s.name_);
if (setAddress.count(address))
throw JSONRPCError(RPC_INVALID_PARAMETER, string("Invalid parameter, duplicated address: ")+s.name_);
setAddress.insert(address);
CScript scriptPubKey;
scriptPubKey.SetDestination(address.Get());
int64_t nAmount = AmountFromValue(s.value_);
CTxOut out(nAmount, scriptPubKey);
rawTx.vout.push_back(out);
}
CDataStream ss(SER_NETWORK, PROTOCOL_VERSION);
ss << rawTx;
return HexStr(ss.begin(), ss.end());
}
Value decoderawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decoderawtransaction \"hexstring\"\n"
"\nReturn a JSON object representing the serialized, hex-encoded transaction.\n"
"\nArguments:\n"
"1. \"hex\" (string, required) The transaction hex string\n"
"\nResult:\n"
"{\n"
" \"txid\" : \"id\", (string) The transaction id\n"
" \"version\" : n, (numeric) The version\n"
" \"locktime\" : ttt, (numeric) The lock time\n"
" \"vin\" : [ (array of json objects)\n"
" {\n"
" \"txid\": \"id\", (string) The transaction id\n"
" \"vout\": n, (numeric) The output number\n"
" \"scriptSig\": { (json object) The script\n"
" \"asm\": \"asm\", (string) asm\n"
" \"hex\": \"hex\" (string) hex\n"
" },\n"
" \"sequence\": n (numeric) The script sequence number\n"
" }\n"
" ,...\n"
" ],\n"
" \"vout\" : [ (array of json objects)\n"
" {\n"
" \"value\" : x.xxx, (numeric) The value in btm\n"
" \"n\" : n, (numeric) index\n"
" \"scriptPubKey\" : { (json object)\n"
" \"asm\" : \"asm\", (string) the asm\n"
" \"hex\" : \"hex\", (string) the hex\n"
" \"reqSigs\" : n, (numeric) The required sigs\n"
" \"type\" : \"pubkeyhash\", (string) The type, eg 'pubkeyhash'\n"
" \"addresses\" : [ (json array of string)\n"
" \"12tvKAXCxZjSmdNbao16dKXC8tRWfcF5oc\" (string) bitmark address\n"
" ,...\n"
" ]\n"
" }\n"
" }\n"
" ,...\n"
" ],\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("decoderawtransaction", "\"hexstring\"")
+ HelpExampleRpc("decoderawtransaction", "\"hexstring\"")
);
vector<unsigned char> txData(ParseHexV(params[0], "argument"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
Object result;
TxToJSON(tx, 0, result);
return result;
}
Value decodescript(const Array& params, bool fHelp)
{
if (fHelp || params.size() != 1)
throw runtime_error(
"decodescript \"hex\"\n"
"\nDecode a hex-encoded script.\n"
"\nArguments:\n"
"1. \"hex\" (string) the hex encoded script\n"
"\nResult:\n"
"{\n"
" \"asm\":\"asm\", (string) Script public key\n"
" \"hex\":\"hex\", (string) hex encoded public key\n"
" \"type\":\"type\", (string) The output type\n"
" \"reqSigs\": n, (numeric) The required signatures\n"
" \"addresses\": [ (json array of string)\n"
" \"address\" (string) bitmark address\n"
" ,...\n"
" ],\n"
" \"p2sh\",\"address\" (string) script address\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("decodescript", "\"hexstring\"")
+ HelpExampleRpc("decodescript", "\"hexstring\"")
);
RPCTypeCheck(params, list_of(str_type));
Object r;
CScript script;
if (params[0].get_str().size() > 0){
vector<unsigned char> scriptData(ParseHexV(params[0], "argument"));
script = CScript(scriptData.begin(), scriptData.end());
} else {
// Empty scripts are valid
}
ScriptPubKeyToJSON(script, r, false);
r.push_back(Pair("p2sh", CBitmarkAddress(script.GetID()).ToString()));
return r;
}
Value signrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 4)
throw runtime_error(
"signrawtransaction \"hexstring\" ( [{\"txid\":\"id\",\"vout\":n,\"scriptPubKey\":\"hex\",\"redeemScript\":\"hex\"},...] [\"privatekey1\",...] sighashtype )\n"
"\nSign inputs for raw transaction (serialized, hex-encoded).\n"
"The second optional argument (may be null) is an array of previous transaction outputs that\n"
"this transaction depends on but may not yet be in the block chain.\n"
"The third optional argument (may be null) is an array of base58-encoded private\n"
"keys that, if given, will be the only keys used to sign the transaction.\n"
#ifdef ENABLE_WALLET
+ HelpRequiringPassphrase() + "\n"
#endif
"\nArguments:\n"
"1. \"hexstring\" (string, required) The transaction hex string\n"
"2. \"prevtxs\" (string, optional) An json array of previous dependent transaction outputs\n"
" [ (json array of json objects, or 'null' if none provided)\n"
" {\n"
" \"txid\":\"id\", (string, required) The transaction id\n"
" \"vout\":n, (numeric, required) The output number\n"
" \"scriptPubKey\": \"hex\", (string, required) script key\n"
" \"redeemScript\": \"hex\" (string, required for P2SH) redeem script\n"
" }\n"
" ,...\n"
" ]\n"
"3. \"privatekeys\" (string, optional) A json array of base58-encoded private keys for signing\n"
" [ (json array of strings, or 'null' if none provided)\n"
" \"privatekey\" (string) private key in base58-encoding\n"
" ,...\n"
" ]\n"
"4. \"sighashtype\" (string, optional, default=ALL) The signature hash type. Must be one of\n"
" \"ALL\"\n"
" \"NONE\"\n"
" \"SINGLE\"\n"
" \"ALL|ANYONECANPAY\"\n"
" \"NONE|ANYONECANPAY\"\n"
" \"SINGLE|ANYONECANPAY\"\n"
"\nResult:\n"
"{\n"
" \"hex\": \"value\", (string) The raw transaction with signature(s) (hex-encoded string)\n"
" \"complete\": n (numeric) if transaction has a complete set of signature (0 if not)\n"
"}\n"
"\nExamples:\n"
+ HelpExampleCli("signrawtransaction", "\"myhex\"")
+ HelpExampleRpc("signrawtransaction", "\"myhex\"")
);
RPCTypeCheck(params, list_of(str_type)(array_type)(array_type)(str_type), true);
vector<unsigned char> txData(ParseHexV(params[0], "argument 1"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
vector<CTransaction> txVariants;
while (!ssData.empty())
{
try {
CTransaction tx;
ssData >> tx;
txVariants.push_back(tx);
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
}
if (txVariants.empty())
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "Missing transaction");
// mergedTx will end up with all the signatures; it
// starts as a clone of the rawtx:
CTransaction mergedTx(txVariants[0]);
bool fComplete = true;
// Fetch previous transactions (inputs):
CCoinsView viewDummy;
CCoinsViewCache view(viewDummy);
{
LOCK(mempool.cs);
CCoinsViewCache &viewChain = *pcoinsTip;
CCoinsViewMemPool viewMempool(viewChain, mempool);
view.SetBackend(viewMempool); // temporarily switch cache backend to db+mempool view
BOOST_FOREACH(const CTxIn& txin, mergedTx.vin) {
const uint256& prevHash = txin.prevout.hash;
CCoins coins;
view.GetCoins(prevHash, coins); // this is certainly allowed to fail
}
view.SetBackend(viewDummy); // switch back to avoid locking mempool for too long
}
bool fGivenKeys = false;
CBasicKeyStore tempKeystore;
if (params.size() > 2 && params[2].type() != null_type)
{
fGivenKeys = true;
Array keys = params[2].get_array();
BOOST_FOREACH(Value k, keys)
{
CBitmarkSecret vchSecret;
bool fGood = vchSecret.SetString(k.get_str());
if (!fGood)
throw JSONRPCError(RPC_INVALID_ADDRESS_OR_KEY, "Invalid private key");
CKey key = vchSecret.GetKey();
tempKeystore.AddKey(key);
}
}
#ifdef ENABLE_WALLET
else
EnsureWalletIsUnlocked();
#endif
// Add previous txouts given in the RPC call:
if (params.size() > 1 && params[1].type() != null_type)
{
Array prevTxs = params[1].get_array();
BOOST_FOREACH(Value& p, prevTxs)
{
if (p.type() != obj_type)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "expected object with {\"txid'\",\"vout\",\"scriptPubKey\"}");
Object prevOut = p.get_obj();
RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type));
uint256 txid = ParseHashO(prevOut, "txid");
int nOut = find_value(prevOut, "vout").get_int();
if (nOut < 0)
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "vout must be positive");
vector<unsigned char> pkData(ParseHexO(prevOut, "scriptPubKey"));
CScript scriptPubKey(pkData.begin(), pkData.end());
CCoins coins;
if (view.GetCoins(txid, coins)) {
if (coins.IsAvailable(nOut) && coins.vout[nOut].scriptPubKey != scriptPubKey) {
string err("Previous output scriptPubKey mismatch:\n");
err = err + coins.vout[nOut].scriptPubKey.ToString() + "\nvs:\n"+
scriptPubKey.ToString();
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, err);
}
// what todo if txid is known, but the actual output isn't?
}
if ((unsigned int)nOut >= coins.vout.size())
coins.vout.resize(nOut+1);
coins.vout[nOut].scriptPubKey = scriptPubKey;
coins.vout[nOut].nValue = 0; // we don't know the actual output value
view.SetCoins(txid, coins);
// if redeemScript given and not using the local wallet (private keys
// given), add redeemScript to the tempKeystore so it can be signed:
if (fGivenKeys && scriptPubKey.IsPayToScriptHash())
{
RPCTypeCheck(prevOut, map_list_of("txid", str_type)("vout", int_type)("scriptPubKey", str_type)("redeemScript",str_type));
Value v = find_value(prevOut, "redeemScript");
if (!(v == Value::null))
{
vector<unsigned char> rsData(ParseHexV(v, "redeemScript"));
CScript redeemScript(rsData.begin(), rsData.end());
tempKeystore.AddCScript(redeemScript);
}
}
}
}
#ifdef ENABLE_WALLET
const CKeyStore& keystore = ((fGivenKeys || !pwalletMain) ? tempKeystore : *pwalletMain);
#else
const CKeyStore& keystore = tempKeystore;
#endif
int nHashType = SIGHASH_ALL;
if (params.size() > 3 && params[3].type() != null_type)
{
static map<string, int> mapSigHashValues =
boost::assign::map_list_of
(string("ALL"), int(SIGHASH_ALL))
(string("ALL|ANYONECANPAY"), int(SIGHASH_ALL|SIGHASH_ANYONECANPAY))
(string("NONE"), int(SIGHASH_NONE))
(string("NONE|ANYONECANPAY"), int(SIGHASH_NONE|SIGHASH_ANYONECANPAY))
(string("SINGLE"), int(SIGHASH_SINGLE))
(string("SINGLE|ANYONECANPAY"), int(SIGHASH_SINGLE|SIGHASH_ANYONECANPAY))
;
string strHashType = params[3].get_str();
if (mapSigHashValues.count(strHashType))
nHashType = mapSigHashValues[strHashType];
else
throw JSONRPCError(RPC_INVALID_PARAMETER, "Invalid sighash param");
}
bool fHashSingle = ((nHashType & ~SIGHASH_ANYONECANPAY) == SIGHASH_SINGLE);
// Sign what we can:
for (unsigned int i = 0; i < mergedTx.vin.size(); i++)
{
CTxIn& txin = mergedTx.vin[i];
CCoins coins;
if (!view.GetCoins(txin.prevout.hash, coins) || !coins.IsAvailable(txin.prevout.n))
{
fComplete = false;
continue;
}
const CScript& prevPubKey = coins.vout[txin.prevout.n].scriptPubKey;
txin.scriptSig.clear();
// Only sign SIGHASH_SINGLE if there's a corresponding output:
if (!fHashSingle || (i < mergedTx.vout.size()))
SignSignature(keystore, prevPubKey, mergedTx, i, nHashType);
// ... and merge in other signatures:
BOOST_FOREACH(const CTransaction& txv, txVariants)
{
txin.scriptSig = CombineSignatures(prevPubKey, mergedTx, i, txin.scriptSig, txv.vin[i].scriptSig);
}
if (!VerifyScript(txin.scriptSig, prevPubKey, mergedTx, i, SCRIPT_VERIFY_P2SH | SCRIPT_VERIFY_STRICTENC | SCRIPT_VERIFY_CHECKLOCKTIMEVERIFY, 0))
fComplete = false;
}
Object result;
CDataStream ssTx(SER_NETWORK, PROTOCOL_VERSION);
ssTx << mergedTx;
result.push_back(Pair("hex", HexStr(ssTx.begin(), ssTx.end())));
result.push_back(Pair("complete", fComplete));
return result;
}
Value sendrawtransaction(const Array& params, bool fHelp)
{
if (fHelp || params.size() < 1 || params.size() > 2)
throw runtime_error(
"sendrawtransaction \"hexstring\" ( allowhighfees )\n"
"\nSubmits raw transaction (serialized, hex-encoded) to local node and network.\n"
"\nAlso see createrawtransaction and signrawtransaction calls.\n"
"\nArguments:\n"
"1. \"hexstring\" (string, required) The hex string of the raw transaction)\n"
"2. allowhighfees (boolean, optional, default=false) Allow high fees\n"
"\nResult:\n"
"\"hex\" (string) The transaction hash in hex\n"
"\nExamples:\n"
"\nCreate a transaction\n"
+ HelpExampleCli("createrawtransaction", "\"[{\\\"txid\\\" : \\\"mytxid\\\",\\\"vout\\\":0}]\" \"{\\\"myaddress\\\":0.01}\"") +
"Sign the transaction, and get back the hex\n"
+ HelpExampleCli("signrawtransaction", "\"myhex\"") +
"\nSend the transaction (signed hex)\n"
+ HelpExampleCli("sendrawtransaction", "\"signedhex\"") +
"\nAs a json rpc call\n"
+ HelpExampleRpc("sendrawtransaction", "\"signedhex\"")
);
// parse hex string from parameter
vector<unsigned char> txData(ParseHexV(params[0], "parameter"));
CDataStream ssData(txData, SER_NETWORK, PROTOCOL_VERSION);
CTransaction tx;
bool fOverrideFees = false;
if (params.size() > 1)
fOverrideFees = params[1].get_bool();
// deserialize binary data stream
try {
ssData >> tx;
}
catch (std::exception &e) {
throw JSONRPCError(RPC_DESERIALIZATION_ERROR, "TX decode failed");
}
uint256 hashTx = tx.GetHash();
CCoinsViewCache &view = *pcoinsTip;
CCoins existingCoins;
bool fHaveMempool = mempool.exists(hashTx);
bool fHaveChain = view.GetCoins(hashTx, existingCoins) && existingCoins.nHeight < 1000000000;
if (!fHaveMempool && !fHaveChain) {
// push to local node and sync with wallets
CValidationState state;
if (AcceptToMemoryPool(mempool, state, tx, false, NULL, !fOverrideFees))
SyncWithWallets(hashTx, tx, NULL);
else {
if(state.IsInvalid())
throw JSONRPCError(RPC_TRANSACTION_REJECTED, strprintf("%i: %s", state.GetRejectCode(), state.GetRejectReason()));
else
throw JSONRPCError(RPC_TRANSACTION_ERROR, state.GetRejectReason());
}
} else if (fHaveChain) {
throw JSONRPCError(RPC_TRANSACTION_ALREADY_IN_CHAIN, "transaction already in block chain");
}
RelayTransaction(tx, hashTx);
return hashTx.GetHex();
}
|
C++
|
MIT
|
1notchdev/bitmark/src/rpcrawtransaction.cpp
|
e317f0ed-3909-474a-a889-ba144e8af7c4
|
[{"tag": "NAME", "value": "Satoshi Nakamoto", "start": 22, "end": 38, "context": "// Copyright (c) 2010 Satoshi Nakamoto\n// Original Code: Copyright (c) 2009-2014 The Bit"}]
|
[{"tag": "NAME", "value": "Satoshi Nakamoto", "start": 22, "end": 38, "context": "// Copyright (c) 2010 Satoshi Nakamoto\n// Original Code: Copyright (c) 2009-2014 The Bit"}]
|
(function () {
const ERRORS = {
invalidPassword: 'Please enter a valid password.',
invalidEmail: 'Please enter a valid email',
invalidUsername: 'Username is not a valid. Specical char, upper, number is required.',
invalidFirstname: 'Please provide valid firstname.',
passAndConfirmShouldMatch: 'Password and Confirm password should match.',
existingEmail: 'That email is already taken.',
existingUsername: 'That username is not available.'
};
function isEmail(email) {
const re = /^(([^<>()\[\]\\.,;:\s@"]+(\.[^<>()\[\]\\.,;:\s@"]+)*)|(".+"))@((\[[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\.[0-9]{1,3}\])|(([a-zA-Z\-0-9]+\.)+[a-zA-Z]{2,}))$/;
return re.test(String(email).toLowerCase());
}
function isValidUsername(name) {
name = String(name);
if (!name.match(/[\$@#!&*%]/)) {
return false;
} else if (!name.match(/[A-Z]/)) {
return false;
} else if (!name.match(/[a-z]/)) {
return false;
} else if (!name.match(/[0-9]/)) {
return false;
}
return true;
}
function renderErrors(formElm, errors) {
const $errElm = $('<span class="error"></span>');
const $formElm = $(formElm);
for (const elm in errors) {
const $errField = $formElm.find(`[name=${elm}]`);
const $fieldErr = $errElm.clone();
$fieldErr.text(ERRORS[elm]);
$fieldErr.insertAfter($errField);
}
}
function removeErrors(e) {
const $formElm = $(e.target).closest('form');
$formElm.children().filter('.error').remove();
}
function onRegSubmit(e) {
e.stopPropagation();
e.preventDefault();
removeErrors(e);
const formData = {};
const errors = {};
let hasError = false;
for (let i of e.target) {
if (i.type !== 'submit') {
formData[i.name] = i.value;
}
}
if (formData.password.length === 0) {
errors.password = 'invalidPassword';
hasError = true;
} else if (formData.password !== formData['confirm-password']) {
errors['confirm-password'] = 'passAndConfirmShouldMatch';
hasError = true;
}
if (!isEmail(formData.email)) {
errors.email = 'invalidEmail';
hasError = true;
}
if (!isValidUsername(formData.username)) {
errors.username = 'invalidUsername';
hasError = true;
}
if (formData.firstname.length < 2) {
errors.firstname = 'invalidFirstname'
hasError = true;
}
// users
if (hasError) {
renderErrors(e.target, errors);
} else {
ET.showSpinner();
console.log("formData =-----> ", formData);
ET_API.createUser(formData).then((logged) => {
localStorage.setItem('loggedIn', true);
localStorage.setItem('isAdmin', logged['is-admin'] === 'on');
ET.navigateTo && ET.navigateTo('Dashboard');
ET.createSiteNav();
ET.hideSpinner();
}).catch(err => {
renderErrors(e.target, err);
localStorage.setItem('loggedIn', false);
ET.createSiteNav();
ET.hideSpinner();
});
}
}
function isAdmin() {
return location.search.indexOf('isAdmin=true') > 0;
}
// Add event listeners
// Reinitialize the listeners
ET.removeListeners();
ET.addListeners();
const $regForm = $('form.user-registration');
$('[data-reset-error]').keydown(removeErrors);
$regForm.submit(onRegSubmit);
if (isAdmin()) {
const $passField = $regForm.find('#confirm-password');
const $checkBox = $(`
<label for="is-admin">Admin:</label>
<input type="checkbox" name="is-admin" id="is-admin" data-reset-error checked>
`);
$checkBox.insertAfter($passField);
}
})();
|
JavaScript
|
MIT
|
blcorps9/TicketReservation-jQuery/pages/Registration/index.js
|
9cfbc2ae-f737-49db-bede-7362cdac3f2b
|
[]
|
[]
|
const express = require('express');
const validate = require('../../middlewares/validate');
const authValidation = require('../../validations/auth.validation');
const authController = require('../../controllers/auth.controller');
const router = express.Router();
router.post('/register', validate(authValidation.register), authController.register);
router.post('/login', validate(authValidation.login), authController.login);
router.post('/logout', validate(authValidation.logout), authController.logout);
router.post('/refresh-tokens', validate(authValidation.refreshTokens), authController.refreshTokens);
router.post('/forgot-password', validate(authValidation.forgotPassword), authController.forgotPassword);
router.post('/reset-password', validate(authValidation.resetPassword), authController.resetPassword);
module.exports = router;
/**
* @swagger
* tags:
* name: Auth
* description: Authentication
*/
/**
* @swagger
* paths:
* /auth/register:
* post:
* summary: Register as user
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - name
* - email
* - password
* properties:
* name:
* type: string
* email:
* type: string
* format: email
* description: must be unique
* password:
* type: string
* format: password
* minLength: 8
* description: At least one number and one letter
* example:
* name: fake name
* email: fake@example.com
* password: password1
* responses:
* "201":
* description: Created
* content:
* application/json:
* schema:
* type: object
* properties:
* user:
* $ref: '#/components/schemas/User'
* tokens:
* $ref: '#/components/schemas/AuthTokens'
* "400":
* $ref: '#/components/responses/DuplicateEmail'
*/
/**
* @swagger
* paths:
* /auth/login:
* post:
* summary: Login
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - email
* - password
* properties:
* email:
* type: string
* format: email
* password:
* type: string
* format: password
* example:
* email: fake@example.com
* password: password1
* responses:
* "200":
* description: OK
* content:
* application/json:
* schema:
* type: object
* properties:
* user:
* $ref: '#/components/schemas/User'
* tokens:
* $ref: '#/components/schemas/AuthTokens'
* "401":
* description: Invalid email or password
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/Error'
* example:
* code: 401
* message: Invalid email or password
*/
/**
* @swagger
* paths:
* /auth/logout:
* post:
* summary: Logout
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refreshToken
* properties:
* refreshToken:
* type: string
* example:
* refreshToken: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI1ZWJhYzUzNDk1NGI1NDEzOTgwNmMxMTIiLCJpYXQiOjE1ODkyOTg0ODQsImV4cCI6MTU4OTMwMDI4NH0.m1U63blB0MLej_WfB7yC2FTMnCziif9X8yzwDEfJXAg
* responses:
* "204":
* description: No content
* "404":
* $ref: '#/components/responses/NotFound'
*/
/**
* @swagger
* paths:
* /auth/refresh-tokens:
* post:
* summary: Refresh auth tokens
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - refreshToken
* properties:
* refreshToken:
* type: string
* example:
* refreshToken: eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJzdWIiOiI1ZWJhYzUzNDk1NGI1NDEzOTgwNmMxMTIiLCJpYXQiOjE1ODkyOTg0ODQsImV4cCI6MTU4OTMwMDI4NH0.m1U63blB0MLej_WfB7yC2FTMnCziif9X8yzwDEfJXAg
* responses:
* "200":
* description: OK
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/AuthTokens'
* "401":
* $ref: '#/components/responses/Unauthorized'
*/
/**
* @swagger
* paths:
* /auth/forgot-password:
* post:
* summary: Forgot password
* description: An email will be sent to reset password.
* tags: [Auth]
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - email
* properties:
* email:
* type: string
* format: email
* example:
* email: fake@example.com
* responses:
* "204":
* description: No content
* "404":
* $ref: '#/components/responses/NotFound'
*/
/**
* @swagger
* paths:
* /auth/reset-password:
* post:
* summary: Reset password
* tags: [Auth]
* parameters:
* - in: query
* name: token
* required: true
* schema:
* type: string
* description: The reset password token
* requestBody:
* required: true
* content:
* application/json:
* schema:
* type: object
* required:
* - password
* properties:
* password:
* type: string
* format: password
* minLength: 8
* description: At least one number and one letter
* example:
* password: password1
* responses:
* "204":
* description: No content
* "401":
* description: Password reset failed
* content:
* application/json:
* schema:
* $ref: '#/components/schemas/Error'
* example:
* code: 401
* message: Password reset failed
*/
|
JavaScript
|
MIT
|
hhp1806/crud-api-node-express/src/routes/v1/auth.route.js
|
8d775999-b664-4b6e-b6f2-b493a961c4e1
|
[]
|
[]
|
import sinon from 'sinon';
import PropTypes from 'prop-types';
import configureStore from 'redux-mock-store';
const mockStore = configureStore();
window.TestStubs = {
// react-router's 'router' context
router: () => ({
push: sinon.spy(),
replace: sinon.spy(),
go: sinon.spy(),
goBack: sinon.spy(),
goForward: sinon.spy(),
setRouteLeaveHook: sinon.spy(),
isActive: sinon.spy(),
createHref: sinon.spy()
}),
location: () => ({
query: {},
pathame: '/mock-pathname/'
}),
routerContext: (location, router) => ({
context: {
location: location || TestStubs.location(),
router: router || TestStubs.router()
},
childContextTypes: {
router: PropTypes.object,
location: PropTypes.object
}
}),
store: state =>
mockStore({
auth: {isAuthenticated: null, user: null},
...state
}),
storeContext: store => ({
context: {
store: store || TestStubs.store()
},
childContextTypes: {
store: PropTypes.object
}
}),
standardContext: () => {
let result = TestStubs.routerContext();
let storeResult = TestStubs.storeContext();
result.context = {...result.context, ...storeResult.context};
result.childContextTypes = {
...result.childContextTypes,
...storeResult.childContextTypes
};
return result;
},
Build: params => ({
created_at: '2018-01-06T16:07:16.830829+00:00',
external_id: '325812408',
finished_at: '2018-01-06T16:11:04.393590+00:00',
id: 'aa7097a2-f2fb-11e7-a565-0a580a28057d',
label: 'fix: Remove break-word behavior on coverage',
number: 650,
provider: 'travis-ci',
result: 'passed',
source: {
author: {
email: 'dcramer@gmail.com',
id: '659dc21c-81db-11e7-988a-0a580a28047a',
name: 'David Cramer'
},
created_at: '2018-01-06T16:07:16.814650+00:00',
id: 'aa6e1f90-f2fb-11e7-a565-0a580a28057d',
revision: {
author: {
email: 'dcramer@gmail.com',
id: '659dc21c-81db-11e7-988a-0a580a28047a',
name: 'David Cramer'
},
committed_at: '2018-01-06T16:06:52+00:00',
created_at: '2018-01-06T16:06:52+00:00',
message: 'fix: Remove break-word behavior on coverage\n',
sha: 'eff634a68a01d081c0bdc51752dfa0709781f0e4'
}
},
started_at: '2018-01-06T16:07:16.957093+00:00',
stats: {
coverage: {
diff_lines_covered: 0,
diff_lines_uncovered: 0,
lines_covered: 6127,
lines_uncovered: 3060
},
style_violations: {
count: 0
},
tests: {
count: 153,
count_unique: 153,
duration: 14673.0,
failures: 0,
failures_unique: 0
},
webpack: {
total_asset_size: 0
}
},
status: 'finished',
url: 'https://travis-ci.org/getsentry/zeus/builds/325812408',
...params
}),
Repository: params => ({
backend: 'git',
created_at: '2017-08-15T17:01:33.206772+00:00',
full_name: 'gh/getsentry/zeus',
id: '63e820d4-81db-11e7-a6df-0a580a28004e',
latest_build: null,
name: 'zeus',
owner_name: 'getsentry',
provider: 'gh',
url: 'git@github.com:getsentry/zeus.git',
permissions: {
admin: true,
read: true,
write: true
},
...params
})
};
|
JavaScript
|
Apache-2.0
|
rob-opsi/zeus/config/jest/testStubs.js
|
b8b4b227-628c-42ff-97ea-4aa83067e65e
|
[{"tag": "PASSWORD", "value": "aa7097a2-f2fb-11e7-a565-0a580a28057d", "start": 1535, "end": 1571, "context": "_at: '2018-01-06T16:11:04.393590+00:00',\n id: 'aa7097a2-f2fb-11e7-a565-0a580a28057d',\n label: 'fix: Remove break-word behavior on "}, {"tag": "SSH_KEY", "value": "eff634a68a01d081c0bdc51752dfa0709781f0e4", "start": 2308, "end": 2348, "context": "break-word behavior on coverage\\n',\n sha: 'eff634a68a01d081c0bdc51752dfa0709781f0e4'\n }\n },\n started_at: '2018-01-06T16:07"}, {"tag": "EMAIL", "value": "dcramer@gmail.com", "start": 2012, "end": 2029, "context": " revision: {\n author: {\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28"}, {"tag": "PASSWORD", "value": "63e820d4-81db-11e7-a6df-0a580a28004e", "start": 3093, "end": 3129, "context": "00',\n full_name: 'gh/getsentry/zeus',\n id: '63e820d4-81db-11e7-a6df-0a580a28004e',\n latest_build: null,\n name: 'zeus',\n o"}, {"tag": "PASSWORD", "value": "aa6e1f90-f2fb-11e7-a565-0a580a28057d", "start": 1919, "end": 1955, "context": "t: '2018-01-06T16:07:16.814650+00:00',\n id: 'aa6e1f90-f2fb-11e7-a565-0a580a28057d',\n revision: {\n author: {\n e"}, {"tag": "NAME", "value": "David Cramer", "start": 2103, "end": 2115, "context": "1c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n committed_at: '2018-01-06T16:"}, {"tag": "PASSWORD", "value": "659dc21c-81db-11e7-988a-0a580a28047a", "start": 2047, "end": 2083, "context": " email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n "}, {"tag": "EMAIL", "value": "dcramer@gmail.com", "start": 1744, "end": 1761, "context": "d',\n source: {\n author: {\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a2804"}, {"tag": "NAME", "value": "David Cramer", "start": 1831, "end": 1843, "context": "c21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n created_at: '2018-01-06T16:07:16."}, {"tag": "PASSWORD", "value": "659dc21c-81db-11e7-988a-0a580a28047a", "start": 1777, "end": 1813, "context": "\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n cre"}]
|
[{"tag": "PASSWORD", "value": "aa7097a2-f2fb-11e7-a565-0a580a28057d", "start": 1535, "end": 1571, "context": "_at: '2018-01-06T16:11:04.393590+00:00',\n id: 'aa7097a2-f2fb-11e7-a565-0a580a28057d',\n label: 'fix: Remove break-word behavior on "}, {"tag": "KEY", "value": "eff634a68a01d081c0bdc51752dfa0709781f0e4", "start": 2308, "end": 2348, "context": "break-word behavior on coverage\\n',\n sha: 'eff634a68a01d081c0bdc51752dfa0709781f0e4'\n }\n },\n started_at: '2018-01-06T16:07"}, {"tag": "EMAIL", "value": "dcramer@gmail.com", "start": 2012, "end": 2029, "context": " revision: {\n author: {\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28"}, {"tag": "PASSWORD", "value": "63e820d4-81db-11e7-a6df-0a580a28004e", "start": 3093, "end": 3129, "context": "00',\n full_name: 'gh/getsentry/zeus',\n id: '63e820d4-81db-11e7-a6df-0a580a28004e',\n latest_build: null,\n name: 'zeus',\n o"}, {"tag": "PASSWORD", "value": "aa6e1f90-f2fb-11e7-a565-0a580a28057d", "start": 1919, "end": 1955, "context": "t: '2018-01-06T16:07:16.814650+00:00',\n id: 'aa6e1f90-f2fb-11e7-a565-0a580a28057d',\n revision: {\n author: {\n e"}, {"tag": "NAME", "value": "David Cramer", "start": 2103, "end": 2115, "context": "1c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n committed_at: '2018-01-06T16:"}, {"tag": "PASSWORD", "value": "659dc21c-81db-11e7-988a-0a580a28047a", "start": 2047, "end": 2083, "context": " email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n "}, {"tag": "EMAIL", "value": "dcramer@gmail.com", "start": 1744, "end": 1761, "context": "d',\n source: {\n author: {\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a2804"}, {"tag": "NAME", "value": "David Cramer", "start": 1831, "end": 1843, "context": "c21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n created_at: '2018-01-06T16:07:16."}, {"tag": "PASSWORD", "value": "659dc21c-81db-11e7-988a-0a580a28047a", "start": 1777, "end": 1813, "context": "\n email: 'dcramer@gmail.com',\n id: '659dc21c-81db-11e7-988a-0a580a28047a',\n name: 'David Cramer'\n },\n cre"}]
|
jest.mock('mongodb');
jest.mock('jsonwebtoken');
import { Query, Mutation, User } from '../resolver';
import { User as UserModel } from '../../../model';
const userId = 'admin';
describe('user resolver', () => {
const { connection } = require('mongodb');
const models = { userModel: new UserModel(connection.db(), userId) };
test('Query', async () => {
const result = await Query.user(null, {}, { models });
expect(result).toMatchSnapshot();
});
test('User', () => {
expect(User.username({ _id: userId })).toBe(userId);
});
describe('Mutation', () => {
test('signin should return token for correct credentials', async () => {
const context = { models, secret: 'secret', expiresIn: 10 };
const result = await Mutation.signin(null, { login: userId, password: 'password' }, context);
expect(result).toMatchSnapshot();
});
describe('changePassword', () => {
test('should throw an error when password are used', async () => {
const context = {
models: { userModel: new UserModel(connection.db(), 'user1') },
secret: 'secret',
expiresIn: 10,
userId: 'user1',
};
await expect(Mutation.changePassword(null, { oldPassword: 'password', newPassword: 'passwordUsed' }, context))
.rejects
.toThrowError(/Password already used/);
});
test('should return token when when password was changed', async () => {
const context = {
models, secret: 'secret', expiresIn: 10, userId: 'admin',
};
const result = await Mutation.changePassword(null, { oldPassword: 'password', newPassword: 'newPassword' }, context);
expect(result).toMatchSnapshot();
});
});
});
});
|
JavaScript
|
MIT
|
w3tl/yoledger-api/src/graphql/user/__tests__/resolver.test.js
|
06c6c8a4-c969-4c53-9108-51be48dfd119
|
[]
|
[]
|
package mat.shared;
/**
* The Class ConstantMessages.
*/
public class ConstantMessages {
public static final int SERVER_LOG =0;
public static final int DB_LOG =1;
public static final int ID_NOT_UNIQUE = 1;
public static final int REACHED_MAXIMUM_VERSION = 2;
public static final int REACHED_MAXIMUM_MAJOR_VERSION = 3;
public static final int REACHED_MAXIMUM_MINOR_VERSION = 4;
public static final int INVALID_VALUE_SET_DATE = 5;
public static final int INVALID_DATA = 6;
public static final int INVALID_CQL_DATA = 7;
public static final int INVALID_CQL_LIBRARIES = 8;
public static final String FILE_NOT_SELECTED ="Please Select a File.";
public static final String EXCEL_FILE_TYPE = "Please select a file with an Excel file type (.xls or .xlsx)";
public static final String EMPTY_FILE_ERROR = "Import failed. File is empty.";
public static final String INCOMPLETE_ROW_ERROR = "Import failed. One or more rows is missing a Code or a Descriptor.";
public static final String SYSTEM_ERROR = "Import failed due to system error. Please try again.";
public static final String DUPLICATE_ERROR = "Import failed. One or more duplicate codes exist in file. Please remove then try again.";
public static final String INVALID_ROW_INPUT_ERROR = "Import failed. Invalid input was found at row ";
public static final String INVALID_TEMPLATE = "Import failed. Invalid Template.";
public static final String DUPLICATE_CODES_MSG = "code(s) were identified as duplicates to code(s) already in the code list and were ignored upon import.";
public static final String TWO_WHITE_SPACES = " ";
public static final String CONTINUOUS_VARIABLE_SCORING ="Continuous Variable";
public static final String PROPORTION_SCORING = "Proportion";
public static final String RATIO_SCORING = "Ratio";
public static final String COHORT_SCORING = "Cohort";
public static final String POPULATION_CONTEXT_ID= "initialPopulation";
public static final String NUMERATOR_CONTEXT_ID = "numerator";
public static final String NUMERATOR_EXCLUSIONS_CONTEXT_ID = "numeratorExclusions";
public static final String DENOMINATOR_CONTEXT_ID = "denominator";
public static final String DENOMINATOR_EXCLUSIONS_CONTEXT_ID = "denominatorExclusions";
public static final String DENOMINATOR_EXCEPTIONS_CONTEXT_ID = "denominatorExceptions";
public static final String MEASURE_POPULATION_CONTEXT_ID = "measurePopulation";
public static final String MEASURE_OBSERVATION_CONTEXT_ID = "measureObservation";
public static final String STRATIFICATION_CONTEXT_ID = "stratification";
public static final String STRATIFIER_CONTEXT_ID = "stratum";
public static final String USER_DEFINED_CONTEXT_ID = "10";
public static final String MEASURE_PHRASE_CONTEXT_ID ="11";
public static final int CONTEXT_ID_COUNT = 11;
public static final String POPULATION_CONTEXT_DESC= "Population";
public static final String NUMERATOR_CONTEXT_DESC = "Numerator";
public static final String NUMERATOR_EXCLUSIONS_CONTEXT_DESC = "Numerator Exclusions";
public static final String DENOMINATOR_CONTEXT_DESC = "Denominator";
public static final String DENOMINATOR_EXCLUSIONS_CONTEXT_DESC = "Denominator Exclusions";
public static final String DENOMINATOR_EXCEPTIONS_CONTEXT_DESC = "Denominator Exceptions";
public static final String MEASURE_POPULATION_CONTEXT_DESC = "Measure Population";
public static final String MEASURE_OBSERVATION_CONTEXT_DESC = "Measure Observation";
public static final String STRAT_CONTEXT_DESC = "Stratification";
public static final String USER_DEFINED_CONTEXT_DESC = "User-defined";
public static final String MEASURE_PHRASE_CONTEXT_DESC ="Measure Phrase";
public static final String POP_TAB = "Pop";
public static final String POP_TAB_EXPANDED = "Population";
public static final String NUM_TAB = "Num";
public static final String NUM_TAB_EXPANDED = "Numerator";
public static final String NUM_EX_TAB = "N Excl";
public static final String NUM_EX_TAB_EXPANDED = "Numerator Exclusions";
public static final String DEN_TAB = "Den";
public static final String DEN_TAB_EXPANDED = "Denominator";
public static final String EXCL_TAB = "D Excl";
public static final String EXCL_TAB_EXPANDED = "Denominator Exclusions";
public static final String EXCEP_TAB = "D Excep";
public static final String EXCEP_TAB_EXPANDED = "Denominator Exceptions";
public static final String MEASURE_POP_TAB = "Meas Pop";
public static final String MEASURE_POP_TAB_EXPANDED = "Measure Population";
public static final String MEASURE_OBS_TAB = "Meas Obs";
public static final String MEASURE_OBS_TAB_EXPANDED = "Measure Observation";
public static final String STRAT_TAB = "Strat";
public static final String STRAT_TAB_EXPANDED = "Stratification";
public static final String USER_DEFINED_TAB = "User-Defined";
public static final String USER_DEFINED_TAB_EXPANDED ="User-Defined";
public static final String MEASURE_PHRASE_TAB = "Measure Phrase";
public static final String MEASURE_PHRASE_TAB_EXPANDED = "Measure Phrase";
public static final String ATTRIBUTE = "Attribute";
public static final String MEASUREMENT_PERIOD = "Measurement Period";
public static final String MEASUREMENT_START_DATE = "Measurement Start Date";
public static final String MEASUREMENT_END_DATE = "Measurement End Date";
public static final String MEASUREMENT_TIMING = "Measure Timing";
public static final String TIMING_ELEMENT = "Timing Element";
public static final String PATIENT_CHARACTERISTIC = "Patient Characteristic";
public static final String PATIENT_CHARACTERISTIC_RACE = "Patient Characteristic Race";
public static final String PATIENT_CHARACTERISTIC_ETHNICITY = "Patient Characteristic Ethnicity";
public static final String PATIENT_CHARACTERISTIC_PAYER = "Patient Characteristic Payer";
public static final String PATIENT_CHARACTERISTIC_GENDER = "Patient Characteristic Sex";
public static final String ADMINISTRATIVE_GENDER_MALE = "Administrative Gender Male";
public static final String ADMINISTRATIVE_GENDER_FEMALE = "Administrative Gender Female";
public static final String ADMINISTRATIVE_GENDER_UNDIFFERENTIATED = "Administrative Gender Undifferentiated";
public static final String GENDER_MALE_OID = "2.16.840.1.113883.3.560.100.1";
public static final String GENDER_FEMALE_OID = "2.16.840.1.113883.3.560.100.2";
public static final String GENDER_UNDIFFERENTIATED_OID = "2.16.840.1.113883.3.560.100.3";
public static final String BIRTH_DATE_OID = "2.16.840.1.113883.3.560.100.4";
public static final String GROUPING_CODE_SYSTEM = "Grouping";
public static final String HL7_ADMINGENDER_CODE_SYSTEM = "Administrative Sex";
public static final String LOINC_CODE_SYSTEM = "LOINC";
public static final String CDC_CODE_SYSTEM = "CDC";
public static final String SOURCE_OF_PAYMENT = "Source of Payment Typology";
public static final String MAT_MODULE = "MAT";
public static final String LOGIN_MODULE = "LOGIN";
public static final String BONNIE_MODULE = "BONNIE";
public static final String UNIT_FUNCTION = "Function";
public static final String UNIT_COMPARISON = "Comparison";
public static final String UNIT_TMP_COMPARISON = "TemporalComparison";
public static final String UNIT_ATTRIBUTE = "Attribute";
public static final String COMPARISON_UNIT_DEFAULT = "seconds";
public static final String PASSWORD = "password";
public static final String PASSWORD_EXPIRE_DATE = "passwordExpireDate";
public static final String WEB_LINK = "link";
public static final String LOGINID = "loginId";
public static final String URL = "url";
public static final String ROOT_PATH = "/";
public static final String INSERT = "Insert";
public static final String UPDATE = "Update";
public static final String DELETE = "Delete";
public static final String EXPORT = "Export";
public static final String USER_COMMENT = "User Comment";
public static final String USER_NOT_FOUND = "User Not Found";
public static final String EMAIL_NOT_FOUND = "Email Not Found";
public static final String TOOLTIP_FOR_OCCURRENCE = "Select Specific Occurrence if you need to reference a specific occurrence of your element.";
public static final int CODE_LIST_WS_HISTORY_STATE = 1;
public static final int MAX_PAGE_DISPLAY = 10;
public static final int PAGE_DOTS_DISPLAY = 3;
public static final String GROUPED_CODE_LIST_CS = "Grouping";
public static final String DEFAULT_SELECT = "--Select--";
public static final String CREATE_NEW_MEASURE = "New Measure";
public static final String CREATE_NEW_DRAFT = "Draft of Existing Measure";
public static final String CREATE_NEW_VERSION = "Measure Version of Draft";
public static final String CREATE_NEW_CQL = "New Library";
public static final String CREATE_NEW_CQL_DRAFT = "Draft of Existing Library";
public static final String CREATE_NEW_CQL_VERSION = "Library Version of Draft";
public static final String MAXIMUM_ALLOWED_VERSION = "999.999";
public static final String MAXIMUM_ALLOWED_MAJOR_VERSION = "999";
public static final String MAXIMUM_ALLOWED_MINOR_VERSION = "999";
public static final String CREATE_NEW_GROUPED_VALUE_SET = "New Grouped Value Set";
public static final String CREATE_NEW_VALUE_SET = "New Value Set";
public static final String CREATE_VALUE_SET_DRAFT = "Draft of Value Set";
public static final String OID_CAUTION = "CAUTION: Changing the OID should be avoided unless absolutely necessary. Under most circumstances the OID should only be changed when a correction is needed.";
public static final String CATEGORY_MEASUREEL = "22";
public static final String CATEGORY_PROPEL = "23";
public static final String GENDER = "ONC Administrative Sex";
public static final String RACE = "Race";
public static final String ETHNICITY = "Ethnicity";
public static final String PAYER = "Payer";
public static final String GENDER_OID = "2.16.840.1.113762.1.4.1";
public static final String RACE_OID = "2.16.840.1.114222.4.11.836";
public static final String ETHNICITY_OID = "2.16.840.1.114222.4.11.837";
public static final String PAYER_OID = "2.16.840.1.114222.4.11.3591";
public static final String[] SUPPLEMENTAL_DATA_ELEMENT_OID_ARR = {GENDER_OID,RACE_OID, ETHNICITY_OID, PAYER_OID};
public static final String MAIN_TAB_LAYOUT_ID = "mainTab";
public static final String MEASURE_COMPOSER_TAB = "measureTab";
public static final String CQL_COMPOSER_TAB = "cqlTab";
public static final String USER_DEFINED_QDM_OID="1.1.1.1";
public static final String USER_DEFINED_QDM_NAME="User Defined QDM";
public static final String PATIENT_CHARACTERISTIC_BIRTHDATE = "Patient Characteristic Birthdate";
public static final String PATIENT_CHARACTERISTIC_EXPIRED = "Patient Characteristic Expired";
public static final String DEAD = "Dead";
public static final String EXPIRED = "Expired";
public static final String BIRTHDATE = "Birthdate";
public static final String DEAD_OID = "419099009";
public static final String BIRTHDATE_OID = "21112-8";
public static final String BIRTHDATE_CODE_SYSTEM_OID = "2.16.840.1.113883.6.1";
public static final String DEAD_CODE_SYSTEM_OID = "2.16.840.1.113883.6.96";
public static int PACKAGE_VALIDATION_FAIL = 30;
}
|
Java
|
CC0-1.0
|
casey-erdmann/mat-jdk-test/src/main/java/mat/shared/ConstantMessages.java
|
11fe2d81-19ca-47de-82ae-998b59107e85
|
[]
|
[]
|
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.knox.gateway.config;
import java.net.InetSocketAddress;
import java.net.UnknownHostException;
import java.util.List;
import java.util.Map;
import java.util.Set;
public interface GatewayConfig {
// Used as the basis for any home directory that is not specified.
String GATEWAY_HOME_VAR = "GATEWAY_HOME";
/**
* Variable name for the location of configuration files edited by users
*
* @deprecated use {@link GatewayConfig#KNOX_GATEWAY_CONF_DIR_VAR} instead
*/
String GATEWAY_CONF_HOME_VAR = "GATEWAY_CONF_HOME";
String KNOX_GATEWAY_CONF_DIR_VAR = "KNOX_GATEWAY_CONF_DIR";
/**
* Variable name for the location of data files generated by the gateway at runtime.
*
* @deprecated use {@link GatewayConfig#KNOX_GATEWAY_DATA_DIR} instead
*/
String GATEWAY_DATA_HOME_VAR = "GATEWAY_DATA_HOME";
String KNOX_GATEWAY_DATA_DIR = "KNOX_GATEWAY_DATA_DIR";
String GATEWAY_CONFIG_ATTRIBUTE = "org.apache.knox.gateway.config";
String HADOOP_KERBEROS_SECURED = "gateway.hadoop.kerberos.secured";
String KRB5_CONFIG = "java.security.krb5.conf";
String KRB5_DEBUG = "sun.security.krb5.debug";
String KRB5_LOGIN_CONFIG = "java.security.auth.login.config";
String KRB5_USE_SUBJECT_CREDS_ONLY = "javax.security.auth.useSubjectCredsOnly";
String IDENTITY_KEYSTORE_PASSWORD_ALIAS = "gateway.tls.keystore.password.alias";
String IDENTITY_KEYSTORE_PATH = "gateway.tls.keystore.path";
String IDENTITY_KEYSTORE_TYPE = "gateway.tls.keystore.type";
String IDENTITY_KEY_ALIAS = "gateway.tls.key.alias";
String IDENTITY_KEY_PASSPHRASE_ALIAS = "gateway.tls.key.passphrase.alias";
String DEFAULT_IDENTITY_KEYSTORE_TYPE = "JKS";
String DEFAULT_IDENTITY_KEYSTORE_PASSWORD_ALIAS = "gateway-identity-keystore-password";
String DEFAULT_IDENTITY_KEY_ALIAS = "gateway-identity";
String DEFAULT_IDENTITY_KEY_PASSPHRASE_ALIAS = "gateway-identity-passphrase";
String DEFAULT_GATEWAY_KEYSTORE_NAME = "gateway.jks";
String SIGNING_KEYSTORE_NAME = "gateway.signing.keystore.name";
String SIGNING_KEYSTORE_PASSWORD_ALIAS = "gateway.signing.keystore.password.alias";
String SIGNING_KEYSTORE_TYPE = "gateway.signing.keystore.type";
String SIGNING_KEY_ALIAS = "gateway.signing.key.alias";
String SIGNING_KEY_PASSPHRASE_ALIAS = "gateway.signing.key.passphrase.alias";
String DEFAULT_SIGNING_KEYSTORE_PASSWORD_ALIAS = "signing.keystore.password";
String DEFAULT_SIGNING_KEYSTORE_TYPE = "JKS";
String DEFAULT_SIGNING_KEY_ALIAS = "gateway-identity";
String DEFAULT_SIGNING_KEY_PASSPHRASE_ALIAS = "signing.key.passphrase";
String GATEWAY_TRUSTSTORE_PASSWORD_ALIAS = "gateway.truststore.password.alias";
String GATEWAY_TRUSTSTORE_PATH = "gateway.truststore.path";
String GATEWAY_TRUSTSTORE_TYPE = "gateway.truststore.type";
String DEFAULT_GATEWAY_TRUSTSTORE_TYPE = "JKS";
String DEFAULT_GATEWAY_TRUSTSTORE_PASSWORD_ALIAS = "gateway-truststore-password";
String HTTP_CLIENT_TRUSTSTORE_PASSWORD_ALIAS = "gateway.httpclient.truststore.password.alias";
String HTTP_CLIENT_TRUSTSTORE_PATH = "gateway.httpclient.truststore.path";
String HTTP_CLIENT_TRUSTSTORE_TYPE = "gateway.httpclient.truststore.type";
String DEFAULT_HTTP_CLIENT_TRUSTSTORE_TYPE = "JKS";
String DEFAULT_HTTP_CLIENT_TRUSTSTORE_PASSWORD_ALIAS = "gateway-httpclient-truststore-password";
String REMOTE_CONFIG_REGISTRY_TYPE = "type";
String REMOTE_CONFIG_REGISTRY_ADDRESS = "address";
String REMOTE_CONFIG_REGISTRY_NAMESPACE = "namespace";
String REMOTE_CONFIG_REGISTRY_AUTH_TYPE = "authType";
String REMOTE_CONFIG_REGISTRY_PRINCIPAL = "principal";
String REMOTE_CONFIG_REGISTRY_CREDENTIAL_ALIAS = "credentialAlias";
String REMOTE_CONFIG_REGISTRY_KEYTAB = "keytab";
String REMOTE_CONFIG_REGISTRY_USE_KEYTAB = "useKeytab";
String REMOTE_CONFIG_REGISTRY_USE_TICKET_CACHE = "useTicketCache";
String PROXYUSER_SERVICES_IGNORE_DOAS = "gateway.proxyuser.services.ignore.doas";
/**
* The location of the gateway configuration.
* Subdirectories will be: topologies
* @return The location of the gateway configuration.
*/
String getGatewayConfDir();
/**
* The location of the gateway runtime generated data.
* Subdirectories will be security, deployments
* @return The location of the gateway runtime generated data.
*/
String getGatewayDataDir();
/**
* The location of the gateway services definition's root directory
* @return The location of the gateway services top level directory.
*/
String getGatewayServicesDir();
/**
* The location of the gateway applications's root directory
* @return The location of the gateway applications top level directory.
*/
String getGatewayApplicationsDir();
String getHadoopConfDir();
String getGatewayHost();
int getGatewayPort();
String getGatewayPath();
String getGatewayProvidersConfigDir();
String getGatewayDescriptorsDir();
String getGatewayTopologyDir();
String getGatewaySecurityDir();
/**
* Returns the path to the Gateway's keystore directory
* <p>
* This path is generally calculated to be a subdirectory named "keystores" under the configured
* "security" directory. However, it may be possible for it to be configured as something else.
*
* @return the path to the Gateway's keystore directory
*/
String getGatewayKeystoreDir();
String getGatewayDeploymentDir();
InetSocketAddress getGatewayAddress() throws UnknownHostException;
boolean isSSLEnabled();
List<String> getExcludedSSLProtocols();
List<String> getIncludedSSLCiphers();
List<String> getExcludedSSLCiphers();
boolean isHadoopKerberosSecured();
String getKerberosConfig();
boolean isKerberosDebugEnabled();
String getKerberosLoginConfig();
String getDefaultTopologyName();
String getDefaultAppRedirectPath();
String getFrontendUrl();
boolean isClientAuthNeeded();
boolean isClientAuthWanted();
String getTruststorePath();
boolean getTrustAllCerts();
String getKeystoreType();
String getTruststoreType();
/**
* Returns the configured value for the alias name to use when to looking up the Gateway's
* truststore password.
*
* @return an alias name
*/
String getTruststorePasswordAlias();
boolean isXForwardedEnabled();
String getEphemeralDHKeySize();
int getHttpClientMaxConnections();
int getHttpClientConnectionTimeout();
int getHttpClientSocketTimeout();
/**
* Returns the configured value for the path to the truststore to be used by the HTTP client instance
* connecting to a service from the Gateway.
*
* @return a path to the trust file; or <code>null</code> if not set
*/
String getHttpClientTruststorePath();
/**
* Returns the configured value for the type of the truststore specified by {@link #getHttpClientTruststorePath()}.
*
* @return a truststore type
*/
String getHttpClientTruststoreType();
/**
* Returns the configured value for the alias name to use when to looking up the HTTP client's
* truststore password.
*
* @return an alias name
*/
String getHttpClientTruststorePasswordAlias();
int getThreadPoolMax();
int getHttpServerRequestBuffer();
int getHttpServerRequestHeaderBuffer();
int getHttpServerResponseBuffer();
int getHttpServerResponseHeaderBuffer();
int getGatewayDeploymentsBackupVersionLimit();
long getGatewayDeploymentsBackupAgeLimit();
long getGatewayIdleTimeout();
/**
* Returns the configured value for the path to the keystore holding the key and certificate for the
* Gateway's TLS identity.
*
* @return a path to the keystore file; or <code>null</code> if not set
*/
String getIdentityKeystorePath();
/**
* Returns the configured value for the type of the keystore holding the Gateway's identity.
*
* @return a keystore type
*/
String getIdentityKeystoreType();
/**
* Returns the configured value for the alias name to use when to looking up the Gateway's identity
* keystore's password.
*
* @return an alias name
*/
String getIdentityKeystorePasswordAlias();
/**
* Returns the configured value for the alias name to use when to looking up the Gateway's identity
* from the Gateway's identity keystore.
*
* @return an alias name
*/
String getIdentityKeyAlias();
/**
* Returns the configured value for the alias name to use when to looking up the Gateway's identity
* key's password.
*
* @return an alias name
*/
String getIdentityKeyPassphraseAlias();
String getSigningKeystoreName();
/**
* Returns the calculated value for the path to the keystore holding the key and certificate for the
* Gateway's signing key.
*
* @return a path to the keystore file; or <code>null</code> if not set
*/
String getSigningKeystorePath();
/**
* Returns the configured value for the type of the keystore holding the Gateway's signing key.
*
* @return a keystore type
*/
String getSigningKeystoreType();
String getSigningKeyAlias();
/**
* Returns the configured value for the alias name to use when to looking up the Gateway's signing
* keystore's password.
*
* @return an alias name
*/
String getSigningKeystorePasswordAlias();
/**
* Returns the configured value for the alias name to use when to looking up the signing key's
* password.
*
* @return an alias name
*/
String getSigningKeyPassphraseAlias();
List<String> getGlobalRulesServices();
/**
* Returns true if websocket feature enabled else false.
* Default is false.
* @since 0.10
* @return true if websocket feature is enabled
*/
boolean isWebsocketEnabled();
/**
* Websocket connection max text message size.
* @since 0.10
* @return max text message size
*/
int getWebsocketMaxTextMessageSize();
/**
* Websocket connection max binary message size.
* @since 0.10
* @return max binary message size
*/
int getWebsocketMaxBinaryMessageSize();
/**
* Websocket connection max text message buffer size.
* @since 0.10
* @return buffer size
*/
int getWebsocketMaxTextMessageBufferSize();
/**
* Websocket connection max binary message buffer size.
* @since 0.10
* @return buffer size
*/
int getWebsocketMaxBinaryMessageBufferSize();
/**
* Websocket connection input buffer size.
* @since 0.10
* @return buffer size
*/
int getWebsocketInputBufferSize();
/**
* Websocket connection async write timeout.
* @since 0.10
* @return timeout
*/
int getWebsocketAsyncWriteTimeout();
/**
* Websocket connection idle timeout.
* @since 0.10
* @return timeout
*/
int getWebsocketIdleTimeout();
boolean isMetricsEnabled();
boolean isJmxMetricsReportingEnabled();
boolean isGraphiteMetricsReportingEnabled();
String getGraphiteHost();
int getGraphitePort();
int getGraphiteReportingFrequency();
/**
* Enable cookie scoping to gateway path
*
* @return true if cookie scoping to path is enabled
* @since 0.13
*/
boolean isCookieScopingToPathEnabled();
/**
* Configured name of the HTTP Header that is expected
* to be set by a proxy in front of the gateway.
* @return header name
*/
String getHeaderNameForRemoteAddress();
/**
* Configured Algorithm name to be used by the CryptoService
* and MasterService implementations
* @return algorithm
*/
String getAlgorithm();
/**
* Configured Algorithm name to be used by the CryptoService
* for password based encryption
* @return algorithm
*/
String getPBEAlgorithm();
/**
* Configured Transformation name to be used by the CryptoService
* and MasterService implementations
* @return transformation name
*/
String getTransformation();
/**
* Configured SaltSize to be used by the CryptoService
* and MasterService implementations
* @return salt size
*/
String getSaltSize();
/**
* Configured IterationCount to be used by the CryptoService
* and MasterService implementations
* @return iteration count
*/
String getIterationCount();
/**
* Configured KeyLength to be used by the CryptoService
* and MasterService implementations
* @return key length
*/
String getKeyLength();
/**
* Map of Topology names and their ports.
* @return Map of Topology names and their ports.
*/
Map<String, Integer> getGatewayPortMappings();
/**
* Is the Port Mapping feature on
* @return true if port mapping enabled
*/
boolean isGatewayPortMappingEnabled();
/**
* Is the Server header suppressed
* @return turn if server header enabled
*/
boolean isGatewayServerHeaderEnabled();
/**
* Determine the default address for discovering service endpoint details.
*
* @return A valid discovery source address, or null (because this property is optional).
*/
String getDefaultDiscoveryAddress();
/**
* Determine the default target cluster for discovering service endpoint details.
*
* @return A valid cluster name, or null (because this property is optional).
*/
String getDefaultDiscoveryCluster();
/**
*
* @param type The type of cluster configuration monitor for which the interval should be returned.
*
* @return The polling interval configuration value, or -1 if it has not been configured.
*/
int getClusterMonitorPollingInterval(String type);
/**
*
* @param type The type of cluster configuration monitor for which the interval should be returned.
*
* @return The enabled status of the specified type of cluster configuration monitor.
*/
boolean isClusterMonitorEnabled(String type);
/**
* @return The list of the names of any remote registry configurations defined herein.
*/
List<String> getRemoteRegistryConfigurationNames();
/**
*
* @param name The name of the remote registry configuration
*
* @return The configuration associated with the specified name.
*/
String getRemoteRegistryConfiguration(String name);
/**
*
* @return The name of a remote configuration registry client
*/
String getRemoteConfigurationMonitorClientName();
/**
* When new remote registry entries must be created, or new ACLs applied to existing entries, this method indicates
* whether unauthenticated connections should be given read access to those entries.
*
* @return true, if unauthenticated clients should be allowed to access remote registry entries.
*/
boolean allowUnauthenticatedRemoteRegistryReadAccess();
/**
* Returns whether the Remote Alias Service is enabled or not.
*
* This value also depends on whether the remote configuration registry is enabled or not.
* If it is enabled, then this option takes effect, else this option has no effect.
*
* @return true, if the remote alias service is enabled; otherwise, false;
*/
boolean isRemoteAliasServiceEnabled();
/**
* Returns prefix for the remote alias service configuration
*
* @return the prefix for the remote alias service configuration
*/
String getRemoteAliasServiceConfigurationPrefix();
/**
* Uses result of getRemoteAliasServiceConfigurationPrefix to return configurations
*
* @return Map of configurations that apply to the remote alias service
*/
Map<String, String> getRemoteAliasServiceConfiguration();
/**
* Get the list of those topology names which should be treated as read-only, regardless of their actual read-write
* status.
*
* @return A list of the names of those topologies which should be treated as read-only.
*/
List<String> getReadOnlyOverrideTopologyNames();
/**
* Get the comma separated list of group names that represent Knox Admin users
* @return comma separate list of admin group names
*/
String getKnoxAdminGroups();
/**
* Get the comma separated list of user names that represent Knox Admin users
* @return comma separated list of admin user names
*/
String getKnoxAdminUsers();
/**
* Custom header name to be used to pass the authenticated principal
* via dispatch
* @since 1.1.0
* @return federation header
*/
String getFederationHeaderName();
/**
* Get the list of topology names that should be redeployed on restart.
* manager and admin are default topologies as they may depend on gateway-site.xml
* configuration for deployment time config.
* @return list of topology names
*/
List<String> getAutoDeployTopologyNames();
/*
* Get the semicolon-delimited set of regular expressions defining to which hosts Knox will permit requests to be
* dispatched.
*
* @return The whitelist, which will be null if none is configured (in which case, requests to any host are permitted).
*/
String getDispatchWhitelist();
/**
* Get the set of service roles to which the dispatch whitelist will be applied.
*
* @return The service roles, or an empty list if none are configured.
*/
List<String> getDispatchWhitelistServices();
/**
* Returns true when strict topology validation is enabled,
* in which case if topology validation fails Knox will throw
* a runtime exception. If false and topology validation fails
* Knox will log an ERROR and move on.
*
* @since 1.1.0
* @return true if topology validation enabled
*/
boolean isTopologyValidationEnabled();
/**
* Returns a list of services that need service name appended to
* X-Forward-Context header as a result of which the new header would look
* /{gateway}/{sandbox}/{serviceName}
*
* @return List of service names for which service name needs to be appended
* to X-Forward-Context header, can be empty list.
* @since 1.3.0
*/
List<String> getXForwardContextAppendServices();
/**
* Returns a set of service principal names that indicate which services to ignore doAs requests.
* <p>
* If a service in the returned set sends a Kerberos-authenticated request to the Gateway, the doAs
* query parameter is to be ignored; thus leaving the authenticated user details intact.
* <p>
* If the (authenticated) service is not authorized to set the specified proxy user (see information
* related to hadoop.proxyuser.... properties) an error will not be returned since the request to
* impersonate users is to be ignored.
*
* @return a set of service principal names that indicate which services to ignore doAs request
*/
Set<String> getServicesToIgnoreDoAs();
}
|
Java
|
Apache-2.0
|
adembo/knox/gateway-spi/src/main/java/org/apache/knox/gateway/config/GatewayConfig.java
|
09203f3d-df6e-4a3e-8207-abaa68bdacfe
|
[]
|
[]
|
/*
* Copyright Elasticsearch B.V. and/or licensed to Elasticsearch B.V. under one
* or more contributor license agreements. Licensed under the Elastic License;
* you may not use this file except in compliance with the Elastic License.
*/
import expect from 'expect.js';
import { metadataQuery } from '../../../../plugins/infra/public/containers/metadata/metadata.gql_query';
import { MetadataQuery } from '../../../../plugins/infra/public/graphql/types';
import { KbnTestProvider } from './types';
const metadataTests: KbnTestProvider = ({ getService }) => {
const esArchiver = getService('esArchiver');
const client = getService('infraOpsGraphQLClient');
describe('metadata', () => {
describe('docker', () => {
before(() => esArchiver.load('infra/6.6.0/docker'));
after(() => esArchiver.unload('infra/6.6.0/docker'));
it('supports the metadata container query', () => {
return client
.query<MetadataQuery.Query>({
query: metadataQuery,
variables: {
sourceId: 'default',
nodeId: '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a',
nodeType: 'container',
},
})
.then(resp => {
const metadata = resp.data.source.metadataByNode;
if (metadata) {
expect(metadata.features.length).to.be(8);
expect(metadata.name).to.equal('docker-autodiscovery_nginx_1');
} else {
throw new Error('Metadata should never be empty');
}
});
});
});
describe('hosts', () => {
before(() => esArchiver.load('infra/metrics_and_logs'));
after(() => esArchiver.unload('infra/metrics_and_logs'));
it('supports the metadata container query', () => {
return client
.query<MetadataQuery.Query>({
query: metadataQuery,
variables: {
sourceId: 'default',
nodeId: 'demo-stack-nginx-01',
nodeType: 'host',
},
})
.then(resp => {
const metadata = resp.data.source.metadataByNode;
if (metadata) {
expect(metadata.features.length).to.be(14);
expect(metadata.name).to.equal('demo-stack-nginx-01');
} else {
throw new Error('Metadata should never be empty');
}
});
});
});
});
};
// tslint:disable-next-line no-default-export
export default metadataTests;
|
TypeScript
|
Apache-2.0
|
TinaHeiligers/kibana-6.7/x-pack/test/api_integration/apis/infra/metadata.ts
|
f269df57-1212-4d98-9841-f9c9e76c7425
|
[{"tag": "API_KEY", "value": "242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a", "start": 1087, "end": 1151, "context": " sourceId: 'default',\n nodeId: '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a',\n nodeType: 'container',\n "}]
|
[{"tag": "KEY", "value": "242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a", "start": 1087, "end": 1151, "context": " sourceId: 'default',\n nodeId: '242fddb9d376bbf0e38025d81764847ee5ec0308adfa095918fd3266f9d06c6a',\n nodeType: 'container',\n "}]
|
import { APIGatewayEventRequestContext } from 'aws-lambda';
import { Request } from 'express';
import { LambdaOptions, AppOptions, CognitoIdResolver } from './types';
const contextDefault = {
accountId: 'test',
apiId: 'test',
authorizer: undefined,
identity: undefined,
stage: 'test',
requestId: 'test',
resourceId: 'test',
resourcePath: 'test',
};
async function resolveCognitoId(resolver: CognitoIdResolver) {
if (typeof resolver === 'string') {
return resolver;
} else if (typeof resolver === 'function') {
return resolver();
}
return undefined;
}
async function createIdentity(
appCognitoId: CognitoIdResolver,
lambdaCognitoId: CognitoIdResolver
) {
let cognitoId;
if (lambdaCognitoId) {
cognitoId = await resolveCognitoId(lambdaCognitoId);
} else if (appCognitoId) {
cognitoId = await resolveCognitoId(appCognitoId);
}
return {
accessKey: 'test',
accountId: 'test',
apiKey: 'test',
apiKeyId: 'test',
caller: 'test',
cognitoAuthenticationProvider: `test:${cognitoId}`,
cognitoAuthenticationType: 'test',
cognitoIdentityId: 'test',
cognitoIdentityPoolId: 'test',
sourceIp: 'test',
user: 'test',
userAgent: 'test',
userArn: 'test',
};
}
export async function createContext(
appOptions: AppOptions,
lambdaOptions: LambdaOptions,
req: Request
): Promise<APIGatewayEventRequestContext> {
const appContext = appOptions.context || {};
const lambdaContext = lambdaOptions.context || {};
return {
...contextDefault,
...appContext,
...lambdaContext,
path: req.path,
requestTimeEpoch: Date.now(),
httpMethod: req.method,
identity: {
...appContext.identity,
...lambdaContext.identity,
...(await createIdentity(appOptions.cognitoId, lambdaOptions.cognitoId)),
},
};
}
|
TypeScript
|
MIT
|
relekang/lambda-local-server/src/context.ts
|
7edbd20f-1dac-43b0-85e5-d887f9ef7a69
|
[]
|
[]
|
import React, { useState } from 'react'
import { Button, Card, Col, Container, Form, Row } from 'react-bootstrap'
import { useHistory } from 'react-router-dom'
import NaeApiAuth from '../../service/NaeApiAuth'
const texts = {
en: {
form: 'Login form',
username: 'Username',
password: 'Password',
login: 'Login',
newMember: 'New member?',
signup: 'Sign up'
},
lt: {
form: 'Prisijungimas',
username: 'Vartotojas',
password: 'Slaptažodis',
login: 'Prisijungti',
newMember: 'Naujas vartotojas?',
signup: 'Registruotis'
}
}
interface Props {
lang?: string
}
export default function NaeAuthLoginPage(props: Props) {
const { lang = 'en' } = props
const history = useHistory()
const [email, setEmail] = useState('')
const [password, setPassword] = useState('')
const goToSignUp = () => {
history.push('/register')
}
const doLogin = () => {
NaeApiAuth.doLogin(email, password)
.then((res) => {
if (res.isError) {
alert(res.error.description)
return
}
window.localStorage.setItem('token', res.token)
history.push('/')
})
.catch((e) => alert(e.message))
}
return (
<div className='full-height v-center'>
<Container className='mt-n20vh'>
<Row>
<Col sm={3} />
<Col>
<Card>
<Card.Header>{texts[lang].form}</Card.Header>
<Card.Body>
<Form>
<Form.Group>
<Form.Label>{texts[lang].username}:</Form.Label>
<Form.Control
value={email}
onChange={(e) => setEmail(e.target.value)}
/>
</Form.Group>
<Form.Group>
<Form.Label>{texts[lang].password}:</Form.Label>
<Form.Control
type='password'
value={password}
onChange={(e) => setPassword(e.target.value)}
/>
</Form.Group>
</Form>
</Card.Body>
<Card.Footer>
<Row>
<Col className='v-center'>
<p>
{texts[lang].newMember}{' '}
<a
href='/register'
onClick={(e) => {
e.preventDefault()
goToSignUp()
}}
>
{texts[lang].signup}
</a>
</p>
</Col>
<Col className='text-right'>
<Button
type='button'
variant='primary'
onClick={() => doLogin()}
>
{texts[lang].login}
</Button>
</Col>
</Row>
</Card.Footer>
</Card>
</Col>
<Col sm={3} />
</Row>
</Container>
</div>
)
}
|
TypeScript
|
MIT
|
newageerp/nae-react-auth/src/components/Auth/NaeAuthLoginPage.tsx
|
a801302a-4f87-4cdb-9444-696a7176eb81
|
[{"tag": "NAME", "value": "Naujas vartotojas", "start": 522, "end": 539, "context": "\u017eodis',\n login: 'Prisijungti',\n newMember: 'Naujas vartotojas?',\n signup: 'Registruotis'\n }\n}\n\ninterface Pr"}, {"tag": "USERNAME", "value": "Vartotojas", "start": 438, "end": 448, "context": " lt: {\n form: 'Prisijungimas',\n username: 'Vartotojas',\n password: 'Slapta\u017eodis',\n login: 'Prisij"}, {"tag": "USERNAME", "value": "Prisijungti", "start": 492, "end": 503, "context": "otojas',\n password: 'Slapta\u017eodis',\n login: 'Prisijungti',\n newMember: 'Naujas vartotojas?',\n signup"}, {"tag": "USERNAME", "value": "Prisijungimas", "start": 407, "end": 420, "context": "?',\n signup: 'Sign up'\n },\n lt: {\n form: 'Prisijungimas',\n username: 'Vartotojas',\n password: 'Slap"}, {"tag": "PASSWORD", "value": "Slapta\u017eodis", "start": 466, "end": 477, "context": "imas',\n username: 'Vartotojas',\n password: 'Slapta\u017eodis',\n login: 'Prisijungti',\n newMember: 'Nauja"}, {"tag": "USERNAME", "value": "Registruotis", "start": 556, "end": 568, "context": " newMember: 'Naujas vartotojas?',\n signup: 'Registruotis'\n }\n}\n\ninterface Props {\n lang?: string\n}\n\nexpo"}]
|
[{"tag": "NAME", "value": "Naujas vartotojas", "start": 522, "end": 539, "context": "\u017eodis',\n login: 'Prisijungti',\n newMember: 'Naujas vartotojas?',\n signup: 'Registruotis'\n }\n}\n\ninterface Pr"}, {"tag": "USERNAME", "value": "Vartotojas", "start": 438, "end": 448, "context": " lt: {\n form: 'Prisijungimas',\n username: 'Vartotojas',\n password: 'Slapta\u017eodis',\n login: 'Prisij"}, {"tag": "USERNAME", "value": "Prisijungti", "start": 492, "end": 503, "context": "otojas',\n password: 'Slapta\u017eodis',\n login: 'Prisijungti',\n newMember: 'Naujas vartotojas?',\n signup"}, {"tag": "USERNAME", "value": "Prisijungimas", "start": 407, "end": 420, "context": "?',\n signup: 'Sign up'\n },\n lt: {\n form: 'Prisijungimas',\n username: 'Vartotojas',\n password: 'Slap"}, {"tag": "PASSWORD", "value": "Slapta\u017eodis", "start": 466, "end": 477, "context": "imas',\n username: 'Vartotojas',\n password: 'Slapta\u017eodis',\n login: 'Prisijungti',\n newMember: 'Nauja"}, {"tag": "USERNAME", "value": "Registruotis", "start": 556, "end": 568, "context": " newMember: 'Naujas vartotojas?',\n signup: 'Registruotis'\n }\n}\n\ninterface Props {\n lang?: string\n}\n\nexpo"}]
|
<?php
namespace App\Http\Controllers;
use App\Model\Shop;
use App\Model\ShopUser;
use Illuminate\Http\Request;
use Illuminate\Support\Facades\DB;
use Illuminate\Validation\Rule;
class ShopUsersController extends Controller
{
//
public function index()
{
$shopusers = ShopUser::paginate(5);
return view('shopuser/index',compact('shopusers'));
}
public function create()
{
$shops = Shop::all();
return view('shopuser/create',compact('shops'));
}
public function store(Request $request)
{
//数据验证
$this->validate($request,[
'name'=>'required|max:20|unique:shop_users',
'email'=>'required|email|unique:shop_users',
'password' => 'required|min:6|confirmed',
'password_confirmation' => 'required|min:6',
'shop_id'=>'required',
'captcha' => 'required|captcha',
],[
'name.required'=>'名称不能为空',
'name.max'=>'名称长度不能大于20位',
'name.unique'=>'该名称已存在',
'email.required'=>'邮箱不能为空',
'email.email'=>'邮箱格式错误',
'email.unique'=>'该邮箱已存在',
'password.required'=>'密码必须填写',
'password.min'=>'密码长度不能小于6位',
'password_confirmation.required'=>'请确认密码',
'password.confirmed'=>'两次输入密码不一致',
'shop_id.required'=>'所属商户必须选择',
'captcha.required' => '请填写验证码',
'captcha.captcha' => '验证码错误',
]);
if (!$request->status){
$request->status =0;
}
//密码加密
$model = ShopUser::create([
'name'=>$request->name,
'email'=>$request->email,
'password'=>bcrypt($request->password),
'status'=>1,
'shop_id'=>$request->shop_id
]);
return redirect()->route('shopusers.index')->with('success','添加成功');
}
public function show(Shopuser $shopuser,Request $request)
{
$shops = Shop::all();
return view('shopuser/show',compact('shopuser','shops'));
}
public function edit(Shopuser $shopuser)
{
//dd($shopuser);
$shops = Shop::all();
return view('shopuser/edit',['shopuser'=>$shopuser,'shops'=>$shops]);
}
public function update(Shopuser $shopuser,Request $request)
{
//数据验证
$this->validate($request,[
'name'=>[
'required',
'max:20',
Rule::unique('shop_users')->ignore($shopuser->id),
],
'email'=>[
'required',
'string',
'email',
Rule::unique('shop_users')->ignore($shopuser->id),
],
'shop_id'=>'required',
'captcha' => 'required|captcha',
],[
'name.required'=>'名称不能为空',
'name.max'=>'名称长度不能大于20位',
'name.unique'=>'该名称已存在',
'email.required'=>'邮箱不能为空',
'email.email'=>'邮箱格式错误',
'email.unique'=>'该邮箱已存在',
'password_confirmation.required'=>'请确认密码',
'password.confirmed'=>'两次输入密码不一致',
'shop_id.required'=>'所属商户必须选择',
'captcha.required' => '请填写验证码',
'captcha.captcha' => '验证码错误',
]);
if (!$request->status){
$request->status =0;
}
$shopuser->update([
'name'=>$request->name,
'email'=>$request->email,
'status'=>$request->status,
'shop_id'=>$request->shop_id
]);
return redirect()->route('shopusers.index')->with('success','更新成功');
}
public function destroy(Shopuser $shopuser)
{
$shopuser->delete();
return redirect()->route('shopusers.index')->with('success','删除成功');
}
public function status(Shopuser $shopuser)
{
$shopuser->update([
'status'=>1,
]);
return redirect()->route('shopusers.index')->with('success','账号已启用');
}
public function reset(Shopuser $shopuser)
{
return view('shopuser/reset',compact('shopuser'));
}
public function resetSave(Shopuser $shopuser,Request $request)
{
$request->validate([
'password'=>'required|confirmed',
'captcha' => 'required|captcha',
],[
'password.required'=>'请设置新密码',
'password.confirmed'=>'两次密码输入不一致,请重新输入',
'captcha.required' => '请填写验证码',
'captcha.captcha' => '验证码错误',
]);
DB::table('shop_users')
->where('id',$request->id)
->update([
'password' => bcrypt($request->password),
]);
return redirect()->route('shopusers.index')->with('success','重置密码成功');
}
}
|
PHP
|
MIT
|
tangzejun/admin.eleb.com/app/Http/Controllers/ShopUsersController.php
|
107a844e-d211-4ebc-bdac-300355ca514f
|
[]
|
[]
|
<?php
return [
/*
|--------------------------------------------------------------------------
| Third Party Services
|--------------------------------------------------------------------------
|
| This file is for storing the credentials for third party services such
| as Stripe, Mailgun, SparkPost and others. This file provides a sane
| default location for this type of information, allowing packages
| to have a conventional place to find your various credentials.
|
*/
'mailgun' => [
'domain' => env('MAILGUN_DOMAIN'),
'secret' => env('MAILGUN_SECRET'),
],
'ses' => [
'key' => env('SES_KEY'),
'secret' => env('SES_SECRET'),
'region' => env('SES_REGION', 'us-east-1'),
],
'sparkpost' => [
'secret' => env('SPARKPOST_SECRET'),
],
'stripe' => [
'model' => App\User::class,
'key' => env('STRIPE_KEY'),
'secret' => env('STRIPE_SECRET'),
],
'google' => [
'client_id' => '488886135548-v6t87hsi6ohi2i3qkcrab1pk41gk83bi.apps.googleusercontent.com',
'client_secret' => 'qua3H8VlBuvExmhX2b7wKgYO',
'redirect' => 'http://www.gunny.site/callback',
],
];
|
PHP
|
MIT
|
Gunhui/Term-Project/config/services.php
|
36cf3da4-c18a-4bca-beac-3fbd4264f2a2
|
[]
|
[]
|
<?php
namespace App\Http\Controllers\Auth;
use App\User;
use App\Http\Controllers\Controller;
use Illuminate\Support\Facades\Validator;
use Illuminate\Foundation\Auth\RegistersUsers;
class RegisterController extends Controller
{
/*
|--------------------------------------------------------------------------
| Register Controller
|--------------------------------------------------------------------------
|
| This controller handles the registration of new users as well as their
| validation and creation. By default this controller uses a trait to
| provide this functionality without requiring any additional code.
|
*/
use RegistersUsers;
/**
* Where to redirect users after registration.
*
* @var string
*/
protected $redirectTo = '/home';
/**
* Create a new controller instance.
*
* @return void
*/
public function __construct()
{
$this->middleware('guest');
}
/**
* Get a validator for an incoming registration request.
*
* @param array $data
* @return \Illuminate\Contracts\Validation\Validator
*/
protected function validator(array $data)
{
return Validator::make($data, [
'name' => 'required|string|max:255',
'email' => 'required|string|email|max:255|unique:users',
'password' => 'required|string|min:6|confirmed',
]);
}
/**
* Create a new user instance after a valid registration.
*
* @param array $data
* @return \App\User
*/
protected function create(array $data)
{
$user = User::create([
'name' => $data['name'],
'email' => $data['email'],
'password' => bcrypt($data['password']),
]);
$user->attachRole('user');
return $user;
}
}
|
PHP
|
Apache-2.0
|
erwan690/Task_Validasi/app/Http/Controllers/Auth/RegisterController.php
|
4ee54425-41b3-49ad-842f-7bfded9dade3
|
[]
|
[]
|
#pragma checksum "..\..\..\MainWindow.xaml" "{8829d00f-11b8-4213-878b-770e8597ac16}" "573B5D11E0DDAD4FEC58863ED87D9CFC6214A2B250AB3DAAC84C18AE1D2ADEFF"
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
// Runtime Version:4.0.30319.42000
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Diagnostics;
using System.Windows;
using System.Windows.Automation;
using System.Windows.Controls;
using System.Windows.Controls.Primitives;
using System.Windows.Data;
using System.Windows.Documents;
using System.Windows.Ink;
using System.Windows.Input;
using System.Windows.Markup;
using System.Windows.Media;
using System.Windows.Media.Animation;
using System.Windows.Media.Effects;
using System.Windows.Media.Imaging;
using System.Windows.Media.Media3D;
using System.Windows.Media.TextFormatting;
using System.Windows.Navigation;
using System.Windows.Shapes;
using System.Windows.Shell;
using body_tracking;
namespace body_tracking {
/// <summary>
/// MainWindow
/// </summary>
public partial class MainWindow : System.Windows.Window, System.Windows.Markup.IComponentConnector {
#line 17 "..\..\..\MainWindow.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.TextBlock Serial;
#line default
#line hidden
#line 19 "..\..\..\MainWindow.xaml"
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1823:AvoidUnusedPrivateFields")]
internal System.Windows.Controls.Image FrameDisplayImage;
#line default
#line hidden
private bool _contentLoaded;
/// <summary>
/// InitializeComponent
/// </summary>
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
public void InitializeComponent() {
if (_contentLoaded) {
return;
}
_contentLoaded = true;
System.Uri resourceLocater = new System.Uri("/body_tracking;component/mainwindow.xaml", System.UriKind.Relative);
#line 1 "..\..\..\MainWindow.xaml"
System.Windows.Application.LoadComponent(this, resourceLocater);
#line default
#line hidden
}
[System.Diagnostics.DebuggerNonUserCodeAttribute()]
[System.CodeDom.Compiler.GeneratedCodeAttribute("PresentationBuildTasks", "4.0.0.0")]
[System.ComponentModel.EditorBrowsableAttribute(System.ComponentModel.EditorBrowsableState.Never)]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Design", "CA1033:InterfaceMethodsShouldBeCallableByChildTypes")]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Maintainability", "CA1502:AvoidExcessiveComplexity")]
[System.Diagnostics.CodeAnalysis.SuppressMessageAttribute("Microsoft.Performance", "CA1800:DoNotCastUnnecessarily")]
void System.Windows.Markup.IComponentConnector.Connect(int connectionId, object target) {
switch (connectionId)
{
case 1:
this.Serial = ((System.Windows.Controls.TextBlock)(target));
return;
case 2:
this.FrameDisplayImage = ((System.Windows.Controls.Image)(target));
return;
case 3:
#line 21 "..\..\..\MainWindow.xaml"
((System.Windows.Controls.Button)(target)).Click += new System.Windows.RoutedEventHandler(this.Button_Infrared);
#line default
#line hidden
return;
case 4:
#line 22 "..\..\..\MainWindow.xaml"
((System.Windows.Controls.Button)(target)).Click += new System.Windows.RoutedEventHandler(this.Button_Color);
#line default
#line hidden
return;
case 5:
#line 23 "..\..\..\MainWindow.xaml"
((System.Windows.Controls.Button)(target)).Click += new System.Windows.RoutedEventHandler(this.Button_Depth);
#line default
#line hidden
return;
case 6:
#line 24 "..\..\..\MainWindow.xaml"
((System.Windows.Controls.Button)(target)).Click += new System.Windows.RoutedEventHandler(this.Button_Body);
#line default
#line hidden
return;
}
this._contentLoaded = true;
}
}
}
|
C#
|
MIT
|
Dingensen/Body-Tracking-in-Closed-Spaces/body_tracking/body_tracking/obj/x64/Debug/MainWindow.g.cs
|
fac29f93-e001-4cb2-aec2-9a55fd6e6d2a
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 2269, "end": 2276, "context": "GeneratedCodeAttribute(\"PresentationBuildTasks\", \"4.0.0.0\")]\n public void InitializeComponent() {\n "}, {"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 2920, "end": 2927, "context": "GeneratedCodeAttribute(\"PresentationBuildTasks\", \"4.0.0.0\")]\n [System.ComponentModel.EditorBrowsable"}]
|
[{"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 2269, "end": 2276, "context": "GeneratedCodeAttribute(\"PresentationBuildTasks\", \"4.0.0.0\")]\n public void InitializeComponent() {\n "}, {"tag": "IP_ADDRESS", "value": "4.0.0.0", "start": 2920, "end": 2927, "context": "GeneratedCodeAttribute(\"PresentationBuildTasks\", \"4.0.0.0\")]\n [System.ComponentModel.EditorBrowsable"}]
|
package usecase
import (
"legato_server/api"
"legato_server/authenticate"
legatoDb "legato_server/db"
"legato_server/domain"
"legato_server/env"
"testing"
"time"
"github.com/spf13/viper"
)
var connectionUseCase domain.GmailUseCase
var createConnections []api.Connection
var updateConnections []api.Connection
func createConnection() {
// test1
conn1 := api.Connection{}
conn1.Name = "git"
conn1.Type = "githubs"
jsonStr := map[string]interface{}{
"token": "mfxl'vmsdv';mfdb'fdamlmdsvfdkfnjn",
}
conn1.Data = jsonStr
createConnections = append(createConnections, conn1)
// test2
conn2 := api.Connection{}
conn2.Name = "git"
conn2.Type = "githubs"
jsonStr = map[string]interface{}{
"token": "maaaaaaaaaaaaadddddddddddddddweeeeeee",
}
conn1.Data = jsonStr
createConnections = append(createConnections, conn2)
// test3
conn3 := api.Connection{}
conn3.Name = "ssh1"
conn3.Type = "sshes"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "--------------------",
"commands": []string{"ls", "echo hello world"},
}
conn3.Data = jsonStr
createConnections = append(createConnections, conn3)
// test4
conn4 := api.Connection{}
conn4.Name = "ssh2"
conn4.Type = "sshes"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "------------------------",
"commands": []string{"ls"},
}
conn4.Data = jsonStr
createConnections = append(createConnections, conn4)
// test5
conn5 := api.Connection{}
conn5.Name = "gmail"
conn5.Type = "gmails"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "sko192j3h",
"commands": []string{"ls", "echo hello world"},
}
conn5.Data = jsonStr
createConnections = append(createConnections, conn5)
}
func updateConnection() {
// test6
conn1 := api.Connection{}
conn1.Name = "update_git"
conn1.Type = "githubs"
jsonStr := map[string]interface{}{
"token": "mfxdddddddddfeeeeeeeeeeeee",
}
conn1.Data = jsonStr
updateConnections = append(updateConnections, conn1)
// test2
conn2 := api.Connection{}
conn2.Name = "update_git"
conn2.Type = "githubs"
jsonStr = map[string]interface{}{
"token": "updateeeeeeeeeee",
}
conn1.Data = jsonStr
updateConnections = append(updateConnections, conn1)
// test3
conn3 := api.Connection{}
conn3.Name = "update_ssh"
conn3.Type = "sshes"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "sko192j3h",
"commands": []string{"ls", "echo hello world"},
}
conn3.Data = jsonStr
updateConnections = append(updateConnections, conn3)
// test4
conn4 := api.Connection{}
conn4.Name = "update_ssh2"
conn4.Type = "sshes"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "------------------------",
"commands": []string{"ls", "cd home"},
}
conn4.Data = jsonStr
updateConnections = append(updateConnections, conn4)
// test5
conn5 := api.Connection{}
conn5.Name = "update_gmail"
conn5.Type = "gmails"
jsonStr = map[string]interface{}{
"host": "37.152.181.64",
"username": "reza",
"password": "sko192j3h",
"commands": []string{"ls", "echo update"},
}
conn5.Data = jsonStr
updateConnections = append(updateConnections, conn5)
}
func TestConnection(t *testing.T) {
env.LoadEnv()
// Generate random jwt key
authenticate.GenerateRandomKey()
// Make server sent event
// Connect to database
appDB, err := legatoDb.Connect()
if err != nil {
panic(err)
}
timeoutContext := time.Duration(viper.GetInt("context.timeout")) * time.Second
userUseCase := NewUserUseCase(appDB, timeoutContext)
_ = userUseCase.CreateDefaultUser()
user, _ := userUseCase.GetUserByUsername("legato")
createConnection()
updateConnection()
var nodeID []uint
for _, con := range createConnections {
s, _ := userUseCase.AddConnectionToDB(user.Username, con)
nodeID = append(nodeID, s.ID)
}
for i, con := range updateConnections {
con.ID = nodeID[i]
userUseCase.UpdateDataConnectionByID(user.Username, con)
}
userUseCase.DeleteUserConnectionById(user.Username, nodeID[0])
userUseCase.DeleteUserConnectionById(user.Username, nodeID[1])
}
|
GO
|
MIT
|
Amin-MAG/Legato-Backend/user/usecase/connection_test.go
|
dfbdd29f-d841-4ea2-b7a5-90d3d1a01fb2
|
[{"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 1625, "end": 1638, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 1678, "end": 1687, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo hello world\""}, {"tag": "USERNAME", "value": "reza", "start": 3163, "end": 3167, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 1656, "end": 1660, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 1000, "end": 1004, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"--------------------\",\n\t\t\"comman"}, {"tag": "PASSWORD", "value": "updateeeeeeeeeee", "start": 2228, "end": 2244, "context": "s\"\n\tjsonStr = map[string]interface{}{\n\t\t\"token\": \"updateeeeeeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tupdateConnections = a"}, {"tag": "USERNAME", "value": "reza", "start": 2493, "end": 2497, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 2824, "end": 2828, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"------------------------\",\n\t\t\"co"}, {"tag": "PASSWORD", "value": "mfxl'vmsdv';mfdb'fdamlmdsvfdkfnjn", "start": 477, "end": 510, "context": "\"\n\tjsonStr := map[string]interface{}{\n\t\t\"token\": \"mfxl'vmsdv';mfdb'fdamlmdsvfdkfnjn\",\n\t}\n\tconn1.Data = jsonStr\n\tcreateConnections = a"}, {"tag": "USERNAME", "value": "reza", "start": 1335, "end": 1339, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"------------------------\",\n\t\t\"co"}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 3185, "end": 3194, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo update\"},\n\t}"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 2462, "end": 2475, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 2793, "end": 2806, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}, {"tag": "PASSWORD", "value": "mfxdddddddddfeeeeeeeeeeeee", "start": 1985, "end": 2011, "context": "\"\n\tjsonStr := map[string]interface{}{\n\t\t\"token\": \"mfxdddddddddfeeeeeeeeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tupdateConnections = a"}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 2515, "end": 2524, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo hello world\""}, {"tag": "PASSWORD", "value": "maaaaaaaaaaaaadddddddddddddddweeeeeee", "start": 720, "end": 757, "context": "s\"\n\tjsonStr = map[string]interface{}{\n\t\t\"token\": \"maaaaaaaaaaaaadddddddddddddddweeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tcreateConnections = a"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 1304, "end": 1317, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 3132, "end": 3145, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 969, "end": 982, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}]
|
[{"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 1625, "end": 1638, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 1678, "end": 1687, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo hello world\""}, {"tag": "USERNAME", "value": "reza", "start": 3163, "end": 3167, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 1656, "end": 1660, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 1000, "end": 1004, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"--------------------\",\n\t\t\"comman"}, {"tag": "PASSWORD", "value": "updateeeeeeeeeee", "start": 2228, "end": 2244, "context": "s\"\n\tjsonStr = map[string]interface{}{\n\t\t\"token\": \"updateeeeeeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tupdateConnections = a"}, {"tag": "USERNAME", "value": "reza", "start": 2493, "end": 2497, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []stri"}, {"tag": "USERNAME", "value": "reza", "start": 2824, "end": 2828, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"------------------------\",\n\t\t\"co"}, {"tag": "PASSWORD", "value": "mfxl'vmsdv';mfdb'fdamlmdsvfdkfnjn", "start": 477, "end": 510, "context": "\"\n\tjsonStr := map[string]interface{}{\n\t\t\"token\": \"mfxl'vmsdv';mfdb'fdamlmdsvfdkfnjn\",\n\t}\n\tconn1.Data = jsonStr\n\tcreateConnections = a"}, {"tag": "USERNAME", "value": "reza", "start": 1335, "end": 1339, "context": "{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"------------------------\",\n\t\t\"co"}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 3185, "end": 3194, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo update\"},\n\t}"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 2462, "end": 2475, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 2793, "end": 2806, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}, {"tag": "PASSWORD", "value": "mfxdddddddddfeeeeeeeeeeeee", "start": 1985, "end": 2011, "context": "\"\n\tjsonStr := map[string]interface{}{\n\t\t\"token\": \"mfxdddddddddfeeeeeeeeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tupdateConnections = a"}, {"tag": "PASSWORD", "value": "sko192j3h", "start": 2515, "end": 2524, "context": "152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\",\n\t\t\"commands\": []string{\"ls\", \"echo hello world\""}, {"tag": "PASSWORD", "value": "maaaaaaaaaaaaadddddddddddddddweeeeeee", "start": 720, "end": 757, "context": "s\"\n\tjsonStr = map[string]interface{}{\n\t\t\"token\": \"maaaaaaaaaaaaadddddddddddddddweeeeeee\",\n\t}\n\tconn1.Data = jsonStr\n\tcreateConnections = a"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 1304, "end": 1317, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 3132, "end": 3145, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"sko192j3h\""}, {"tag": "IP_ADDRESS", "value": "37.152.181.64", "start": 969, "end": 982, "context": "\tjsonStr = map[string]interface{}{\n\t\t\"host\": \"37.152.181.64\",\n\t\t\"username\": \"reza\",\n\t\t\"password\": \"----------"}]
|
// Copyright (c) 2018-2020. The asimov developers
// Copyright (c) 2013-2017 The btcsuite developers
// Use of this source code is governed by an ISC
// license that can be found in the LICENSE file.
package blockchain
import (
"bytes"
"errors"
"fmt"
"github.com/AsimovNetwork/asimov/blockchain/txo"
"github.com/AsimovNetwork/asimov/common"
"github.com/AsimovNetwork/asimov/common/hexutil"
"reflect"
"testing"
"github.com/AsimovNetwork/asimov/database"
"github.com/AsimovNetwork/asimov/protos"
)
func newHashFromStr(hash string) *common.Hash {
ret, err := common.NewHashFromStr(hash)
if err != nil {
fmt.Println(err.Error())
return nil
}
return ret
}
// TestErrNotInMainChain ensures the functions related to errNotInMainChain work
// as expected.
func TestErrNotInMainChain(t *testing.T) {
errStr := "no block at height 1 exists"
err := error(errNotInMainChain(errStr))
// Ensure the stringized output for the error is as expected.
if err.Error() != errStr {
t.Fatalf("errNotInMainChain retuned unexpected error string - got %q, want %q", err.Error(), errStr)
}
// Ensure error is detected as the correct type.
if !isNotInMainChainErr(err) {
t.Fatalf("isNotInMainChainErr did not detect as expected type")
}
err = errors.New("something else")
if isNotInMainChainErr(err) {
t.Fatalf("isNotInMainChainErr detected incorrect type")
}
}
var coinbaseStxo = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 1,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}
var coinbaseStxo2 = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}
var coinbaseStxo10001 = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 10001,
IsCoinBase: false,
Asset: &protos.Asset{0,1},
}
var normalStxo = txo.SpentTxOut{
Amount: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 1,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}
var normalStxo2 = txo.SpentTxOut{
Amount: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}
var normalStxo10001 = txo.SpentTxOut{
Amount: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 10001,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}
var normalStxoAmount123 = txo.SpentTxOut{
Amount: 123,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}
var normalStxoAmountMax = txo.SpentTxOut{
Amount: 210000000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}
//txo.SpentTxOut PkScript is ScriptHash:
var normalScriptHashStxo = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{169,21,115,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,196},
Height: 1,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}
//txo.SpentTxOut PkScript is isContract:
var contractStxo = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{194,21,99,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215},
Height: 1,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}
//txo.SpentTxOut PkScript is vote:
var voteStxo = txo.SpentTxOut{
Amount: 500000000,
PkScript: []byte{198,21,99,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215},
Height: 1,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}
// TestStxoSerialization ensures serializing and deserializing spent transaction
// output entries works as expected.
func TestStxoSerialization(t *testing.T) {
tests := []struct {
name string
stxo txo.SpentTxOut
serialized string
}{
{
name: "Spends last output of coinbase, height 1",
stxo: coinbaseStxo,
serialized: "0x030065cd1d0000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of coinbase, height 2",
stxo: coinbaseStxo2,
serialized: "0x050065cd1d0000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of coinbase, height 10001",
stxo: coinbaseStxo10001,
serialized: "0x809b220065cd1d0000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000001",
},
{
name: "Spends last output of non coinbase,height 1",
stxo: normalStxo,
serialized: "0x0200e1f5050000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of non coinbase,height 2",
stxo: normalStxo2,
serialized: "0x0400e1f5050000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of non coinbase, height 10001",
stxo: normalStxo10001,
serialized: "0x809b2200e1f5050000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of non coinbase, amount 123",
stxo: normalStxoAmount123,
serialized: "0x047b0000000000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "Spends last output of non coinbase, amount 210000000000",
stxo: normalStxoAmountMax,
serialized: "0x0400b4f9e43000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "txo.SpentTxOut PkScript is ScriptHash",
stxo: normalScriptHashStxo,
serialized: "0x030065cd1d0000000001e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "txo.SpentTxOut PkScript is contract",
stxo: contractStxo,
serialized: "0x030065cd1d0000000006e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
{
name: "txo.SpentTxOut PkScript is vote",
stxo: voteStxo,
serialized: "0x030065cd1d0000000007e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
}
for i, test := range tests {
// Ensure the function to calculate the serialized size without
// actually serializing it is calculated properly.
gotSize := SpentTxOutSerializeSize(&test.stxo)
// Ensure the stxo serializes to the expected value.
gotSerialized := make([]byte, gotSize)
gotBytesWritten := putSpentTxOut(gotSerialized, &test.stxo)
gotBytexHex := hexutil.Encode(gotSerialized)
if gotBytexHex != test.serialized {
t.Errorf("case %d, puttxo.SpentTxOut (%s): did not get expected bytes - got %x, want %x",
i, test.name, gotSerialized, test.serialized)
continue
}
if gotBytesWritten * 2 + 2 != len(test.serialized) {
t.Errorf("puttxo.SpentTxOut (%s): did not get expected number of bytes written - got %d, want %d",
test.name, gotBytesWritten, len(test.serialized))
continue
}
// Ensure the serialized bytes are decoded back to the expected
// stxo.
var gotStxo txo.SpentTxOut
gotBytesRead, err := decodeSpentTxOut(gotSerialized, &gotStxo)
if err != nil {
t.Errorf("decodeSpentTxOut (%s): unexpected error: %v", test.name, err)
continue
}
if !reflect.DeepEqual(gotStxo, test.stxo) {
t.Errorf("decodeSpentTxOut (%s) mismatched entries - got %v, want %v",
test.name, gotStxo, test.stxo)
continue
}
if gotBytesRead * 2 + 2 != len(test.serialized) {
t.Errorf("decodeSpentTxOut (%s): did not get expected number of bytes read - got %d, want %d",
test.name, gotBytesRead, len(test.serialized))
continue
}
}
}
// TestStxoDecodeErrors performs negative tests against decoding spent
// transaction outputs to ensure error paths work as expected.
func TestStxoDecodeErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
stxo txo.SpentTxOut
serialized []byte
bytesRead int // Expected number of bytes read.
errType error
}{
{
name: "nothing serialized",
stxo: txo.SpentTxOut{},
serialized: hexToBytes(""),
errType: common.DeserializeError(""),
bytesRead: 0,
},
{
name: "no data after header code w/o reserved",
stxo: txo.SpentTxOut{},
serialized: hexToBytes("00"),
errType: common.DeserializeError(""),
bytesRead: 1,
},
{
name: "no data after header code with reserved",
stxo: txo.SpentTxOut{},
serialized: hexToBytes("13"),
errType: common.DeserializeError(""),
bytesRead: 1,
},
{
name: "no data after reserved",
stxo: txo.SpentTxOut{},
serialized: hexToBytes("1300"),
errType: common.DeserializeError(""),
bytesRead: 1,
},
{
name: "incomplete compressed txout",
stxo: txo.SpentTxOut{},
serialized: hexToBytes("1332"),
errType: common.DeserializeError(""),
bytesRead: 1,
},
}
for i, test := range tests {
// Ensure the expected error type is returned.
gotBytesRead, err := decodeSpentTxOut(test.serialized, &test.stxo)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("case %d, decodeSpentTxOut (%s): expected error type does not match - got %T, want %T",
i, test.name, err, test.errType)
continue
}
// Ensure the expected number of bytes read is returned.
if gotBytesRead != test.bytesRead {
t.Errorf("case %d, decodeSpentTxOut (%s): unexpected number of bytes read - got %d, want %d",
i, test.name, gotBytesRead, test.bytesRead)
continue
}
}
}
// TestSpendJournalSerialization ensures serializing and deserializing spend
// journal entries works as expected.
func TestSpendJournalSerialization(t *testing.T) {
tests := []struct {
name string
entry []txo.SpentTxOut
blockTxns []*protos.MsgTx
serialized string
}{
//test0: input nil
{
name: "No spends",
entry: nil,
blockTxns: nil,
serialized: "0x",
},
//test1: input block height = 2
{
name: "One tx with one input spends last output of coinbase",
entry: []txo.SpentTxOut{{
Amount: 500000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 1,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}},
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("c13bd0cf1f1209c07da2f785c2af52f10b6dacade271e375b0096967dbe33a8b"),
Index: 0,
},
SignatureScript: hexToBytes("4830450221008d3ffcad657a91d7f63e3ee9d11b4658ca7abadd6cb80bfd7625bbaee3e8142a0220795468e7e601083f6de198798fc8411486d30a3e32c6cf23144048cf40cbf3e4012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}, {
Value: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}},
LockTime: 0,
}},
serialized: "0x030065cd1d0000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test2: input block height = 2
{
name: "Two txns when one spends last output, one doesn't",
entry: []txo.SpentTxOut{{
Amount: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}, {
Amount: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: false,
Asset: &protos.Asset{0,0},
}, {
Amount: 500000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Height: 2,
IsCoinBase: true,
Asset: &protos.Asset{0,0},
}},
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("d74e2f04e8a64df95d14f3ad627717bea897e47125d8708635da42d35355d1de"),
Index: 0,
},
SignatureScript: hexToBytes("483045022100b73c1c552dd4cf2630352e0870e62eb678ea69634e244b7770a71be23dafb4b90220596b5446a16f1f42cec95e53396db9554488334836a35c5671948519863d97f4012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}, {
Value: 0,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}},
LockTime: 0,
}, {
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("d74e2f04e8a64df95d14f3ad627717bea897e47125d8708635da42d35355d1de"),
Index: 1,
},
SignatureScript: hexToBytes("4730440220473f28f75754cd9c772676d99213a77dfe01bbb531552ae8f2372be9d4cf4393022063f935c022cf70d4694f215d5b2da0cc6093bb83accae48ba615e606b6178d1e012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}, {
Value: 300000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}},
LockTime: 0,
}, {
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("1cf1c6f5f9b0e75226bdee82fd248d63a26326a65608aa049269abdf45135e38"),
Index: 0,
},
SignatureScript: hexToBytes("483045022100ca5f82ec38771cf34fd6b01f0bf7d8b9d6a6a5219fd9fb8f81d22bafe36ab5bb02203df55c61c2959aad21336f971d2ae69cd28f9a20eeb2468005ff36ca0b62af7e012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}, {
Value: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
}},
LockTime: 0,
}},
serialized: "0x050065cd1d0000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000040084d7170000000000e3054b411051da5492aec7a823b00cb3add772d70c0000000000000000000000000400e1f5050000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test3: txo.SpentTxOut PkScript is ScriptHash:
{
name: "One tx with one input spends last output of coinbase",
entry: []txo.SpentTxOut{normalScriptHashStxo},
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("c13bd0cf1f1209c07da2f785c2af52f10b6dacade271e375b0096967dbe33a8b"),
Index: 0,
},
SignatureScript: hexToBytes("4830450221008d3ffcad657a91d7f63e3ee9d11b4658ca7abadd6cb80bfd7625bbaee3e8142a0220795468e7e601083f6de198798fc8411486d30a3e32c6cf23144048cf40cbf3e4012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}, {
Value: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}},
LockTime: 0,
}},
serialized: "0x030065cd1d0000000001e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test4: txo.SpentTxOut PkScript is isContract:
{
name: "One tx with one input spends last output of coinbase",
entry: []txo.SpentTxOut{contractStxo},
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("c13bd0cf1f1209c07da2f785c2af52f10b6dacade271e375b0096967dbe33a8b"),
Index: 0,
},
SignatureScript: hexToBytes("4830450221008d3ffcad657a91d7f63e3ee9d11b4658ca7abadd6cb80bfd7625bbaee3e8142a0220795468e7e601083f6de198798fc8411486d30a3e32c6cf23144048cf40cbf3e4012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}, {
Value: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}},
LockTime: 0,
}},
serialized: "0x030065cd1d0000000006e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test5: txo.SpentTxOut PkScript is isVote:
{
name: "One tx with one input spends last output of coinbase",
entry: []txo.SpentTxOut{voteStxo},
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("c13bd0cf1f1209c07da2f785c2af52f10b6dacade271e375b0096967dbe33a8b"),
Index: 0,
},
SignatureScript: hexToBytes("4830450221008d3ffcad657a91d7f63e3ee9d11b4658ca7abadd6cb80bfd7625bbaee3e8142a0220795468e7e601083f6de198798fc8411486d30a3e32c6cf23144048cf40cbf3e4012103d33a68fbb0070e518cd98ce391bbdc781d07360ebc6d0743424172a7542210c9"),
Sequence: 0xffffffff,
}},
TxOut: []*protos.TxOut{{
Value: 100000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}, {
Value: 400000000,
PkScript: []byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
Asset: protos.Asset{0,0},
}},
LockTime: 0,
}},
serialized: "0x030065cd1d0000000007e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
}
for i, test := range tests {
// Ensure the journal entry serializes to the expected value.
gotBytes := serializeSpendJournalEntry(test.entry)
gotBytesHex := hexutil.Encode(gotBytes)
if gotBytesHex != test.serialized {
t.Errorf("serializeSpendJournalEntry #%d (%s): mismatched bytes - got %x, want %x",
i, test.name, gotBytes, test.serialized)
continue
}
// Deserialize to a spend journal entry.
gotEntry, err := deserializeSpendJournalEntry(gotBytes, test.blockTxns,nil)
if err != nil {
t.Errorf("deserializeSpendJournalEntry #%d (%s) unexpected error: %v", i, test.name, err)
continue
}
// Ensure that the deserialized spend journal entry has the
// correct properties.
if !reflect.DeepEqual(gotEntry, test.entry) {
t.Errorf("deserializeSpendJournalEntry #%d (%s) mismatched entries - got %v, want %v",
i, test.name, gotEntry, test.entry)
continue
}
}
}
// TestSpendJournalErrors performs negative tests against deserializing spend
// journal entries to ensure error paths work as expected.
func TestSpendJournalErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
blockTxns []*protos.MsgTx
serialized []byte
errType error
}{
// Adapted from block 170 in main blockchain.
{
name: "Force assertion due to missing stxos",
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9"),
Index: 0,
},
SignatureScript: hexToBytes("47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d0901"),
Sequence: 0xffffffff,
}},
LockTime: 0,
}},
serialized: hexToBytes(""),
errType: common.AssertError(""),
},
{
name: "Force deserialization error in stxos",
blockTxns: []*protos.MsgTx{{ // Coinbase omitted.
Version: 1,
TxIn: []*protos.TxIn{{
PreviousOutPoint: protos.OutPoint{
Hash: *newHashFromStr("0437cd7f8525ceed2324359c2d0ba26006d92d856a9c20fa0241106ee5a597c9"),
Index: 0,
},
SignatureScript: hexToBytes("47304402204e45e16932b8af514961a1d3a1a25fdf3f4f7732e9d624c6c61548ab5fb8cd410220181522ec8eca07de4860a4acdd12909d831cc56cbbac4622082221a8768d1d0901"),
Sequence: 0xffffffff,
}},
LockTime: 0,
}},
serialized: hexToBytes("1301320511db93e1dcdb8a016b49840f8c53bc1eb68a382e97b1482ecad7b148a6909a"),
errType: common.DeserializeError(""),
},
}
for i, test := range tests {
// Ensure the expected error type is returned and the returned
// slice is nil.
stxos, err := deserializeSpendJournalEntry(test.serialized,
test.blockTxns,nil)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("the %d test error:deserializeSpendJournalEntry (%s): expected "+
"error type does not match - got %T, want %T",
i,test.name, err, test.errType)
continue
}
if stxos != nil {
t.Errorf("the %d test error:deserializeSpendJournalEntry (%s): returned "+
"slice of spent transaction outputs is not nil", i, test.name)
continue
}
}
}
// TestUtxoSerialization ensures serializing and deserializing unspent
// trasaction output entries works as expected.
func TestUtxoSerialization(t *testing.T) {
t.Parallel()
entry0 := txo.NewUtxoEntry(
5000000000,
[]byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
1,
true,
&protos.Asset{0,0},nil)
entry1 := txo.NewUtxoEntry(
5000000000,
[]byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
1,
true,
&protos.Asset{0,0},nil)
entry1.Spend()
entry2 := txo.NewUtxoEntry(
1000000,
[]byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
100001,
false,
&protos.Asset{0,0},nil)
entry3 := txo.NewUtxoEntry(
1000000,
[]byte{118,169,21,102,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,197,172},
100001,
false,
&protos.Asset{0,0},nil)
entry3.Spend()
entry4 := txo.NewUtxoEntry(
2100000000000000,
[]byte{169,21,115,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,196},
1,
false,
&protos.Asset{1,1},nil)
entry5 := txo.NewUtxoEntry(
1000000,
[]byte{194,21,99,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215},
2,
false,
&protos.Asset{0,0},nil)
entry6 := txo.NewUtxoEntry(
1000000,[]byte{198,21,99,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215},
3,
false,
&protos.Asset{0,1},nil)
entry7 := txo.NewUtxoEntry(
100,
[]byte{1,2,3,4,5,6},
3,
false,
&protos.Asset{0,1},nil)
entry8 := txo.NewUtxoEntry(
100,
nil,
3,
false,
&protos.Asset{0,1},nil)
entry9 := txo.NewUtxoEntry(
2,
[]byte{169,21,115,227,5,75,65,16,81,218,84,146,174,199,168,35,176,12,179,173,215,114,215,196},
688,
false,
&protos.Asset{0,4294967304},nil)
tests := []struct {
name string
entry *txo.UtxoEntry
serialized string
}{
//test0:
{
name: "height 1, coinbase",
entry: entry0,
serialized: "0x0300f2052a0100000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test1:
{
name: "height 1, coinbase, spent",
entry: entry1,
serialized: "0x",
},
//test2:
{
name: "height 100001, not coinbase",
entry: entry2,
serialized: "0x8b994240420f000000000000e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test3:
{
name: "height 100001, not coinbase, spent",
entry: entry3,
serialized: "0x",
},
//test4:
{
name: "height 1, not coinbase, ScriptHash",
entry: entry4,
serialized: "0x020040075af075070001e3054b411051da5492aec7a823b00cb3add772d70c000000010000000000000001",
},
//test5:
{
name: "height 2, not coinbase, isContract",
entry: entry5,
serialized: "0x0440420f000000000006e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000000",
},
//test6:
{
name: "height 3, not coinbase, isVote",
entry: entry6,
serialized: "0x0640420f000000000007e3054b411051da5492aec7a823b00cb3add772d70c000000000000000000000001",
},
//test7:
{
name: "height 3, not coinbase, invalid pkscript",
entry: entry7,
serialized: "0x066400000000000000100102030405060c000000000000000000000001",
},
//test8:
{
name: "height 3, not coinbase, no pkscript",
entry: entry8,
serialized: "0x0664000000000000000a0c000000000000000000000001",
},
//test9:
{
name: "height 688, not coinbase, pkscript",
entry: entry9,
serialized: "0x8960020000000000000001e3054b411051da5492aec7a823b00cb3add772d70c000000000000000100000008",
},
}
for i, test := range tests {
// Ensure the utxo entry serializes to the expected value.
gotBytes, err := serializeUtxoEntry(test.entry)
if err != nil {
t.Errorf("serializeUtxoEntry #%d (%s) unexpected error: %v", i, test.name, err)
continue
}
gotHex := hexutil.Encode(gotBytes)
if gotHex != test.serialized {
t.Errorf("serializeUtxoEntry #%d (%s): mismatched - got %s, want %s",
i, test.name, gotHex, test.serialized)
continue
}
// Don't try to deserialize if the test entry was spent since it
// will have a nil serialization.
if test.entry.IsSpent() {
continue
}
serializedBytes, err := hexutil.Decode(test.serialized)
// Deserialize to a utxo entry.
utxoEntry, err := DeserializeUtxoEntry(serializedBytes)
if err != nil {
t.Errorf("DeserializeUtxoEntry #%d (%s) unexpected error: %v", i, test.name, err)
continue
}
// The deserialized entry must not be marked spent since unspent
// entries are not serialized.
if utxoEntry.IsSpent() {
t.Errorf("DeserializeUtxoEntry #%d (%s) output should not be marked spent", i, test.name)
continue
}
// Ensure the deserialized entry has the same properties as the
// ones in the test entry.
if utxoEntry.Amount() != test.entry.Amount() {
t.Errorf("DeserializeUtxoEntry #%d (%s) mismatched amounts: got %d, want %d",
i, test.name, utxoEntry.Amount(), test.entry.Amount())
continue
}
if !bytes.Equal(utxoEntry.PkScript(), test.entry.PkScript()) {
t.Errorf("DeserializeUtxoEntry #%d (%s) mismatched scripts: got %x, want %x",
i, test.name, utxoEntry.PkScript(), test.entry.PkScript())
continue
}
if utxoEntry.BlockHeight() != test.entry.BlockHeight() {
t.Errorf("DeserializeUtxoEntry #%d (%s) mismatched block height: got %d, want %d",
i, test.name, utxoEntry.BlockHeight(), test.entry.BlockHeight())
continue
}
if utxoEntry.IsCoinBase() != test.entry.IsCoinBase() {
t.Errorf("DeserializeUtxoEntry #%d (%s) mismatched coinbase flag: got %v, want %v",
i, test.name, utxoEntry.IsCoinBase(), test.entry.IsCoinBase())
continue
}
}
}
// TestUtxoEntryHeaderCodeErrors performs negative tests against unspent
// transaction output header codes to ensure error paths work as expected.
func TestUtxoEntryHeaderCodeErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
entry *txo.UtxoEntry
code uint64
errType error
}{
{
name: "Force assertion due to spent output",
entry: &txo.UtxoEntry{},
errType: common.AssertError(""),
},
}
tests[0].entry.Spend()
for _, test := range tests {
// Ensure the expected error type is returned and the code is 0.
code, err := utxoEntryHeaderCode(test.entry)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("utxoEntryHeaderCode (%s): expected error type does not match - got %T, want %T",
test.name, err, test.errType)
continue
}
if code != 0 {
t.Errorf("utxoEntryHeaderCode (%s): unexpected code on error - got %d, want 0", test.name, code)
continue
}
}
}
// TestUtxoEntryDeserializeErrors performs negative tests against deserializing
// unspent transaction outputs to ensure error paths work as expected.
func TestUtxoEntryDeserializeErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
serialized []byte
errType error
}{
{
name: "no data after header code",
serialized: hexToBytes("02"),
errType: common.DeserializeError(""),
},
{
name: "incomplete compressed txout",
serialized: hexToBytes("0232"),
errType: common.DeserializeError(""),
},
}
for _, test := range tests {
// Ensure the expected error type is returned and the returned
// entry is nil.
entry, err := DeserializeUtxoEntry(test.serialized)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("DeserializeUtxoEntry (%s): expected error type does not match - got %T, want %T",
test.name, err, test.errType)
continue
}
if entry != nil {
t.Errorf("DeserializeUtxoEntry (%s): returned entry is not nil", test.name)
continue
}
}
}
// TestBestChainStateSerialization ensures serializing and deserializing the
// best chain state works as expected.
func TestBestChainStateSerialization(t *testing.T) {
t.Parallel()
tests := []struct {
name string
state bestChainState
serialized []byte
}{
{
name: "genesis",
state: bestChainState{
hash: *newHashFromStr("000000000019d6689c085ae165831e934ff763ae46a2a6c172b3f1b60a8ce26f"),
height: 0,
totalTxns: 1,
},
serialized: hexToBytes("6fe28c0ab6f1b372c1a6a246ae63f74f931e8365e15a089c68d6190000000000000000000100000000000000"),
},
{
name: "block 1",
state: bestChainState{
hash: *newHashFromStr("00000000839a8e6886ab5951d76f411475428afc90947ee320161bbf18eb6048"),
height: 1,
totalTxns: 2,
},
serialized: hexToBytes("4860eb18bf1b1620e37e9490fc8a427514416fd75159ab86688e9a8300000000010000000200000000000000"),
},
}
for i, test := range tests {
// Ensure the state serializes to the expected value.
gotBytes := serializeBestChainState(test.state)
if !bytes.Equal(gotBytes, test.serialized) {
t.Errorf("serializeBestChainState #%d (%s): mismatched bytes - got %x, want %x",
i, test.name, gotBytes, test.serialized)
continue
}
// Ensure the serialized bytes are decoded back to the expected
// state.
state, err := deserializeBestChainState(test.serialized)
if err != nil {
t.Errorf("deserializeBestChainState #%d (%s) unexpected error: %v", i, test.name, err)
continue
}
if !reflect.DeepEqual(state, test.state) {
t.Errorf("deserializeBestChainState #%d (%s) mismatched state - got %v, want %v",
i, test.name, state, test.state)
continue
}
}
}
// TestBestChainStateDeserializeErrors performs negative tests against
// deserializing the chain state to ensure error paths work as expected.
func TestBestChainStateDeserializeErrors(t *testing.T) {
t.Parallel()
tests := []struct {
name string
serialized []byte
errType error
}{
{
name: "nothing serialized",
serialized: hexToBytes(""),
errType: database.Error{ErrorCode: database.ErrCorruption},
},
{
name: "short data in hash",
serialized: hexToBytes("0000"),
errType: database.Error{ErrorCode: database.ErrCorruption},
},
}
for _, test := range tests {
// Ensure the expected error type and code is returned.
_, err := deserializeBestChainState(test.serialized)
if reflect.TypeOf(err) != reflect.TypeOf(test.errType) {
t.Errorf("deserializeBestChainState (%s): expected error type does not match - got %T, want %T",
test.name, err, test.errType)
continue
}
if derr, ok := err.(database.Error); ok {
tderr := test.errType.(database.Error)
if derr.ErrorCode != tderr.ErrorCode {
t.Errorf("deserializeBestChainState (%s): wrong error code got: %v, want: %v",
test.name, derr.ErrorCode, tderr.ErrorCode)
continue
}
}
}
}
func TestDbFetchBalance(t *testing.T) {
parivateKeyList := []string{
"0xd0f0461b7b4d26cf370e6c73b58ef7fa26e8e30853a8cee901ed42cf0879cb6e", //privateKey0
"0xd07f68f78fc58e3dc8ea72ff69784aa9542c452a4ee66b2665fa3cccb48441c2", //privateKey1
}
testRoundSize := uint16(10)
accList, netParam, chain, teardownFunc, err := createFakeChainByPrivateKeys(parivateKeyList, testRoundSize)
defer teardownFunc()
validators, filters, _ := chain.GetValidatorsByNode(1, chain.bestChain.tip())
addrs := make([]common.Address, 0)
for i:=0; i<len(validators); i++ {
find := false
for k:=0;k<len(addrs);k++ {
if *validators[i] == addrs[k] {
find = true
}
}
if !find {
addrs = append(addrs, *validators[i])
}
}
resultEntryPairs := make(map[common.Address][]protos.OutPoint)
curSlot := uint16(0)
curEpoch := uint32(1)
testBlkCnt := int32(3)
for addNum:=int32(0); addNum<testBlkCnt; addNum++ {
if addNum != 0 {
if 0 == addNum % int32(testRoundSize) {
curSlot = 0
curEpoch++
validators, filters, _ = chain.GetValidatorsByNode(curEpoch, chain.bestChain.tip())
} else {
curSlot++
}
}
//create block:
block, _, err := createAndSignBlock(netParam, accList, validators, filters, chain, uint32(curEpoch),
uint16(curSlot), chain.bestChain.height(), protos.Asset{0,0}, 0,
validators[curSlot],nil,0,chain.bestChain.tip())
if err != nil {
t.Errorf("create block error %v", err)
}
blkNums := len(block.Transactions())
coinBaseTx := block.Transactions()[blkNums - 1]
idx := 0
if curSlot == 0 {
idx = 1
}
preOut := protos.OutPoint{
*coinBaseTx.Hash(),
uint32(idx),
}
if _,ok := resultEntryPairs[*validators[curSlot]]; ok {
resultEntryPairs[*validators[curSlot]] = append(resultEntryPairs[*validators[curSlot]],preOut)
} else {
tmpPreOuts := make([]protos.OutPoint,0)
tmpPreOuts = append(tmpPreOuts,preOut)
resultEntryPairs[*validators[curSlot]] = tmpPreOuts
}
// Insert the block to bestChain:
_, isOrphan, err := chain.ProcessBlock(block, nil, nil, nil, common.BFNone)
if err != nil {
t.Errorf("ProcessBlock err %v", err)
}
log.Infof("isOrphan = %v",isOrphan)
}
for i, addr := range addrs {
log.Infof("=============the %d test start=============", i)
err = chain.db.View(func(dbTx database.Tx) error {
mp, err := dbFetchBalance(dbTx, addr[:])
if err != nil {
return err
}
if len(*mp) > 0 {
if _,ok := resultEntryPairs[addr]; !ok {
t.Errorf("the %d test error: can not find addr in resultEntryPairs",i)
}
resultPreOuts := resultEntryPairs[addr]
if len(resultPreOuts) != len(*mp) {
t.Errorf("the %d test error: the numbers of resultPreOut do not equal the expect ",i)
}
for _, data := range *mp {
findPreOut := false
for _, preOut := range resultPreOuts {
if preOut == data.Key {
findPreOut = true
}
}
if !findPreOut {
t.Errorf("the %d test error: the preOut do not equal the expect",i)
}
}
}
for _, data := range *mp {
op := data.Key
entity := data.Value
log.Infof("Outpoint hash %x, its index %d, utxo amount %d, height %d, flags %d, asset %v",
op.Hash, op.Index, entity.Amount(), entity.BlockHeight(), entity.PackedFlags(), entity.Asset())
}
return nil
})
if err != nil {
t.Log(err)
}
}
}
|
GO
|
Apache-2.0
|
SSSpin/asimov/blockchain/chainio_test.go
|
7f010f1c-3a0a-49c4-bf05-542d5c21752a
|
[]
|
[]
|
package awin_go
import (
"bytes"
"compress/gzip"
"encoding/csv"
"encoding/json"
"errors"
"fmt"
"github.com/matthiasbruns/awin-go/awin"
"io"
"io/ioutil"
"net/http"
"strings"
"testing"
)
type mockRoundTripper struct {
response *http.Response
requestTestFunc func(r *http.Request) error
}
func (m mockRoundTripper) RoundTrip(request *http.Request) (*http.Response, error) {
if err := m.requestTestFunc(request); err != nil {
return nil, err
}
return m.response, nil
}
func readCSVFileContents(filePath string) (string, error) {
csvContent, err := ioutil.ReadFile(filePath) // just pass the file name
if err != nil {
return "", err
}
return string(csvContent), nil
}
func parseCSVToDataFeedRow(csvContent string) (*[]awin.DataFeedListRow, error) {
reader := csv.NewReader(strings.NewReader(csvContent))
var rows []awin.DataFeedListRow
columnNamesSkipped := false
for {
record, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
// Skip column names from csv
if !columnNamesSkipped {
columnNamesSkipped = true
continue
}
row := awin.DataFeedListRow{
AdvertiserID: record[0],
AdvertiserName: record[1],
PrimaryRegion: record[2],
MembershipStatus: record[3],
FeedID: record[4],
FeedName: record[5],
Language: record[6],
Vertical: record[7],
LastImported: record[8],
LastChecked: record[9],
NoOfProducts: record[10],
URL: record[11],
}
rows = append(rows, row)
}
return &rows, nil
}
func parseCSVToDataFeedEntry(csvContent string) (*[]awin.DataFeedEntry, error) {
reader := csv.NewReader(strings.NewReader(csvContent))
var entries []awin.DataFeedEntry
columnNamesSkipped := false
for {
record, err := reader.Read()
if err == io.EOF {
break
}
if err != nil {
return nil, err
}
// Skip column names from csv
if !columnNamesSkipped {
columnNamesSkipped = true
continue
}
entry := awin.DataFeedEntry{
AwDeepLink: record[0],
ProductName: record[1],
AwProductId: record[2],
MerchantProductId: record[3],
MerchantImageUrl: record[4],
Description: record[5],
MerchantCategory: record[6],
SearchPrice: record[7],
MerchantName: record[8],
MerchantId: record[9],
CategoryName: record[10],
CategoryId: record[11],
AwImageUrl: record[12],
Currency: record[13],
StorePrice: record[14],
DeliveryCost: record[15],
MerchantDeepLink: record[16],
Language: record[17],
LastUpdated: record[18],
DisplayPrice: record[19],
DataFeedId: record[20],
BrandName: record[21],
BrandId: record[22],
Colour: record[23],
ProductShortDescription: record[24],
Specifications: record[25],
Condition: record[26],
ProductModel: record[27],
ModelNumber: record[28],
Dimensions: record[29],
Keywords: record[30],
PromotionalText: record[31],
ProductType: record[32],
CommissionGroup: record[33],
MerchantProductCategoryPath: record[34],
MerchantProductSecondCategory: record[35],
MerchantProductThirdCategory: record[36],
RrpPrice: record[37],
Saving: record[38],
SavingsPercent: record[39],
BasePrice: record[40],
BasePriceAmount: record[41],
BasePriceText: record[42],
ProductPriceOld: record[43],
DeliveryRestrictions: record[44],
DeliveryWeight: record[45],
Warranty: record[46],
TermsOfContract: record[47],
DeliveryTime: record[48],
InStock: record[49],
StockQuantity: record[50],
ValidFrom: record[51],
ValidTo: record[52],
IsForSale: record[53],
WebOffer: record[54],
PreOrder: record[55],
StockStatus: record[56],
SizeStockStatus: record[57],
SizeStockAmount: record[58],
MerchantThumbUrl: record[59],
LargeImage: record[60],
AlternateImage: record[61],
AwThumbUrl: record[62],
AlternateImageTwo: record[63],
AlternateImageThree: record[64],
AlternateImageFour: record[65],
Reviews: record[66],
AverageRating: record[67],
Rating: record[68],
NumberAvailable: record[69],
Custom1: record[70],
Custom2: record[71],
Custom3: record[72],
Custom4: record[73],
Custom5: record[74],
Custom6: record[75],
Custom7: record[76],
Custom8: record[77],
Custom9: record[78],
Ean: record[79],
Isbn: record[80],
Upc: record[81],
Mpn: record[82],
ParentProductId: record[83],
ProductGtin: record[84],
BasketLink: record[85],
}
entries = append(entries, entry)
}
return &entries, nil
}
func TestFetchDataFeedList(t *testing.T) {
// Read mock data from CSV
csvContent, err := readCSVFileContents("testdata/data_feed_list.csv")
if err != nil {
t.Fatalf("coult not parse csv file '%v'", err)
}
// Create mock response
response := &http.Response{
StatusCode: 200,
Body: ioutil.NopCloser(bytes.NewBufferString(csvContent)),
}
// Create test client to run tests on
awinClient := awin.NewAwinClient(&http.Client{Transport: mockRoundTripper{response: response, requestTestFunc: func(r *http.Request) error {
expectedUrl := "https://productdata.awin.com/datafeed/list/apikey/apiKey"
if r.URL.String() != expectedUrl {
err := errors.New(fmt.Sprintf("invalid url found in test\nexpected '%s'\nfound '%s'", expectedUrl, r.URL.String()))
t.Error(err)
return err
}
expectedMethod := "GET"
if r.Method != expectedMethod {
err := errors.New(fmt.Sprintf("invalid request method in test\nexpected '%s'\nfound '%s'", expectedMethod, r.Method))
t.Error(err)
return err
}
return nil
}}})
result, err := awinClient.FetchDataFeedList("apiKey")
if err != nil {
t.Fatalf("err is not null '%v'", err)
}
if len(*result) != 10 {
t.Fatalf("Invalid amount of data rows received %d", len(*result))
}
// Check if received rows and expected rows match
expectedRows, _ := parseCSVToDataFeedRow(csvContent)
for i, expectedRow := range *expectedRows {
receivedRow := (*result)[i]
if expectedRow != receivedRow {
t.Fatalf("Invalid row parsed\nexpected '%v'\nreceived '%v'", expectedRow, receivedRow)
}
}
}
func TestFetchDataFeed(t *testing.T) {
// Read mock data from CSV
csvContent, err := readCSVFileContents("testdata/data_feed.csv")
if err != nil {
t.Fatalf("coult not parse csv file '%v'", err)
}
var b bytes.Buffer
gz := gzip.NewWriter(&b)
if _, err := gz.Write([]byte(csvContent)); err != nil {
t.Error(err)
}
if err := gz.Flush(); err != nil {
t.Error(err)
}
if err := gz.Close(); err != nil {
t.Error(err)
}
// Create mock response
response := &http.Response{
StatusCode: 200,
Body: ioutil.NopCloser(bytes.NewBuffer(b.Bytes())),
}
// Create test client to run tests on
awinClient := awin.NewAwinClient(&http.Client{Transport: mockRoundTripper{response: response, requestTestFunc: func(r *http.Request) error {
expectedUrl := "https://productdata.awin.com/datafeed/download/apikey/apiKey/language/en/fid/fid1,fid2/columns/aw_deep_link,product_name,aw_product_id,merchant_product_id,merchant_image_url,description,merchant_category,search_price,merchant_name,merchant_id,category_name,category_id,aw_image_url,currency,store_price,delivery_cost,merchant_deep_link,language,last_updated,display_price,data_feed_id,brand_name,brand_id,colour,product_short_description,specifications,condition,product_model,model_number,dimensions,keywords,promotional_text,product_type,commission_group,merchant_product_category_path,merchant_product_second_category,merchant_product_third_category,rrp_price,saving,savings_percent,base_price,base_price_amount,base_price_text,product_price_old,delivery_restrictions,delivery_weight,warranty,terms_of_contract,delivery_time,in_stock,stock_quantity,valid_from,valid_to,is_for_sale,web_offer,pre_order,stock_status,size_stock_status,size_stock_amount,merchant_thumb_url,large_image,alternate_image,aw_thumb_url,alternate_image_two,alternate_image_three,alternate_image_four,reviews,average_rating,rating,number_available,custom_1,custom_2,custom_3,custom_4,custom_5,custom_6,custom_7,custom_8,custom_9,ean,isbn,upc,mpn,parent_product_id,product_GTIN,basket_link/format/csv/delimiter/,/compression/gzip/adultcontent/1/"
if r.URL.String() != expectedUrl {
err := errors.New(fmt.Sprintf("invalid url found in test\nexpected '%s'\nfound '%s'", expectedUrl, r.URL.String()))
t.Error(err)
return err
}
expectedMethod := "GET"
if r.Method != expectedMethod {
err := errors.New(fmt.Sprintf("invalid request method in test\nexpected '%s'\nfound '%s'", expectedMethod, r.Method))
t.Error(err)
return err
}
return nil
}}})
result, err := awinClient.FetchDataFeed(&awin.DataFeedOptions{
ApiKey: "apiKey",
FeedIds: []string{"fid1", "fid2"},
Language: "en",
ShowAdultContent: true,
})
if err != nil {
t.Fatalf("err is not null '%v'", err)
}
if len(*result) != 10 {
t.Fatalf("Invalid amount of data rows received %d", len(*result))
}
// Check if received rows and expected rows match
expectedRows, _ := parseCSVToDataFeedEntry(csvContent)
for i, expectedRow := range *expectedRows {
receivedRow := (*result)[i]
if expectedRow != receivedRow {
eJson, _ := json.Marshal(expectedRow)
rJson, _ := json.Marshal(receivedRow)
t.Fatalf("Invalid row parsed\nexpected '%v'\nreceived '%v'", string(eJson), string(rJson))
}
}
}
func TestFetchDataFeedFromUrl(t *testing.T) {
// Read mock data from CSV
csvContent, err := readCSVFileContents("testdata/data_feed.csv")
if err != nil {
t.Fatalf("coult not parse csv file '%v'", err)
}
var b bytes.Buffer
gz := gzip.NewWriter(&b)
if _, err := gz.Write([]byte(csvContent)); err != nil {
t.Error(err)
}
if err := gz.Flush(); err != nil {
t.Error(err)
}
if err := gz.Close(); err != nil {
t.Error(err)
}
// Create mock response
response := &http.Response{
StatusCode: 200,
Body: ioutil.NopCloser(bytes.NewBuffer(b.Bytes())),
}
// Create test client to run tests on
awinClient := awin.NewAwinClient(&http.Client{Transport: mockRoundTripper{response: response, requestTestFunc: func(r *http.Request) error {
fmt.Println(r.URL.String())
expectedUrl := "https://productdata.awin.com/datafeed/download/apikey/apiKey/language/en/fid/fid1,fid2/columns/aw_deep_link,product_name,aw_product_id,merchant_product_id,merchant_image_url,description,merchant_category,search_price,merchant_name,merchant_id,category_name,category_id,aw_image_url,currency,store_price,delivery_cost,merchant_deep_link,language,last_updated,display_price,data_feed_id,brand_name,brand_id,colour,product_short_description,specifications,condition,product_model,model_number,dimensions,keywords,promotional_text,product_type,commission_group,merchant_product_category_path,merchant_product_second_category,merchant_product_third_category,rrp_price,saving,savings_percent,base_price,base_price_amount,base_price_text,product_price_old,delivery_restrictions,delivery_weight,warranty,terms_of_contract,delivery_time,in_stock,stock_quantity,valid_from,valid_to,is_for_sale,web_offer,pre_order,stock_status,size_stock_status,size_stock_amount,merchant_thumb_url,large_image,alternate_image,aw_thumb_url,alternate_image_two,alternate_image_three,alternate_image_four,reviews,average_rating,rating,number_available,custom_1,custom_2,custom_3,custom_4,custom_5,custom_6,custom_7,custom_8,custom_9,ean,isbn,upc,mpn,parent_product_id,product_GTIN,basket_link/format/csv/delimiter/,/compression/gzip/adultcontent/1/"
if r.URL.String() != expectedUrl {
err := errors.New(fmt.Sprintf("invalid url found in test\nexpected '%s'\nfound '%s'", expectedUrl, r.URL.String()))
t.Error(err)
return err
}
expectedMethod := "GET"
if r.Method != expectedMethod {
err := errors.New(fmt.Sprintf("invalid request method in test\nexpected '%s'\nfound '%s'", expectedMethod, r.Method))
t.Error(err)
return err
}
return nil
}}})
result, err := awinClient.FetchDataFeedFromUrl("https://productdata.awin.com/datafeed/download/apikey/apiKey/language/en/fid/fid1,fid2/columns/aw_deep_link,product_name,aw_product_id,merchant_product_id,merchant_image_url,description,merchant_category,search_price,merchant_name,merchant_id,category_name,category_id,aw_image_url,currency,store_price,delivery_cost,merchant_deep_link,language,last_updated,display_price,data_feed_id,brand_name,brand_id,colour,product_short_description,specifications,condition,product_model,model_number,dimensions,keywords,promotional_text,product_type,commission_group,merchant_product_category_path,merchant_product_second_category,merchant_product_third_category,rrp_price,saving,savings_percent,base_price,base_price_amount,base_price_text,product_price_old,delivery_restrictions,delivery_weight,warranty,terms_of_contract,delivery_time,in_stock,stock_quantity,valid_from,valid_to,is_for_sale,web_offer,pre_order,stock_status,size_stock_status,size_stock_amount,merchant_thumb_url,large_image,alternate_image,aw_thumb_url,alternate_image_two,alternate_image_three,alternate_image_four,reviews,average_rating,rating,number_available,custom_1,custom_2,custom_3,custom_4,custom_5,custom_6,custom_7,custom_8,custom_9,ean,isbn,upc,mpn,parent_product_id,product_GTIN,basket_link/format/csv/delimiter/,/compression/gzip/adultcontent/1/")
if err != nil {
t.Fatalf("err is not null '%v'", err)
}
if len(*result) != 10 {
t.Fatalf("Invalid amount of data rows received %d", len(*result))
}
// Check if received rows and expected rows match
expectedRows, _ := parseCSVToDataFeedEntry(csvContent)
for i, expectedRow := range *expectedRows {
receivedRow := (*result)[i]
if expectedRow != receivedRow {
t.Fatalf("Invalid row parsed\nexpected '%v'\nreceived '%v'", expectedRow, receivedRow)
}
}
}
|
GO
|
MIT
|
matthiasbruns/awin-go/test/client_test.go
|
9ed488fd-5d3e-46c8-87ed-dd28ed71b4ad
|
[{"tag": "USERNAME", "value": "matthiasbruns", "start": 115, "end": 128, "context": "v\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"github.com/matthiasbruns/awin-go/awin\"\n\t\"io\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"st"}]
|
[{"tag": "USERNAME", "value": "matthiasbruns", "start": 115, "end": 128, "context": "v\"\n\t\"encoding/json\"\n\t\"errors\"\n\t\"fmt\"\n\t\"github.com/matthiasbruns/awin-go/awin\"\n\t\"io\"\n\t\"io/ioutil\"\n\t\"net/http\"\n\t\"st"}]
|
require 'spec_helper'
RSpec.describe Airbrake::AirbrakeLogger do
let(:project_id) { 113743 }
let(:project_key) { 'fd04e13d806a90f96614ad8e529b2822' }
let(:endpoint) do
"https://airbrake.io/api/v3/projects/#{project_id}/notices?key=#{project_key}"
end
let(:airbrake) do
Airbrake::Notifier.new(project_id: project_id, project_key: project_key)
end
let(:logger) { Logger.new('/dev/null') }
subject { described_class.new(logger) }
def wait_for_a_request_with_body(body)
wait_for(a_request(:post, endpoint).with(body: body)).to have_been_made.once
end
before do
stub_request(:post, endpoint).to_return(status: 201, body: '{}')
end
describe "#airbrake_notifier" do
it "has the default notifier installed by default" do
expect(subject.airbrake_notifier).to be_an(Airbrake::Notifier)
end
it "installs Airbrake notifier" do
notifier_id = airbrake.object_id
expect(subject.airbrake_notifier.object_id).not_to eq(notifier_id)
subject.airbrake_notifier = airbrake
expect(subject.airbrake_notifier.object_id).to eq(notifier_id)
end
context "when Airbrake is installed explicitly" do
let(:out) { StringIO.new }
let(:logger) { Logger.new(out) }
before do
subject.airbrake_notifier = airbrake
end
it "both logs and notifies" do
msg = 'bingo'
subject.fatal(msg)
wait_for_a_request_with_body(/"message":"#{msg}"/)
expect(out.string).to match(/FATAL -- : #{msg}/)
end
it "sets the correct severity" do
subject.fatal('bango')
wait_for_a_request_with_body(/"context":{.*"severity":"critical".*}/)
end
it "sets the correct component" do
subject.fatal('bingo')
wait_for_a_request_with_body(/"component":"log"/)
end
it "strips out internal logger frames" do
subject.fatal('bongo')
wait_for(
a_request(:post, endpoint).
with(body: %r{"file":".+/logger.rb"})
).not_to have_been_made
wait_for(a_request(:post, endpoint)).to have_been_made.once
end
end
context "when Airbrake is not installed" do
it "only logs, never notifies" do
out = StringIO.new
l = described_class.new(Logger.new(out))
l.airbrake_notifier = nil
msg = 'bango'
l.fatal(msg)
wait_for(a_request(:post, endpoint)).not_to have_been_made
expect(out.string).to match('FATAL -- : bango')
end
end
end
describe "#airbrake_level" do
context "when not set" do
it "defaults to Logger::WARN" do
expect(subject.airbrake_level).to eq(Logger::WARN)
end
end
context "when set" do
before do
subject.airbrake_level = Logger::FATAL
end
it "does not notify below the specified level" do
subject.error('bingo')
wait_for(a_request(:post, endpoint)).not_to have_been_made
end
it "notifies in the current or above level" do
subject.fatal('bingo')
wait_for(a_request(:post, endpoint)).to have_been_made
end
it "raises error when below the allowed level" do
expect do
subject.airbrake_level = Logger::DEBUG
end.to raise_error(/severity level \d is not allowed/)
end
end
end
end
|
Ruby
|
MIT
|
AirHelp/airbrake/spec/unit/logger_spec.rb
|
b48e0b4e-7be6-4727-a4a0-92327b0da012
|
[{"tag": "API_KEY", "value": "fd04e13d806a90f96614ad8e529b2822", "start": 119, "end": 151, "context": "et(:project_id) { 113743 }\n let(:project_key) { 'fd04e13d806a90f96614ad8e529b2822' }\n\n let(:endpoint) do\n \"https://airbrake.io/"}]
|
[{"tag": "KEY", "value": "fd04e13d806a90f96614ad8e529b2822", "start": 119, "end": 151, "context": "et(:project_id) { 113743 }\n let(:project_key) { 'fd04e13d806a90f96614ad8e529b2822' }\n\n let(:endpoint) do\n \"https://airbrake.io/"}]
|
require 'rails_helper'
require 'byebug'
RSpec.describe UsersController, :type => :controller do
let(:user) { User.create!({ username: "jill_bruce", password: "password" }) }
describe "GET #show" do
it "renders the show template" do
get :show, id: user.id
expect(response).to render_template(:show)
end
end
describe "GET #index" do
it "renders the index template" do
get :index
expect(response).to render_template("index")
end
end
describe "POST #create" do
context "with invalid params" do
it "validates the presence of the user's username and password" do
post :create, user: { username: "jill_bruce", password: "" }
expect(response).to render_template("new")
expect(flash[:errors]).to be_present
end
it "validates that the password is at least 6 characters long" do
post :create, user: { username: "jill_bruce", password: "short" }
expect(response).to render_template("new")
expect(flash[:errors]).to be_present
end
end
context "with valid params" do
it "redirects user to user show on success" do
post :create, user: { username: "jill_bruce", password: "password" }
expect(response).to redirect_to(user_url(User.last))
end
end
end
end
|
Ruby
|
MIT
|
skullbaselab/aa-afterdark/NYC-student-lecture-notes-dec-2016-master/w5d2/goal_app/spec/controllers/users_controller_spec.rb
|
84b7b2d0-59ff-4c8d-96e6-330e5d208ebc
|
[{"tag": "USERNAME", "value": "jill_bruce", "start": 1188, "end": 1198, "context": "cess\" do\n post :create, user: { username: \"jill_bruce\", password: \"password\" }\n expect(response)"}, {"tag": "USERNAME", "value": "jill_bruce", "start": 911, "end": 921, "context": "long\" do\n post :create, user: { username: \"jill_bruce\", password: \"short\" }\n expect(response).to"}, {"tag": "PASSWORD", "value": "password", "start": 1212, "end": 1220, "context": "reate, user: { username: \"jill_bruce\", password: \"password\" }\n expect(response).to redirect_to(user_u"}, {"tag": "USERNAME", "value": "jill_bruce", "start": 663, "end": 673, "context": "word\" do\n post :create, user: { username: \"jill_bruce\", password: \"\" }\n expect(response).to rend"}]
|
[{"tag": "USERNAME", "value": "jill_bruce", "start": 1188, "end": 1198, "context": "cess\" do\n post :create, user: { username: \"jill_bruce\", password: \"password\" }\n expect(response)"}, {"tag": "USERNAME", "value": "jill_bruce", "start": 911, "end": 921, "context": "long\" do\n post :create, user: { username: \"jill_bruce\", password: \"short\" }\n expect(response).to"}, {"tag": "PASSWORD", "value": "password", "start": 1212, "end": 1220, "context": "reate, user: { username: \"jill_bruce\", password: \"password\" }\n expect(response).to redirect_to(user_u"}, {"tag": "USERNAME", "value": "jill_bruce", "start": 663, "end": 673, "context": "word\" do\n post :create, user: { username: \"jill_bruce\", password: \"\" }\n expect(response).to rend"}]
|
class Akamai < Formula
desc "CLI toolkit for working with Akamai's APIs"
homepage "https://github.com/akamai/cli"
url "https://github.com/akamai/cli/archive/1.1.5.tar.gz"
sha256 "759c3c3bc59c2623fc8a5f91907f55d870f77aef1839f2ecc703db5c469b852a"
bottle do
cellar :any_skip_relocation
sha256 "a986f3bfc261227cd44447d5ff9cdfb461c50c002118d36caed068f5859432e1" => :catalina
sha256 "ce3ea6b8dba89d48bfec3be3bbf5701e7b1dcdde7a2f76a97dd668752b1e95fb" => :mojave
sha256 "2b6d07c4926858e1be33bef070a925a6746f396fa27566aaa313d5a2673cb25f" => :high_sierra
end
depends_on "dep" => :build
depends_on "go" => :build
def install
ENV["GOPATH"] = buildpath
ENV["GLIDE_HOME"] = HOMEBREW_CACHE/"glide_home/#{name}"
srcpath = buildpath/"src/github.com/akamai/cli"
srcpath.install buildpath.children
cd srcpath do
system "dep", "ensure", "-vendor-only"
system "go", "build", "-tags", "noautoupgrade nofirstrun", "-o", bin/"akamai"
prefix.install_metafiles
end
end
test do
assert_match "Purge", shell_output("#{bin}/akamai install --force purge")
end
end
|
Ruby
|
BSD-2-Clause
|
AdrianSosic/homebrew-core/Formula/akamai.rb
|
528320d1-a413-476b-9704-f4f29dab0879
|
[]
|
[]
|
class Comby < Formula
desc "Tool for changing code across many languages"
homepage "https://comby.dev"
url "https://github.com/comby-tools/comby/archive/0.15.0.tar.gz"
sha256 "6f7304c1644f6212c3b86542f8325e6f76cd268b787fbfbbe3cc0de61ddec14a"
license "Apache-2.0"
bottle do
cellar :any
sha256 "64591a0ced2e6ecd71e43e773ec1ae8dd8f0a1fe3507ecb121e665db02a8f21c" => :catalina
sha256 "7f8b23196d449a572f0e7061d33a509c479c7f526fae9a9754388f645e601bcc" => :mojave
sha256 "87c8b380a96b5f9cfe0ee6de72f837ee5f1b61fc5777a0cb35d538b262f4ba01" => :high_sierra
end
depends_on "gmp" => :build
depends_on "ocaml" => :build
depends_on "opam" => :build
depends_on "pcre"
depends_on "pkg-config"
uses_from_macos "m4"
uses_from_macos "unzip"
uses_from_macos "zlib"
def install
ENV.deparallelize
opamroot = buildpath/".opam"
ENV["OPAMROOT"] = opamroot
ENV["OPAMYES"] = "1"
system "opam", "init", "--no-setup", "--disable-sandboxing"
system "opam", "config", "exec", "--", "opam", "install", ".", "--deps-only", "-y"
ENV.prepend_path "LIBRARY_PATH", opamroot/"default/lib/hack_parallel" # for -lhp
system "opam", "config", "exec", "--", "make", "release"
bin.install "_build/default/src/main.exe" => "comby"
end
test do
expect = <<~EXPECT
--- /dev/null
+++ /dev/null
@@ -1,3 +1,3 @@
int main(void) {
- printf("hello world!");
+ printf("comby, hello!");
}
EXPECT
input = <<~INPUT
EOF
int main(void) {
printf("hello world!");
}
EOF
INPUT
match = 'printf(":[1] :[2]!")'
rewrite = 'printf("comby, :[1]!")'
assert_equal expect, shell_output("#{bin}/comby '#{match}' '#{rewrite}' .c -stdin -diff << #{input}")
end
end
|
Ruby
|
BSD-2-Clause
|
MLH-Fellowship/homebrew-core/Formula/comby.rb
|
921334c2-b30f-46fc-a5b2-2a088e69476e
|
[]
|
[]
|
require 'spec_helper'
require 'yt/models/playlist_item'
describe Yt::PlaylistItem, :device_app do
subject(:item) { Yt::PlaylistItem.new id: id, auth: $account }
context 'given an existing playlist item' do
let(:id) { 'PLjW_GNR5Ir0GMlbJzA-aW0UV8TchJFb8p3uzrLNcZKPY' }
it 'returns valid metadata' do
expect(item.title).to be_a String
expect(item.description).to be_a String
expect(item.thumbnail_url).to be_a String
expect(item.published_at).to be_a Time
expect(item.channel_id).to be_a String
expect(item.channel_title).to be_a String
expect(item.playlist_id).to be_a String
expect(item.position).to be_an Integer
expect(item.video_id).to be_a String
expect(item.video).to be_a Yt::Video
expect(item.privacy_status).to be_a String
end
end
context 'given an unknown playlist item' do
let(:id) { 'not-a-playlist-item-id' }
it { expect{item.snippet}.to raise_error Yt::Errors::RequestError }
end
context 'given one of my own playlist items that I want to update' do
before(:all) do
@my_playlist = $account.create_playlist title: "Yt Test Update Playlist Item #{rand}"
@my_playlist.add_video 'MESycYJytkU'
@my_playlist_item = @my_playlist.add_video 'MESycYJytkU'
end
after(:all) { @my_playlist.delete }
let(:id) { @my_playlist_item.id }
let!(:old_title) { @my_playlist_item.title }
let!(:old_privacy_status) { @my_playlist_item.privacy_status }
let(:update) { @my_playlist_item.update attrs }
context 'given I update the position' do
let(:attrs) { {position: 0} }
specify 'only updates the position' do
expect(update).to be true
expect(@my_playlist_item.position).to be 0
expect(@my_playlist_item.title).to eq old_title
expect(@my_playlist_item.privacy_status).to eq old_privacy_status
end
end
end
end
|
Ruby
|
MIT
|
Crowd9/yt/spec/requests/as_account/playlist_item_spec.rb
|
d0c8e61b-807a-4b43-a779-2ce050111b77
|
[]
|
[]
|
cask "curiosity" do
version "0.5.5"
sha256 "cee2a11755b23c55d96a2226750e80ee991ae8bff74bda4ff757717bcbdf9cad"
url "https://github.com/Dimillian/RedditOS/releases/download/#{version}/Curiosity.zip"
name "Curiosity"
desc "SwiftUI Reddit client"
homepage "https://github.com/Dimillian/RedditOS"
livecheck do
url :url
strategy :github_latest
end
depends_on macos: ">= :monterey"
app "Curiosity.app"
zap trash: "~/Library/Containers/com.thomasricouard.curiosity"
end
|
Ruby
|
BSD-2-Clause
|
JounQin/homebrew-cask/Casks/curiosity.rb
|
c68d4142-383b-43de-bf57-9a48e2402e4b
|
[{"tag": "USERNAME", "value": "Dimillian", "start": 141, "end": 150, "context": "bda4ff757717bcbdf9cad\"\n\n url \"https://github.com/Dimillian/RedditOS/releases/download/#{version}/Curiosity.z"}, {"tag": "USERNAME", "value": "Dimillian", "start": 285, "end": 294, "context": "tUI Reddit client\"\n homepage \"https://github.com/Dimillian/RedditOS\"\n\n livecheck do\n url :url\n strate"}]
|
[{"tag": "USERNAME", "value": "Dimillian", "start": 141, "end": 150, "context": "bda4ff757717bcbdf9cad\"\n\n url \"https://github.com/Dimillian/RedditOS/releases/download/#{version}/Curiosity.z"}, {"tag": "USERNAME", "value": "Dimillian", "start": 285, "end": 294, "context": "tUI Reddit client\"\n homepage \"https://github.com/Dimillian/RedditOS\"\n\n livecheck do\n url :url\n strate"}]
|
// Copyright 2013 Martini Authors
// Copyright 2014 The Web Authors
//
// Licensed under the Apache License, Version 2.0 (the "License"): you may
// not use this file except in compliance with the License. You may obtain
// a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
// WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
// License for the specific language governing permissions and limitations
// under the License.
package web
import (
"net/http"
"net/http/httptest"
"os"
"testing"
"time"
. "github.com/smartystreets/goconvey/convey"
)
func Test_New(t *testing.T) {
Convey("Initialize a new instance", t, func() {
So(New(), ShouldNotBeNil)
})
Convey("Just test that Run doesn't bomb", t, func() {
go New().Run()
time.Sleep(1 * time.Second)
os.Setenv("PORT", "4001")
go New().Run("0.0.0.0")
go New().Run(4002)
go New().Run("0.0.0.0", 4003)
})
}
func Test_Web_Before(t *testing.T) {
Convey("Register before handlers", t, func() {
m := New()
m.Before(func(rw http.ResponseWriter, req *http.Request) bool {
return false
})
m.Before(func(rw http.ResponseWriter, req *http.Request) bool {
return true
})
resp := httptest.NewRecorder()
req, err := http.NewRequest("GET", "/", nil)
So(err, ShouldBeNil)
m.ServeHTTP(resp, req)
})
}
func Test_Web_ServeHTTP(t *testing.T) {
Convey("Serve HTTP requests", t, func() {
result := ""
m := New()
m.Use(func(c *Context) {
result += "foo"
c.Next()
result += "ban"
})
m.Use(func(c *Context) {
result += "bar"
c.Next()
result += "baz"
})
m.Get("/", func() {})
m.Action(func(res http.ResponseWriter, req *http.Request) {
result += "bat"
res.WriteHeader(http.StatusBadRequest)
})
resp := httptest.NewRecorder()
req, err := http.NewRequest("GET", "/", nil)
So(err, ShouldBeNil)
m.ServeHTTP(resp, req)
So(result, ShouldEqual, "foobarbatbazban")
So(resp.Code, ShouldEqual, http.StatusBadRequest)
})
}
func Test_Web_Handlers(t *testing.T) {
Convey("Add custom handlers", t, func() {
result := ""
batman := func(c *Context) {
result += "batman!"
}
m := New()
m.Use(func(c *Context) {
result += "foo"
c.Next()
result += "ban"
})
m.Handlers(
batman,
batman,
batman,
)
Convey("Add not callable function", func() {
defer func() {
So(recover(), ShouldNotBeNil)
}()
m.Use("shit")
})
m.Get("/", func() {})
m.Action(func(res http.ResponseWriter, req *http.Request) {
result += "bat"
res.WriteHeader(http.StatusBadRequest)
})
resp := httptest.NewRecorder()
req, err := http.NewRequest("GET", "/", nil)
So(err, ShouldBeNil)
m.ServeHTTP(resp, req)
So(result, ShouldEqual, "batman!batman!batman!bat")
So(resp.Code, ShouldEqual, http.StatusBadRequest)
})
}
func Test_Web_EarlyWrite(t *testing.T) {
Convey("Write early content to response", t, func() {
result := ""
m := New()
m.Use(func(res http.ResponseWriter) {
result += "foobar"
res.Write([]byte("Hello world"))
})
m.Use(func() {
result += "bat"
})
m.Get("/", func() {})
m.Action(func(res http.ResponseWriter) {
result += "baz"
res.WriteHeader(http.StatusBadRequest)
})
resp := httptest.NewRecorder()
req, err := http.NewRequest("GET", "/", nil)
So(err, ShouldBeNil)
m.ServeHTTP(resp, req)
So(result, ShouldEqual, "foobar")
So(resp.Code, ShouldEqual, http.StatusOK)
})
}
func Test_Web_Written(t *testing.T) {
Convey("Written sign", t, func() {
resp := httptest.NewRecorder()
m := New()
m.Handlers(func(res http.ResponseWriter) {
res.WriteHeader(http.StatusOK)
})
ctx := m.createContext(resp, &http.Request{Method: "GET"})
So(ctx.Written(), ShouldBeFalse)
ctx.run()
So(ctx.Written(), ShouldBeTrue)
})
}
func Test_Web_Basic_NoRace(t *testing.T) {
Convey("Make sure no race between requests", t, func() {
m := New()
handlers := []Handler{func() {}, func() {}}
// Ensure append will not realloc to trigger the race condition
m.handlers = handlers[:1]
m.Get("/", func() {})
for i := 0; i < 2; i++ {
go func() {
req, _ := http.NewRequest("GET", "/", nil)
resp := httptest.NewRecorder()
m.ServeHTTP(resp, req)
}()
}
})
}
func Test_SetENV(t *testing.T) {
Convey("Get and save environment variable", t, func() {
tests := []struct {
in string
out string
}{
{"", "development"},
{"not_development", "not_development"},
}
m := New()
for _, test := range tests {
m.SetEnv(test.in)
// auto correct env to 'development', 'production', 'test'
So(m.Env(), ShouldEqual, DEV)
}
})
}
|
GO
|
MIT
|
landzero/x/net/web/web_test.go
|
f743eba9-db1b-40cd-bf9c-5a8ff4e91ebf
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1062, "end": 1069, "context": "n(\"0.0.0.0\")\n\t\tgo New().Run(4002)\n\t\tgo New().Run(\"0.0.0.0\", 4003)\n\t})\n}\n\nfunc Test_Web_Before(t *testing.T)"}, {"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1015, "end": 1022, "context": "cond)\n\t\tos.Setenv(\"PORT\", \"4001\")\n\t\tgo New().Run(\"0.0.0.0\")\n\t\tgo New().Run(4002)\n\t\tgo New().Run(\"0.0.0.0\", "}]
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1062, "end": 1069, "context": "n(\"0.0.0.0\")\n\t\tgo New().Run(4002)\n\t\tgo New().Run(\"0.0.0.0\", 4003)\n\t})\n}\n\nfunc Test_Web_Before(t *testing.T)"}, {"tag": "IP_ADDRESS", "value": "0.0.0.0", "start": 1015, "end": 1022, "context": "cond)\n\t\tos.Setenv(\"PORT\", \"4001\")\n\t\tgo New().Run(\"0.0.0.0\")\n\t\tgo New().Run(4002)\n\t\tgo New().Run(\"0.0.0.0\", "}]
|
/**
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the new BSD license.
*
* @author David Zeller <me@zellerda.com>
* @license http://www.opensource.org/licenses/BSD-3-Clause New BSD license
* @version 2.2.1
*/
(function($, tokenize){
// Keycodes
var KEYS = {
BACKSPACE: 8,
TAB: 9,
ENTER: 13,
ESCAPE: 27,
ARROW_UP: 38,
ARROW_DOWN: 40,
COMMA: 188
};
// Data storage constant
var DATA = 'tokenize';
$.tokenize = function(opts){
if(opts == undefined){
opts = $.fn.tokenize.defaults;
}
this.options = opts;
};
$.extend($.tokenize.prototype, {
init: function(el){
var $this = this;
this.select = el.attr('multiple', 'multiple').css({margin: 0, padding: 0, border: 0}).hide();
this.container = $('<div />')
.attr('class', this.select.attr('class'))
.addClass('Tokenize');
if(this.options.maxElements == 1){
this.container.addClass('OnlyOne');
}
this.dropdown = $('<ul />')
.addClass('Dropdown');
this.tokensContainer = $('<ul />')
.addClass('TokensContainer');
this.searchToken = $('<li />')
.addClass('TokenSearch')
.appendTo(this.tokensContainer);
this.searchInput = $('<input />')
.attr('maxlength', this.options.searchMaxLength)
.appendTo(this.searchToken);
this.container
.append(this.tokensContainer)
.append(this.dropdown)
.insertAfter(this.select);
this.tokensContainer.on('click', function(e){
e.stopImmediatePropagation();
$this.searchInput.get(0).focus();
if($this.dropdown.is(':hidden') && $this.searchInput.val() != ''){
$this.search();
}
});
this.searchInput.on('keydown', function(e){
$this.resizeSearchInput();
$this.keydown(e);
});
this.searchInput.on('keyup', function(e){
$this.keyup(e);
});
this.searchInput.on('paste', function(){
setTimeout(function(){ $this.resizeSearchInput(); }, 10);
setTimeout(function(){
var paste_elements = $this.searchInput.val().split(',');
if(paste_elements.length > 1){
$.each(paste_elements, function(_, value){
$this.tokenAdd(value.trim(), '');
});
}
}, 20);
});
$(document).on('click', function(){
$this.dropdownHide();
if($this.options.maxElements == 1){
if($this.searchInput.val()){
$this.tokenAdd($this.searchInput.val(), '');
}
}
});
this.resizeSearchInput();
$('option:selected', this.select).each(function(){
$this.tokenAdd($(this).attr('value'), $(this).html(), true);
});
},
dropdownShow: function(){
this.dropdown.show();
},
dropdownPrev: function(){
if($('li.Hover', this.dropdown).length > 0){
if(!$('li.Hover', this.dropdown).is('li:first-child')){
$('li.Hover', this.dropdown).removeClass('Hover').prev().addClass('Hover');
} else {
$('li.Hover', this.dropdown).removeClass('Hover');
$('li:last-child', this.dropdown).addClass('Hover');
}
} else {
$('li:first', this.dropdown).addClass('Hover');
}
},
dropdownNext: function(){
if($('li.Hover', this.dropdown).length > 0){
if(!$('li.Hover', this.dropdown).is('li:last-child')){
$('li.Hover', this.dropdown).removeClass('Hover').next().addClass('Hover');
} else {
$('li.Hover', this.dropdown).removeClass('Hover');
$('li:first-child', this.dropdown).addClass('Hover');
}
} else {
$('li:first', this.dropdown).addClass('Hover');
}
},
dropdownAddItem: function(value, text, html){
if(html == undefined){
html = text;
}
if($('li[data-value="' + value + '"]', this.tokensContainer).length){
return false;
}
var $this = this;
var item = $('<li />')
.attr('data-value', value)
.attr('data-text', text)
.html(html)
.on('click', function(e){
e.stopImmediatePropagation();
$this.tokenAdd($(this).attr('data-value'), $(this).attr('data-text'));
}).on('mouseover', function(){
$(this).addClass('Hover');
}).on('mouseout', function(){
$('li', $this.dropdown).removeClass('Hover');
});
this.dropdown.append(item);
return true;
},
dropdownHide: function(){
this.dropdownReset();
this.dropdown.hide();
},
dropdownReset: function(){
this.dropdown.html('');
},
resizeSearchInput: function(){
var measure = $('<div />')
.css({ position: 'absolute', visibility: 'hidden' })
.addClass('TokenizeMeasure')
.html(this.searchInput.val());
$('body').append(measure);
this.searchInput.width(measure.width() + 25);
measure.remove();
},
resetSearchInput: function(){
this.searchInput.val("");
this.resizeSearchInput();
},
resetPendingTokens: function(){
$('li.PendingDelete', this.tokensContainer).removeClass('PendingDelete');
},
keydown: function(e){
if(e.keyCode == KEYS.COMMA){
e.preventDefault();
this.tokenAdd(this.searchInput.val(), '');
} else {
switch(e.keyCode){
case KEYS.BACKSPACE:
if(this.searchInput.val().length == 0){
e.preventDefault();
if($('li.Token.PendingDelete', this.tokensContainer).length){
this.tokenRemove($('li.Token.PendingDelete').attr('data-value'));
} else {
$('li.Token:last', this.tokensContainer).addClass('PendingDelete');
}
this.dropdownHide();
}
break;
case KEYS.TAB:
case KEYS.ENTER:
if($('li.Hover', this.dropdown).length){
var element = $('li.Hover', this.dropdown);
e.preventDefault();
this.tokenAdd(element.attr('data-value'), element.attr('data-text'));
} else {
if(this.searchInput.val()){
e.preventDefault();
this.tokenAdd(this.searchInput.val(), '');
}
}
this.resetPendingTokens();
break;
case KEYS.ESCAPE:
this.resetSearchInput();
this.dropdownHide();
this.resetPendingTokens();
break;
case KEYS.ARROW_UP:
e.preventDefault();
this.dropdownPrev();
break;
case KEYS.ARROW_DOWN:
e.preventDefault();
this.dropdownNext();
break;
default:
this.resetPendingTokens();
break;
}
}
},
keyup: function(e){
if(e.keyCode != this.options.validator){
switch(e.keyCode){
case KEYS.TAB:
case KEYS.ENTER:
case KEYS.ESCAPE:
case KEYS.ARROW_UP:
case KEYS.ARROW_DOWN:
break;
case KEYS.BACKSPACE:
if(this.searchInput.val()){
this.search();
} else {
this.dropdownHide();
}
break;
default:
if(this.searchInput.val()){
this.search();
}
break;
}
}
},
search: function(){
var $this = this;
var count = 1;
if(this.options.maxElements > 0 && $('li.Token', this.tokensContainer).length >= this.options.maxElements){
return false;
}
if(this.options.datas == 'select'){
var found = false, regexp = new RegExp(this.searchInput.val().replace(/[-[\]{}()*+?.,\\^$|#\s]/g, "\\$&"), 'i');
this.dropdownReset();
$('option', this.select).not(':selected').each(function(){
if(count <= $this.options.nbDropdownElements){
if(regexp.test($(this).html())){
$this.dropdownAddItem($(this).attr('value'), $(this).html());
found = true;
count++;
}
} else {
return false;
}
});
if(found){
$('li:first', this.dropdown).addClass('Hover');
this.dropdownShow();
} else {
this.dropdownHide();
}
} else {
$.ajax({
url: this.options.datas,
data: this.options.searchParam + "=" + this.searchInput.val(),
dataType: this.options.dataType,
success: function(data){
if(data){
$this.dropdownReset();
$.each(data, function(key, val){
if(count <= $this.options.nbDropdownElements){
var html = undefined;
if(val[$this.options.htmlField]){
html = val[$this.options.htmlField];
}
$this.dropdownAddItem(val[$this.options.valueField], val[$this.options.textField], html);
count++;
} else {
return false;
}
});
if($('li', $this.dropdown).length){
$('li:first', $this.dropdown).addClass('Hover');
$this.dropdownShow();
return true;
}
}
$this.dropdownHide();
},
error: function(XHR, textStatus) {
console.log("Error : " + textStatus);
}
});
}
},
tokenAdd: function(value, text, first){
if(value == undefined || value == ''){
return false;
}
if(text == undefined || text == ''){
text = value;
}
if(first == undefined){
first = false;
}
if(this.options.maxElements > 0 && $('li.Token', this.tokensContainer).length >= this.options.maxElements){
this.resetSearchInput();
return false;
}
var $this = this;
var close_btn = $('<a />')
.addClass('Close')
.html("×")
.on('click', function(e){
e.stopImmediatePropagation();
$this.tokenRemove(value);
});
if($('option[value="' + value + '"]', this.select).length){
$('option[value="' + value + '"]', this.select).attr('selected', 'selected');
} else if(this.options.newElements) {
var option = $('<option />')
.attr('selected', 'selected')
.attr('value', value)
.attr('data-type', 'custom')
.html(text);
this.select.append(option);
} else {
this.resetSearchInput();
return false;
}
if($('li.Token[data-value="' + value + '"]', this.tokensContainer).length > 0) {
return false;
}
$('<li />')
.addClass('Token')
.attr('data-value', value)
.append('<span>' + text + '</span>')
.prepend(close_btn)
.insertBefore(this.searchToken);
if(!first){
this.options.onAddToken(value, text);
}
this.resetSearchInput();
this.dropdownHide();
return true;
},
tokenRemove: function(value){
var option = $('option[value="' + value + '"]', this.select);
if(option.attr('data-type') == 'custom'){
option.remove();
} else {
option.removeAttr('selected');
}
$('li.Token[data-value="' + value + '"]', this.tokensContainer).remove();
this.options.onRemoveToken(value);
this.resizeSearchInput();
this.dropdownHide();
}
});
$.fn.tokenize = function(options){
if(options == undefined){
options = {};
}
this.each(function(){
var obj = new $.tokenize($.extend({}, $.fn.tokenize.defaults, options));
obj.init($(this));
$(this).data(DATA, obj);
});
return this;
};
$.fn.tokenize.defaults = {
datas: 'select',
searchParam: 'search',
searchMaxLength: 30,
newElements: true,
nbDropdownElements: 10,
maxElements: 0,
dataType: 'json',
valueField: 'value',
textField: 'text',
htmlField: 'html',
onAddToken: function(value, text){},
onRemoveToken: function(value){}
};
})(jQuery, 'tokenize');
|
JavaScript
|
Apache-2.0
|
marknotbatman/dttw-old/wp-content/plugins/vfb-pro/public/assets/js/vendors/jquery.tokenize.js
|
0078f653-b224-41df-b017-7aaa4adeb0ca
|
[{"tag": "NAME", "value": "David Zeller", "start": 938, "end": 950, "context": "sed under the new BSD license.\n *\n * @author David Zeller <me@zellerda.com>\n * @license http://www.open"}, {"tag": "EMAIL", "value": "me@zellerda.com", "start": 952, "end": 967, "context": "new BSD license.\n *\n * @author David Zeller <me@zellerda.com>\n * @license http://www.opensource.org/licens"}]
|
[{"tag": "NAME", "value": "David Zeller", "start": 938, "end": 950, "context": "sed under the new BSD license.\n *\n * @author David Zeller <me@zellerda.com>\n * @license http://www.open"}, {"tag": "EMAIL", "value": "me@zellerda.com", "start": 952, "end": 967, "context": "new BSD license.\n *\n * @author David Zeller <me@zellerda.com>\n * @license http://www.opensource.org/licens"}]
|
#!/usr/bin/env python
import common
import json
import docker_utils
nginx_sites_available = '/etc/nginx/sites-available'
CERT_DIR = '/root/certs'
import subprocess
def create_certificates(domains):
format_args = {'cert_dir': CERT_DIR}
import os.path
if not os.path.isfile(os.path.join(CERT_DIR, 'acmeCA.key.deleteme')):
commands = """openssl rsa -in %(cert_dir)s/acmeCA.key -out %(cert_dir)s/acmeCA.key.deleteme""" % format_args
for command in [cmd for cmd in commands.split("\n") if cmd]:
subprocess.call([arg for arg in command.split(" ") if arg])
for domain in domains:
create_certificate(domain)
def create_certificate(domain):
format_args = {'domain': domain,
'cert_dir': CERT_DIR}
import os.path
if os.path.isfile('%(cert_dir)s/%(domain)s.key' % format_args):
return
commands = """
openssl genrsa -out %(cert_dir)s/%(domain)s.key 2048
openssl req -new -key %(cert_dir)s/%(domain)s.key -out %(cert_dir)s/%(domain)s.csr -subj /C=DE/ST=Niedersachsen/L=Osnabrueck/O=OPS/CN=%(domain)s
openssl x509 -req -in %(cert_dir)s/%(domain)s.csr -CA %(cert_dir)s/acmeCA.pem -CAkey %(cert_dir)s/acmeCA.key.deleteme -CAcreateserial -out %(cert_dir)s/%(domain)s.crt -days 500
rm %(cert_dir)s/%(domain)s.csr
""" % format_args
for command in [cmd for cmd in commands.split("\n") if cmd]:
print command.split(" ")
subprocess.call([arg for arg in command.split(" ") if arg])
# create_certificates([host.domains[0] for host in common.get_vhost_config()])
def update_vhosts_config(applications):
jsonFile = open('/root/config/nginx_vhosts.json', "r")
data = json.load(jsonFile)
jsonFile.close()
for app in applications:
docker_container_config = docker_utils.get_config(app.docker_container_name)
vhost_config = data[app.vhost_name]
vhost_config['port'] = docker_container_config.port if not app.docker_container_port else app.docker_container_port
vhost_config['ip_addr'] = docker_container_config.ip_addr
jsonFile = open('/root/config/nginx_vhosts.json', "w+")
jsonFile.write(json.dumps(data, indent=4, sort_keys=True))
jsonFile.close()
def update_vhosts(vhosts):
for vhost in vhosts:
host = vhost.host
port = vhost.port
ip_addr = vhost.ip_addr
domains = vhost.domains
flags = vhost.flags
location_tmpl = """
location %(path)s {
proxy_pass http://upstream_%(upstream)s%(upstream_path)s;
proxy_http_version 1.1;
%(redirect_rule)s
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
proxy_set_header Host %(host)s;
%(set_script_name)s
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
proxy_set_header X-Forwarded-Port $server_port;
%(misc)s
}
"""
location_tmpl_params = {
'redirect_rule': 'proxy_redirect off;' if flags.get('disableRedirect') else ''
}
def render_location(location_dict):
location_dict['host'] = location_dict.get('host', '$host')
location_dict['set_script_name'] = location_dict.get('set_script_name', '')
location_dict['misc'] = location_dict.get('misc', '')
location_dict['upstream_path'] = location_dict.get('upstream_path', '')
params = dict(location_dict.items()+ location_tmpl_params.items())
# print params
return location_tmpl % params
location_parameters = { 'upstream': domains[0], 'path': '/', 'host': flags.get('forceHost', '$host'),
'upstream_path': flags.get('upstream_path', '')}
if 'htpasswd_file' in flags:
location_parameters['misc'] = 'auth_basic "Restricted"; auth_basic_user_file %s;' % (flags['htpasswd_file'])
if 'location_extra' in flags:
location_parameters['misc'] = location_parameters['misc'] if 'misc' in location_parameters else ''
location_parameters['misc'] += flags['location_extra']
location = render_location(location_parameters)
location_ssl = location
upstreams = [{
'local_port': port,
'local_address': ip_addr,
'name': domains[0]
}]
if flags.get('sslToPort'):
upstream_name = "%s_ssl " % domains[0]
location_ssl = render_location({ 'upstream': upstream_name, 'path': '/', 'host': flags.get('forceHost', '$host')})
upstreams.append({
'local_port': flags.get('sslToPort'),
'local_address': ip_addr,
'name': upstream_name
})
if flags.get('httpsToHttpPaths'):
for path in flags.get('httpsToHttpPaths').split(','):
location_ssl += "\n" + render_location({ 'upstream': domains[0], 'path': '/%s' % path, 'host': flags.get('forceHost', '$host') })
other_locations = [{ 'upstream': domains[0], 'path': '@failover', 'host': flags.get('forceHost', '$host')}]
other_locations_https = []
path_idx = 0
for path, path_config in vhost.paths.items():
upstream_name = "%s_%s " % (domains[0], path_idx)
upstreams.append({
'local_port': path_config['port'],
'local_address': vm_map[path_config['host']]['local_address'],
'name': upstream_name
})
if path_config['secure']:
other_locations_https.append({ 'upstream': upstream_name, 'path': '/%s' % path,
'misc': '''
''',
'set_script_name': ('proxy_set_header SCRIPT_NAME /%s;' % path.rstrip('/')) if path_config.get('setScriptName') else '',
'host': flags.get('forceHost', '$host')})
else:
other_locations.append({ 'upstream': upstream_name, 'path': '/%s' % path,
'misc': '''
error_page 500 = @failover;
proxy_intercept_errors on;
''',
'set_script_name': ('proxy_set_header SCRIPT_NAME /%s;' % path.rstrip('/')) if path_config.get('setScriptName') else '',
'host': flags.get('forceHost', '$host')})
path_idx += 1
upstream_tmpl = 'upstream upstream_%(name)s { server %(local_address)s:%(local_port)s; }'
rewrites = ''
extra_directives = ''
if flags.get('block_robots'):
extra_directives += '''
location = /robots.txt {
alias /var/www/robots_deny.txt;
}
'''
if flags.get('allow_robots'):
extra_directives += '''
location = /robots.txt {
alias /var/www/robots_allow.txt;
}
'''
if 'server_config_extra' in flags:
extra_directives += flags['server_config_extra']
if flags.get('aliases'):
aliases = flags.get('aliases').split("\n")
for alias in aliases:
extra_directives += '''
location /%s {
alias %s;
}
''' % tuple(alias.strip().split('->'))
if vhost.rewrites:
rewrites += vhost.rewrites
location_http = location if flags.get('allow_http') else 'return 301 https://$host$request_uri;'
if flags.get('httpPaths'):
for path in flags.get('httpPaths').split(','):
location_http = "\n" + render_location({ 'upstream': domains[0], 'path': '/%s' % path, 'host': flags.get('forceHost', '$host') }) + "\n" + ''' location / { return 301 https://$host$request_uri; }
'''
format_args = {
'upstreams': "\n".join([upstream_tmpl % up for up in upstreams]),
'public_port': port,
'other_locations': "\n".join([render_location(location_dict) for location_dict in other_locations]),
'other_locations_https': "\n".join([render_location(location_dict) for location_dict in other_locations_https]),
'extra_directives': extra_directives,
'domain': domains[0],
'server_names': ' '.join(domains) if not flags.get('rewriteDomains') else domains[0],
'location': location_ssl,
'rewrites': rewrites,
'upload_limit': flags.get('uploadLimit', '20M'),
'location_http': location_http,
'cert_dir': CERT_DIR}
config = """
%(upstreams)s
server {
listen 80;
server_name %(server_names)s;
client_max_body_size %(upload_limit)s;
%(rewrites)s
%(location_http)s
%(other_locations)s
%(extra_directives)s
}
""" % format_args
if not flags.get('noSsl'):
config += """
server {
listen 443 ssl;
server_name %(server_names)s;
client_max_body_size %(upload_limit)s;
ssl on;
ssl_certificate %(cert_dir)s/%(domain)s.cer;
ssl_certificate_key %(cert_dir)s/%(domain)s.key;
ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:AES128-GCM-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!CAMELLIA;
ssl_protocols TLSv1.2 TLSv1.1 TLSv1;
ssl_prefer_server_ciphers on;
%(location)s
%(other_locations_https)s
%(extra_directives)s
}
""" % format_args
if flags.get('rewriteDomains'):
for domain in domains[1:]:
config += """
server {
listen 80;
server_name %(domain1)s;
return 301 http://%(domain2)s$request_uri;
}
""" % {'domain1': domain, 'domain2': domains[0]}
f = open('%s/%s' % (nginx_sites_available, domains[0]), 'w')
f.write(config)
f.close()
'''
proxy_next_upstream error timeout invalid_header http_500 http_502 http_503 http_504;
proxy_set_header Host $host;
proxy_set_header X-Real-IP $remote_addr;
proxy_set_header X-Forwarded-For $proxy_add_x_forwarded_for;
'''
update_vhosts_config(common.get_applications_config())
update_vhosts(common.get_vhost_config())
|
Python
|
Unlicense
|
bcoding/docker-host-scripts/py/update_nginx_vhosts.py
|
84a8ba8b-fb84-406a-8835-8662f655b23e
|
[{"tag": "API_KEY", "value": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:AES128-GCM-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!CAMELLIA", "start": 9730, "end": 9850, "context": "(cert_dir)s/%(domain)s.key;\n ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:AES128-GCM-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!CAMELLIA;\n ssl_protocols TLSv1.2 TLSv1.1 TLSv1;\n "}]
|
[{"tag": "KEY", "value": "ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:AES128-GCM-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!CAMELLIA", "start": 9730, "end": 9850, "context": "(cert_dir)s/%(domain)s.key;\n ssl_ciphers ECDHE-RSA-AES128-GCM-SHA256:ECDHE-RSA-RC4-SHA:ECDHE-RSA-AES128-SHA:AES128-GCM-SHA256:RC4:HIGH:!MD5:!aNULL:!EDH:!CAMELLIA;\n ssl_protocols TLSv1.2 TLSv1.1 TLSv1;\n "}]
|
/**
@file appmodule.cpp
@brief This file is part of Kalinka mediaserver.
@author Ivan Murashko <ivan.murashko@gmail.com>
Copyright (c) 2007-2012 Kalinka Team
Permission is hereby granted, free of charge, to any person obtaining
a copy of this software and associated documentation files (the
"Software"), to deal in the Software without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Software, and to
permit persons to whom the Software is furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
CHANGE HISTORY
@date
- 2009/04/02 created by ipp (Ivan Murashko)
- 2009/08/02 header was changed by header.py script
- 2010/01/06 header was changed by header.py script
- 2011/01/01 header was changed by header.py script
- 2012/02/03 header was changed by header.py script
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include "appmodule.h"
#include "exception.h"
#include "cliapp.h"
#include "db.h"
using namespace klk::app;
//
// Module class
//
// Constructor
Module::Module(klk::IFactory* factory,
const std::string& modid,
const std::string& setmsgid,
const std::string& showmsgid) :
klk::ModuleWithDB(factory, modid), m_appuuid_mutex(), m_appuuid(),
m_setmsgid(setmsgid),
m_showmsgid(showmsgid)
{
BOOST_ASSERT(m_setmsgid.empty() == false);
BOOST_ASSERT(m_showmsgid.empty() == false);
BOOST_ASSERT(m_setmsgid != m_showmsgid);
}
// Retrives application uuid
const std::string Module::getAppUUID()
{
using namespace klk;
Locker lock(&m_appuuid_mutex);
if (m_appuuid.empty())
{
// retrive application id
// `klk_application_uuid_get` (
// IN module VARCHAR(40),
// IN host VARCHAR(40),
// OUT application VARCHAR(40)
db::DB db(getFactory());
db.connect();
db::Parameters params;
params.add("@module", getID());
params.add("@host", db.getHostUUID());
params.add("@application");
db::Result res = db.callSimple("klk_application_uuid_get", params);
if (res["@application"].isNull())
{
throw Exception(__FILE__, __LINE__,
"DB error while retriving application uuid");
}
m_appuuid = res["@application"].toString();
}
return m_appuuid;
}
// Register all processors
void Module::registerProcessors()
{
using namespace klk;
ModuleWithDB::registerProcessors();
registerCLI(cli::ICommandPtr(new cli::AutostartSet(m_setmsgid)));
registerCLI(cli::ICommandPtr(new cli::AutostartShow(m_showmsgid)));
}
|
C++
|
MIT
|
ivanmurashko/kalinka/src/common/appmodule.cpp
|
cc77578d-d7b4-40ca-baf7-e6358f3a9414
|
[{"tag": "NAME", "value": "Ivan Murashko", "start": 90, "end": 103, "context": "s file is part of Kalinka mediaserver.\n @author Ivan Murashko <ivan.murashko@gmail.com>\n\n Copyright (c) 2007-"}, {"tag": "NAME", "value": "Ivan Murashko", "start": 1304, "end": 1317, "context": "HISTORY\n\n @date\n - 2009/04/02 created by ipp (Ivan Murashko)\n - 2009/08/02 header was changed by header.py "}, {"tag": "EMAIL", "value": "ivan.murashko@gmail.com", "start": 105, "end": 128, "context": "of Kalinka mediaserver.\n @author Ivan Murashko <ivan.murashko@gmail.com>\n\n Copyright (c) 2007-2012 Kalinka Team\n\n Per"}]
|
[{"tag": "NAME", "value": "Ivan Murashko", "start": 90, "end": 103, "context": "s file is part of Kalinka mediaserver.\n @author Ivan Murashko <ivan.murashko@gmail.com>\n\n Copyright (c) 2007-"}, {"tag": "NAME", "value": "Ivan Murashko", "start": 1304, "end": 1317, "context": "HISTORY\n\n @date\n - 2009/04/02 created by ipp (Ivan Murashko)\n - 2009/08/02 header was changed by header.py "}, {"tag": "EMAIL", "value": "ivan.murashko@gmail.com", "start": 105, "end": 128, "context": "of Kalinka mediaserver.\n @author Ivan Murashko <ivan.murashko@gmail.com>\n\n Copyright (c) 2007-2012 Kalinka Team\n\n Per"}]
|
// Protocol Buffers - Google's data interchange format
// Copyright 2008 Google Inc.
// http://code.google.com/p/protobuf/
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Author: kenton@google.com (Kenton Varda)
// Based on original Protocol Buffers design by
// Sanjay Ghemawat, Jeff Dean, and others.
// Modified to implement C code by Dave Benson.
#include <google/protobuf/compiler/c/c_enum_field.h>
#include <google/protobuf/compiler/c/c_helpers.h>
#include <google/protobuf/io/printer.h>
#include <google/protobuf/wire_format.h>
namespace google {
namespace protobuf {
namespace compiler {
namespace c {
using internal::WireFormat;
// TODO(kenton): Factor out a "SetCommonFieldVariables()" to get rid of
// repeat code between this and the other field types.
void SetEnumVariables(const FieldDescriptor* descriptor,
map<string, string>* variables) {
(*variables)["name"] = FieldName(descriptor);
(*variables)["type"] = FullNameToC(descriptor->enum_type()->full_name());
if (descriptor->has_default_value()) {
const EnumValueDescriptor* default_value = descriptor->default_value_enum();
(*variables)["default"] = FullNameToUpper(default_value->type()->full_name())
+ "__" + ToUpper(default_value->name());
} else
(*variables)["default"] = "0";
(*variables)["deprecated"] = FieldDeprecated(descriptor);
}
// ===================================================================
EnumFieldGenerator::
EnumFieldGenerator(const FieldDescriptor* descriptor)
: FieldGenerator(descriptor)
{
SetEnumVariables(descriptor, &variables_);
}
EnumFieldGenerator::~EnumFieldGenerator() {}
void EnumFieldGenerator::GenerateStructMembers(io::Printer* printer) const
{
switch (descriptor_->label()) {
case FieldDescriptor::LABEL_REQUIRED:
printer->Print(variables_, "$type$ $name$$deprecated$;\n");
break;
case FieldDescriptor::LABEL_OPTIONAL:
printer->Print(variables_, "protobuf_c_boolean has_$name$$deprecated$;\n");
printer->Print(variables_, "$type$ $name$$deprecated$;\n");
break;
case FieldDescriptor::LABEL_REPEATED:
printer->Print(variables_, "size_t n_$name$$deprecated$;\n");
printer->Print(variables_, "$type$ *$name$$deprecated$;\n");
break;
}
}
string EnumFieldGenerator::GetDefaultValue(void) const
{
return variables_.find("default")->second;
}
void EnumFieldGenerator::GenerateStaticInit(io::Printer* printer) const
{
switch (descriptor_->label()) {
case FieldDescriptor::LABEL_REQUIRED:
printer->Print(variables_, "$default$");
break;
case FieldDescriptor::LABEL_OPTIONAL:
printer->Print(variables_, "0,$default$");
break;
case FieldDescriptor::LABEL_REPEATED:
// no support for default?
printer->Print("0,NULL");
break;
}
}
void EnumFieldGenerator::GenerateDescriptorInitializer(io::Printer* printer) const
{
string addr = "&" + FullNameToLower(descriptor_->enum_type()->full_name()) + "__descriptor";
GenerateDescriptorInitializerGeneric(printer, true, "ENUM", addr);
}
} // namespace c
} // namespace compiler
} // namespace protobuf
} // namespace google
|
C++
|
BSD-3-Clause
|
Cosmic-OS/platform_external_protobuf-c/src/google/protobuf/compiler/c/c_enum_field.cc
|
c7324f83-828a-4239-a1dd-bd55b6d2977a
|
[{"tag": "NAME", "value": "Sanjay Ghemawat", "start": 780, "end": 795, "context": " Based on original Protocol Buffers design by\n// Sanjay Ghemawat, Jeff Dean, and others.\n\n// Modified to implement"}, {"tag": "EMAIL", "value": "kenton@google.com", "start": 694, "end": 711, "context": "and\n// limitations under the License.\n\n// Author: kenton@google.com (Kenton Varda)\n// Based on original Protocol Buf"}, {"tag": "NAME", "value": "Kenton Varda", "start": 713, "end": 725, "context": "under the License.\n\n// Author: kenton@google.com (Kenton Varda)\n// Based on original Protocol Buffers design by"}, {"tag": "NAME", "value": "Dave Benson", "start": 856, "end": 867, "context": ", and others.\n\n// Modified to implement C code by Dave Benson.\n\n#include <google/protobuf/compiler/c/c_enum_fie"}, {"tag": "NAME", "value": "Jeff Dean", "start": 797, "end": 806, "context": "l Protocol Buffers design by\n// Sanjay Ghemawat, Jeff Dean, and others.\n\n// Modified to implement C code by "}]
|
[{"tag": "NAME", "value": "Sanjay Ghemawat", "start": 780, "end": 795, "context": " Based on original Protocol Buffers design by\n// Sanjay Ghemawat, Jeff Dean, and others.\n\n// Modified to implement"}, {"tag": "EMAIL", "value": "kenton@google.com", "start": 694, "end": 711, "context": "and\n// limitations under the License.\n\n// Author: kenton@google.com (Kenton Varda)\n// Based on original Protocol Buf"}, {"tag": "NAME", "value": "Kenton Varda", "start": 713, "end": 725, "context": "under the License.\n\n// Author: kenton@google.com (Kenton Varda)\n// Based on original Protocol Buffers design by"}, {"tag": "NAME", "value": "Dave Benson", "start": 856, "end": 867, "context": ", and others.\n\n// Modified to implement C code by Dave Benson.\n\n#include <google/protobuf/compiler/c/c_enum_fie"}, {"tag": "NAME", "value": "Jeff Dean", "start": 797, "end": 806, "context": "l Protocol Buffers design by\n// Sanjay Ghemawat, Jeff Dean, and others.\n\n// Modified to implement C code by "}]
|
---
title: Message Released on Plans for the 2020-21 Academic Year
subtitle:
date: 2020-04-28
link: >-
https://news.stonybrook.edu/stony-brook-spotlight/message-release-on-plans-for-the-2020-21-academic-year/
countryCode: us
status: published
instituteSlug: us-stonybrook
---

President-Elect Maurie McInnis and Interim President Michael A. Bernstein have issued a message to the campus community concerning plans for the 2020-2021 academic year. The message reads as follows:
As we approach the final weeks of the Spring Semester, we hope that you and your families are healthy and safe. We write to update you on plans for the months ahead and, in particular, the planning underway for the 2020-2021 academic year.
As you know, our region stands at the current epicenter of the pandemic in the United States. We are proud of how the Stony Brook community has responded. Our Stony Brook Medicine personnel have been heroic in meeting the rapidly escalating healthcare needs of our region. Our faculty and staff facilitated a smooth transition to remote teaching and learning so that our academic mission could continue. Our students have risen to the challenges posed by remote learning.
The question on everyone’s mind is what will happen in the fall. Specifically, will we be able to return to campus?
Answering this question requires careful attention to local conditions. As we consider how to reactivate campus, we will do so with the guidance of our medical, epidemiological, and local public health experts. The well-being of our community must remain our overriding priority.
At the beginning of this crisis, the University established Coronavirus response committees. These teams have served the University well in meeting emergent needs and are now focused on assessment and planning for the months ahead. These committees will consult with campus leadership, employing shared governance as we develop return to campus protocols in light of the new challenges posed by the COVID-19 pandemic.
We are as eager as all of you to return to campus as soon as possible though no definitive decisions have yet been made about the fall term. We plan to provide the campus further updates no later than June 30; sooner if we are able. Along the way, we will update the University Coronavirus website with timely information regarding the fall semester.
Our nation is confronted by a dramatically challenging, disorienting, and disturbing situation — one unique in its entire history. The stresses and strains on us all — both at work and at home — are profound. At the same time, the Stony Brook community has drawn together in powerful and inspiring fashion. Your resilience, skill, courage, and virtuosity are widely acknowledged and celebrated on campus, throughout the SUNY network, and across higher education nationwide.
To be sure, difficult days will continue to confront us. But we will do so together. We want to express our gratitude to everyone on campus for your work in helping us continue to meet the University’s mission and support the Seawolf community.
|
Markdown
|
MIT
|
applysquare/covid19-datahub/data/update/2020-05/2020-05-06-10_51_42-us-stonybrook.md
|
64708ecc-a6e0-4ce8-9c10-2bc55e4f9774
|
[{"tag": "NAME", "value": "Maurie McInnis", "start": 390, "end": 404, "context": "ony-brook-university-campus.jpg)\n\nPresident-Elect Maurie McInnis and Interim President Michael A. Bernstein have i"}, {"tag": "NAME", "value": "Michael A. Bernstein", "start": 427, "end": 447, "context": "sident-Elect Maurie McInnis and Interim President Michael A. Bernstein have issued a message to the campus community con"}]
|
[{"tag": "NAME", "value": "Maurie McInnis", "start": 390, "end": 404, "context": "ony-brook-university-campus.jpg)\n\nPresident-Elect Maurie McInnis and Interim President Michael A. Bernstein have i"}, {"tag": "NAME", "value": "Michael A. Bernstein", "start": 427, "end": 447, "context": "sident-Elect Maurie McInnis and Interim President Michael A. Bernstein have issued a message to the campus community con"}]
|
#include <microhttpd.h>
#include <cstdlib>
#include <cstring>
#include <cstdio>
#include <string>
#include <iostream>
#include <map>
#include "serverdata.h"
#include "serverapi.h"
// we manage the registration process here.
#include "api/register.h"
#define PAGE "<html><head><title>libmicrohttpd demo</title>"\
"</head><body>libmicrohttpd demo</body></html>"
namespace gms {
static int ahc_echo(void * cls, struct MHD_Connection * connection,
const char * url, const char * method, const char * version,
const char * upload_data, size_t * upload_data_size, void ** ptr)
{
static int dummy;
const char * page = (char*) cls;
struct MHD_Response * response;
int ret;
if (0 != strcmp(method, "GET"))
return MHD_NO; /* unexpected method */
if (&dummy != *ptr)
{
/* The first time only the headers are valid,
do not respond in the first round... */
*ptr = &dummy;
return MHD_YES;
}
if (0 != *upload_data_size)
return MHD_NO; /* upload data in a GET!? */
*ptr = NULL; /* clear context pointer */
printf("url: '%s'\n", url);
response = MHD_create_response_from_data(strlen(page), (void*) page, MHD_NO,
MHD_NO);
ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
MHD_destroy_response(response);
return ret;
}
int print_out_key(void *cls, enum MHD_ValueKind kind, const char *key,
const char *value)
{
printf("%s: %s\n", key, value);
return MHD_YES;
}
static int answer_to_connection(void *cls, struct MHD_Connection *connection,
const char *url, const char *method, const char *version,
const char *upload_data, size_t *upload_data_size, void **con_cls)
{
printf("New %s request for %s using version %s\n", method, url, version);
printf("\nMHD_RESPONSE_HEADER_KIND:\n");
MHD_get_connection_values(connection, MHD_RESPONSE_HEADER_KIND,
&print_out_key, NULL);
printf("\nMHD_HEADER_KIND:\n");
MHD_get_connection_values(connection, MHD_HEADER_KIND, &print_out_key,
NULL);
printf("\nMHD_COOKIE_KIND:\n");
MHD_get_connection_values(connection, MHD_COOKIE_KIND, &print_out_key,
NULL);
printf("\nMHD_POSTDATA_KIND:\n");
MHD_get_connection_values(connection, MHD_POSTDATA_KIND, &print_out_key,
NULL);
printf("\nMHD_GET_ARGUMENT_KIND:\n");
MHD_get_connection_values(connection, MHD_GET_ARGUMENT_KIND, &print_out_key,
NULL);
printf("\nMHD_FOOTER_KIND:\n");
MHD_get_connection_values(connection, MHD_FOOTER_KIND, &print_out_key,
NULL);
return MHD_NO;
}
#define ERROR_PAGE "<html><head><title>Error</title></head><body>Bad data</body></html>"
static int send_bad_response(struct MHD_Connection *connection)
{
static char *bad_response = (char *) ERROR_PAGE;
int bad_response_len = strlen(bad_response);
int ret;
struct MHD_Response *response;
response = MHD_create_response_from_buffer(bad_response_len, bad_response,
MHD_RESPMEM_PERSISTENT);
if (response == 0)
{
return MHD_NO;
}
ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
MHD_destroy_response(response);
return ret;
}
static int send_authenticated_response(struct MHD_Connection *connection)
{
static char *authResponse = (char *)("Successfully authenticated");
int response_len = strlen(authResponse);
int ret;
struct MHD_Response *response;
response = MHD_create_response_from_buffer(response_len, authResponse,
MHD_RESPMEM_PERSISTENT);
if (response == 0)
{
return MHD_NO;
}
ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
MHD_destroy_response(response);
return ret;
}
/*
*The Cross-Origin Resource Sharing standard works by adding new HTTP headers
*that allow servers to describe the set of origins that are permitted to read
*that information using a web browser. Additionally, for HTTP request methods
*that can cause side-effects on user data (in particular, for HTTP methods
*other than GET, or for POST usage with certain MIME types), the specification
*mandates that browsers "preflight" the request, soliciting supported methods
*from the server with an HTTP OPTIONS request header, and then, upon
*"approval" from the server, sending the actual request with the actual HTTP
*request method. Servers can also notify clients whether "credentials"
*(including Cookies and HTTP Authentication data) should be sent with
*requests.
*/
static int sendAccessControl(struct MHD_Connection *connection, const char *url,
const char *method, const char *version)
{
int ret;
struct MHD_Response *response;
std::cout << "Sending CORS accept header for the request: " << std::endl;
/*answer_to_connection(NULL, connection, url, method, version, NULL, NULL,
NULL);*/
response = MHD_create_response_from_buffer(0, NULL, MHD_RESPMEM_PERSISTENT);
if (response == 0)
{
return MHD_NO;
}
// not too fussed with who is trying to use us :)
MHD_add_response_header(response, "Access-Control-Allow-Origin", "*");
// only allow GET (and OPTIONS) requests, no need for PUSH yet...now there is a need for post
MHD_add_response_header(response, "Access-Control-Allow-Methods",
"GET, OPTIONS, POST"); // see http://stackoverflow.com/questions/107390/whats-the-difference-between-a-post-and-a-put-http-request
// we simply 'allow' all requested headers
const char* val = MHD_lookup_connection_value(connection, MHD_HEADER_KIND,
"Access-Control-Request-Headers");
MHD_add_response_header(response, "Access-Control-Allow-Headers", val);
// these seem to be needed?
MHD_add_response_header(response, "Access-Control-Expose-Headers",
"Content-Range");
ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
MHD_destroy_response(response);
return ret;
}
static int get_url_args(void *cls, MHD_ValueKind kind, const char *key,
const char* value)
{
std::cout << "Get Arg with key: '" << key << "'" << std::endl;
std::map<std::string, std::string> * url_args = static_cast<std::map<std::string, std::string> *>(cls);
if (url_args->find(key) == url_args->end())
{
if (!value)
(*url_args)[key] = "";
else
(*url_args)[key] = value;
}
else
{
std::cerr << "Duplicate URL argument?\n";
}
return MHD_YES;
}
/*
* Based on information from:
* https://gnunet.org/svn/libmicrohttpd/doc/examples/simplepost.c
* http://www.gnu.org/software/libmicrohttpd/tutorial.html#Processing-POST-data
*
* to handle POST. But libmicrohttpd only has convenience methods for handling "form" style POSTs, so
* here we check for JSON content in the uploaded data and handle that ourselves.
*
*/
struct connection_info
{
std::string url;
char* biomapsId;
int id;
int connectiontype;
char* data;
size_t dataSize;
};
#define JSON_CONTENT_TYPE "application/json"
#define GET 0
#define POST 1
static int handleRegistration(struct MHD_Connection *connection, const char *url,
std::map<std::string, std::string>& url_args, ServerData* data)
{
int ret;
struct MHD_Response *response;
std::cout << "Handling registration URL: " << url << std::endl;
if (data->isAuthenticated())
{
if (url_args.count("test"))
{
std::string testResponse = data->testGet();
std::cout << "Test Get: " << testResponse << std::endl;
}
// already authenticated, so do nothing?
return send_authenticated_response(connection);
}
// first we need to redirect the user to the PMR authorisation page, if we haven't already done so
if (!data->isAuthenticationUrlSet())
{
std::string authorisationUrl = data->getAuthenticationUrl();
response = MHD_create_response_from_buffer(0, NULL, MHD_RESPMEM_PERSISTENT);
if (response == 0)
{
return MHD_NO;
}
MHD_add_response_header(response, "Location", authorisationUrl.c_str());
ret = MHD_queue_response(connection, MHD_HTTP_TEMPORARY_REDIRECT, response);
MHD_destroy_response(response);
return ret;
}
// now if we have the verifier and token set in the URL we can continue the authentication process
if (url_args.count("oauth_verifier"))
{
if (data->authenticate(url_args["oauth_verifier"]))
{
std::cout << "whoo hoo?!" << std::endl;
return send_authenticated_response(connection);
}
else
{
std::cerr << "Error authenticating?" << std::endl;
}
}
return MHD_NO;
}
static void
request_completed (void *cls, struct MHD_Connection *connection,
void **con_cls, enum MHD_RequestTerminationCode toe)
{
ServerData* data = static_cast<ServerData*>(cls);
struct connection_info *con_info = (connection_info*)(*con_cls);
if (NULL == con_info) return;
std::cout << "Request completed, so destroy connection information object: " << con_info->id << std::endl;
free (con_info);
*con_cls = NULL;
}
static int url_handler(void *cls, struct MHD_Connection *connection,
const char *url, const char *method, const char *version,
const char *upload_data, size_t *upload_data_size, void **con_cls)
{
ServerData* data = static_cast<ServerData*>(cls);
char *me;
struct MHD_Response *response;
int ret;
std::map<std::string, std::string> url_args;
std::string respdata;
// HTTP access control (CORS)
// https://developer.mozilla.org/en-US/docs/HTTP/Access_control_CORS?redirectlocale=en-US&redirectslug=HTTP_access_control
// some times a preflight check is required which uses the OPTIONS HTTP method to check for permission to
// call cross-domain requests
if (0 == strcmp(method, MHD_HTTP_METHOD_OPTIONS))
return sendAccessControl(connection, url, method, version);
// we only need to deal with GET requests
/* FIXME -- don't need this since the HTTP options/preflight will ensure non-supported methods are rejected?
if (0 != strcmp(method, MHD_HTTP_METHOD_GET))
return MHD_NO;*/
// set up out connection information on the first pass through.
if (NULL == *con_cls)
{
struct connection_info *con_info;
con_info = (connection_info*) malloc (sizeof (struct connection_info));
if (NULL == con_info) return MHD_NO;
static int idCounter = 1;
con_info->id = idCounter++;
con_info->url = std::string(url);
con_info->data = NULL;
con_info->biomapsId = NULL;
con_info->dataSize = 0;
if (0 == strcmp (method, MHD_HTTP_METHOD_POST))
{
std::cout << "Setting up con_cls for POST: " << con_info->id << std::endl;
std::cout << " - with url: " << url << std::endl;
std::string contentType(MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "Content-type"));
if (contentType.find(JSON_CONTENT_TYPE) == std::string::npos)
{
std::cerr << "Error creating POST processor?! Unhandled content type: "
<< MHD_lookup_connection_value(connection, MHD_HEADER_KIND, "Content-type")
<< std::endl;
free (con_info);
return MHD_NO;
}
con_info->connectiontype = POST;
}
else con_info->connectiontype = GET;
*con_cls = (void *) con_info;
return MHD_YES;
}
// intercept POST requests for now to test stuff
if (0 == strcmp(method, MHD_HTTP_METHOD_POST))
{
// post recieved, do stuff.
struct connection_info *con_info = (connection_info*)(*con_cls);
std::cout << "Received a POST for connection: " << con_info->id << std::endl;
if (*upload_data_size != 0)
{
std::cout << "Processed some data: " << *upload_data_size << std::endl;
//std::cout << "Data: " << upload_data << std::endl;
con_info->data = (char*)realloc(con_info->data, con_info->dataSize + *upload_data_size);
memcpy(con_info->data + con_info->dataSize, upload_data, *upload_data_size);
con_info->dataSize += *upload_data_size;
//std::string bob(upload_data, *upload_data_size);
//con_info->data += bob.c_str();
//std::cout << "con_info->data: " << con_info->data << std::endl;
*upload_data_size = 0; // set to 0 to indicate all data considered/handled.
return MHD_YES;
}
else
{
}
}
else if (0 == strcmp(method, MHD_HTTP_METHOD_GET))
{
if (MHD_get_connection_values(connection, MHD_GET_ARGUMENT_KIND,
get_url_args, &url_args) < 0)
{
return MHD_NO;
}
// need to handle registration/authorisation separately
if (Register::CompatiblePath(url))
{
return handleRegistration(connection, url, url_args, data);
}
// otherwise let our API take care of things.
respdata = executeAPI(url, url_args, data);
}
if (respdata == "BAD RESPONSE")
{
return send_bad_response(connection);
}
//val = MHD_lookup_connection_value (connection, MHD_GET_ARGUMENT_KIND, "q");
me = (char *) malloc(respdata.size() + 1);
if (me == 0)
return MHD_NO;
strncpy(me, respdata.c_str(), respdata.size() + 1);
response = MHD_create_response_from_buffer(strlen(me), me,
MHD_RESPMEM_MUST_FREE);
if (response == 0)
{
free(me);
return MHD_NO;
}
/*it = url_args.find("type");
if (it != url_args.end() && strcasecmp(it->second.c_str(), "xml") == 0)
type = typexml;*/
MHD_add_response_header(response, "Content-Type", "application/json");
MHD_add_response_header(response, "Content-Range", "items 0-5/5");
MHD_add_response_header(response, "Access-Control-Allow-Origin", "*");
// need to make sure we always expose the (non-simple) headers that we use
MHD_add_response_header(response, "Access-Control-Expose-Headers",
"Content-Range");
//MHD_add_response_header(response, "OurHeader", type);
ret = MHD_queue_response(connection, MHD_HTTP_OK, response);
MHD_destroy_response(response);
return ret;
}
int startServer(int port, ServerData* data)
{
struct MHD_Daemon * d = 0;
d = MHD_start_daemon(MHD_USE_SELECT_INTERNALLY, port, NULL, NULL,
&url_handler, static_cast<void*>(data),
MHD_OPTION_NOTIFY_COMPLETED, request_completed,
static_cast<void*>(data), MHD_OPTION_END);
if (d == NULL)
return 1;
(void) getchar();
MHD_stop_daemon(d);
return 0;
}
} // namespace gms
|
C++
|
Apache-2.0
|
nickerso/gms-v2/src/libhttpd-utils.cpp
|
b086a289-ea91-4d3e-b331-f98659a52377
|
[]
|
[]
|
# Nodes represent a definition of a value in our graph of operators.
from typing import TYPE_CHECKING, Union, Callable, Any, Tuple, List, Optional, Dict, Set
from ._compatibility import compatibility
from .immutable_collections import immutable_dict, immutable_list
import torch
import builtins
import types
from torch.fx.operator_schemas import normalize_function, normalize_module, ArgsKwargsPair
if TYPE_CHECKING:
from .graph import Graph
BaseArgumentTypes = Union[str, int, float, bool, torch.dtype, torch.Tensor, torch.device, torch.memory_format]
base_types = BaseArgumentTypes.__args__ # type: ignore[attr-defined]
Target = Union[Callable[..., Any], str]
Argument = Optional[Union[
Tuple[Any, ...], # actually Argument, but mypy can't represent recursive types
List[Any], # actually Argument
Dict[str, Any], # actually Argument
slice, # Slice[Argument, Argument, Argument], but slice is not a templated type in typing
'Node',
BaseArgumentTypes
]]
_side_effectful_functions: Set[Callable] = {
torch._assert, torch.ops.profiler._record_function_enter,
torch.ops.profiler._record_function_exit}
# this is fixed on master, WAR for 1.5
def _find_module_of_method(orig_method: Callable[..., Any]) -> str:
name = orig_method.__name__
module = orig_method.__module__
if module is not None:
return module
for guess in [torch, torch.nn.functional]:
if getattr(guess, name, None) is orig_method:
return guess.__name__
raise RuntimeError(f'cannot find module for {orig_method}')
# Borrowed from CPython typing module
# https://github.com/python/cpython/blob/f90dc36c15d7fee0efaf6d39e97be0bdf2683e93/Lib/typing.py#L156
def _type_repr(obj):
"""Return the repr() of an object, special-casing types (internal helper).
If obj is a type, we return a shorter version than the default
type.__repr__, based on the module and qualified name, which is
typically enough to uniquely identify a type. For everything
else, we fall back on repr(obj).
"""
# HACK: In Python 3.6, type aliases from ``typing`` are instances of ``type``, but in
# later Python versions, type aliases are not instances of ``type``!! We want
# all type aliases to fall through to ``repr``, so if we have a type that is
# in the module typing, don't go down this path.
if isinstance(obj, type) and obj.__module__ != 'typing':
if obj.__module__ == 'builtins':
return obj.__qualname__
return f'{obj.__module__}.{obj.__qualname__}'
if obj is ...:
return('...')
if isinstance(obj, types.FunctionType):
return obj.__name__
return repr(obj)
def _get_qualified_name(func: Callable[..., Any]) -> str:
# things like getattr just appear in builtins
if getattr(builtins, func.__name__, None) is func:
return func.__name__
name = func.__name__
module = _find_module_of_method(func)
module = module.replace('torch._ops', 'torch.ops') # WAR for bug in how torch.ops assigns module
return f'{module}.{name}'
def _format_arg(arg) -> str:
if isinstance(arg, list):
items = ', '.join(_format_arg(a) for a in arg)
return f'[{items}]'
elif isinstance(arg, tuple):
items = ', '.join(_format_arg(a) for a in arg)
maybe_comma = ',' if len(arg) == 1 else ''
return f'({items}{maybe_comma})'
elif isinstance(arg, dict):
items_str = ', '.join(f'{k}: {_format_arg(v)}' for k, v in arg.items())
return f'{{{items_str}}}'
if isinstance(arg, Node):
return '%' + str(arg)
else:
return str(arg)
@compatibility(is_backward_compatible=True)
class Node:
"""
``Node`` is the data structure that represents individual operations within
a ``Graph``. For the most part, Nodes represent callsites to various entities,
such as operators, methods, and Modules (some exceptions include nodes that
specify function inputs and outputs). Each ``Node`` has a function specified
by its ``op`` property. The ``Node`` semantics for each value of ``op`` are as follows:
- ``placeholder`` represents a function input. The ``name`` attribute specifies the name this value will take on.
``target`` is similarly the name of the argument. ``args`` holds either: 1) nothing, or 2) a single argument
denoting the default parameter of the function input. ``kwargs`` is don't-care. Placeholders correspond to
the function parameters (e.g. ``x``) in the graph printout.
- ``get_attr`` retrieves a parameter from the module hierarchy. ``name`` is similarly the name the result of the
fetch is assigned to. ``target`` is the fully-qualified name of the parameter's position in the module hierarchy.
``args`` and ``kwargs`` are don't-care
- ``call_function`` applies a free function to some values. ``name`` is similarly the name of the value to assign
to. ``target`` is the function to be applied. ``args`` and ``kwargs`` represent the arguments to the function,
following the Python calling convention
- ``call_module`` applies a module in the module hierarchy's ``forward()`` method to given arguments. ``name`` is
as previous. ``target`` is the fully-qualified name of the module in the module hierarchy to call.
``args`` and ``kwargs`` represent the arguments to invoke the module on, *including the self argument*.
- ``call_method`` calls a method on a value. ``name`` is as similar. ``target`` is the string name of the method
to apply to the ``self`` argument. ``args`` and ``kwargs`` represent the arguments to invoke the module on,
*including the self argument*
- ``output`` contains the output of the traced function in its ``args[0]`` attribute. This corresponds to the "return" statement
in the Graph printout.
"""
@compatibility(is_backward_compatible=True)
def __init__(self, graph: 'Graph', name: str, op: str, target: 'Target',
args: Tuple['Argument', ...], kwargs: Dict[str, 'Argument'],
return_type : Optional[Any] = None) -> None:
"""
Instantiate an instance of ``Node``. Note: most often, you want to use the
Graph APIs, i.e. ``Graph.call_module``, ``Graph.call_method``, etc. rather
than instantiating a ``Node`` directly.
Args:
graph (Graph): The ``Graph`` to which this ``Node`` should belong.
name (str): The name to which the output of this ``Node`` should be assigned
op (str): The opcode for this ``Node``. Can be one of 'placeholder',
'call_method', 'call_module', 'call_function', 'get_attr',
'output'
target ('Target'): The target this op should call. See the broader
``Node`` docstring for more details.
args (Tuple['Argument']): The args to be passed to ``target``
kwargs (Dict[str, 'Argument']): The kwargs to be passed to ``target``
return_type (Optional[Any]): The python type expression representing the
type of the output of this node. This field can be used for
annotation of values in the generated code or for other types
of analyses.
"""
self.graph = graph
self.name = name # unique name of value being created
assert op in ['placeholder', 'call_method', 'call_module', 'call_function', 'get_attr', 'output', 'root']
self.op = op # the kind of operation = placeholder|call_method|call_module|call_function|get_attr
if op == 'call_function':
if not callable(target):
raise ValueError(f'Node [graph = {graph}, name = \'{name}\'] target {target} has type {torch.typename(target)} '
'but a Callable is expected')
else:
if not isinstance(target, str):
raise ValueError(f'Node [graph = {graph}, name = \'{name}\'] target {target} has type {torch.typename(target)} '
'but a str is expected')
self.target = target # for method/module/function, the name of the method/module/function/attr
# being invoked, e.g add, layer1, or torch.add
# All `Node`-valued inputs. Key is the Node, value is don't-care.
# The public API for this is `all_input_nodes`, this private attribute
# should not be accessed directly.
self._input_nodes : Dict[Node, None] = {}
self.__update_args_kwargs(map_arg(args, lambda x: x), map_arg(kwargs, lambda x: x)) # type: ignore[arg-type]
# All of the nodes that use the value produced by this Node
# Note one user may correspond to several uses, e.g. the node fo ``x + x``
# would appear once here, but represents two uses.
#
# Is a dict to act as an "ordered set". Keys are significant, value dont-care
self.users : Dict['Node', None] = {}
# Type expression representing the output value of this node.
# This should contain the same class of Type objects that would appear
# as type annotations for function inputs/outputs.
#
# For placeholder nodes, this value will be used to type-annotate the
# generated function parameters.
# For the return node, this value will be used to type-annotate the
# generated function return type. (Note this is a special case. ``return``
# does not produce a value, it's more of a notation. Thus, this value
# describes the type of args[0] in the ``return`` node.
self.type : Optional[Any] = return_type
self._prev = self
self._next = self
self._erased = False
# If set, use this fn to print this node
self._repr_fn : Optional[Callable[[Node], str]] = None
self._stack_trace : Optional[str] = None
# Dictionary to store metadata passes need to do their
# transformations. This metadata is preserved across node copies
self.meta : Dict[str, Any] = {}
@property
def next(self) -> 'Node':
"""
Returns the next ``Node`` in the linked list of Nodes.
Returns:
The next ``Node`` in the linked list of Nodes.
"""
return self._next
@property
def prev(self) -> 'Node':
"""
Returns the previous ``Node`` in the linked list of Nodes.
Returns:
The previous ``Node`` in the linked list of Nodes.
"""
return self._prev
@compatibility(is_backward_compatible=True)
def prepend(self, x: 'Node') -> None:
"""
Insert x before this node in the list of nodes in the graph. Example::
Before: p -> self
bx -> x -> ax
After: p -> x -> self
bx -> ax
Args:
x (Node): The node to put before this node. Must be a member of the same graph.
"""
assert self.graph == x.graph, "Attempting to move a Node into a different Graph"
x._remove_from_list()
p = self._prev
p._next, x._prev = x, p
x._next, self._prev = self, x
@compatibility(is_backward_compatible=True)
def append(self, x: 'Node') -> None:
"""
Insert x after this node in the list of nodes in the graph.
Equvalent to ``self.next.prepend(x)``
Args:
x (Node): The node to put after this node. Must be a member of the same graph.
"""
self._next.prepend(x)
def _remove_from_list(self):
p, n = self._prev, self._next
p._next, n._prev = n, p
@property
def args(self) -> Tuple[Argument, ...]:
"""
The tuple of arguments to this ``Node``. The interpretation of arguments
depends on the node's opcode. See the :class:`Node` docstring for more
information.
Assignment to this property is allowed. All accounting of uses and users
is updated automatically on assignment.
"""
return self._args
@args.setter
def args(self, a : Tuple[Argument, ...]):
"""
Set the tuple of arguments to this Node. The interpretation of arguments
depends on the node's opcode. See the ``fx.Graph`` docstring for more
information.
"""
# DO NOT CALL `__update_args_kwargs` directly. The correct way to
# set `args` is via direct assignment, i.e. `node.args = new_args`
self.__update_args_kwargs(map_arg(a, lambda x: x), self._kwargs) # type: ignore[arg-type]
@property
def kwargs(self) -> Dict[str, Argument]:
"""
The dict of keyword arguments to this ``Node``. The interpretation of arguments
depends on the node's opcode. See the :class:`Node` docstring for more
information.
Assignment to this property is allowed. All accounting of uses and users
is updated automatically on assignment.
"""
return self._kwargs
@kwargs.setter
def kwargs(self, k : Dict[str, Argument]):
"""
Set the dict of kwargs to this Node. The interpretation of arguments
depends on the node's opcode. See the ``fx.Graph`` docstring for more
information.
"""
# DO NOT CALL `__update_args_kwargs` directly. The correct way to
# set `args` is via direct assignment, i.e. `node.kwargs = new_kwargs`
self.__update_args_kwargs(self._args, map_arg(k, lambda x: x)) # type: ignore[arg-type]
@property
def all_input_nodes(self) -> List['Node']:
"""
Return all Nodes that are inputs to this Node. This is equivalent to
iterating over ``args`` and ``kwargs`` and only collecting the values that
are Nodes.
Returns:
List of ``Nodes`` that appear in the ``args`` and ``kwargs`` of this
``Node``, in that order.
"""
return list(self._input_nodes.keys())
@compatibility(is_backward_compatible=True)
def update_arg(self, idx : int, arg : Argument) -> None:
"""
Update an existing positional argument to contain the new value
``arg``. After calling, ``self.args[idx] == arg``.
Args:
idx (int): The index into ``self.args`` of the element to update
arg (Argument): The new argument value to write into ``args``
"""
args = list(self.args)
args[idx] = arg
self.args = tuple(args)
@compatibility(is_backward_compatible=True)
def update_kwarg(self, key : str, arg : Argument) -> None:
"""
Update an existing keyword argument to contain the new value
``arg``. After calling, ``self.kwargs[key] == arg``.
Args:
key (str): The key in ``self.kwargs`` of the element to update
arg (Argument): The new argument value to write into ``kwargs``
"""
kwargs = dict(self.kwargs)
kwargs[key] = arg
self.kwargs = kwargs
@property
def stack_trace(self) -> Optional[str]:
"""
Return the Python stack trace that was recorded during tracing, if any.
This property is usually populated by `Tracer.create_proxy`. To record
stack traces during tracing for debug purposes, set
`record_stack_traces = True` on the `Tracer` instance.
"""
return self._stack_trace
@stack_trace.setter
def stack_trace(self, trace : Optional[str]):
self._stack_trace = trace
def __update_args_kwargs(self, new_args : Tuple['Argument', ...], new_kwargs : Dict[str, 'Argument']):
"""
This API is internal. Do *not* call it directly.
"""
self._args = new_args
self._kwargs = new_kwargs
for old_use in self._input_nodes.keys():
old_use.users.pop(self)
self._input_nodes = {}
map_arg(self._args, lambda n: self._input_nodes.setdefault(n))
map_arg(self._kwargs, lambda n: self._input_nodes.setdefault(n))
for new_use in self._input_nodes.keys():
new_use.users.setdefault(self)
def __repr__(self) -> str:
if self._repr_fn:
return self._repr_fn(self)
return self.name
def _pretty_print_target(self, target):
"""
Make target printouts more user-friendly.
1) builtins will be printed as `builtins.xyz`
2) operators will be printed as `operator.xyz`
3) other callables will be printed with qualfied name, e.g. torch.add
"""
if isinstance(target, str):
return target
if hasattr(target, '__module__'):
if not hasattr(target, '__name__'):
# Just to be defensive, if we don't have `__name__`, get the
# qualname. Not sure if this happens for any members of `operator`
# or `builtins`. This fallback path is not as good, since e.g.
# things in `operator` have `_operator` as their __module__.
return _get_qualified_name(target)
if target.__module__ == 'builtins':
return f'builtins.{target.__name__}'
elif target.__module__ == '_operator':
return f'operator.{target.__name__}'
return _get_qualified_name(target)
@compatibility(is_backward_compatible=True)
def format_node(self,
placeholder_names: List[str] = None,
maybe_return_typename: List[str] = None) -> Optional[str]:
"""
Return a descriptive string representation of ``self``.
This method can be used with no arguments as a debugging
utility.
This function is also used internally in the ``__str__`` method
of ``Graph``. Together, the strings in ``placeholder_names``
and ``maybe_return_typename`` make up the signature of the
autogenerated ``forward`` function in this Graph's surrounding
GraphModule. ``placeholder_names`` and ``maybe_return_typename``
should not be used otherwise.
Args:
placeholder_names: A list that will store formatted strings
representing the placeholders in the generated
``forward`` function. Internal use only.
maybe_return_typename: A single-element list that will store
a formatted string representing the output of the
generated ``forward`` function. Internal use only.
Returns:
str: If 1) we're using ``format_node`` as an internal helper
in the ``__str__`` method of ``Graph``, and 2) ``self``
is a placeholder Node, return ``None``. Otherwise,
return a descriptive string representation of the
current Node.
"""
if self.op == 'placeholder':
assert isinstance(self.target, str)
arg_str = self.target
arg_str += arg_str + f': {_type_repr(self.type)}' if self.type else ''
if placeholder_names:
placeholder_names.append(arg_str)
return None
maybe_typename = f'{_type_repr(self.type)} ' if self.type else ''
default_val = '(default=' + str(self.args[0]) + ')' if self.args else ''
return f'%{self.name} : {maybe_typename}[#users={len(self.users)}] = {self.op}[target={self.target}]{default_val}'
elif self.op == 'get_attr':
maybe_typename = f'{_type_repr(self.type)} ' if self.type is not None else ''
return f'%{self.name} : {maybe_typename}[#users={len(self.users)}] = ' \
f'{self.op}[target={self._pretty_print_target(self.target)}]'
elif self.op == 'output':
if self.type and maybe_return_typename:
maybe_return_typename[0] = f' -> {_type_repr(self.type)}'
return f'return {self.args[0]}'
else:
maybe_typename = f'{_type_repr(self.type)} ' if self.type is not None else ''
return f'%{self.name} : {maybe_typename}[#users={len(self.users)}] = ' \
f'{self.op}[target={self._pretty_print_target(self.target)}](' \
f'args = {_format_arg(self.args)}, kwargs = {_format_arg(self.kwargs)})'
@compatibility(is_backward_compatible=True)
def replace_all_uses_with(self, replace_with : 'Node') -> List['Node']:
"""
Replace all uses of ``self`` in the Graph with the Node ``replace_with``.
Args:
replace_with (Node): The node to replace all uses of ``self`` with.
Returns:
The list of Nodes on which this change was made.
"""
to_process = list(self.users)
for use_node in to_process:
def maybe_replace_node(n : Node) -> Node:
if n == self:
return replace_with
else:
return n
new_args = map_arg(use_node.args, maybe_replace_node)
new_kwargs = map_arg(use_node.kwargs, maybe_replace_node)
assert isinstance(new_args, tuple)
assert isinstance(new_kwargs, dict)
use_node.__update_args_kwargs(new_args, new_kwargs)
assert len(self.users) == 0
return to_process
@compatibility(is_backward_compatible=False)
def is_impure(self):
"""
Returns whether this op is impure, i.e. if its op is a placeholder or
output, or if a call_function or call_module which is impure.
Returns:
bool: If the op is impure or not.
"""
if self.op in {"placeholder", "output"}:
return True
# Check if an impure function.
if self.op == "call_function":
return self.target in _side_effectful_functions
# Check if an impure module.
if self.op == "call_module":
assert (
self.graph.owning_module is not None
), "self.graph.owning_module not set for purity check"
target_mod = self.graph.owning_module.get_submodule(self.target)
assert (
target_mod is not None
), f"Did not find expected submodule target {self.target}"
return getattr(target_mod, "_is_impure", False)
return False
@compatibility(is_backward_compatible=False)
def normalized_arguments(
self, root : torch.nn.Module, arg_types : Optional[Tuple[Any]] = None,
kwarg_types : Optional[Dict[str, Any]] = None,
normalize_to_only_use_kwargs : bool = False) -> Optional[ArgsKwargsPair]:
"""
Returns normalized arguments to Python targets. This means that
`args/kwargs` will be matched up to the module/functional's
signature and return exclusively kwargs in positional order
if `normalize_to_only_use_kwargs` is true.
Also populates default values. Does not support positional-only
parameters or varargs parameters.
Supports module calls.
May require `arg_types` and `kwarg_types` in order to disambiguate overloads.
Args:
root (torch.nn.Module): Module upon which to resolve module targets.
arg_types (Optional[Tuple[Any]]): Tuple of arg types for the args
kwarg_types (Optional[Dict[str, Any]]): Dict of arg types for the kwargs
normalize_to_only_use_kwargs (bool): Whether to normalize to only use kwargs.
Returns:
Returns NamedTuple ArgsKwargsPair, or `None` if not successful.
"""
if self.op == 'call_function':
assert callable(self.target)
return normalize_function(self.target, self.args, self.kwargs, arg_types, kwarg_types) # type: ignore[arg-type]
elif self.op == 'call_module':
assert isinstance(self.target, str)
return normalize_module(root, self.target, self.args, self.kwargs) # type: ignore[arg-type]
return None
@compatibility(is_backward_compatible=True)
def replace_input_with(self, old_input: 'Node', new_input: 'Node'):
"""
Loop through input nodes of ``self``, and replace all instances of
``old_input`` with ``new_input``.
Args:
old_input (Node): The old input node to be replaced.
new_input (Node): The new input node to replace ``old_input``.
"""
def maybe_replace_node(n : Node) -> Node:
return new_input if n == old_input else n
new_args = map_arg(self.args, maybe_replace_node)
new_kwargs = map_arg(self.kwargs, maybe_replace_node)
assert isinstance(new_args, tuple)
assert isinstance(new_kwargs, dict)
self.__update_args_kwargs(new_args, new_kwargs)
@compatibility(is_backward_compatible=True)
def map_arg(a: Argument, fn: Callable[[Node], Argument]) -> Argument:
"""
Apply fn to each Node appearing arg. arg may be a list, tuple, slice, or dict with string keys.
"""
assert callable(fn), "torch.fx.map_arg(a, fn): fn must be a callable"
return map_aggregate(a, lambda x: fn(x) if isinstance(x, Node) else x)
@compatibility(is_backward_compatible=True)
def map_aggregate(a: Argument, fn: Callable[[Argument], Argument]) -> Argument:
"""
Apply fn to each Node appearing arg. arg may be a list, tuple, slice, or dict with string keys.
"""
if isinstance(a, tuple):
return tuple(map_aggregate(elem, fn) for elem in a)
elif isinstance(a, list):
return immutable_list(map_aggregate(elem, fn) for elem in a)
elif isinstance(a, dict):
return immutable_dict((k, map_aggregate(v, fn)) for k, v in a.items())
elif isinstance(a, slice):
return slice(map_aggregate(a.start, fn), map_aggregate(a.stop, fn), map_aggregate(a.step, fn))
else:
return fn(a)
|
Python
|
MIT
|
Westlanderz/AI-Plat1/venv/Lib/site-packages/torch/fx/node.py
|
45821636-104e-4854-8f17-af7c6dce34a2
|
[]
|
[]
|
# Protocol Buffers - Google's data interchange format
# Copyright 2008 Google Inc. All rights reserved.
# https://developers.google.com/protocol-buffers/
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# * Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# * Redistributions in binary form must reproduce the above
# copyright notice, this list of conditions and the following disclaimer
# in the documentation and/or other materials provided with the
# distribution.
# * Neither the name of Google Inc. nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
# TODO(robinson): We should just make these methods all "pure-virtual" and move
# all implementation out, into reflection.py for now.
"""Contains an abstract base class for protocol messages."""
__author__ = 'robinson@google.com (Will Robinson)'
class Error(Exception):
"""Base error type for this module."""
pass
class DecodeError(Error):
"""Exception raised when deserializing messages."""
pass
class EncodeError(Error):
"""Exception raised when serializing messages."""
pass
class Message(object):
"""Abstract base class for protocol messages.
Protocol message classes are almost always generated by the protocol
compiler. These generated types subclass Message and implement the methods
shown below.
"""
# TODO(robinson): Link to an HTML document here.
# TODO(robinson): Document that instances of this class will also
# have an Extensions attribute with __getitem__ and __setitem__.
# Again, not sure how to best convey this.
# TODO(robinson): Document that the class must also have a static
# RegisterExtension(extension_field) method.
# Not sure how to best express at this point.
# TODO(robinson): Document these fields and methods.
__slots__ = []
#: The :class:`google.protobuf.descriptor.Descriptor` for this message type.
DESCRIPTOR = None
def __deepcopy__(self, memo=None):
clone = type(self)()
clone.MergeFrom(self)
return clone
def __eq__(self, other_msg):
"""Recursively compares two messages by value and structure."""
raise NotImplementedError
def __ne__(self, other_msg):
# Can't just say self != other_msg, since that would infinitely recurse. :)
return not self == other_msg
def __hash__(self):
raise TypeError('unhashable object')
def __str__(self):
"""Outputs a human-readable representation of the message."""
raise NotImplementedError
def __unicode__(self):
"""Outputs a human-readable representation of the message."""
raise NotImplementedError
def MergeFrom(self, other_msg):
"""Merges the contents of the specified message into current message.
This method merges the contents of the specified message into the current
message. Singular fields that are set in the specified message overwrite
the corresponding fields in the current message. Repeated fields are
appended. Singular sub-messages and groups are recursively merged.
Args:
other_msg (Message): A message to merge into the current message.
"""
raise NotImplementedError
def CopyFrom(self, other_msg):
"""Copies the content of the specified message into the current message.
The method clears the current message and then merges the specified
message using MergeFrom.
Args:
other_msg (Message): A message to copy into the current one.
"""
if self is other_msg:
return
self.Clear()
self.MergeFrom(other_msg)
def Clear(self):
"""Clears all data that was set in the message."""
raise NotImplementedError
def SetInParent(self):
"""Mark this as present in the parent.
This normally happens automatically when you assign a field of a
sub-message, but sometimes you want to make the sub-message
present while keeping it empty. If you find yourself using this,
you may want to reconsider your design.
"""
raise NotImplementedError
def IsInitialized(self):
"""Checks if the message is initialized.
Returns:
bool: The method returns True if the message is initialized (i.e. all of
its required fields are set).
"""
raise NotImplementedError
# TODO(robinson): MergeFromString() should probably return None and be
# implemented in terms of a helper that returns the # of bytes read. Our
# deserialization routines would use the helper when recursively
# deserializing, but the end user would almost always just want the no-return
# MergeFromString().
def MergeFromString(self, serialized):
"""Merges serialized protocol buffer data into this message.
When we find a field in `serialized` that is already present
in this message:
- If it's a "repeated" field, we append to the end of our list.
- Else, if it's a scalar, we overwrite our field.
- Else, (it's a nonrepeated composite), we recursively merge
into the existing composite.
Args:
serialized (bytes): Any object that allows us to call
``memoryview(serialized)`` to access a string of bytes using the
buffer interface.
Returns:
int: The number of bytes read from `serialized`.
For non-group messages, this will always be `len(serialized)`,
but for messages which are actually groups, this will
generally be less than `len(serialized)`, since we must
stop when we reach an ``END_GROUP`` tag. Note that if
we *do* stop because of an ``END_GROUP`` tag, the number
of bytes returned does not include the bytes
for the ``END_GROUP`` tag information.
Raises:
DecodeError: if the input cannot be parsed.
"""
# TODO(robinson): Document handling of unknown fields.
# TODO(robinson): When we switch to a helper, this will return None.
raise NotImplementedError
def ParseFromString(self, serialized):
"""Parse serialized protocol buffer data into this message.
Like :func:`MergeFromString()`, except we clear the object first.
"""
self.Clear()
return self.MergeFromString(serialized)
def SerializeToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
Returns:
A binary string representation of the message if all of the required
fields in the message are set (i.e. the message is initialized).
Raises:
EncodeError: if the message isn't initialized (see :func:`IsInitialized`).
"""
raise NotImplementedError
def SerializePartialToString(self, **kwargs):
"""Serializes the protocol message to a binary string.
This method is similar to SerializeToString but doesn't check if the
message is initialized.
Keyword Args:
deterministic (bool): If true, requests deterministic serialization
of the protobuf, with predictable ordering of map keys.
Returns:
bytes: A serialized representation of the partial message.
"""
raise NotImplementedError
# TODO(robinson): Decide whether we like these better
# than auto-generated has_foo() and clear_foo() methods
# on the instances themselves. This way is less consistent
# with C++, but it makes reflection-type access easier and
# reduces the number of magically autogenerated things.
#
# TODO(robinson): Be sure to document (and test) exactly
# which field names are accepted here. Are we case-sensitive?
# What do we do with fields that share names with Python keywords
# like 'lambda' and 'yield'?
#
# nnorwitz says:
# """
# Typically (in python), an underscore is appended to names that are
# keywords. So they would become lambda_ or yield_.
# """
def ListFields(self):
"""Returns a list of (FieldDescriptor, value) tuples for present fields.
A message field is non-empty if HasField() would return true. A singular
primitive field is non-empty if HasField() would return true in proto2 or it
is non zero in proto3. A repeated field is non-empty if it contains at least
one element. The fields are ordered by field number.
Returns:
list[tuple(FieldDescriptor, value)]: field descriptors and values
for all fields in the message which are not empty. The values vary by
field type.
"""
raise NotImplementedError
def HasField(self, field_name):
"""Checks if a certain field is set for the message.
For a oneof group, checks if any field inside is set. Note that if the
field_name is not defined in the message descriptor, :exc:`ValueError` will
be raised.
Args:
field_name (str): The name of the field to check for presence.
Returns:
bool: Whether a value has been set for the named field.
Raises:
ValueError: if the `field_name` is not a member of this message.
"""
raise NotImplementedError
def ClearField(self, field_name):
"""Clears the contents of a given field.
Inside a oneof group, clears the field set. If the name neither refers to a
defined field or oneof group, :exc:`ValueError` is raised.
Args:
field_name (str): The name of the field to check for presence.
Raises:
ValueError: if the `field_name` is not a member of this message.
"""
raise NotImplementedError
def WhichOneof(self, oneof_group):
"""Returns the name of the field that is set inside a oneof group.
If no field is set, returns None.
Args:
oneof_group (str): the name of the oneof group to check.
Returns:
str or None: The name of the group that is set, or None.
Raises:
ValueError: no group with the given name exists
"""
raise NotImplementedError
def HasExtension(self, extension_handle):
"""Checks if a certain extension is present for this message.
Extensions are retrieved using the :attr:`Extensions` mapping (if present).
Args:
extension_handle: The handle for the extension to check.
Returns:
bool: Whether the extension is present for this message.
Raises:
KeyError: if the extension is repeated. Similar to repeated fields,
there is no separate notion of presence: a "not present" repeated
extension is an empty list.
"""
raise NotImplementedError
def ClearExtension(self, extension_handle):
"""Clears the contents of a given extension.
Args:
extension_handle: The handle for the extension to clear.
"""
raise NotImplementedError
def UnknownFields(self):
"""Returns the UnknownFieldSet.
Returns:
UnknownFieldSet: The unknown fields stored in this message.
"""
raise NotImplementedError
def DiscardUnknownFields(self):
"""Clears all fields in the :class:`UnknownFieldSet`.
This operation is recursive for nested message.
"""
raise NotImplementedError
def ByteSize(self):
"""Returns the serialized size of this message.
Recursively calls ByteSize() on all contained messages.
Returns:
int: The number of bytes required to serialize this message.
"""
raise NotImplementedError
@classmethod
def FromString(cls, s):
raise NotImplementedError
@staticmethod
def RegisterExtension(extension_handle):
raise NotImplementedError
def _SetListener(self, message_listener):
"""Internal method used by the protocol message implementation.
Clients should not call this directly.
Sets a listener that this message will call on certain state transitions.
The purpose of this method is to register back-edges from children to
parents at runtime, for the purpose of setting "has" bits and
byte-size-dirty bits in the parent and ancestor objects whenever a child or
descendant object is modified.
If the client wants to disconnect this Message from the object tree, she
explicitly sets callback to None.
If message_listener is None, unregisters any existing listener. Otherwise,
message_listener must implement the MessageListener interface in
internal/message_listener.py, and we discard any listener registered
via a previous _SetListener() call.
"""
raise NotImplementedError
def __getstate__(self):
"""Support the pickle protocol."""
return dict(serialized=self.SerializePartialToString())
def __setstate__(self, state):
"""Support the pickle protocol."""
self.__init__()
serialized = state['serialized']
# On Python 3, using encoding='latin1' is required for unpickling
# protos pickled by Python 2.
if not isinstance(serialized, bytes):
serialized = serialized.encode('latin1')
self.ParseFromString(serialized)
def __reduce__(self):
message_descriptor = self.DESCRIPTOR
if message_descriptor.containing_type is None:
return type(self), (), self.__getstate__()
# the message type must be nested.
# Python does not pickle nested classes; use the symbol_database on the
# receiving end.
container = message_descriptor
return (_InternalConstructMessage, (container.full_name,),
self.__getstate__())
def _InternalConstructMessage(full_name):
"""Constructs a nested message."""
from google.protobuf import symbol_database # pylint:disable=g-import-not-at-top
return symbol_database.Default().GetSymbol(full_name)()
|
Python
|
MIT
|
JustinACoder/H22-GR3-UnrealAI/Plugins/UnrealEnginePython/Binaries/Win64/Lib/site-packages/google/protobuf/message.py
|
7bc196e8-6f48-46c2-8499-8e87a5d54665
|
[{"tag": "NAME", "value": "robinson", "start": 2857, "end": 2865, "context": "re how to best express at this point.\r\n\r\n # TODO(robinson): Document these fields and methods.\r\n\r\n __slots"}, {"tag": "EMAIL", "value": "robinson@google.com", "start": 1879, "end": 1898, "context": " class for protocol messages.\"\"\"\r\n\r\n__author__ = 'robinson@google.com (Will Robinson)'\r\n\r\nclass Error(Exception):\r\n \"\""}, {"tag": "NAME", "value": "robinson", "start": 8703, "end": 8711, "context": "of magically autogenerated things.\r\n #\r\n # TODO(robinson): Be sure to document (and test) exactly\r\n # whi"}, {"tag": "NAME", "value": "robinson", "start": 2446, "end": 2454, "context": "nt the methods\r\n shown below.\r\n \"\"\"\r\n\r\n # TODO(robinson): Link to an HTML document here.\r\n\r\n # TODO(robi"}, {"tag": "NAME", "value": "robinson", "start": 2500, "end": 2508, "context": "nson): Link to an HTML document here.\r\n\r\n # TODO(robinson): Document that instances of this class will also"}, {"tag": "NAME", "value": "Will Robinson", "start": 1900, "end": 1913, "context": "essages.\"\"\"\r\n\r\n__author__ = 'robinson@google.com (Will Robinson)'\r\n\r\nclass Error(Exception):\r\n \"\"\"Base error typ"}, {"tag": "NAME", "value": "robinson", "start": 5412, "end": 5420, "context": " \"\"\"\r\n raise NotImplementedError\r\n\r\n # TODO(robinson): MergeFromString() should probably return None a"}, {"tag": "NAME", "value": "robinson", "start": 8398, "end": 8406, "context": " \"\"\"\r\n raise NotImplementedError\r\n\r\n # TODO(robinson): Decide whether we like these better\r\n # than a"}, {"tag": "NAME", "value": "robinson", "start": 2685, "end": 2693, "context": "in, not sure how to best convey this.\r\n\r\n # TODO(robinson): Document that the class must also have a static"}]
|
[{"tag": "NAME", "value": "robinson", "start": 2857, "end": 2865, "context": "re how to best express at this point.\r\n\r\n # TODO(robinson): Document these fields and methods.\r\n\r\n __slots"}, {"tag": "EMAIL", "value": "robinson@google.com", "start": 1879, "end": 1898, "context": " class for protocol messages.\"\"\"\r\n\r\n__author__ = 'robinson@google.com (Will Robinson)'\r\n\r\nclass Error(Exception):\r\n \"\""}, {"tag": "NAME", "value": "robinson", "start": 8703, "end": 8711, "context": "of magically autogenerated things.\r\n #\r\n # TODO(robinson): Be sure to document (and test) exactly\r\n # whi"}, {"tag": "NAME", "value": "robinson", "start": 2446, "end": 2454, "context": "nt the methods\r\n shown below.\r\n \"\"\"\r\n\r\n # TODO(robinson): Link to an HTML document here.\r\n\r\n # TODO(robi"}, {"tag": "NAME", "value": "robinson", "start": 2500, "end": 2508, "context": "nson): Link to an HTML document here.\r\n\r\n # TODO(robinson): Document that instances of this class will also"}, {"tag": "NAME", "value": "Will Robinson", "start": 1900, "end": 1913, "context": "essages.\"\"\"\r\n\r\n__author__ = 'robinson@google.com (Will Robinson)'\r\n\r\nclass Error(Exception):\r\n \"\"\"Base error typ"}, {"tag": "NAME", "value": "robinson", "start": 5412, "end": 5420, "context": " \"\"\"\r\n raise NotImplementedError\r\n\r\n # TODO(robinson): MergeFromString() should probably return None a"}, {"tag": "NAME", "value": "robinson", "start": 8398, "end": 8406, "context": " \"\"\"\r\n raise NotImplementedError\r\n\r\n # TODO(robinson): Decide whether we like these better\r\n # than a"}, {"tag": "NAME", "value": "robinson", "start": 2685, "end": 2693, "context": "in, not sure how to best convey this.\r\n\r\n # TODO(robinson): Document that the class must also have a static"}]
|
package de.uni_mannheim.informatik.dws.tnt.match.data;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileInputStream;
import java.io.FileNotFoundException;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.IOException;
import java.io.InputStreamReader;
import java.io.Serializable;
import java.time.LocalDateTime;
import java.util.Arrays;
import java.util.Collection;
import java.util.HashMap;
import java.util.HashSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.List;
import java.util.Map;
import java.util.Map.Entry;
import java.util.Set;
import com.esotericsoftware.kryo.Kryo;
import com.esotericsoftware.kryo.io.Input;
import com.esotericsoftware.kryo.io.Output;
import au.com.bytecode.opencsv.CSVReader;
import de.uni_mannheim.informatik.dws.t2k.index.dbpedia.DBpediaIndexer;
import de.uni_mannheim.informatik.dws.winter.index.IIndex;
import de.uni_mannheim.informatik.dws.winter.model.Correspondence;
import de.uni_mannheim.informatik.dws.winter.model.DataSet;
import de.uni_mannheim.informatik.dws.winter.model.Matchable;
import de.uni_mannheim.informatik.dws.winter.model.ParallelHashedDataSet;
import de.uni_mannheim.informatik.dws.winter.preprocessing.datatypes.DataType;
import de.uni_mannheim.informatik.dws.winter.processing.Processable;
import de.uni_mannheim.informatik.dws.winter.processing.parallel.ParallelProcessableCollection;
import de.uni_mannheim.informatik.dws.winter.utils.MapUtils;
import de.uni_mannheim.informatik.dws.winter.utils.query.Q;
import de.uni_mannheim.informatik.dws.winter.webtables.Table;
import de.uni_mannheim.informatik.dws.winter.webtables.TableColumn;
import de.uni_mannheim.informatik.dws.winter.webtables.TableRow;
import de.uni_mannheim.informatik.dws.winter.webtables.lod.LodTableColumn;
import de.uni_mannheim.informatik.dws.winter.webtables.parsers.LodCsvTableParser;
/**
*
* Model of a knowledge base.
*
* @author Oliver Lehmberg (oli@dwslab.de)
*
*/
public class KnowledgeBase implements Serializable {
private static final long serialVersionUID = 1L;
// data that will be matched: records and schema
private DataSet<MatchableTableRow, MatchableTableColumn> records = new ParallelHashedDataSet<>();
private DataSet<MatchableTableColumn, MatchableTableColumn> schema = new ParallelHashedDataSet<>();
private DataSet<MatchableTable, MatchableTableColumn> tables = new ParallelHashedDataSet<>();
private DataSet<MatchableTableDeterminant, MatchableTableColumn> candidateKeys = new ParallelHashedDataSet<>();
private Processable<Correspondence<MatchableTableDeterminant, Matchable>> inclusionDependencies = new ParallelProcessableCollection<>();
private DataSet<MatchableTableCell, MatchableTableColumn> labels = new ParallelHashedDataSet<>();
// translation for DBpedia property URIs: URI string to integer
private LinkedList<String> properties = new LinkedList<>();
private HashMap<String, Integer> propertyIds = new HashMap<>();
private Map<Integer, LocalDateTime[]> dateRanges = new HashMap<>();
// translation from table file name to table id
private Map<String, Integer> tableIds = new HashMap<>();
//translation from table id to DBpedia class
private Map<Integer, String> classIndices = new HashMap<>();
// translation from class name to table id
private Map<String, Integer> classToId = new HashMap<>();
// rdfs:label
private MatchableLodColumn rdfsLabel;
// lookup for tables by id
private HashMap<Integer, Integer> sizePerTable = new HashMap<Integer, Integer>();
// class weights
private HashMap<Integer, Double> classWeight = new HashMap<Integer, Double>();
//class hierarchy
private static HashMap<String, String> classHierarchy = new HashMap<String, String>();
// candidate keys: table id -> list of keys; key = list of property names
private Map<String, Collection<Collection<String>>> candidateKeyDefinitions;
public static final String RDFS_LABEL = "rdf-schema#label";
private static boolean doSerialise = true;
public static void setDoSerialise(boolean serialise) {
doSerialise = serialise;
}
public static KnowledgeBase loadKnowledgeBase(File location, IIndex index, SurfaceForms sf, boolean convertTypes, boolean parseLists) throws FileNotFoundException {
// look for serialised version
File ser;
if(sf==null) {
ser = new File(location.getParentFile(), location.getName() + ".bin");
} else {
ser = new File(location.getParentFile(), location.getName() + "_surfaceforms.bin");
}
// index from serialised version is not implemented, so only load serialised if we did not get an index to fill
if(index==null) {
if(ser.exists()) {
return KnowledgeBase.deserialise(ser);
} else if(location.getName().endsWith(".bin")) {
return KnowledgeBase.deserialise(location);
}
}
// load KB from location
KnowledgeBase kb = new KnowledgeBase();
kb.load(location, index, sf, convertTypes, parseLists);
// serialise
if(doSerialise) {
kb.serialise(ser);
}
return kb;
}
public void load(File location, IIndex index, SurfaceForms sForms, boolean convertTypes, boolean parseLists) {
/***********************************************
* Load DBpedia
***********************************************/
LodCsvTableParser lodParser = new LodCsvTableParser();
lodParser.setConvertValues(convertTypes);
lodParser.setParseLists(parseLists);
DBpediaIndexer indexer = new DBpediaIndexer();
List<File> dbpFiles = null;
if(location.isDirectory()) {
dbpFiles = Arrays.asList(location.listFiles());
} else {
dbpFiles = Arrays.asList(new File[] { location});
}
int tblIdx = 0;
for(File f : dbpFiles) {
System.out.println("Loading Knowledge Base Table " + f.getName());
Table tDBp = lodParser.parseTable(f);
tDBp.setTableId(tblIdx);
String className = tDBp.getPath().replace(".csv", "").replace(".gz", "");
MatchableTable mt = new MatchableTable(tblIdx, className);
tables.add(mt);
tableIds.put(className, tblIdx);
if(tDBp.getSchema().getSize()>1 && "rdf-schema#label".equals(tDBp.getSchema().get(1).getHeader())) {
tDBp.setSubjectColumnIndex(1);
}
if(dbpFiles.size()==1) {
for(TableColumn tc : tDBp.getSchema().getRecords()) {
System.out.println(String.format("{%s} [%d] %s (%s): %s", tDBp.getPath(), tc.getColumnIndex(), tc.getHeader(), tc.getDataType(), tc.getUri()));
}
}
LodTableColumn[] cols = tDBp.getColumns().toArray(new LodTableColumn[tDBp.getSchema().getSize()]);
// assign the class to the range of the rdfs:label property
for(LodTableColumn c : cols) {
if("rdf-schema#label".equals(c.getHeader())) {
c.setRange(className);
}
}
// assign the range of the original property to the label column
// remove object properties and keep only "_label" columns (otherwise we will have duplicate property URLs)
List<TableColumn> removedColumns = new LinkedList<>();
for(LodTableColumn tc : cols) {
if(tc.isReferenceLabel()) {
Iterator<TableColumn> it = tDBp.getSchema().getRecords().iterator();
while(it.hasNext()) {
LodTableColumn ltc = (LodTableColumn)it.next();
if(!ltc.isReferenceLabel() && ltc.getUri().equals(tc.getUri())) {
tc.setRange(ltc.getRange());
// it.remove();
// removedColumns.add(ltc.getColumnIndex());
removedColumns.add(ltc);
}
}
}
}
// remove the columns
for(TableColumn c : removedColumns) {
tDBp.removeColumn(c);
}
// re-create value arrays
// for(TableRow r : tDBp.getRows()) {
// Object[] values = new Object[tDBp.getSchema().getSize()];
// int newIndex = 0;
// for(int i=0; i < r.getValueArray().length; i++) {
// if(!removedColumns.contains(i)) {
// values[newIndex++] = r.getValueArray()[i];
// }
// }
// r.set(values);
// }
// create the schema
MatchableTableColumn[] matchableColumns = new MatchableLodColumn[tDBp.getSchema().getSize()];
int colIdx=0;
MatchableTableColumn rdfsLabelColumn = null;
for(TableColumn tc : tDBp.getSchema().getRecords()) {
MatchableLodColumn mc = new MatchableLodColumn(tblIdx, tc);
schema.add(mc);
matchableColumns[colIdx++] = mc;
if("rdf-schema#label".equals(mc.getHeader())) {
rdfsLabelColumn=mc;
}
}
// create the records
for(TableRow r : tDBp.getRows()) {
// make sure only the instance with the most specific class remains in the final dataset for each URI
MatchableTableRow mr = records.getRecord(r.getIdentifier());
if(mr==null) {
// mr = new MatchableTableRow(r, tblIdx, matchableColumns);
mr = new MatchableLodRow(r, tblIdx, matchableColumns);
} else {
String clsOfPrevoisRecord = classIndices.get(mr.getTableId());
String clsOfCurrentRecord = tDBp.getPath().replace(".csv", "").replace(".gz", "");
if(classHierarchy.get(clsOfPrevoisRecord)==null){
continue;
}else {
String cls;
boolean flag = false;
while((cls = classHierarchy.get(clsOfPrevoisRecord)) != null){
if(cls.equals(clsOfCurrentRecord)){
flag = true;
break;
}else{
clsOfPrevoisRecord = cls;
}
}
if(flag == false){
// mr = new MatchableTableRow(r, tblIdx, matchableColumns);
mr = new MatchableLodRow(r, tblIdx, matchableColumns);
}
}
}
// MatchableLodRow mr = new MatchableLodRow(r, tblIdx, matchableColumns);
if(sForms!=null && rdfsLabelColumn!=null) {
// add all surface forms to the rdfs:label property
Set<String> names = new HashSet<>();
if(mr.get(rdfsLabelColumn.getColumnIndex()) instanceof String[]) {
names.addAll(Arrays.asList((String[])mr.get(rdfsLabelColumn.getColumnIndex())));
} else {
names.add((String)mr.get(rdfsLabelColumn.getColumnIndex()));
}
Set<String> surfaceForms = new HashSet<>();
for(String name : names) {
if(name!=null) {
surfaceForms.addAll(sForms.getSurfaceForms(name.toLowerCase()));
}
}
surfaceForms.addAll(names);
if(surfaceForms.size()>1) {
mr.set(rdfsLabelColumn.getColumnIndex(), surfaceForms.toArray(new String[surfaceForms.size()]));
}
}
if(rdfsLabelColumn!=null) {
labels.add(new MatchableTableCell(mr, rdfsLabelColumn));
}
records.add(mr);
}
sizePerTable.put(tblIdx, tDBp.getSize());
classIndices.put(tblIdx, className);
classToId.put(className, tblIdx);
// we don't need the table rows anymore (MatchableTableRow is used from now on)
tDBp.clear();
tblIdx++;
// } else {
// System.out.println(" -> no key!");
// }
}
// add classes from the class hierarchy which have not been loaded (but can be mapped via the hierarchy)
for(String cls : new HashSet<>(classToId.keySet())) {
String superClass = classHierarchy.get(cls);
while(superClass!=null) {
if(!classToId.containsKey(superClass)) {
// create a URI column for the class
MatchableLodColumn mc = new MatchableLodColumn(
superClass + ".csv",
"URI",
"http://www.w3.org/2002/07/owl#Thing",
tblIdx,
0,
"URI",
DataType.link);
schema.add(mc);
// create a label column for the class
mc = new MatchableLodColumn(
superClass + ".csv",
"http://www.w3.org/2000/01/rdf-schema#label",
"http://www.w3.org/2000/01/rdf-schema#Literal",
tblIdx,
1,
"rdf-schema#label",
DataType.string);
schema.add(mc);
MatchableTable mt = new MatchableTable(tblIdx, superClass);
tables.add(mt);
classToId.put(superClass, tblIdx);
classIndices.put(tblIdx, superClass);
tblIdx++;
}
superClass = classHierarchy.get(superClass);
}
}
LodCsvTableParser.endLoadData();
// calculate class weights
calculateClassWeight();
determineDateRanges();
if(index!=null) {
System.out.println("Indexing ...");
indexer.indexInstances(index, records.get(), classIndices, sForms);
}
System.out.println(String.format("%,d DBpedia Instances loaded from CSV", records.size()));
System.out.println(String.format("%,d DBpedia Properties / %,d Property IDs", schema.size(), propertyIds.size()));
}
public static void loadClassHierarchy(String location) throws IOException{
System.out.println("Loading Class Hierarchy...");
BufferedReader tsvReader = new BufferedReader(new InputStreamReader(new FileInputStream(new File(location))));
String values;
while((values = tsvReader.readLine()) != null){
String[] cls = values.split("\t")[0].split("/");
String[] superCls = values.split("\t")[1].split("/");
classHierarchy.put(cls[cls.length-1].replaceAll("\"", ""), superCls[superCls.length-1].replaceAll("\"", ""));
}
tsvReader.close();
System.out.println("Loaded Class Hierarchy for " + classHierarchy.size() + " Resources.");
}
public void loadCandidateKeys(File location) throws IOException {
candidateKeyDefinitions = new HashMap<>();
for(File f : location.listFiles()) {
String className = f.getName().replace(".csv", "").replace(".gz", "");
LinkedList<Collection<String>> keys = new LinkedList<>();
CSVReader r = new CSVReader(new FileReader(f));
String[] values = null;
while((values = r.readNext())!=null) {
keys.add(Arrays.asList(values));
}
r.close();
candidateKeyDefinitions.put(className, keys);
}
for(String cls : candidateKeyDefinitions.keySet()) {
Collection<Collection<String>> keys = candidateKeyDefinitions.get(cls);
int classId = getClassIds().get(cls);
Map<String, MatchableTableColumn> attributeToColumn = new HashMap<>();
for(MatchableTableColumn c : getSchema().where((c)->c.getTableId()==classId).get()) {
attributeToColumn.put(c.getHeader(), c);
}
for(Collection<String> key : keys) {
Set<MatchableTableColumn> columns = new HashSet<>();
for(String attribute : key) {
columns.add(attributeToColumn.get(attribute));
}
MatchableTableDeterminant det = new MatchableTableDeterminant(classId, columns);
candidateKeys.add(det);
}
}
}
public void loadInclusionDependencies(File location) throws IOException {
BufferedReader r = new BufferedReader(new FileReader(location));
String line = null;
int lineNo = 1;
while((line = r.readLine())!=null) {
String[] values = line.split("\t");
if(values!=null && values.length==5) {
String cls1 = values[0];
String[] attributes1 = values[1].split(",");
String cls2 = values[2];
String[] attributes2 = values[3].split(",");
Double score = Double.parseDouble(values[4]);
int cls1Id = getClassIds().get(cls1);
Map<String, MatchableTableColumn> attributeToColumn1 = new HashMap<>();
for(MatchableTableColumn c : getSchema().where((c)->c.getTableId()==cls1Id).get()) {
attributeToColumn1.put(c.getHeader(), c);
}
int cls2Id = getClassIds().get(cls2);
Map<String, MatchableTableColumn> attributeToColumn2 = new HashMap<>();
for(MatchableTableColumn c : getSchema().where((c)->c.getTableId()==cls2Id).get()) {
attributeToColumn2.put(c.getHeader(), c);
}
Set<MatchableTableColumn> cols1 = new HashSet<>();
for(String attribute : attributes1) {
cols1.add(attributeToColumn1.get(attribute));
}
Set<MatchableTableColumn> cols2 = new HashSet<>();
for(String attribute : attributes2) {
cols2.add(attributeToColumn2.get(attribute));
}
Correspondence<MatchableTableDeterminant, Matchable> id = new Correspondence<>(
new MatchableTableDeterminant(cls1Id, cols1),
new MatchableTableDeterminant(cls2Id, cols2),
score
);
inclusionDependencies.add(id);
} else {
System.err.println(String.format("[KnowledgeBase] Malformed inclusion dependency in file %s line %d: %s", location.getAbsolutePath(), lineNo, line));
}
lineNo++;
}
r.close();
}
public static KnowledgeBase deserialise(File location) throws FileNotFoundException {
System.out.println("Deserialising Knowledge Base");
Kryo kryo = new Kryo();
// kryo.setRegistrationRequired(false);
Input input = new Input(new FileInputStream(location));
KnowledgeBase kb = kryo.readObject(input, KnowledgeBase.class);
input.close();
return kb;
}
public void serialise(File location) throws FileNotFoundException {
System.out.println("Serialising Knowledge Base");
Kryo kryo = new Kryo();
// kryo.setRegistrationRequired(false);
Output output = new Output(new FileOutputStream(location));
kryo.writeObject(output, this);
output.close();
}
public void calculateClassWeight(){
double max = -1;
for (Entry<Integer, Integer> tableSize : getTablesSize().entrySet()) {
if (tableSize.getValue() < 1) {
continue;
}
if (tableSize.getValue() > max) {
max = tableSize.getValue();
}
}
for(Entry<Integer, Integer> tableSize : getTablesSize().entrySet()){
double value = 0;
if (tableSize.getValue() < 1) {
value = 1;
}
value =tableSize.getValue()/max;
value = 1-value;
classWeight.put(tableSize.getKey(), value);
}
}
public void determineDateRanges() {
for(MatchableTableRow row : records.get()) {
for(MatchableTableColumn col : row.getSchema()) {
if(col.getType()==DataType.date) {
LocalDateTime[] range = MapUtils.get(dateRanges, col.getColumnIndex(), new LocalDateTime[2]);
// Map<Integer, Integer> indexTranslation = getPropertyIndices().get(row.getTableId());
// if(indexTranslation==null) {
// System.err.println("Missing property index translation for table " + row.getTableId());
// }
// 'secondColumnIndex' ('globalId' of dbpedia property) is used to get 'columnIndex' of dbpedia property in a respective table
// Integer translatedIndex = indexTranslation.get(col.getColumnIndex());
// if(translatedIndex!=null) {
Object obj = row.get(col.getColumnIndex());
Object[] values = null;
if(obj!=null && obj.getClass().isArray()) {
values = (Object[])obj;
} else {
values = new Object[] { obj };
}
for(Object o : values) {
if(o!=null && o instanceof LocalDateTime) {
LocalDateTime value = (LocalDateTime)o;
if(range[0]==null || value.compareTo(range[0]) < 0) {
range[0] = value;
}
if(range[1]==null || value.compareTo(range[1]) > 0) {
range[1] = value;
}
} else {
if(o!=null && !(o instanceof LocalDateTime)) {
System.err.println(String.format("{%s} row %d property %s has value of invalid type: '%s' (%s)", this.classIndices.get(row.getTableId()), row.getRowNumber(), col.getIdentifier(), obj, obj.getClass()));
}
}
}
// }
}
}
}
}
public DataSet<MatchableTableRow, MatchableTableColumn> getRecords() {
return records;
}
public DataSet<MatchableTableColumn, MatchableTableColumn> getSchema() {
return schema;
}
/**
* @return the labels
*/
public DataSet<MatchableTableCell, MatchableTableColumn> getLabels() {
return labels;
}
public DataSet<MatchableTable, MatchableTableColumn> getTables() {
return tables;
}
public LinkedList<String> getProperties() {
return properties;
}
public HashMap<String, Integer> getPropertyIds() {
return propertyIds;
}
public MatchableLodColumn getRdfsLabel() {
return rdfsLabel;
}
public HashMap<Integer, Double> getClassWeight(){
return classWeight;
}
/**
* @return the classIndices
*/
public Map<Integer, String> getClassIndices() {
return classIndices;
}
/**
* @return the tableIds
*/
public Map<String, Integer> getClassIds() {
return classToId;
}
/**
* @return the tables
*/
public HashMap<Integer, Integer> getTablesSize() {
return sizePerTable;
}
/**
* @return the classHierarchy mapping class -> super class
*/
public static HashMap<String, String> getClassHierarchy() {
return classHierarchy;
}
/**
* @return the candidateKeys
*/
public Map<String, Collection<Collection<String>>> getCandidateKeyDefinitions() {
return candidateKeyDefinitions;
}
public DataSet<MatchableTableDeterminant, MatchableTableColumn> getCandidateKeys() {
return candidateKeys;
}
/**
* @return the inclusionDependencies
*/
public Processable<Correspondence<MatchableTableDeterminant, Matchable>> getInclusionDependencies() {
return inclusionDependencies;
}
}
|
Java
|
Apache-2.0
|
olehmberg/snow/src/main/java/de/uni_mannheim/informatik/dws/tnt/match/data/KnowledgeBase.java
|
1081ed56-711e-40e5-a837-e52015e6c5ba
|
[]
|
[]
|
from datetime import timedelta
from random import randint
from ichnaea.data.tasks import (
monitor_api_key_limits,
monitor_api_users,
monitor_queue_size,
)
from ichnaea import util
class TestMonitor(object):
def test_monitor_api_keys_empty(self, celery, stats):
monitor_api_key_limits.delay().get()
stats.check(gauge=[('api.limit', 0)])
def test_monitor_api_keys_one(self, celery, redis, stats):
today = util.utcnow().strftime('%Y%m%d')
rate_key = 'apilimit:no_key_1:v1.geolocate:' + today
redis.incr(rate_key, 13)
monitor_api_key_limits.delay().get()
stats.check(gauge=[
('api.limit', ['key:no_key_1', 'path:v1.geolocate']),
])
def test_monitor_api_keys_multiple(self, celery, redis, stats):
now = util.utcnow()
today = now.strftime('%Y%m%d')
yesterday = (now - timedelta(hours=24)).strftime('%Y%m%d')
data = {
'test': {'v1.search': 11, 'v1.geolocate': 13},
'no_key_1': {'v1.search': 12},
'no_key_2': {'v1.geolocate': 15},
}
for key, paths in data.items():
for path, value in paths.items():
rate_key = 'apilimit:%s:%s:%s' % (key, path, today)
redis.incr(rate_key, value)
rate_key = 'apilimit:%s:%s:%s' % (key, path, yesterday)
redis.incr(rate_key, value - 10)
# add some other items into Redis
redis.lpush('default', 1, 2)
redis.set('cache_something', '{}')
monitor_api_key_limits.delay().get()
stats.check(gauge=[
('api.limit', ['key:test', 'path:v1.geolocate']),
('api.limit', ['key:test', 'path:v1.search']),
('api.limit', ['key:no_key_1', 'path:v1.search']),
('api.limit', ['key:no_key_2', 'path:v1.geolocate']),
])
def test_monitor_queue_size(self, celery, redis, stats):
data = {
'export_queue_internal': 3,
'export_queue_backup:abcd-ef-1234': 7,
}
for name in celery.all_queues:
data[name] = randint(1, 10)
for k, v in data.items():
redis.lpush(k, *range(v))
monitor_queue_size.delay().get()
stats.check(
gauge=[('queue', 1, v, ['queue:' + k]) for k, v in data.items()])
class TestMonitorAPIUsers(object):
@property
def today(self):
return util.utcnow().date()
@property
def today_str(self):
return self.today.strftime('%Y-%m-%d')
def test_empty(self, celery, stats):
monitor_api_users.delay().get()
stats.check(gauge=[('submit.user', 0), ('locate.user', 0)])
def test_one_day(self, celery, geoip_data, redis, stats):
bhutan_ip = geoip_data['Bhutan']['ip']
london_ip = geoip_data['London']['ip']
redis.pfadd(
'apiuser:submit:test:' + self.today_str, bhutan_ip, london_ip)
redis.pfadd(
'apiuser:submit:valid_key:' + self.today_str, bhutan_ip)
redis.pfadd(
'apiuser:locate:valid_key:' + self.today_str, bhutan_ip)
monitor_api_users.delay().get()
stats.check(gauge=[
('submit.user', 1, 2, ['key:test', 'interval:1d']),
('submit.user', 1, 2, ['key:test', 'interval:7d']),
('submit.user', 1, 1, ['key:valid_key', 'interval:1d']),
('submit.user', 1, 1, ['key:valid_key', 'interval:7d']),
('locate.user', 1, 1, ['key:valid_key', 'interval:1d']),
('locate.user', 1, 1, ['key:valid_key', 'interval:7d']),
])
def test_many_days(self, celery, geoip_data, redis, stats):
bhutan_ip = geoip_data['Bhutan']['ip']
london_ip = geoip_data['London']['ip']
days_6 = (self.today - timedelta(days=6)).strftime('%Y-%m-%d')
days_7 = (self.today - timedelta(days=7)).strftime('%Y-%m-%d')
redis.pfadd(
'apiuser:submit:test:' + self.today_str, '127.0.0.1', bhutan_ip)
# add the same IPs + one new one again
redis.pfadd(
'apiuser:submit:test:' + days_6, '127.0.0.1', bhutan_ip, london_ip)
# add one entry which is too old
redis.pfadd(
'apiuser:submit:test:' + days_7, bhutan_ip)
monitor_api_users.delay().get()
stats.check(gauge=[
('submit.user', 1, 2, ['key:test', 'interval:1d']),
# we count unique IPs over the entire 7 day period,
# so it's just 3 uniques
('submit.user', 1, 3, ['key:test', 'interval:7d']),
])
# the too old key was deleted manually
assert not redis.exists('apiuser:submit:test:' + days_7)
|
Python
|
Apache-2.0
|
BBOXX/ichnaea/ichnaea/data/tests/test_monitor.py
|
a234d1e9-3dff-4f0a-bcfc-c0b12f90e5d9
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 4135, "end": 4144, "context": "dd(\n 'apiuser:submit:test:' + days_6, '127.0.0.1', bhutan_ip, london_ip)\n # add one entry w"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 3998, "end": 4007, "context": " 'apiuser:submit:test:' + self.today_str, '127.0.0.1', bhutan_ip)\n # add the same IPs + one new"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 4135, "end": 4144, "context": "dd(\n 'apiuser:submit:test:' + days_6, '127.0.0.1', bhutan_ip, london_ip)\n # add one entry w"}, {"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 3998, "end": 4007, "context": " 'apiuser:submit:test:' + self.today_str, '127.0.0.1', bhutan_ip)\n # add the same IPs + one new"}]
|
/* -*- mode: C++; c-basic-offset: 2; indent-tabs-mode: nil -*- */
/*
* Main authors:
* Christopher Mears <Chris.Mears@monash.edu>
*
* Contributing authors:
* Christian Schulte <schulte@gecode.org>
*
* Copyright:
* Christopher Mears, 2011
* Christian Schulte, 2011
*
* Last modified:
* $Date: 2016-08-17 14:21:02 +0200 (Wed, 17 Aug 2016) $ by $Author: schulte $
* $Revision: 15151 $
*
* This file is part of Gecode, the generic constraint
* development environment:
* http://www.gecode.org
*
* Permission is hereby granted, free of charge, to any person obtaining
* a copy of this software and associated documentation files (the
* "Software"), to deal in the Software without restriction, including
* without limitation the rights to use, copy, modify, merge, publish,
* distribute, sublicense, and/or sell copies of the Software, and to
* permit persons to whom the Software is furnished to do so, subject to
* the following conditions:
*
* The above copyright notice and this permission notice shall be
* included in all copies or substantial portions of the Software.
*
* THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
* EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
* MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
* NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
* LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
* OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
* WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
*
*/
namespace Gecode { namespace Int { namespace Precede {
/// Whether \a x is assigned to value \a v
template<class View>
forceinline bool
assigned(View x, int v) {
return x.assigned() && (x.val() == v);
}
template<class View>
forceinline
Single<View>::Index::Index(Space& home, Propagator& p,
Council<Index>& c, int i0)
: Advisor(home,p,c), i(i0) {}
template<class View>
forceinline
Single<View>::Index::Index(Space& home, bool share, Index& a)
: Advisor(home,share,a), i(a.i) {}
template<class View>
forceinline ExecStatus
Single<View>::updateAlpha(Space& home) {
int n = x.size();
while ((alpha < n) && !x[alpha].in(s))
GECODE_ME_CHECK(x[alpha++].nq(home, t));
if (alpha < n)
GECODE_ME_CHECK(x[alpha].nq(home, t));
return ES_OK;
}
template<class View>
forceinline ExecStatus
Single<View>::updateBeta(Space& home) {
int n = x.size();
do {
beta++;
} while ((beta < n) && !x[beta].in(s));
if (beta > gamma)
GECODE_ME_CHECK(x[alpha].eq(home, s));
return ES_OK;
}
template<class View>
forceinline
Single<View>::Single(Home home, ViewArray<View>& x0,
int s0, int t0, int b, int g)
: NaryPropagator<View, PC_INT_NONE>(home,x0),
c(home), s(s0), t(t0), alpha(0), beta(b), gamma(g) {
for (int i=x.size(); i--; )
if (!x[i].assigned())
x[i].subscribe(home,*new (home) Index(home,*this,c,i));
View::schedule(home, *this, ME_INT_BND);
}
template<class View>
inline ExecStatus
Single<View>::post(Home home, ViewArray<View>& x, int s, int t) {
{
int alpha = 0;
while ((alpha < x.size()) && !x[alpha].in(s))
GECODE_ME_CHECK(x[alpha++].nq(home,t));
x.drop_fst(alpha);
if (x.size() == 0)
return ES_OK;
}
// alpha has been normalized to 0
int beta = 0, gamma = 0;
GECODE_ME_CHECK(x[0].nq(home,t));
do {
gamma++;
} while ((gamma < x.size()) && !assigned(x[gamma],t));
do {
beta++;
} while ((beta < x.size()) && !x[beta].in(s));
if (beta > gamma) {
GECODE_ME_CHECK(x[0].eq(home, s));
return ES_OK;
}
if (gamma < x.size())
x.drop_lst(gamma);
(void) new (home) Single<View>(home, x, s, t, beta, gamma);
return ES_OK;
}
template<class View>
forceinline
Single<View>::Single(Space& home, bool share, Single& p)
: NaryPropagator<View,PC_INT_NONE>(home, share, p),
s(p.s), t(p.t),
alpha(p.alpha), beta(p.beta), gamma(p.gamma) {
c.update(home, share, p.c);
}
template<class View>
Propagator*
Single<View>::copy(Space& home, bool share) {
// Try to eliminate assigned views at the beginning
if (alpha > 0) {
int i = 0;
while ((i < alpha) && x[i].assigned())
i++;
x.drop_fst(i);
for (Advisors<Index> as(c); as(); ++as)
as.advisor().i -= i;
alpha -= i; beta -= i; gamma -= i;
}
// Try to eliminate assigned views at the end
if (gamma < x.size()) {
int i = x.size()-1;
while ((i > gamma) && x[i].assigned())
i--;
x.drop_lst(i);
}
return new (home) Single<View>(home, share, *this);
}
template<class View>
inline size_t
Single<View>::dispose(Space& home) {
// Cancel remaining advisors
for (Advisors<Index> as(c); as(); ++as)
x[as.advisor().i].cancel(home,as.advisor());
c.dispose(home);
(void) NaryPropagator<View,PC_INT_NONE>::dispose(home);
return sizeof(*this);
}
template<class View>
PropCost
Single<View>::cost(const Space&, const ModEventDelta&) const {
return PropCost::linear(PropCost::LO, x.size());
}
template<class View>
void
Single<View>::reschedule(Space& home) {
View::schedule(home, *this, ME_INT_BND);
}
template<class View>
ExecStatus
Single<View>::advise(Space& home, Advisor& a0, const Delta& d) {
Index& a(static_cast<Index&>(a0));
int i = a.i;
// Check for gamma
if ((beta <= gamma) && (i < gamma) && assigned(x[i],t))
gamma = i;
if (x[i].assigned()) {
a.dispose(home,c);
if (c.empty())
return ES_NOFIX;
} else if ((i < alpha) || (i > gamma)) {
x[i].cancel(home,a);
a.dispose(home,c);
return (c.empty()) ? ES_NOFIX : ES_FIX;
}
if (beta > gamma)
return ES_NOFIX;
if ((alpha == i) || (beta == i)) {
if (x[i].any(d) && !x[i].in(s))
return ES_NOFIX;
if ((x[i].min(d) <= s) && (s <= x[i].max(d)))
return ES_NOFIX;
}
return ES_FIX;
}
template<class View>
ExecStatus
Single<View>::propagate(Space& home, const ModEventDelta&) {
int n = x.size();
if (beta > gamma) {
GECODE_ME_CHECK(x[alpha].eq(home, s));
return home.ES_SUBSUMED(*this);
}
if ((alpha < n) && !x[alpha].in(s)) {
alpha++;
while (alpha < beta)
GECODE_ME_CHECK(x[alpha++].nq(home, t));
GECODE_ES_CHECK(updateAlpha(home));
beta = alpha;
if (alpha < n)
GECODE_ES_CHECK(updateBeta(home));
} else if ((beta < n) && !x[beta].in(s)) {
GECODE_ES_CHECK(updateBeta(home));
}
return (c.empty()) ? home.ES_SUBSUMED(*this) : ES_FIX;
}
}}}
// STATISTICS: int-prop
|
C++
|
MIT-feh
|
SaGagnon/gecode-5-cbs/gecode/int/precede/single.hpp
|
6e3812d9-5876-4282-a51b-d614774e89e7
|
[{"tag": "EMAIL", "value": "schulte@gecode.org", "start": 192, "end": 210, "context": " Contributing authors:\n * Christian Schulte <schulte@gecode.org>\n *\n * Copyright:\n * Christopher Mears, 2011"}, {"tag": "NAME", "value": "Christian Schulte", "start": 268, "end": 285, "context": " Copyright:\n * Christopher Mears, 2011\n * Christian Schulte, 2011\n *\n * Last modified:\n * $Date: 2016-08"}, {"tag": "NAME", "value": "Christian Schulte", "start": 173, "end": 190, "context": "s@monash.edu>\n *\n * Contributing authors:\n * Christian Schulte <schulte@gecode.org>\n *\n * Copyright:\n * Chr"}, {"tag": "EMAIL", "value": "Chris.Mears@monash.edu", "start": 113, "end": 135, "context": "*/\n/*\n * Main authors:\n * Christopher Mears <Chris.Mears@monash.edu>\n *\n * Contributing authors:\n * Christian Sc"}, {"tag": "NAME", "value": "Christopher Mears", "start": 94, "end": 111, "context": "tabs-mode: nil -*- */\n/*\n * Main authors:\n * Christopher Mears <Chris.Mears@monash.edu>\n *\n * Contributing auth"}, {"tag": "NAME", "value": "Christopher Mears", "start": 237, "end": 254, "context": "lte <schulte@gecode.org>\n *\n * Copyright:\n * Christopher Mears, 2011\n * Christian Schulte, 2011\n *\n * Last "}]
|
[{"tag": "EMAIL", "value": "schulte@gecode.org", "start": 192, "end": 210, "context": " Contributing authors:\n * Christian Schulte <schulte@gecode.org>\n *\n * Copyright:\n * Christopher Mears, 2011"}, {"tag": "NAME", "value": "Christian Schulte", "start": 268, "end": 285, "context": " Copyright:\n * Christopher Mears, 2011\n * Christian Schulte, 2011\n *\n * Last modified:\n * $Date: 2016-08"}, {"tag": "NAME", "value": "Christian Schulte", "start": 173, "end": 190, "context": "s@monash.edu>\n *\n * Contributing authors:\n * Christian Schulte <schulte@gecode.org>\n *\n * Copyright:\n * Chr"}, {"tag": "EMAIL", "value": "Chris.Mears@monash.edu", "start": 113, "end": 135, "context": "*/\n/*\n * Main authors:\n * Christopher Mears <Chris.Mears@monash.edu>\n *\n * Contributing authors:\n * Christian Sc"}, {"tag": "NAME", "value": "Christopher Mears", "start": 94, "end": 111, "context": "tabs-mode: nil -*- */\n/*\n * Main authors:\n * Christopher Mears <Chris.Mears@monash.edu>\n *\n * Contributing auth"}, {"tag": "NAME", "value": "Christopher Mears", "start": 237, "end": 254, "context": "lte <schulte@gecode.org>\n *\n * Copyright:\n * Christopher Mears, 2011\n * Christian Schulte, 2011\n *\n * Last "}]
|
import React from "react"
// TODO: get these from json
const PHONE_NO = "+49 (0)172 1008 431"
const EMAIL = "sacha@sachahoechstetter.com"
export const Phone = () => {
return PHONE_NO
}
export const Email = () => {
return (
<a href = { `mailto:${ EMAIL }` }>{ EMAIL }</a>
)
}
|
JavaScript
|
MIT
|
bimbambenjamin/react-portfolio/src/partials/contactElements.js
|
7ee37124-5202-42bb-a783-ae9b12334881
|
[{"tag": "EMAIL", "value": "sacha@sachahoechstetter.com", "start": 110, "end": 137, "context": "t PHONE_NO = \"+49 (0)172 1008 431\"\nconst EMAIL = \"sacha@sachahoechstetter.com\"\n\n\n\nexport const Phone = () => {\n\treturn PHONE_NO"}]
|
[{"tag": "EMAIL", "value": "sacha@sachahoechstetter.com", "start": 110, "end": 137, "context": "t PHONE_NO = \"+49 (0)172 1008 431\"\nconst EMAIL = \"sacha@sachahoechstetter.com\"\n\n\n\nexport const Phone = () => {\n\treturn PHONE_NO"}]
|
/*
* =====================================================================================
*
* Filename: task1.cpp
*
* Description:
*
* Version: 1.0
* Created: 07.12.2019
*
* Author: Michał Zagórski (zagura), <mzagorsk@student.agh.edu.pl>
* Organization: AGH University of Science and Technology, Kraków
*
* =====================================================================================
*/
#include <iostream>
#include <sstream>
#include <vector>
#include <string>
#include <array>
#include <algorithm>
using std::getline;
using std::istringstream;
using std::stringstream;
using std::string;
using std::vector;
using std::cin;
using std::cout;
using std::endl;
using std::array;
using std::next_permutation;
using std::pair;
using std::make_pair;
// modes
// 0 - position mode
// 1 - immediate mode
void add(vector<int>& vals, int begin_index, int modes) {
int mode1 = modes % 10;
int mode2 = (modes / 10) % 10;
int val1 = vals.at(begin_index + 1);
int val2 = vals.at(begin_index + 2);
int val3 = vals.at(begin_index + 3);
if (not mode1) {
val1 = vals.at(val1);
}
if (not mode2) {
val2 = vals.at(val2);
}
int result = val1 + val2;
vals[val3] = result;
}
void multiply(vector<int>& vals, int begin_index, int modes) {
int mode1 = modes % 10;
int mode2 = (modes / 10) % 10;
int val1 = vals.at(begin_index + 1);
int val2 = vals.at(begin_index + 2);
int val3 = vals.at(begin_index + 3);
if (not mode1) {
val1 = vals.at(val1);
}
if (not mode2) {
val2 = vals.at(val2);
}
int result = val1 * val2;
vals[val3] = result;
}
void less_than(vector<int>& vals, int i, int modes) {
int mode1 = modes % 10;
int mode2 = (modes / 10) % 10;
int v1, v2, v3;
v1 = vals.at(i+1);
v2 = vals.at(i+2);
v3 = vals.at(i+3);
if (not mode1) {
v1 = vals.at(v1);
}
if (not mode2) {
v2 = vals.at(v2);
}
if (v1 < v2) {
vals[v3] = 1;
} else {
vals[v3] = 0;
}
}
void equal(vector<int>& vals, int i, int modes) {
int mode1 = modes % 10;
int mode2 = (modes / 10) % 10;
int v1, v2, v3;
v1 = vals.at(i+1);
v2 = vals.at(i+2);
v3 = vals.at(i+3);
if (not mode1) {
v1 = vals.at(v1);
}
if (not mode2) {
v2 = vals.at(v2);
}
if (v1 == v2) {
vals[v3] = 1;
} else {
vals[v3] = 0;
}
}
class Amplifier {
public:
vector<int> state;
size_t i;
string name;
vector<int> input_vector;
size_t input_index;
pair<bool, int> process_code();
};
pair<bool, int> Amplifier::process_code() {
int output = 0;
while (i < state.size()) {
int v1 = 0;
int v2 = 0;
int v3 = 0;
int mode;
int mode1, mode2;
int instruction = state[i] % 100;
// cout << "Element i " << i << endl;
mode = state[i] / 100;
switch(instruction) {
case 1:
add(state, i, mode);
i += 4;
break;
case 2:
multiply(state, i, mode);
i += 4;
break;
case 3:
v1 = state.at(i+1);
// cout << "Get: " << endl;
v2 = input_vector.at(input_index);
++input_index;
state[v1] = v2;
i += 2;
break;
case 4:
v1 = state.at(i+1);
cout << "Output: " << state[v1] << endl;
output = state[v1];
i += 2;
return make_pair(false, output);
break;
case 5:
v1 = state.at(i+1);
v2 = state.at(i+2);
mode1 = mode % 10;
mode2 = (mode / 10) % 10;
if (not mode1) {
v1 = state.at(v1);
}
if (not mode2) {
v2 = state.at(v2);
}
if (v1 != 0) {
i = v2;
} else {
i+= 3;
}
break;
case 6:
v1 = state.at(i+1);
v2 = state.at(i+2);
mode1 = mode % 10;
mode2 = (mode / 10) % 10;
if (not mode1) {
v1 = state.at(v1);
}
if (not mode2) {
v2 = state.at(v2);
}
if (v1 == 0) {
i = v2;
} else {
i+= 3;
}
break;
case 7:
less_than(state, i, mode);
i += 4;
break;
case 8:
equal(state, i, mode);
i += 4;
break;
case 99:
// std::cout << "End of program" << std::endl;
i = state.size() + 1;
break;
default:
// cout << "Problem: " << i << '[' << vals[i] << ']' << endl;
i = state.size();
}
}
return make_pair(true, output);
}
/*
* class Amplifier {
public:
vector<int> state;
size_t i;
vector<int> input_vector;
vector<int>::iterator input_iterator;
pair<bool, int> process_code();
};
*/
int check_permutation(array<int, 5>& phases, vector<int>& vals) {
int input = 0;
array<Amplifier, 5> amps;
for (size_t i = 0; i < amps.size(); i++) {
auto& amp = amps[i];
amp.i = 0;
string name = "Amplifier";
name += ('1' + i);
amp.name = name;
amp.state = vals;
amp.input_index = 0;
amp.input_vector.push_back(phases[i]);
}
vector<int> inputs {};
inputs.reserve(2);
bool finished = false;
while (not finished) {
for (auto& amp: amps) {
amp.input_vector.push_back(input);
auto result = amp.process_code();
finished = result.first;
if (not finished) {
input = result.second;
}
}
}
cout << "Permutation output " << input << endl;
return input;
}
int main() {
std::vector<int> vals;
vals.reserve(200);
std::string line;
getline(std::cin, line);
stringstream ss { line };
for (std::string number; getline(ss, number, ',');) {
vals.push_back(std::stoi(number));
}
std::array<int, 5> phases = {5 ,6 ,7 , 8, 9};
// array<int,5> phases = { 9, 8, 7, 6, 5 };
int result = 0;
int max_result = 0;
do {
result = check_permutation(phases, vals);
if (result > max_result) {
max_result = result;
}
} while(std::next_permutation(phases.begin(), phases.end()));
cout << "Task 2 result: " << max_result << endl;
// for (auto& v: vals) {
// cout << v << ",";
// }
// cout << endl;
return 0;
}
|
C++
|
MIT
|
zagura/aoc-2017/2019/07day/task2.cpp
|
3e8c79c1-d079-4b36-a737-4ae00031e960
|
[{"tag": "EMAIL", "value": "mzagorsk@student.agh.edu.pl", "start": 255, "end": 282, "context": " *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Organization: AGH University of Science an"}, {"tag": "USERNAME", "value": "zagura", "start": 245, "end": 251, "context": "7.12.2019\n *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Organization"}, {"tag": "NAME", "value": "Micha\u0142 Zag\u00f3rski", "start": 228, "end": 243, "context": " Created: 07.12.2019\n *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Orga"}]
|
[{"tag": "EMAIL", "value": "mzagorsk@student.agh.edu.pl", "start": 255, "end": 282, "context": " *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Organization: AGH University of Science an"}, {"tag": "USERNAME", "value": "zagura", "start": 245, "end": 251, "context": "7.12.2019\n *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Organization"}, {"tag": "NAME", "value": "Micha\u0142 Zag\u00f3rski", "start": 228, "end": 243, "context": " Created: 07.12.2019\n *\n * Author: Micha\u0142 Zag\u00f3rski (zagura), <mzagorsk@student.agh.edu.pl>\n * Orga"}]
|
/*global window, define, ns */
/*jslint nomen: true */
/*
* Copyright (c) 2015 Samsung Electronics Co., Ltd
*
* Licensed under the Flora License, Version 1.1 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://floralicense.org/license/
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* #Progress
* Progress component shows that an operation is in progress.
*
* #Crete widget
*
* Shows a control that indicates the progress percentage of an on-going operation by circular shape.
*
* @example template tau-progress
* <div class="ui-progress" data-type="bar" data-max="100" data-value="60">
* <span class="ui-current-value"> times</span>
* </div>
*
*
* ##Set and Get the value
* You can set or get the value with the value() method
*
* @since 2.0
* @class ns.widget.core.progress.Progress
* @component-selector .ui-circle-progress, .ui-progress
* @extends ns.widget.BaseWidget
* @author Heeju Joo <heeju.joo@samsung.com>
*/
(function (document, ns) {
"use strict";
//>>excludeStart("tauBuildExclude", pragmas.tauBuildExclude);
define(
[
"../../../util",
"../../../util/selectors",
"../../../util/DOM",
"../../../util/object",
"../../../event",
"../../core", // fetch namespace
"../../BaseWidget",
"../progress",
"./type/indeterminatebar",
"./type/indeterminatecircle",
"./type/progressbar",
"./type/progresscircle",
"../Page"
],
function () {
//>>excludeEnd("tauBuildExclude");
var BaseWidget = ns.widget.BaseWidget,
events = ns.event,
engine = ns.engine,
util = ns.util,
selectors = ns.util.selectors,
utilsObject = ns.util.object,
Page = ns.widget.core.Page,
eventType = {
/**
* Event is triggered when value of widget is changing.
* @event change
* @member ns.widget.mobile.ProgressBar
*/
CHANGE: "change"
},
progressType = {
PROGRESS_BAR: "bar",
PROGRESS_CIRCLE: "circle",
INDETERMINATE_BAR: "indeterminatebar",
INDETERMINATE_CIRCLE: "indeterminatecircle"
},
circleSize = {
SMALL_HEADER: "small-header",
SMALL: "small",
MEDIUM: "medium",
LARGE: "large",
FULL: "full"
},
/**
* Progress constructor
* @method Progress
*/
Progress = function () {
var self = this;
self.options = utilsObject.merge({}, Progress.defaults);
self._ui = {};
self._type = null;
self._progress = null;
self._isAnimating = false;
self._callbacks = {};
},
/**
* Dictionary object containing commonly used widget classes
* @property {Object} classes
* @member ns.widget.core.progress.Progress
* @private
* @static
* @readonly
*/
classes = {
uiProgress: "ui-progress"
},
defaults = {
type: progressType.PROGRESS_BAR,
size: circleSize.MEDIUM,
value: 100,
min: 0,
max: 100
},
prototype = new BaseWidget();
Progress.prototype = prototype;
Progress.classes = classes;
Progress.events = eventType;
Progress.defaults = defaults;
/**
* Build structure of Progress component
* @method _build
* @param {HTMLElement} element
* @member ns.widget.core.progress.Progress
* @protected
*/
prototype._build = function (element) {
var self = this,
options = self.options;
self._type = options.type;
element.classList.add(classes.uiProgress);
self._progress = ns.widget.core.progress.type[options.type];
self._progress.build(self, element);
return element;
};
/**
* Initialization of Progress component
* @method _init
* @param {HTMLElement} element
* @member ns.widget.core.progress.Progress
* @protected
*/
prototype._init = function (element) {
var self = this;
self._progress.init(self, element);
element.setAttribute("value", self.options.value);
return element;
};
/**
* Refresh of Progress
* @method _refresh
* @param {HTMLElement} element
* @member ns.widget.core.progress.Progress
* @protected
*/
prototype._refresh = function (element) {
var self = this,
options = self.options;
if (self._type !== options.type) {
self._destroy();
return ns.widget.Progress(element, {type: options.type});
} else {
self._progress.refresh(self);
self._setValue(self.options.value);
}
return element;
};
prototype._setValue = function (value) {
var self = this,
options = self.options,
element = self.element;
self._oldValue = options.value;
if (typeof value === "number") {
value = Math.min(options.max, Math.max(options.min, value));
// value changed
if (value !== self._oldValue) {
options.value = value;
if (!self.isCustomElement) {
element.setAttribute("data-value", value);
}
element.setAttribute("value", value);
events.trigger(element, eventType.CHANGE);
self._progress.changeValue(self, self._oldValue, value);
}
return true;
}
return false;
};
/**
* Return value of progress
* @return {number}
* @private
*/
prototype._getValue = function () {
return parseInt(this.element.getAttribute("value"), 10);
};
prototype._animate = function (duration, progressCallback, finishCallback) {
var self = this,
startTime = null,
step = function (timeStamp) {
var currentTimeGap = 0;
if (startTime === null) {
startTime = timeStamp;
}
currentTimeGap = timeStamp - startTime;
progressCallback(currentTimeGap);
if (self._isAnimating && duration > currentTimeGap) {
util.requestAnimationFrame(step);
} else {
self._isAnimating = false;
finishCallback();
}
};
self._isAnimating = true;
util.requestAnimationFrame(step);
};
/**
* Callback on event pagebeforeshow
* @method pageBeforeShow
* @param {ns.widget.core.progress.Progress} self
* @private
* @member ns.widget.core.progress.Progress
*/
function pageBeforeShow(self) {
self.refresh();
}
/**
* Bind events to Progress
* @method _bindEvents
* @member ns.widget.core.progress.Progress
* @protected
*/
prototype._bindEvents = function () {
var self = this,
element = self.element,
page = selectors.getClosestByClass(element, Page.classes.uiPage);
self._ui.page = page;
self._callbacks.onPageBeforeShow = pageBeforeShow.bind(null, self);
page.addEventListener(Page.events.BEFORE_SHOW, self._callbacks.onPageBeforeShow, false);
};
prototype._unbindEvents = function () {
var self = this;
if (self._callbacks.onPageBeforeShow) {
self._ui.page.removeEventListener(
Page.events.BEFORE_SHOW,
self._callbacks.onPageBeforeShow,
false);
}
};
/**
* Destroys Progress component
* @method _destroy
* @member ns.widget.core.progress.Progress
* @protected
*/
prototype._destroy = function () {
var self = this,
element = self.element;
this._unbindEvents();
if (!self._progress.destroy(self, element)) {
while (element.firstChild) {
element.removeChild(element.firstChild);
}
}
self._ui = null;
self._oldValue = null;
return element;
};
ns.widget.core.progress.Progress = Progress;
engine.defineWidget(
"Progress",
"[data-role='progress'], .ui-progress",
[],
Progress,
"core"
);
return Progress;
//>>excludeStart("tauBuildExclude", pragmas.tauBuildExclude);
}
);
//>>excludeEnd("tauBuildExclude");
}(window.document, ns));
|
JavaScript
|
SHL-0.51
|
SwimingKim/TAU/src/js/core/widget/core/progress/Progress.js
|
ea122fb5-b21e-4153-98e8-ba7867ad5f76
|
[{"tag": "EMAIL", "value": "heeju.joo@samsung.com", "start": 1324, "end": 1345, "context": "xtends ns.widget.BaseWidget\n * @author Heeju Joo <heeju.joo@samsung.com>\n */\n(function (document, ns) {\n\t\"use strict\";\n\t/"}, {"tag": "NAME", "value": "Heeju Joo", "start": 1313, "end": 1322, "context": "gress\n * @extends ns.widget.BaseWidget\n * @author Heeju Joo <heeju.joo@samsung.com>\n */\n(function (document, "}]
|
[{"tag": "EMAIL", "value": "heeju.joo@samsung.com", "start": 1324, "end": 1345, "context": "xtends ns.widget.BaseWidget\n * @author Heeju Joo <heeju.joo@samsung.com>\n */\n(function (document, ns) {\n\t\"use strict\";\n\t/"}, {"tag": "NAME", "value": "Heeju Joo", "start": 1313, "end": 1322, "context": "gress\n * @extends ns.widget.BaseWidget\n * @author Heeju Joo <heeju.joo@samsung.com>\n */\n(function (document, "}]
|
<?php
/*
* THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
* "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
* LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
* A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
* OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
* SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
* LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
* DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
* THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
* (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
* OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
*
* This software consists of voluntary contributions made by many individuals
* and is licensed under the MIT license. For more information, see
* <http://www.doctrine-project.org>.
*/
namespace Mockery;
use Closure;
use Exception;
use ReflectionClass;
use UnexpectedValueException;
use InvalidArgumentException;
/**
* This is a trimmed down version of https://github.com/doctrine/instantiator,
* basically without the caching
*
* @author Marco Pivetta <ocramius@gmail.com>
*/
final class Instantiator
{
/**
* Markers used internally by PHP to define whether {@see \unserialize} should invoke
* the method {@see \Serializable::unserialize()} when dealing with classes implementing
* the {@see \Serializable} interface.
*/
const SERIALIZATION_FORMAT_USE_UNSERIALIZER = 'C';
const SERIALIZATION_FORMAT_AVOID_UNSERIALIZER = 'O';
/**
* {@inheritDoc}
*/
public function instantiate($className)
{
$factory = $this->buildFactory($className);
$instance = $factory();
$reflection = new ReflectionClass($instance);
return $instance;
}
/**
* @internal
* @private
*
* Builds a {@see \Closure} capable of instantiating the given $className without
* invoking its constructor.
* This method is only exposed as public because of PHP 5.3 compatibility. Do not
* use this method in your own code
*
* @param string $className
*
* @return Closure
*/
public function buildFactory($className)
{
$reflectionClass = $this->getReflectionClass($className);
if ($this->isInstantiableViaReflection($reflectionClass)) {
return function () use ($reflectionClass) {
return $reflectionClass->newInstanceWithoutConstructor();
};
}
$serializedString = sprintf(
'%s:%d:"%s":0:{}',
$this->getSerializationFormat($reflectionClass),
strlen($className),
$className
);
$this->attemptInstantiationViaUnSerialization($reflectionClass, $serializedString);
return function () use ($serializedString) {
return unserialize($serializedString);
};
}
/**
* @param string $className
*
* @return ReflectionClass
*
* @throws InvalidArgumentException
*/
private function getReflectionClass($className)
{
if (! class_exists($className)) {
throw new InvalidArgumentException("Class:$className does not exist");
}
$reflection = new ReflectionClass($className);
if ($reflection->isAbstract()) {
throw new InvalidArgumentException("Class:$className is an abstract class");
}
return $reflection;
}
/**
* @param ReflectionClass $reflectionClass
* @param string $serializedString
*
* @throws UnexpectedValueException
*
* @return void
*/
private function attemptInstantiationViaUnSerialization(ReflectionClass $reflectionClass, $serializedString)
{
set_error_handler(function ($code, $message, $file, $line) use ($reflectionClass, & $error) {
$msg = sprintf(
'Could not produce an instance of "%s" via un-serialization, since an error was triggered in file "%s" at line "%d"',
$reflectionClass->getName(),
$file,
$line
);
$error = new UnexpectedValueException($msg, 0, new \Exception($message, $code));
});
try {
unserialize($serializedString);
} catch (Exception $exception) {
restore_error_handler();
throw new UnexpectedValueException("An exception was raised while trying to instantiate an instance of \"{$reflectionClass->getName()}\" via un-serialization", 0, $exception);
}
restore_error_handler();
if ($error) {
throw $error;
}
}
/**
* @param ReflectionClass $reflectionClass
*
* @return bool
*/
private function isInstantiableViaReflection(ReflectionClass $reflectionClass)
{
if (\PHP_VERSION_ID >= 50600) {
return ! ($reflectionClass->isInternal() && $reflectionClass->isFinal());
}
return \PHP_VERSION_ID >= 50400 && ! $this->hasInternalAncestors($reflectionClass);
}
/**
* Verifies whether the given class is to be considered internal
*
* @param ReflectionClass $reflectionClass
*
* @return bool
*/
private function hasInternalAncestors(ReflectionClass $reflectionClass)
{
do {
if ($reflectionClass->isInternal()) {
return true;
}
} while ($reflectionClass = $reflectionClass->getParentClass());
return false;
}
/**
* Verifies if the given PHP version implements the `Serializable` interface serialization
* with an incompatible serialization format. If that's the case, use serialization marker
* "C" instead of "O".
*
* @link http://news.php.net/php.internals/74654
*
* @param ReflectionClass $reflectionClass
*
* @return string the serialization format marker, either self::SERIALIZATION_FORMAT_USE_UNSERIALIZER
* or self::SERIALIZATION_FORMAT_AVOID_UNSERIALIZER
*/
private function getSerializationFormat(ReflectionClass $reflectionClass)
{
if ($this->isPhpVersionWithBrokenSerializationFormat()
&& $reflectionClass->implementsInterface('Serializable')
) {
return self::SERIALIZATION_FORMAT_USE_UNSERIALIZER;
}
return self::SERIALIZATION_FORMAT_AVOID_UNSERIALIZER;
}
/**
* Checks whether the current PHP runtime uses an incompatible serialization format
*
* @return bool
*/
private function isPhpVersionWithBrokenSerializationFormat()
{
return PHP_VERSION_ID === 50429 || PHP_VERSION_ID === 50513;
}
}
|
PHP
|
MIT
|
ahmadelsaeed/invstoc_old/vendor/mockery/mockery/library/Mockery/Instantiator.php
|
dab6cfa4-cf9e-418f-90d7-6781da364ad8
|
[{"tag": "EMAIL", "value": "ocramius@gmail.com", "start": 1295, "end": 1313, "context": "ithout the caching\r\n *\r\n * @author Marco Pivetta <ocramius@gmail.com>\r\n */\r\nfinal class Instantiator\r\n{\r\n /**\r\n "}, {"tag": "NAME", "value": "Marco Pivetta", "start": 1280, "end": 1293, "context": "\n * basically without the caching\r\n *\r\n * @author Marco Pivetta <ocramius@gmail.com>\r\n */\r\nfinal class Instantiat"}]
|
[{"tag": "EMAIL", "value": "ocramius@gmail.com", "start": 1295, "end": 1313, "context": "ithout the caching\r\n *\r\n * @author Marco Pivetta <ocramius@gmail.com>\r\n */\r\nfinal class Instantiator\r\n{\r\n /**\r\n "}, {"tag": "NAME", "value": "Marco Pivetta", "start": 1280, "end": 1293, "context": "\n * basically without the caching\r\n *\r\n * @author Marco Pivetta <ocramius@gmail.com>\r\n */\r\nfinal class Instantiat"}]
|
/********************************************************************************/
/* Portable Graphics Library for Embedded Systems * (C) Componentality Oy, 2015 */
/* Initial design and development: Konstantin A. Khait */
/* Support, comments and questions: dev@componentality.com */
/********************************************************************************/
/* Alfa and gradient transparency support */
/********************************************************************************/
#include "transparency.h"
#include "sprite.h"
using namespace Componentality::Graphics;
void AlphaBrush::plot(ISurface& surface, const size_t x, const size_t y, const Color& color)
{
ColorRGB original_color = surface.peek(x, y);
ColorRGB new_color = color;
new_color.blue = ____applyAlpha(original_color.blue, new_color.blue, mAlpha);
new_color.green = ____applyAlpha(original_color.green, new_color.green, mAlpha);
new_color.red = ____applyAlpha(original_color.red, new_color.red, mAlpha);
surface.plot(x, y, new_color);
}
void GradientBrush::plot(ISurface& surface, const size_t x, const size_t y, const Color& color)
{
long long x_scale = 10000 * x / surface.getWidth();
long long y_scale = 10000 * y / surface.getHeight();
x_scale = (mRight * x_scale) + (mLeft * (10000 - x_scale));
y_scale = (mBottom * y_scale) + (mTop * (10000 - y_scale));
long long alpha = (x_scale + y_scale) / 20000;
ColorRGB original_color = surface.peek(x, y);
ColorRGB new_color = color;
new_color.blue = ____applyAlpha(original_color.blue, new_color.blue, (unsigned char) alpha);
new_color.green = ____applyAlpha(original_color.green, new_color.green, (unsigned char)alpha);
new_color.red = ____applyAlpha(original_color.red, new_color.red, (unsigned char)alpha);
surface.plot(x, y, new_color);
}
|
C++
|
MIT
|
YemSalat/jetcat/Graphics/Drawing/transparency.cpp
|
8d35e2c7-6187-4441-8805-ea0281196dc2
|
[{"tag": "EMAIL", "value": "dev@componentality.com", "start": 285, "end": 307, "context": " */\n/* Support, comments and questions: dev@componentality.com */\n/************************"}, {"tag": "NAME", "value": "Konstantin A. Khait", "start": 201, "end": 220, "context": "ty Oy, 2015 */\n/* Initial design and development: Konstantin A. Khait */\n/* Support, comments "}]
|
[{"tag": "EMAIL", "value": "dev@componentality.com", "start": 285, "end": 307, "context": " */\n/* Support, comments and questions: dev@componentality.com */\n/************************"}, {"tag": "NAME", "value": "Konstantin A. Khait", "start": 201, "end": 220, "context": "ty Oy, 2015 */\n/* Initial design and development: Konstantin A. Khait */\n/* Support, comments "}]
|
/*******************************************************************************
* Copyright 2012 Kim Herzig, Sascha Just
*
* Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on
* an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the
* specific language governing permissions and limitations under the License.
******************************************************************************/
package net.ownhero.dev.kanuni.annotations.file;
import java.lang.annotation.Documented;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import net.ownhero.dev.kanuni.annotations.factories.CreatorFile;
import net.ownhero.dev.kanuni.annotations.meta.FactoryClass;
/**
* The Interface ExecutableFile.
*
* @author Sascha Just <sascha.just@st.cs.uni-saarland.de>
*/
@Documented
@Retention (RetentionPolicy.RUNTIME)
@FactoryClass (CreatorFile.class)
@Target (value = { ElementType.PARAMETER })
public @interface ExecutableFile {
/**
* Value.
*
* @return the string
*/
String value() default "";
}
|
Java
|
Apache-2.0
|
SaschaJust/kanuni/src/main/java/net/ownhero/dev/kanuni/annotations/file/ExecutableFile.java
|
464721af-de7d-4b64-905d-d731a00d67db
|
[{"tag": "NAME", "value": "Sascha Just", "start": 111, "end": 122, "context": "*******************\n * Copyright 2012 Kim Herzig, Sascha Just\n * \n * Licensed under the Apache License, Version"}, {"tag": "NAME", "value": "Kim Herzig", "start": 99, "end": 109, "context": "*******************************\n * Copyright 2012 Kim Herzig, Sascha Just\n * \n * Licensed under the Apache Lic"}, {"tag": "EMAIL", "value": "sascha.just@st.cs.uni-saarland.de", "start": 1195, "end": 1228, "context": "erface ExecutableFile.\n *\n * @author Sascha Just <sascha.just@st.cs.uni-saarland.de>\n */\n@Documented\n@Retention (RetentionPolicy.RUNT"}, {"tag": "NAME", "value": "Sascha Just", "start": 1182, "end": 1193, "context": "**\n * The Interface ExecutableFile.\n *\n * @author Sascha Just <sascha.just@st.cs.uni-saarland.de>\n */\n@Document"}]
|
[{"tag": "NAME", "value": "Sascha Just", "start": 111, "end": 122, "context": "*******************\n * Copyright 2012 Kim Herzig, Sascha Just\n * \n * Licensed under the Apache License, Version"}, {"tag": "NAME", "value": "Kim Herzig", "start": 99, "end": 109, "context": "*******************************\n * Copyright 2012 Kim Herzig, Sascha Just\n * \n * Licensed under the Apache Lic"}, {"tag": "EMAIL", "value": "sascha.just@st.cs.uni-saarland.de", "start": 1195, "end": 1228, "context": "erface ExecutableFile.\n *\n * @author Sascha Just <sascha.just@st.cs.uni-saarland.de>\n */\n@Documented\n@Retention (RetentionPolicy.RUNT"}, {"tag": "NAME", "value": "Sascha Just", "start": 1182, "end": 1193, "context": "**\n * The Interface ExecutableFile.\n *\n * @author Sascha Just <sascha.just@st.cs.uni-saarland.de>\n */\n@Document"}]
|
<?php
/**
* 2007-2018 PrestaShop.
*
* NOTICE OF LICENSE
*
* This source file is subject to the Open Software License (OSL 3.0)
* that is bundled with this package in the file LICENSE.txt.
* It is also available through the world-wide-web at this URL:
* https://opensource.org/licenses/OSL-3.0
* If you did not receive a copy of the license and are unable to
* obtain it through the world-wide-web, please send an email
* to license@prestashop.com so we can send you a copy immediately.
*
* DISCLAIMER
*
* Do not edit or add to this file if you wish to upgrade PrestaShop to newer
* versions in the future. If you wish to customize PrestaShop for your
* needs please refer to http://www.prestashop.com for more information.
*
* @author PrestaShop SA <contact@prestashop.com>
* @copyright 2007-2018 PrestaShop SA
* @license https://opensource.org/licenses/OSL-3.0 Open Software License (OSL 3.0)
* International Registered Trademark & Property of PrestaShop SA
*/
namespace PrestaShop\PrestaShop\Adapter\Presenter\Order;
use PrestaShop\PrestaShop\Adapter\Presenter\AbstractLazyArray;
use PrestaShopBundle\Translation\TranslatorComponent;
use PrestaShop\PrestaShop\Adapter\Product\PriceFormatter;
use Cart;
use Configuration;
use Context;
use Order;
use TaxConfiguration;
use Currency;
class OrderSubtotalLazyArray extends AbstractLazyArray
{
/** @var Order */
private $order;
/** @var Context */
private $context;
/** @var TaxConfiguration */
private $taxConfiguration;
/** @var PriceFormatter */
private $priceFormatter;
/** @var bool */
private $includeTaxes;
/** @var TranslatorComponent */
private $translator;
/**
* OrderSubtotalLazyArray constructor.
*
* @param Order $order
*/
public function __construct(Order $order)
{
$this->context = Context::getContext();
$this->taxConfiguration = new TaxConfiguration();
$this->includeTaxes = $this->includeTaxes();
$this->priceFormatter = new PriceFormatter();
$this->translator = Context::getContext()->getTranslator();
$this->order = $order;
parent::__construct();
}
/**
* @arrayAccess
*
* @return array
*/
public function getProducts()
{
$totalProducts = ($this->includeTaxes) ? $this->order->total_products_wt : $this->order->total_products;
return array(
'type' => 'products',
'label' => $this->translator->trans('Subtotal', array(), 'Shop.Theme.Checkout'),
'amount' => $totalProducts,
'value' => $this->priceFormatter->format(
$totalProducts,
Currency::getCurrencyInstance((int) $this->order->id_currency)
),
);
}
/**
* @arrayAccess
*
* @return array
*/
public function getDiscounts()
{
$discountAmount = ($this->includeTaxes)
? $this->order->total_discounts_tax_incl
: $this->order->total_discounts_tax_excl;
if ((float) $discountAmount) {
return array(
'type' => 'discount',
'label' => $this->translator->trans('Discount', array(), 'Shop.Theme.Checkout'),
'amount' => $discountAmount,
'value' => $this->priceFormatter->format(
$discountAmount,
Currency::getCurrencyInstance((int) $this->order->id_currency)
),
);
}
return array(
'type' => 'discount',
'label' => null,
'amount' => null,
'value' => '',
);
}
/**
* @arrayAccess
*
* @return array
*/
public function getShipping()
{
$cart = new Cart($this->order->id_cart);
if (!$cart->isVirtualCart()) {
$shippingCost = ($this->includeTaxes)
? $this->order->total_shipping_tax_incl : $this->order->total_shipping_tax_excl;
return array(
'type' => 'shipping',
'label' => $this->translator->trans('Shipping and handling', array(), 'Shop.Theme.Checkout'),
'amount' => $shippingCost,
'value' => $shippingCost != 0 ? $this->priceFormatter->format(
$shippingCost,
Currency::getCurrencyInstance((int) $this->order->id_currency)
)
: $this->translator->trans('Free', array(), 'Shop.Theme.Checkout'),
);
}
return array(
'type' => 'shipping',
'label' => null,
'amount' => null,
'value' => '',
);
}
/**
* @arrayAccess
*
* @return array
*/
public function getTax()
{
$tax = $this->order->total_paid_tax_incl - $this->order->total_paid_tax_excl;
if ((float) $tax && Configuration::get('PS_TAX_DISPLAY')) {
return array(
'type' => 'tax',
'label' => $this->translator->trans('Tax', array(), 'Shop.Theme.Checkout'),
'amount' => $tax,
'value' => $this->priceFormatter->format(
$tax,
Currency::getCurrencyInstance((int) $this->order->id_currency)
),
);
}
return array(
'type' => 'tax',
'label' => null,
'amount' => null,
'value' => '',
);
}
/**
* @arrayAccess
*
* @return array
*/
public function getGiftWrapping()
{
if ($this->order->gift) {
$giftWrapping = ($this->includeTaxes)
? $this->order->total_wrapping_tax_incl
: $this->order->total_wrapping_tax_excl;
return array(
'type' => 'gift_wrapping',
'label' => $this->translator->trans('Gift wrapping', array(), 'Shop.Theme.Checkout'),
'amount' => $giftWrapping,
'value' => $this->priceFormatter->format(
$giftWrapping,
Currency::getCurrencyInstance((int) $this->order->id_currency)
),
);
}
return array(
'type' => 'gift_wrapping',
'label' => null,
'amount' => null,
'value' => '',
);
}
/**
* @return bool
*/
private function includeTaxes()
{
return $this->taxConfiguration->includeTaxes();
}
}
|
PHP
|
MIT
|
RubyCouz/folio/prestashop/src/Adapter/Presenter/Order/OrderSubtotalLazyArray.php
|
57b3b1fb-067e-42ba-9de0-03e1e5bf15f0
|
[{"tag": "EMAIL", "value": "contact@prestashop.com", "start": 772, "end": 794, "context": "more information.\n *\n * @author PrestaShop SA <contact@prestashop.com>\n * @copyright 2007-2018 PrestaShop SA\n * @licens"}]
|
[{"tag": "EMAIL", "value": "contact@prestashop.com", "start": 772, "end": 794, "context": "more information.\n *\n * @author PrestaShop SA <contact@prestashop.com>\n * @copyright 2007-2018 PrestaShop SA\n * @licens"}]
|
# Copyright 2015 gRPC authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""The Python implementation of the GRPC helloworld.Greeter client."""
from __future__ import print_function
import logging
from urllib import response
from vinte_um import Jogador, VinteUm
import grpc
import helloworld_pb2
import helloworld_pb2_grpc
import time
import redis
def createLoginForm(stub):
username = input("Digite seu login: ")
password = input("Digite sua senha: ")
_redis = redis.Redis(
host= 'localhost',
port= '6379',
password = 'davi')
_redis.set('username', username)
value = _redis.get('username')
print("variavel do redis:", value)
return stub.Login(helloworld_pb2.LoginRequest(username=username, password=password))
def runTurn(stub, auth_token):
extraCard = input("Deseja cavar mais uma carta? S/N: ")
return stub.TurnAction(helloworld_pb2.TurnRequest(auth_token=auth_token, dig = extraCard))
def run():
# NOTE(gRPC Python Team): .close() is possible on a channel and should be
# used in circumstances in which the with statement does not fit the needs
# of the code.
with grpc.insecure_channel('0.0.0.0:50051') as channel:
stub = helloworld_pb2_grpc.GreeterStub(channel)
login = createLoginForm(stub)
print("Suas cartas são: ", login.message)
while True:
turnResponse = runTurn(stub, login.auth_token)
print("Suas cartas são: ", turnResponse.cards)
if turnResponse.message:
print(turnResponse.message)
if turnResponse.playing == "False":
break
winner = stub.VerifyTurn(helloworld_pb2.VerifyTurnRequest(auth_token=login.auth_token))
print(winner.message)
if __name__ == '__main__':
logging.basicConfig()
run()
|
Python
|
BSD-3-Clause
|
DMCDavi/grpc-stateful-less/examples/python/helloworld/greeter_client.py
|
c3621bb2-1735-4913-848b-aa487ba8373d
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0:50051", "start": 1718, "end": 1731, "context": " # of the code.\n with grpc.insecure_channel('0.0.0.0:50051') as channel:\n stub = helloworld_pb2_grpc."}, {"tag": "PASSWORD", "value": "davi", "start": 1077, "end": 1081, "context": "lhost',\n port= '6379',\n password = 'davi')\n\n _redis.set('username', username)\n "}]
|
[{"tag": "IP_ADDRESS", "value": "0.0.0.0:50051", "start": 1718, "end": 1731, "context": " # of the code.\n with grpc.insecure_channel('0.0.0.0:50051') as channel:\n stub = helloworld_pb2_grpc."}, {"tag": "PASSWORD", "value": "davi", "start": 1077, "end": 1081, "context": "lhost',\n port= '6379',\n password = 'davi')\n\n _redis.set('username', username)\n "}]
|
using Microsoft.AspNetCore.Builder;
using Microsoft.AspNetCore.Hosting;
using Microsoft.AspNetCore.Identity;
using Microsoft.EntityFrameworkCore;
using Microsoft.Extensions.Configuration;
using Microsoft.Extensions.DependencyInjection;
using IdentityServer4.Services;
using System.Reflection;
using IdentityServer4;
using System;
using Microsoft.AspNetCore.HttpOverrides;
using System.Security.Cryptography.X509Certificates;
using AspNetCoreSpa.STS.Models;
using AspNetCoreSpa.STS.Resources;
using Microsoft.IdentityModel.Tokens;
using AspNetCoreSpa.STS.Services.Certificate;
using System.Collections.Generic;
using System.Globalization;
using Microsoft.AspNetCore.Localization;
using AspNetCoreSpa.STS.Services;
using Microsoft.Extensions.Options;
using Microsoft.Extensions.Hosting;
using System.IO;
namespace AspNetCoreSpa.STS
{
public class Startup
{
public static IConfiguration Configuration { get; set; }
public IWebHostEnvironment Environment { get; }
public Startup(IConfiguration configuration, IWebHostEnvironment environment)
{
Configuration = configuration;
Environment = environment;
}
public void ConfigureServices(IServiceCollection services)
{
var x509Certificate2 = GetCertificate(Environment, Configuration);
services.Configure<StsConfig>(Configuration.GetSection("StsConfig"));
services.Configure<EmailSettings>(Configuration.GetSection("EmailSettings"));
services.AddSingleton<LocService>();
services.AddLocalization(options => options.ResourcesPath = "Resources");
services.Configure<RequestLocalizationOptions>(
options =>
{
var supportedCultures = new List<CultureInfo>
{
new CultureInfo("en-US"),
new CultureInfo("de-DE"),
new CultureInfo("de-CH"),
new CultureInfo("it-IT"),
new CultureInfo("gsw-CH"),
new CultureInfo("fr-FR")
};
options.DefaultRequestCulture = new RequestCulture(culture: "de-DE", uiCulture: "de-DE");
options.SupportedCultures = supportedCultures;
options.SupportedUICultures = supportedCultures;
var providerQuery = new LocalizationQueryProvider
{
QureyParamterName = "ui_locales"
};
options.RequestCultureProviders.Insert(0, providerQuery);
});
services.Configure<IISOptions>(options =>
{
options.AutomaticAuthentication = false;
options.AuthenticationDisplayName = "Windows";
});
var connectionString = Configuration.GetConnectionString("DefaultConnection");
var migrationsAssembly = typeof(Startup).GetTypeInfo().Assembly.GetName().Name;
// Add framework services.
services.AddDbContextPool<ApplicationDbContext>(options =>
{
options.UseSqlite(connectionString);
options.UseSqlite(connectionString, b => b.MigrationsAssembly(migrationsAssembly));
});
services.AddIdentity<ApplicationUser, ApplicationRole>()
.AddEntityFrameworkStores<ApplicationDbContext>()
.AddDefaultTokenProviders();
services.AddCors(options =>
{
options.AddPolicy("CorsPolicy", corsBuilder =>
{
corsBuilder.AllowAnyHeader()
.AllowAnyMethod()
.AllowAnyOrigin();
});
});
services.AddTransient<ISeedData, SeedData>();
services.AddTransient<IProfileService, CustomProfileService>();
services.AddTransient<ApplicationDbContext>();
services.AddControllersWithViews();
services.AddRazorPages()
.AddViewLocalization()
.AddDataAnnotationsLocalization(options =>
{
options.DataAnnotationLocalizerProvider = (type, factory) =>
{
var assemblyName = new AssemblyName(typeof(SharedResource).GetTypeInfo().Assembly.FullName);
return factory.Create("SharedResource", assemblyName.Name);
};
});
services.AddTransient<IProfileService, CustomProfileService>();
services.AddTransient<IEmailSender, EmailSender>();
var identityServer = services.AddIdentityServer(options =>
{
options.Events.RaiseErrorEvents = true;
options.Events.RaiseInformationEvents = true;
options.Events.RaiseFailureEvents = true;
options.Events.RaiseSuccessEvents = true;
})
.AddSigningCredential(x509Certificate2)
// this adds the config data from DB (clients, resources, CORS)
.AddConfigurationStore(options =>
{
options.ConfigureDbContext = builder =>
builder.UseSqlite(connectionString,
sql => sql.MigrationsAssembly(migrationsAssembly));
})
// OR In memory config store
//.AddInMemoryApiResources(Config.GetApiResources())
//.AddInMemoryClients(Config.GetClients(Configuration["ClientUrls"]))
//.AddInMemoryIdentityResources(Config.GetIdentityResources())
// this adds the operational data from DB (codes, tokens, consents)
.AddOperationalStore(options =>
{
options.ConfigureDbContext = builder =>
builder.UseSqlite(connectionString,
sql => sql.MigrationsAssembly(migrationsAssembly));
// this enables automatic token cleanup. this is optional.
options.EnableTokenCleanup = true;
// options.TokenCleanupInterval = 15; // interval in seconds. 15 seconds useful for debugging
})
.AddAspNetIdentity<ApplicationUser>()
.AddProfileService<CustomProfileService>();
services.AddAuthentication()
.AddGoogle(options =>
{
options.SignInScheme = IdentityServerConstants.ExternalCookieAuthenticationScheme;
options.ClientId = "476611152863-ltgqfk9jhq1vsenin5039n58ogkraltb.apps.googleusercontent.com";
options.ClientSecret = "rSHvhgdOQUB4KMc5JS1alzhg";
})
.AddOpenIdConnect("aad", "Login with Azure AD", options =>
{
options.Authority = $"https://login.microsoftonline.com/common";
options.TokenValidationParameters = new TokenValidationParameters { ValidateIssuer = false };
options.ClientId = "99eb0b9d-ca40-476e-b5ac-6f4c32bfb530";
options.CallbackPath = "/signin-oidc";
options.SignInScheme = IdentityServerConstants.ExternalCookieAuthenticationScheme;
});
}
public void Configure(IApplicationBuilder app, IWebHostEnvironment env)
{
// https://github.com/openiddict/openiddict-core/issues/518
// And
// https://github.com/aspnet/Docs/issues/2384#issuecomment-297980490
var forwarOptions = new ForwardedHeadersOptions
{
ForwardedHeaders = ForwardedHeaders.XForwardedFor | ForwardedHeaders.XForwardedProto
};
forwarOptions.KnownNetworks.Clear();
forwarOptions.KnownProxies.Clear();
app.UseForwardedHeaders(forwarOptions);
if (env.IsDevelopment())
{
app.UseDeveloperExceptionPage();
}
else
{
app.UseHsts();
app.UseExceptionHandler("/Home/Error");
}
var locOptions = app.ApplicationServices.GetService<IOptions<RequestLocalizationOptions>>();
app.UseRequestLocalization(locOptions.Value);
app.UseHttpsRedirection();
// app.UseMiddleware<AdminSafeListMiddleware>(
// Configuration["AdminSafeList"]);
app.UseStaticFiles();
app.UseRouting();
app.UseCors("CorsPolicy");
app.UseAuthentication();
app.UseIdentityServer();
app.UseAuthorization();
app.UseEndpoints(endpoints =>
{
endpoints.MapControllerRoute(
name: "default",
pattern: "{controller=Home}/{action=Index}/{id?}");
endpoints.MapRazorPages();
});
}
private static X509Certificate2 GetCertificate(IWebHostEnvironment environment, IConfiguration configuration)
{
var useDevCertificate = bool.Parse(configuration["UseDevCertificate"]);
X509Certificate2 cert = new X509Certificate2(Path.Combine(environment.ContentRootPath, "sts_dev_cert.pfx"), "1234");
if (environment.IsProduction() && !useDevCertificate)
{
var useLocalCertStore = Convert.ToBoolean(configuration["UseLocalCertStore"]);
if (useLocalCertStore)
{
using (X509Store store = new X509Store(StoreName.My, StoreLocation.LocalMachine))
{
var certificateThumbprint = configuration["CertificateThumbprint"];
store.Open(OpenFlags.ReadOnly);
var certs = store.Certificates.Find(X509FindType.FindByThumbprint, certificateThumbprint, false);
cert = certs[0];
store.Close();
}
}
else
{
// Azure deployment, will be used if deployed to Azure
var vaultConfigSection = configuration.GetSection("Vault");
var keyVaultService = new KeyVaultCertificateService(vaultConfigSection["Url"], vaultConfigSection["ClientId"], vaultConfigSection["ClientSecret"]);
cert = keyVaultService.GetCertificateFromKeyVault(vaultConfigSection["CertificateName"]);
}
}
return cert;
}
}
}
|
C#
|
MIT
|
ThomasAnjos/POC_Teste/src/AspNetCoreSpa.STS/Startup.cs
|
0276117a-d3a1-4b30-9d1a-81afee6ae81f
|
[{"tag": "API_KEY", "value": "476611152863-ltgqfk9jhq1vsenin5039n58ogkraltb.apps.googleusercontent.com", "start": 6842, "end": 6914, "context": "onScheme;\n\n options.ClientId = \"476611152863-ltgqfk9jhq1vsenin5039n58ogkraltb.apps.googleusercontent.com\";\n options.ClientSecret = \"rSHv"}, {"tag": "API_KEY", "value": "99eb0b9d-ca40-476e-b5ac-6f4c32bfb530", "start": 7336, "end": 7372, "context": " false };\n options.ClientId = \"99eb0b9d-ca40-476e-b5ac-6f4c32bfb530\";\n options.CallbackPath = \"/si"}, {"tag": "USERNAME", "value": "aspnet", "start": 7783, "end": 7789, "context": " // And\n // https://github.com/aspnet/Docs/issues/2384#issuecomment-297980490\n "}, {"tag": "USERNAME", "value": "openiddict", "start": 7692, "end": 7702, "context": " env)\n {\n // https://github.com/openiddict/openiddict-core/issues/518\n // And\n "}, {"tag": "API_KEY", "value": "rSHvhgdOQUB4KMc5JS1alzhg", "start": 6960, "end": 6984, "context": ".com\";\n options.ClientSecret = \"rSHvhgdOQUB4KMc5JS1alzhg\";\n })\n .AddOpenIdConn"}]
|
[{"tag": "KEY", "value": "476611152863-ltgqfk9jhq1vsenin5039n58ogkraltb.apps.googleusercontent.com", "start": 6842, "end": 6914, "context": "onScheme;\n\n options.ClientId = \"476611152863-ltgqfk9jhq1vsenin5039n58ogkraltb.apps.googleusercontent.com\";\n options.ClientSecret = \"rSHv"}, {"tag": "KEY", "value": "99eb0b9d-ca40-476e-b5ac-6f4c32bfb530", "start": 7336, "end": 7372, "context": " false };\n options.ClientId = \"99eb0b9d-ca40-476e-b5ac-6f4c32bfb530\";\n options.CallbackPath = \"/si"}, {"tag": "USERNAME", "value": "aspnet", "start": 7783, "end": 7789, "context": " // And\n // https://github.com/aspnet/Docs/issues/2384#issuecomment-297980490\n "}, {"tag": "USERNAME", "value": "openiddict", "start": 7692, "end": 7702, "context": " env)\n {\n // https://github.com/openiddict/openiddict-core/issues/518\n // And\n "}, {"tag": "KEY", "value": "rSHvhgdOQUB4KMc5JS1alzhg", "start": 6960, "end": 6984, "context": ".com\";\n options.ClientSecret = \"rSHvhgdOQUB4KMc5JS1alzhg\";\n })\n .AddOpenIdConn"}]
|
"use strict";
var People = (function () {
function People(obj) {
this.personId = obj && obj.personId || null;
this.isenable = obj && obj.isenable || null;
this.title = obj && obj.title || null;
this.firstName = obj && obj.firstName || null;
this.lastName = obj && obj.lastName || null;
this.dob = obj && obj.dob || null;
this.gender = obj && obj.gender || null;
this.phone = obj && obj.phone || null;
this.mobile = obj && obj.mobile || null;
this.occupation = obj && obj.occupation || null;
this.address = obj && obj.address || null;
this.suburbDistrict = obj && obj.suburbDistrict || null;
this.ward = obj && obj.ward || null;
this.postcode = obj && obj.postcode || null;
this.stateProvince = obj && obj.stateProvince || null;
this.country = obj && obj.country || null;
this.ispatient = obj && obj.ispatient || null;
this.isdoctor = obj && obj.isdoctor || null;
this.image = obj && obj.image || null;
this.createdBy = obj && obj.createdBy || null;
this.creationDate = obj && obj.creationDate || null;
this.lastUpdatedBy = obj && obj.lastUpdatedBy || null;
this.lastUpdateDate = obj && obj.lastUpdateDate || null;
}
return People;
}());
exports.People = People;
//# sourceMappingURL=data:application/json;charset=utf8;base64,eyJ2ZXJzaW9uIjozLCJzb3VyY2VzIjpbInBlb3BsZS9tb2RlbHMvcGVvcGxlLm1vZGVsLnRzIl0sIm5hbWVzIjpbXSwibWFwcGluZ3MiOiI7QUFDQTtJQTBCSSxnQkFBWSxHQUFRO1FBRW5CLElBQUksQ0FBQyxRQUFRLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxRQUFRLElBQUksSUFBSSxDQUFDO1FBQzFDLElBQUksQ0FBQyxRQUFRLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxRQUFRLElBQUksSUFBSSxDQUFDO1FBQzFDLElBQUksQ0FBQyxLQUFLLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxLQUFLLElBQUksSUFBSSxDQUFDO1FBQ3BDLElBQUksQ0FBQyxTQUFTLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxTQUFTLElBQUksSUFBSSxDQUFDO1FBQzVDLElBQUksQ0FBQyxRQUFRLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxRQUFRLElBQUksSUFBSSxDQUFDO1FBRTFDLElBQUksQ0FBQyxHQUFHLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxHQUFHLElBQUksSUFBSSxDQUFDO1FBQ2hDLElBQUksQ0FBQyxNQUFNLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxNQUFNLElBQUksSUFBSSxDQUFDO1FBQ3RDLElBQUksQ0FBQyxLQUFLLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxLQUFLLElBQUksSUFBSSxDQUFDO1FBQ3BDLElBQUksQ0FBQyxNQUFNLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxNQUFNLElBQUksSUFBSSxDQUFDO1FBRXRDLElBQUksQ0FBQyxVQUFVLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxVQUFVLElBQUksSUFBSSxDQUFDO1FBQzlDLElBQUksQ0FBQyxPQUFPLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxPQUFPLElBQUksSUFBSSxDQUFDO1FBQ3hDLElBQUksQ0FBQyxjQUFjLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxjQUFjLElBQUksSUFBSSxDQUFDO1FBQ3RELElBQUksQ0FBQyxJQUFJLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxJQUFJLElBQUksSUFBSSxDQUFDO1FBRS9CLElBQUksQ0FBQyxRQUFRLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxRQUFRLElBQUksSUFBSSxDQUFDO1FBQzFDLElBQUksQ0FBQyxhQUFhLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxhQUFhLElBQUksSUFBSSxDQUFDO1FBQ3BELElBQUksQ0FBQyxPQUFPLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxPQUFPLElBQUksSUFBSSxDQUFDO1FBQ3hDLElBQUksQ0FBQyxTQUFTLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxTQUFTLElBQUksSUFBSSxDQUFDO1FBQzVDLElBQUksQ0FBQyxRQUFRLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxRQUFRLElBQUksSUFBSSxDQUFDO1FBQzFDLElBQUksQ0FBQyxLQUFLLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxLQUFLLElBQUksSUFBSSxDQUFDO1FBRXZDLElBQUksQ0FBQyxTQUFTLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxTQUFTLElBQUksSUFBSSxDQUFDO1FBQzVDLElBQUksQ0FBQyxZQUFZLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxZQUFZLElBQUksSUFBSSxDQUFDO1FBQ2xELElBQUksQ0FBQyxhQUFhLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxhQUFhLElBQUksSUFBSSxDQUFDO1FBQ3BELElBQUksQ0FBQyxjQUFjLEdBQUcsR0FBRyxJQUFFLEdBQUcsQ0FBQyxjQUFjLElBQUksSUFBSSxDQUFDO0lBRXZELENBQUM7SUFFTCxhQUFDO0FBQUQsQ0ExREEsQUEwREMsSUFBQTtBQTFEWSxjQUFNLFNBMERsQixDQUFBIiwiZmlsZSI6InBlb3BsZS9tb2RlbHMvcGVvcGxlLm1vZGVsLmpzIiwic291cmNlc0NvbnRlbnQiOlsiXG5leHBvcnQgY2xhc3MgUGVvcGxle1xuXG4gICAgcGVyc29uSWQ6IG51bWJlcjtcbiAgICBpc2VuYWJsZTogbnVtYmVyO1xuICAgIHRpdGxlOiBzdHJpbmc7XG4gICAgZmlyc3ROYW1lOiBzdHJpbmc7XG4gICAgbGFzdE5hbWU6IHN0cmluZztcbiAgICBkb2I6IERhdGU7XG4gICAgZ2VuZGVyOiBzdHJpbmc7XG4gICAgcGhvbmU6IHN0cmluZztcbiAgICBtb2JpbGU6IHN0cmluZztcbiAgICBvY2N1cGF0aW9uOiBzdHJpbmc7XG4gICAgYWRkcmVzczogc3RyaW5nO1xuICAgIHN1YnVyYkRpc3RyaWN0OiBzdHJpbmc7XG4gICAgd2FyZDogc3RyaW5nO1xuICAgIHBvc3Rjb2RlOiBzdHJpbmc7XG4gICAgc3RhdGVQcm92aW5jZTogc3RyaW5nO1xuICAgIGNvdW50cnk6IHN0cmluZztcbiAgICBpc3BhdGllbnQ6IG51bWJlcjtcbiAgICBpc2RvY3RvcjogbnVtYmVyO1xuICAgIGltYWdlOiBzdHJpbmc7XG4gICAgY3JlYXRlZEJ5OiBudW1iZXI7XG4gICAgY3JlYXRpb25EYXRlOiBEYXRlO1xuICAgIGxhc3RVcGRhdGVkQnk6IG51bWJlcjtcbiAgICBsYXN0VXBkYXRlRGF0ZTogRGF0ZTtcblxuICAgIGNvbnN0cnVjdG9yKG9iajogYW55KXtcbiAgICBcdFxuICAgIFx0dGhpcy5wZXJzb25JZCA9IG9iaiYmb2JqLnBlcnNvbklkIHx8IG51bGw7XG4gICAgXHR0aGlzLmlzZW5hYmxlID0gb2JqJiZvYmouaXNlbmFibGUgfHwgbnVsbDtcbiAgICBcdHRoaXMudGl0bGUgPSBvYmomJm9iai50aXRsZSB8fCBudWxsO1xuICAgIFx0dGhpcy5maXJzdE5hbWUgPSBvYmomJm9iai5maXJzdE5hbWUgfHwgbnVsbDtcbiAgICBcdHRoaXMubGFzdE5hbWUgPSBvYmomJm9iai5sYXN0TmFtZSB8fCBudWxsO1xuXG4gICAgXHR0aGlzLmRvYiA9IG9iaiYmb2JqLmRvYiB8fCBudWxsO1xuICAgIFx0dGhpcy5nZW5kZXIgPSBvYmomJm9iai5nZW5kZXIgfHwgbnVsbDtcbiAgICBcdHRoaXMucGhvbmUgPSBvYmomJm9iai5waG9uZSB8fCBudWxsO1xuICAgIFx0dGhpcy5tb2JpbGUgPSBvYmomJm9iai5tb2JpbGUgfHwgbnVsbDtcblxuICAgIFx0dGhpcy5vY2N1cGF0aW9uID0gb2JqJiZvYmoub2NjdXBhdGlvbiB8fCBudWxsO1xuICAgIFx0dGhpcy5hZGRyZXNzID0gb2JqJiZvYmouYWRkcmVzcyB8fCBudWxsO1xuICAgIFx0dGhpcy5zdWJ1cmJEaXN0cmljdCA9IG9iaiYmb2JqLnN1YnVyYkRpc3RyaWN0IHx8IG51bGw7XG4gICAgXHR0aGlzLndhcmQgPSBvYmomJm9iai53YXJkIHx8IG51bGw7XG5cbiAgICAgICAgdGhpcy5wb3N0Y29kZSA9IG9iaiYmb2JqLnBvc3Rjb2RlIHx8IG51bGw7XG4gICAgICAgIHRoaXMuc3RhdGVQcm92aW5jZSA9IG9iaiYmb2JqLnN0YXRlUHJvdmluY2UgfHwgbnVsbDtcbiAgICAgICAgdGhpcy5jb3VudHJ5ID0gb2JqJiZvYmouY291bnRyeSB8fCBudWxsO1xuICAgICAgICB0aGlzLmlzcGF0aWVudCA9IG9iaiYmb2JqLmlzcGF0aWVudCB8fCBudWxsO1xuICAgICAgICB0aGlzLmlzZG9jdG9yID0gb2JqJiZvYmouaXNkb2N0b3IgfHwgbnVsbDtcbiAgICAgICAgdGhpcy5pbWFnZSA9IG9iaiYmb2JqLmltYWdlIHx8IG51bGw7XG5cbiAgICBcdHRoaXMuY3JlYXRlZEJ5ID0gb2JqJiZvYmouY3JlYXRlZEJ5IHx8IG51bGw7XG4gICAgXHR0aGlzLmNyZWF0aW9uRGF0ZSA9IG9iaiYmb2JqLmNyZWF0aW9uRGF0ZSB8fCBudWxsO1xuICAgIFx0dGhpcy5sYXN0VXBkYXRlZEJ5ID0gb2JqJiZvYmoubGFzdFVwZGF0ZWRCeSB8fCBudWxsO1xuICAgIFx0dGhpcy5sYXN0VXBkYXRlRGF0ZSA9IG9iaiYmb2JqLmxhc3RVcGRhdGVEYXRlIHx8IG51bGw7XG4gICAgXG4gICAgfVxuXG59Il19
|
JavaScript
|
MIT
|
mp4nguyen/project1/dist/dev/people/models/people.model.js
|
96f1bc90-d03b-452b-8bfd-3f1e9848f27c
|
[]
|
[]
|
// Copyright 2014 Reece Heineke<reece.heineke@gmail.com>
#include "iem/session.hpp"
#include <algorithm>
#include <numeric>
#include <string>
#include <vector>
#include <utility>
#include "boost/property_tree/xml_parser.hpp"
namespace iem {
using ptree = boost::property_tree::ptree;
using StringPair = std::pair<std::string, std::string>;
using StringPairVector = std::vector<StringPair>;
constexpr auto _VAH("viewAssetHoldings");
Session::Session(const std::string& username, const std::string& password):
username_(username), password_(password), cookie_(""), client_() {
}
const std::string url_encode(const std::vector<StringPair>& nvs) {
using boost::network::uri::encoded;
using std::operator ""s;
auto op = [](const std::string& a, const StringPair& p) {
return a.empty() ?
encoded(p.first) + '=' + encoded(p.second) :
a + '&' + encoded(p.first) + '=' + encoded(p.second);
};
return std::accumulate(nvs.begin(), nvs.end(), ""s, op);
}
ClientRequest buildRequest(const std::string &path,
const StringPairVector& name_value_pairs = {}) {
// URI
boost::network::uri::uri url;
url << boost::network::uri::scheme("https")
<< boost::network::uri::host("iem.uiowa.edu")
<< boost::network::uri::path(path);
ClientRequest request(url);
// Add default headers
using boost::network::header;
// Python requests default headers
request << header("User-Agent", "python-requests/2.10.0")
<< header("Accept-Encoding", "gzip, deflate")
<< header("Accept", "*/*");
// Optional body
if (name_value_pairs.size()) {
const auto body = url_encode(name_value_pairs);
request << boost::network::body(body)
<< header("Content-Length", std::to_string(body.length()))
<< header("Content-Type", "application/x-www-form-urlencoded");
}
return request;
}
const ClientResponse Session::authenticate() {
auto location = login(false);
if (location == "") { // If location is empty string, then login failed
std::cout << "Login failed; attempting forcing login...\n";
location = login(true); // Force login
}
// Market select request
auto market_select_request = buildRequest(location);
// Set cookie
market_select_request << boost::network::header("Set-Cookie", cookie());
// GET request
return client_.get(market_select_request);
}
std::string Session::login(const bool force_login) {
// Login request
auto login_request = buildRequest(
"/iem/trader/TraderLogin.action",
{
{"forceLogin", force_login ? "true" : "false"}, // Required
{"username", this->username()},
{"password", this->password()},
{"loginSubmit", "Sign in"}, // Required
{"_sourcePage", ""}, // Required
});
// POST request
const auto login_response = client_.post(login_request);
// Extract important headers - IEM requests a redirect
std::string location;
for (const auto& h : login_response.headers()) {
if (h.first == "Set-Cookie") {
cookie_ = h.second;
} else if (h.first == "Location") {
location = h.second;
}
}
return location;
}
const ClientResponse Session::logout() {
// Construct request
auto logout_request = buildRequest("/iem/trader/TraderLogin.action?logout=");
// Set cookie
logout_request << boost::network::header("Cookie", this->cookie());
// GET request
const auto response = client_.get(logout_request);
// Set cookie_ to empty string if successful
if (status(response) == 200) {
this->cookie_ = "";
}
return response;
}
Price _parse_price(const std::string &px_str) {
if (px_str.size()) {
std::istringstream iss(px_str);
Price p;
iss >> p;
return p;
}
return nanPrice();
}
position_t _parse_quantity(const std::string &qty_str) {
if (qty_str.size()) {
return std::stoi(qty_str);
}
return 0;
}
const ptree& empty_ptree() {
static ptree t;
return t;
}
unsigned int num_open_orders(ptree::const_assoc_iterator it) {
auto obo_form = it->second.get_child("form", empty_ptree());
std::string value;
if (obo_form == empty_ptree()) {
value = it->second.data();
} else {
// viewAssetHoldings input
auto input_its = obo_form.equal_range("input");
for (auto it = input_its.first; it != input_its.second; it++) {
if (it->second.get<std::string>("<xmlattr>.name") == _VAH) {
value = it->second.get<std::string>("<xmlattr>.value");
}
}
}
boost::trim(value);
return std::stoi(value);
}
const OrderBook _read_orderbook_html(ptree::const_assoc_iterator tr_it,
const Market& market) {
// Orderbook values
std::string contract_name;
Price best_bid_px;
bool best_bid_priority;
Price best_ask_px;
bool best_ask_priority;
Price last_trade_px;
position_t position;
unsigned int num_open_bid_orders;
unsigned int num_open_ask_orders;
auto td_its = tr_it->second.equal_range("td");
constexpr auto c("contract");
for (auto it = td_its.first; it != td_its.second; it++) {
// XML attribute called class
const auto& klass = it->second.get<std::string>("<xmlattr>.class", c);
if (klass == c) { // Contract name
contract_name = it->second.data();
boost::trim(contract_name);
} else if (klass == "change-cell bestBidPrice") { // Best bid price
auto data = it->second.get_child("p").data();
boost::trim(data);
best_bid_px = _parse_price(data);
best_bid_priority = data.find("*") != std::string::npos;
} else if (klass == "change-cell bestAskPrice") { // Best ask price
auto data = it->second.get_child("p").data();
boost::trim(data);
best_ask_px = _parse_price(data);
best_ask_priority = data.find("*") != std::string::npos;
} else if (klass == "change-cell lastPrice") { // Last trade price
auto data = it->second.get_child("p").data();
boost::trim(data);
last_trade_px = _parse_price(data);
} else if (klass == "change-cell quantity") { // Position
auto data = it->second.get<std::string>("<xmlattr>.value");
position = _parse_quantity(data);
} else if (klass == "yourBidsCell") { // Number of open bid orders
num_open_bid_orders = num_open_orders(it);
} else if (klass == "yourAsksCell") { // Number of open ask orders
num_open_ask_orders = num_open_orders(it);
}
}
return OrderBook(
Contract(market.name(), contract_name),
best_bid_px,
best_bid_priority,
best_ask_px,
best_ask_priority,
last_trade_px,
num_open_bid_orders,
num_open_ask_orders,
position);
}
const std::string _table_html_string(const std::string& body) {
// In order to increase likelihood of parsing valid XML, find substring that
// covers all html tables
const size_t pos = body.find("<table");
const size_t len = body.rfind("table") - pos + 6; // 6 is length of "<table"
return body.substr(pos, len);
}
ptree _tbody_ptree(const std::string& body) {
const auto tables_str = _table_html_string(body);
// Boost ptree works on streams only
std::istringstream is(tables_str);
// ptree
ptree pt;
read_xml(is, pt);
// XML nodes (terminology?) of interest
return pt.get_child("table.tbody");
}
const std::vector<OrderBook> _read_orderbooks_html(const std::string &body,
const Market& market) {
auto tbody = _tbody_ptree(body);
const auto tr_its = tbody.equal_range("tr");
std::vector<OrderBook> obs;
for (auto it = tr_its.first; it != tr_its.second; it++) {
obs.push_back(_read_orderbook_html(it, market));
}
return obs;
}
const std::vector<OrderBook> Session::orderbook(const Market& market) {
// Construct request
auto market_orderbook_request = buildRequest(
"/iem/trader/MarketTrader.action",
{
{"market", std::to_string(market.value())}
});
// Set cookie
market_orderbook_request << boost::network::header("Cookie", cookie());
// POST request
const auto& response = client_.post(market_orderbook_request);
const auto& obs = _read_orderbooks_html(body(response), market);
return obs;
}
const HoldingMessage _read_message_html(ptree::const_assoc_iterator tr_it) {
// Trader message values
boost::posix_time::ptime date;
std::string market_label;
Action action;
Quantity quantity = 0;
Price price;
auto td_its = tr_it->second.equal_range("td");
int i = 0;
for (auto it = td_its.first; it != td_its.second; it++) {
auto data_str = it->second.data();
boost::trim(data_str);
if (i == 0) { // date
date = date_from_string(data_str);
} else if (i == 1) { // market
market_label = data_str;
} else if (i == 2) { // action
data_str = it->second.get_child("a").data();
action = action_from_string(data_str);
} else if (i == 4) { // quantity
quantity = _parse_quantity(data_str);
} else if (i == 5) { // price
price = _parse_price(data_str);
}
i++;
}
return HoldingMessage(
date,
market_label,
action,
quantity,
price);
}
const std::vector<HoldingMessage> _read_messages_html(const std::string& body) {
auto tbody = _tbody_ptree(body);
const auto tr_its = tbody.equal_range("tr");
std::vector<HoldingMessage> msgs;
for (auto it = tr_its.first; it != tr_its.second; it++) {
msgs.push_back(_read_message_html(it));
}
return msgs;
}
const std::vector<HoldingMessage> Session::holdings(const Contract& contract) {
// Construct request
auto asset_holdings_request = buildRequest(
"/iem/trader/TraderActivity.action",
{
{"market", "contract.market"},
{"asset", std::to_string(contract.asset_id())},
{"activityType", "holdings"},
{_VAH, std::to_string(25)} // Number of transactions? Required?
});
// Set cookie
asset_holdings_request << boost::network::header("Cookie", cookie());
// POST request
const auto& response = client_.post(asset_holdings_request);
return _read_messages_html(body(response));
}
const std::string _activity_type(Side side) {
return (side == Side::BUY) ? "bid" : "ask";
}
const Side _side_from_string(const std::string& side_str) {
if (side_str == "bid") {
return Side::BUY;
} else if (side_str == "ask") {
return Side::SELL;
} else {
throw std::invalid_argument("Unknown side: " + side_str);
}
}
OrderId _order_id(const std::string& href_str, const Side& side) {
const std::string& key = (side == Side::BUY) ? "bidOrder=" : "askOrder=";
const size_t pos = href_str.find(key) + key.size();
const size_t len = href_str.find("&", pos) - pos;
return std::stoi(href_str.substr(pos, len));
}
const Single _read_order_html(const ptree::const_assoc_iterator tr_it) {
// Outstanding order values
boost::posix_time::ptime order_date;
std::string market_name;
std::string contract_name;
std::string order_type;
Quantity quantity = 0;
Price price;
boost::posix_time::ptime expiration;
OrderId order_id;
auto td_its = tr_it->second.equal_range("td");
int i = 0;
for (auto it = td_its.first; it != td_its.second; it++) {
auto data_str = it->second.data();
boost::trim(data_str);
if (i == 0) { // order date
order_date = date_from_string(data_str);
} else if (i == 1) { // market
market_name = data_str;
} else if (i == 2) { // contract name
contract_name = data_str;
} else if (i == 3) { // order type
order_type = data_str;
} else if (i == 4) { // quantity
quantity = _parse_quantity(data_str);
} else if (i == 5) { // price
price = _parse_price(data_str);
} else if (i == 6) { // expiration date
expiration = date_from_string(data_str);
} else if (i == 7) { // order id
order_id = _order_id(it->second.get<std::string>("a.<xmlattr>.href"),
_side_from_string(order_type));
}
i++;
}
Single so(
Contract(market_name, contract_name),
_side_from_string(order_type),
quantity,
PriceTimeLimit(price, expiration)
);
so.set_id(order_id);
return so;
}
const std::vector<Single> _read_orders_html(const std::string& body) {
auto tbody = _tbody_ptree(body);
const auto tr_its = tbody.equal_range("tr");
std::vector<Single> os;
for (auto it = tr_its.first; it != tr_its.second; it++) {
os.push_back(_read_order_html(it));
}
return os;
}
const SingleOrders Session::outstanding_orders(const Contract& contract,
const Side& side) {
// Construct request
auto outstanding_order_request = buildRequest(
"/iem/trader/TraderActivity.action",
{
{"market", std::to_string(contract.market().value())},
{"asset", std::to_string(contract.asset_id())},
{"activityType", _activity_type(side)},
});
// Set cookie
outstanding_order_request << boost::network::header("Cookie", cookie());
// POST request
const auto& response = client_.post(outstanding_order_request);
return _read_orders_html(body(response));
}
const ClientRequest limit_order_request(const Single& order) {
using boost::gregorian::to_iso_extended_string;
// Construct request
const auto& c = order.contract();
auto order_request = buildRequest(
"/iem/trader/order/LimitOrder.action",
{
{"limitOrderAssetToMarket", std::to_string(c.asset_to_market_id())},
{"orderType", _activity_type(order.side())},
{"expirationDate", to_string(order.price_time_limit().expiration())},
{"price", to_string(order.price_time_limit().price())},
{"limitOrderQuantity", std::to_string(order.quantity())},
{"placeLimitOrder", "Place Limit Order"},
{"market", std::to_string(c.market().value())},
{"_sourcePage", ""}, // Required?
});
return order_request;
}
const std::string _market_order_type(Side side) {
return (side == Side::BUY) ? "buy" : "sell";
}
const ClientRequest market_order_request(const Single& order) {
// Construct request
const auto& c = order.contract();
auto order_request = buildRequest(
"/iem/trader/order/MarketOrder.action",
{
{"limitOrderAssetToMarket", std::to_string(c.asset_to_market_id())},
{"orderType", _market_order_type(order.side())},
{"marketOrderQuantity", std::to_string(order.quantity())},
{"placeMarketOrder", "Place Market Order"},
{"market", std::to_string(c.market().value())}
});
return order_request;
}
const ClientRequest _single_order_request(const Single &order) {
if (order.price_time_limit().ioc()) {
return market_order_request(order);
} else {
return limit_order_request(order);
}
}
const ClientResponse Session::place_order(const Single& order) {
// Construct request
ClientRequest order_request = _single_order_request(order);
// Set cookie
order_request << boost::network::header("Cookie", cookie());
// POST request
const auto& response = client_.post(order_request);
return response;
}
const std::string _bundle_order_type(const Side& side, const Counterparty& cp) {
const auto action_str = _market_order_type(side);
const auto cp_str = (cp == Counterparty::EXCHANGE) ? "Fixed" : "Market";
return action_str + "At" + cp_str;
}
const ClientRequest _bundle_order_request(const Bundle& order) {
// Construct request
const auto& cb = order.contract_bundle();
auto order_request = buildRequest(
"/iem/trader/order/BundleOrder.action",
{
{"bundle", std::to_string(cb.bundle_id())},
{"orderType", _bundle_order_type(order.side(), order.counterparty())},
{"bundleOrderQuantity", std::to_string(order.quantity())},
{"placeBundleOrder", "Place Bundle Order"},
{"market", std::to_string(cb.market().value())}
});
return order_request;
}
const ClientResponse Session::place_order(const Bundle& order) {
// Construct request
ClientRequest order_request = _bundle_order_request(order);
// Set cookie
order_request << boost::network::header("Cookie", cookie());
// POST request
const auto& response = client_.post(order_request);
return response;
}
const std::string _action(const Side& side) {
return (side == Side::BUY) ? "cancelBidOrder" : "cancelAskOrder";
}
const ClientResponse Session::cancel_order(const Single& order) {
// Construct request
if (valid_id(order)) {
// Return something useful
return ClientResponse();
}
const auto oid = (order.side() == Side::BUY) ? "bidOrder": "askOrder";
const auto cxl = url_encode(
{
{_action(order.side()), ""},
{"market", std::to_string(order.contract().market().value())},
{oid, std::to_string(order.id())},
{"asset", std::to_string(order.contract().asset_id())},
{"activityType", _activity_type(order.side())}
});
auto cxl_request = buildRequest("/iem/trader/TraderActivity.action?" + cxl);
// Set cookie
cxl_request << boost::network::header("Cookie", this->cookie());
// POST request
const auto& response = client_.get(cxl_request);
return response;
}
const TraderMessage _read_message_html(const std::string& market_name,
ptree::const_assoc_iterator tr_it) {
// Trader message values
boost::posix_time::ptime date;
MessageType msg_type;
std::string contract_name;
Action action;
Quantity quantity = 0;
Price price;
boost::posix_time::ptime expiration_date;
auto td_its = tr_it->second.equal_range("td");
int i = 0;
for (auto it = td_its.first; it != td_its.second; it++) {
auto data_str = it->second.data();
boost::trim(data_str);
if (i == 0) { // date
date = date_from_string(data_str);
} else if (i == 1) { // msg_type
msg_type = message_type_from_string(data_str);
} else if (i == 2) { // contract_name
contract_name = data_str;
} else if (i == 3) { // action
action = action_from_string(data_str);
} else if (i == 4) { // quantity
quantity = std::stoi(data_str);
} else if (i == 5) { // price
price = _parse_price(data_str);
} else if (i == 6) { // expiration_date
expiration_date = expiration_date_from_string(data_str);
}
i++;
}
return TraderMessage(
date,
msg_type,
contract_name,
action,
quantity,
price,
expiration_date);
}
const std::vector<TraderMessage> _read_messages_html(const Market& market,
const std::string& body) {
auto tbody = _tbody_ptree(body);
const auto tr_its = tbody.equal_range("tr");
std::vector<TraderMessage> msgs;
for (auto it = tr_its.first; it != tr_its.second; it++) {
msgs.push_back(_read_message_html(market.name(), it));
}
return msgs;
}
const std::vector<TraderMessage> Session::messages(const Market& market) {
// Construct request
const auto request = market_client_request(market, "home");
// GET request
const auto& response = client_.get(request);
return _read_messages_html(market, body(response));
}
const auto Session::remove_messages(const Market& market) {
// Construct request
const auto request = market_client_request(market, "removeMessages");
// GET request
const auto& response = client_.get(request);
return response;
}
const std::vector<TraderMessage> Session::portfolio(const Market& market) {
// Construct request
const auto request = market_client_request(market, "viewPortfolio");
// GET request
const auto& response = client_.get(request);
return _read_messages_html(market, body(response));
}
ClientRequest Session::market_client_request(const Market& market,
const std::string& query) {
// Construct request
const auto msg = url_encode(
{
{query, ""},
{"market", std::to_string(market.value())}
});
auto request = buildRequest("/iem/trader/TraderMessages.action?" + msg);
request << boost::network::header("Cookie", this->cookie());
return request;
}
int snprintf_session(char* const str, const Session& s) {
static const auto fmt("{\"name\":\"session\", \"username\":\"%s\", "
"\"password\":\"%s\", \"cookie\":\"%s\"}");
const auto buf_size = std::min<std::size_t>(sizeof(str), 160);
return snprintf(str, buf_size, fmt, s.username().c_str(),
s.password().c_str(), s.cookie().c_str());
}
// TODO(rheineke): Write function
//UniqueSession make_unique_session(const std::string& username,
// const std::string& password) {
// return std::unique_ptr(new Session(username, password),
// decltype(&Session::logout));
//}
} // namespace iem
|
C++
|
MIT
|
rheineke/cpp-iem/src/iem/session.cpp
|
00f9b923-ab9d-4373-a6e0-d6172f7999fd
|
[{"tag": "USERNAME", "value": "rheineke", "start": 20654, "end": 20662, "context": "ssword().c_str(), s.cookie().c_str());\n}\n\n// TODO(rheineke): Write function\n//UniqueSession make_unique_sess"}, {"tag": "EMAIL", "value": "reece.heineke@gmail.com", "start": 32, "end": 55, "context": "// Copyright 2014 Reece Heineke<reece.heineke@gmail.com>\n#include \"iem/session.hpp\"\n\n#include <algorithm>"}, {"tag": "NAME", "value": "Reece Heineke", "start": 18, "end": 31, "context": "// Copyright 2014 Reece Heineke<reece.heineke@gmail.com>\n#include \"iem/session.hp"}]
|
[{"tag": "USERNAME", "value": "rheineke", "start": 20654, "end": 20662, "context": "ssword().c_str(), s.cookie().c_str());\n}\n\n// TODO(rheineke): Write function\n//UniqueSession make_unique_sess"}, {"tag": "EMAIL", "value": "reece.heineke@gmail.com", "start": 32, "end": 55, "context": "// Copyright 2014 Reece Heineke<reece.heineke@gmail.com>\n#include \"iem/session.hpp\"\n\n#include <algorithm>"}, {"tag": "NAME", "value": "Reece Heineke", "start": 18, "end": 31, "context": "// Copyright 2014 Reece Heineke<reece.heineke@gmail.com>\n#include \"iem/session.hp"}]
|
// -*- C++ -*-
/**
* @file region_maker.cpp
* @brief A ROS node to make correct region for obstacle measurment
*
* @author Yasushi SUMI <y.sumi@aist.go.jp>
*
* Copyright (C) 2021 AIST
* Released under the MIT license
* https://opensource.org/licenses/mit-license.php
*/
#ifdef HAVE_CONFIG_H
#include "config.h"
#endif
#include <ros/ros.h>
#include <UFV/types.h>
#include "region_maker.h"
/*!
* @brief main function
*/
int main(int argc, char** argv)
{
ros::init (argc, argv, "region_maker");
emulated_srs::RegionMaker region_maker;
ros::spin();
return 0;
}
emulated_srs::RegionMaker::RegionMaker(void)
:
emulated_srs::ObstacleDetector(),
param_fname_to_save_("Reg.png"),
param_path_to_save_("./")
{
node_handle_.getParam("filename_region", param_fname_to_save_);
node_handle_.getParam("path_to_save", param_path_to_save_);
ROS_INFO("filename_region: %s", param_fname_to_save_.c_str());
ROS_INFO("path_to_save: %s", param_path_to_save_.c_str());
}
void emulated_srs::RegionMaker::initializeMap(
const int width,
const int height,
const int point_step)
{
this->emulated_srs::ObstacleDetector::initializeMap(width,height,point_step);
map_for_detection_.setDrawLabel(true);
return;
}
int
emulated_srs::RegionMaker::save(void) noexcept
{
int width = map_for_showing_depth_data_.width();
int height = map_for_showing_depth_data_.height();
UFV::ImageData<unsigned char> regimg(width, height, 1);
unsigned char *rimg = regimg.data();
unsigned char *dimg = map_for_showing_depth_data_.data();
for(int i=0; i<width*height; i++)
{
if(*(dimg+2) > 0 && *dimg == 0) // R is positive, and B is zero
{
*rimg = 255;
}
rimg ++;
dimg += 3;
}
regimg.setWriteImage(true, param_path_to_save_);
regimg.writeImage(param_fname_to_save_);
ROS_INFO("Saved: %s", (param_path_to_save_ + param_fname_to_save_).c_str());
return(UFV::OK);
}
void emulated_srs::RegionMaker::displayAll(void)
{
// Copy the depth image with rtection results for display
map_for_detection_.normalize(map_for_showing_depth_data_);
if(has_rgb_data_)
{
// Copy the current RGB image.
map_for_showing_rgb_data_ = *(map_for_rgb_display_.getImageData<unsigned char>());
}
// Overwrite the depth image with the obstacle reasons.
//map_for_detection_.drawObstacleRegionWithLabel(map_for_showing_depth_data_);
map_for_detection_.setDrawLabel(false);
map_for_detection_.drawObstacleRegion(map_for_showing_depth_data_);
UFV::KeyDef dret1, dret2, dret3;
// Display the images.
if(has_rgb_data_)
{
dret1 = map_for_showing_rgb_data_.display("RGB", -1);
}
map_for_detection_.setDrawLabel(true);
dret2 = map_for_detection_.display("Detection",-1);
dret3 = map_for_showing_depth_data_.display("Depth", 10);
//std::cout << dret1 << ", " << dret2 << ", " << dret3 << std::endl;
if(dret1 == UFV::KEY_SAVE || dret2 == UFV::KEY_SAVE ||
dret3 == UFV::KEY_SAVE)
{
this->save();
}
return;
}
|
C++
|
MIT
|
yssmii/emulated_srs/src/region_maker.cpp
|
05ef95e0-b402-4189-bf78-5aca09202c79
|
[{"tag": "EMAIL", "value": "y.sumi@aist.go.jp", "start": 142, "end": 159, "context": "r obstacle measurment\n *\n * @author Yasushi SUMI <y.sumi@aist.go.jp>\n * \n * Copyright (C) 2021 AIST\n * Released unde"}, {"tag": "NAME", "value": "Yasushi SUMI", "start": 128, "end": 140, "context": "rect region for obstacle measurment\n *\n * @author Yasushi SUMI <y.sumi@aist.go.jp>\n * \n * Copyright (C) 2021 AI"}]
|
[{"tag": "EMAIL", "value": "y.sumi@aist.go.jp", "start": 142, "end": 159, "context": "r obstacle measurment\n *\n * @author Yasushi SUMI <y.sumi@aist.go.jp>\n * \n * Copyright (C) 2021 AIST\n * Released unde"}, {"tag": "NAME", "value": "Yasushi SUMI", "start": 128, "end": 140, "context": "rect region for obstacle measurment\n *\n * @author Yasushi SUMI <y.sumi@aist.go.jp>\n * \n * Copyright (C) 2021 AI"}]
|
# vim: syntax=ruby:expandtab:shiftwidth=2:softtabstop=2:tabstop=2
name 'cpe_helloit'
maintainer 'Pinterest'
maintainer_email 'itcpe@pinterest.com'
license 'Apache-2.0'
description 'Installs/Configures cpe_helloit'
version '0.1.0'
chef_version '>= 12.6'
supports 'mac_os_x'
depends 'cpe_launchd'
depends 'cpe_profiles'
depends 'cpe_remote'
depends 'cpe_utils'
|
Ruby
|
Apache-2.0
|
maotsu/it-cpe-cookbooks/cpe_helloit/metadata.rb
|
0ff2e299-4f49-4122-bfe2-407971b69386
|
[{"tag": "USERNAME", "value": "cpe_helloit", "start": 73, "end": 84, "context": "ndtab:shiftwidth=2:softtabstop=2:tabstop=2\n\nname 'cpe_helloit'\nmaintainer 'Pinterest'\nmaintainer_email 'itcpe@p"}, {"tag": "EMAIL", "value": "itcpe@pinterest.com", "start": 127, "end": 146, "context": "helloit'\nmaintainer 'Pinterest'\nmaintainer_email 'itcpe@pinterest.com'\nlicense 'Apache-2.0'\ndescription 'Installs/Confi"}]
|
[{"tag": "USERNAME", "value": "cpe_helloit", "start": 73, "end": 84, "context": "ndtab:shiftwidth=2:softtabstop=2:tabstop=2\n\nname 'cpe_helloit'\nmaintainer 'Pinterest'\nmaintainer_email 'itcpe@p"}, {"tag": "EMAIL", "value": "itcpe@pinterest.com", "start": 127, "end": 146, "context": "helloit'\nmaintainer 'Pinterest'\nmaintainer_email 'itcpe@pinterest.com'\nlicense 'Apache-2.0'\ndescription 'Installs/Confi"}]
|
---
layout: post
title: "Introducing boot.rackspace.com"
date: 2014-01-23 16:00
comments: true
author: Antony Messerli
published: true
categories:
- Cloud Servers
- Cloud Tools
- Images
- iPXE
- OpenStack
- Performance
---
We have had a number of customers request the need to be able to create their own Cloud Servers images rather than taking snapshots from our base installs. To fulfill this need, we are announcing a new tool as a preview today called [boot.rackspace.com](http://boot.rackspace.com). The tool enables you to utilize the various Linux distributions installers to install directly to the disk of your Cloud Server. <!-- more -->
# How It Works
When you create a Rackspace Cloud Server from the boot.rackspace.com image, it will boot the Cloud Server with a small 1 MB [iPXE](http://www.ipxe.org) based ISO. This in turn will set up the server's assigned networking within the virtual BIOS and netboot into a menu of operating system options hosted over HTTP on [boot.rackspace.com](http://boot.rackspace.com).
You will need to connect to the console of the Cloud Server in order to view the menu after booting the server. By default, the menu will boot from local disk after five minutes, so if you connect to the console too late, just issue a reboot of the server and then reconnect to the console.
Each option will either kick off the install kernels from the various operating systems or automatically load up the ISO to the Cloud Server. From there you can customize your Cloud Server to your hearts content and install directly to the OS disk. Once completed, you can install the Rackspace Cloud Agent, take a snapshot, and then redeploy the image as your golden master. We have also initially included a few useful tools like [Clonezilla](http://clonezilla.org/) for moving data around.
{% img center 2014-01-23-introducing-boot-dot-rackspace-dot-com/brc-linux-menu.png %}
# Contributing
We've put all the source for the iPXE scripts on [Github](https://github.com/rackerlabs/boot.rackspace.com/) and welcome contributions. We've also written up some [how-to's](https://github.com/rackerlabs/boot.rackspace.com/wiki) on Rackspace Cloud Servers image creation which will enable you to create images just like our base images.
As contributions are accepted, they will be deployed to the site automatically. Because of the flexibility iPXE provides, you can also create your own custom menus, host them on your own site, and chain load them from the iPXE command line.
The tool currently ****only works on the newer Performance Flavors**** *(not Standard)* so please keep that in mind when using the tool.
# Using Outside of Rackspace Cloud
The [README](https://github.com/rackerlabs/boot.rackspace.com/blob/master/README.md) also contains instructions for using the tool outside of Rackspace Cloud Servers. Using the iPXE ISO is great for working on your own servers (DRAC, iLO, etc) in the Datacenter because it's very lightweight and provides a lot of options at your fingertips as you can stream all of the needed packages over the network instead of using a large DVD/ISO.
# iPXE Community
The [iPXE](http://ipxe.org) community is great and very helpful. If you'd like to learn more about how network booting works, make sure to check out [networkboot.org](http://networkboot.org/).
# How to Get Started
So in summary, to get started, you can boot a server using the API with the image id:
****9aa0d346-c06f-4652-bbb1-4342a7d2d017****
and then connect to the console of the server. Current and future image id's will be tracked [here](https://github.com/rackerlabs/boot.rackspace.com/wiki/boot.rackspace.com-Image-UUIDs). If you have any questions or feedback, please don't hesitate to open up a github issue or contact us at <bootrax@rackspace.com>.
# About the Author
Antony Messerli is a Principal Engineer at Rackspace working on the Cloud Servers Engineering team. You can follow him on twitter [@ajmesserli](http://twitter.com/ajmesserli) and on Github as [amesserl](https://github.com/amesserl).
|
Markdown
|
Apache-2.0
|
Acidburn0zzz/developer.rackspace.com/src/site_source/_posts/2014-01-23-introducing-boot-dot-rackspace-dot-com.markdown
|
83dac12a-e203-4812-995f-a262e038db6b
|
[{"tag": "USERNAME", "value": "@ajmesserli", "start": 3960, "end": 3971, "context": " Engineering team. You can follow him on twitter [@ajmesserli](http://twitter.com/ajmesserli) and on Github as "}, {"tag": "USERNAME", "value": "amesserl", "start": 4051, "end": 4059, "context": "i) and on Github as [amesserl](https://github.com/amesserl).\n"}, {"tag": "USERNAME", "value": "amesserl", "start": 4022, "end": 4030, "context": "(http://twitter.com/ajmesserli) and on Github as [amesserl](https://github.com/amesserl).\n"}, {"tag": "USERNAME", "value": "ajmesserli", "start": 3992, "end": 4002, "context": "w him on twitter [@ajmesserli](http://twitter.com/ajmesserli) and on Github as [amesserl](https://github.com/a"}, {"tag": "NAME", "value": "Antony Messerli", "start": 3829, "end": 3844, "context": "s at <bootrax@rackspace.com>.\n\n# About the Author\nAntony Messerli is a Principal Engineer at Rackspace working on t"}, {"tag": "NAME", "value": "Antony Messerli", "start": 103, "end": 118, "context": "om\"\ndate: 2014-01-23 16:00\ncomments: true\nauthor: Antony Messerli\npublished: true\ncategories: \n- Cloud Servers\n- Cl"}]
|
[{"tag": "USERNAME", "value": "@ajmesserli", "start": 3960, "end": 3971, "context": " Engineering team. You can follow him on twitter [@ajmesserli](http://twitter.com/ajmesserli) and on Github as "}, {"tag": "USERNAME", "value": "amesserl", "start": 4051, "end": 4059, "context": "i) and on Github as [amesserl](https://github.com/amesserl).\n"}, {"tag": "USERNAME", "value": "amesserl", "start": 4022, "end": 4030, "context": "(http://twitter.com/ajmesserli) and on Github as [amesserl](https://github.com/amesserl).\n"}, {"tag": "USERNAME", "value": "ajmesserli", "start": 3992, "end": 4002, "context": "w him on twitter [@ajmesserli](http://twitter.com/ajmesserli) and on Github as [amesserl](https://github.com/a"}, {"tag": "NAME", "value": "Antony Messerli", "start": 3829, "end": 3844, "context": "s at <bootrax@rackspace.com>.\n\n# About the Author\nAntony Messerli is a Principal Engineer at Rackspace working on t"}, {"tag": "NAME", "value": "Antony Messerli", "start": 103, "end": 118, "context": "om\"\ndate: 2014-01-23 16:00\ncomments: true\nauthor: Antony Messerli\npublished: true\ncategories: \n- Cloud Servers\n- Cl"}]
|
from __future__ import print_function, division
import _init_paths
import math
import os.path as osp
from shapely.geometry import Polygon
from gen_data import get_cent
from bbox_util import is_rect
import argparse
import sys
from model.config import cfg
def parse_args():
"""
Parse input arguments
"""
parser = argparse.ArgumentParser(description='Generate txt result file')
parser.add_argument('--dir', dest='base_dir',
help='result base dir',
default='/home/hezheqi/data/frame/result', type=str)
parser.add_argument('--gt', dest='gt_dir',
help='gt base dir',
default='/data/hezheqi/frame/test/gt', type=str)
parser.add_argument('--name', dest='name',
help='out name', default=None, type=str)
parser.add_argument('--list', dest='img_list_dir',
help='image list', default='/data/hezheqi/frame/test/img_list.txt', type=str)
if len(sys.argv) == 1:
parser.print_help()
sys.exit(1)
args = parser.parse_args()
return args
def read_txt(name, use_bound=False, rect_label=None, is_gt=False):
ret = []
if not osp.exists(name):
return ret
with open(name) as fin:
for line in fin:
info = line.strip().split()
if len(info) == 1:
continue
if is_gt and len(info) != 9:
continue
info = list(map(int, info))
# for i in range(len(info)):
# info[i] = max(0, info[i])
if rect_label != None: # only use rectangle gt
rect_label.append(is_rect(info[1:]))
pts = [(info[i], info[i + 1]) for i in range(1, len(info), 2)]
cx, cy = get_cent(info[1:])
pts.sort(key=lambda a: math.atan2(a[1] - cy, a[0] - cx))
# if is_gt:
# print(pts)
frame = Polygon(pts)
if use_bound:
x1, y1, x2, y2 = frame.bounds
# print(x1, y1, x2, y2)
frame = Polygon([[x1, y1], [x2, y1], [x2, y2], [x1, y2]])
if not frame.is_valid:
print(info[0])
continue
# frame = frame.convex_hull
ret.append(frame)
return ret
def calculate_iou(p1, p2):
a1 = p1.area
a2 = p2.area
# print(a1, a2)
# print(p1.is_valid, p2.is_valid)
intersection = p1.intersection(p2).area
return intersection / (a1 + a2 - intersection)
def verify_point_distance(poly1, poly2):
pts1 = list(poly1.exterior.coords)
pts2 = list(poly2.exterior.coords)
for p1, p2 in zip(pts1, pts2):
dis = math.pow(p1[0] - p2[0], 2) + math.pow(p1[1] - p2[1], 2)
if dis > 2500:
return False
return True
def eval_one(results, gts, point_dis=False, rect_label=None):
'''
:param results:
:param gts:
:param point_dis:
:param rect_label: use rectangle or not
:return right_num, error_num, mid_num
'''
m = len(gts)
is_used = [False] * m
right_num = 0
err_num = 0
mid_num = 0
for res in results:
if not point_dis:
max_iou = -1
max_index = -1
for j, gt in enumerate(gts):
if is_used[j]:
continue
iou = calculate_iou(res, gt)
if max_iou < iou:
max_iou = iou
max_index = j
if max_iou > th:
is_used[max_index] = True
if rect_label == None:
right_num += 1
elif rect_label[max_index]:
right_num += 1
elif not rect_label[max_index]:
mid_num += 1
else:
err_num += 1
else:
flag = False
for j, gt in enumerate(gts):
if is_used[j]:
continue
if verify_point_distance(res, gt):
is_used[j] = True
right_num += 1
flag = True
break
if not flag:
err_num += 1
assert (right_num <= m)
assert (err_num <= len(results))
return right_num, err_num, mid_num
def evaluate(mean_f=True, point_dis=False, rect_flag=False):
name_list = open(name_list_dir).read().strip().split('\n')
fout = open(osp.join(cfg.DATA_DIR, 'wrong.txt'), 'w')
precision, recall, page_correct = 0, 0, 0
right_all, error_all, gt_all, res_all = 0, 0, 0, 0
for name in name_list:
results = read_txt(osp.join(res_base_dir, name + '.txt'), use_bound=False)
if rect_flag:
rect_label = []
else:
rect_label = None
gts = read_txt(osp.join(gt_base_dir, name + '.txt'), rect_label=rect_label,
is_gt=True, use_bound=False)
right_num, error_num, mid_num = eval_one(results, gts, rect_label=rect_label, point_dis=point_dis)
# right_num, error_num, mid_num = eval_one(results, gts)
right_all += right_num
error_all += error_num
gt_all += len(gts) - mid_num
res_all += len(results) - mid_num
if len(results) - mid_num > 0:
precision += right_num / (len(results) - mid_num)
if len(gts) - mid_num > 0:
recall += right_num / (len(gts) - mid_num)
if right_num == len(gts) and error_num == 0:
# if right_num == len(gts):
page_correct += 1
else:
fout.write('{}\n'.format(name))
n = len(name_list)
precision /= n
recall /= n
page_correct /= n
f1 = 2 * precision * recall / (precision + recall)
print('{} {:.5f} {:.5f} {:.5f} {:.5f}'.format(th, precision, recall, f1, page_correct))
if not mean_f:
precision = right_all / res_all
recall = right_all / gt_all
f1 = 2 * precision * recall / (precision + recall)
# print(th, precision, recall, f1, page_correct)
print('{} {:.5f} {:.5f} {:.5f} {:.5f}'.format(th, precision, recall, f1, page_correct))
if __name__ == '__main__':
# gt_base_dir = '/data/datasets/frame/test_2000/gt'
# res_base_dir = '/data/datasets/frame/result/result_all_0.8_th0.75'
# res_base_dir = '/data3/dt'
# res_base_dir = '/data/datasets/frame/result/result_ssd_th0.75'
# res_base_dir = '/home/hezheqi/data/frame/result/faster_reg2_poly'
# res_base_dir = '/home/hezheqi/Project/dpreg/net/results/pages_mult/txt'
# res_base_dir = '/home/cpdp/Documents/yf-workspace/data/2000_res_txt'
# res_base_dir = '/data3/20w_results/ly_crf_new'
# res_base_dir = '/data3/20w_results/dt'
# res_base_dir = '/home/cpdp/Documents/yf-workspace/data/29845_LD_DRR'
# res_base_dir = '/data/datasets/frame/result/result_2000_0.8_th0.75'
# name_list_dir = '/data/datasets/frame/test_2000/img_list.txt'
args = parse_args()
gt_base_dir = args.gt_dir
res_base_dir = osp.join(args.base_dir, args.name)
th = 0.9
name_list_dir = args.img_list_dir
evaluate(mean_f=False, point_dis=False)
# evaluate(False, True)
|
Python
|
MIT
|
lz20061213/quadrilateral/tools/eval_frame.py
|
e3a92ef8-f7c5-430b-bc6d-40b1422246e9
|
[]
|
[]
|
import datetime
import unittest
import unittest.mock as mock
from betdaq.apiclient import APIClient
from betdaq.endpoints.account import Account
class AccountTest(unittest.TestCase):
def setUp(self):
client = APIClient('username', 'password')
self.account = Account(client)
@mock.patch('betdaq.endpoints.account.Account.process_response')
@mock.patch('betdaq.endpoints.account.Account.request', return_value=mock.Mock())
def test_get_account_balances(self, mock_request, mock_process_response):
self.account.get_account_balances()
mock_request.assert_called_once_with('GetAccountBalances', {}, secure=True)
assert mock_process_response.call_count == 1
@mock.patch('betdaq.endpoints.account.Account.process_response')
@mock.patch('betdaq.endpoints.account.Account.request', return_value=mock.Mock())
def test_get_account_transactions(self, mock_request, mock_process_response):
self.account.get_account_transactions(StartTime=datetime.datetime(2017, 1, 1).timestamp(),
EndTime=datetime.datetime(2017, 1, 10).timestamp())
mock_request.assert_called_once_with(
'ListAccountPostings', {'StartTime': 1483228800.0, 'EndTime': 1484006400.0}, secure=True
)
assert mock_process_response.call_count == 1
@mock.patch('betdaq.endpoints.account.Account.process_response')
@mock.patch('betdaq.endpoints.account.Account.request', return_value=mock.Mock())
def test_get_account_transactions_by_id(self, mock_request, mock_process_response):
self.account.get_account_transactions_by_id(TransactionId=1)
mock_request.assert_called_once_with('ListAccountPostingsById', {'TransactionId': 1}, secure=True)
assert mock_process_response.call_count == 1
@mock.patch('betdaq.endpoints.account.Account.process_response')
@mock.patch('betdaq.endpoints.account.Account.request', return_value=mock.Mock())
def test_change_account_password(self, mock_request, mock_process_response):
self.account.change_account_password(Password='new_password')
mock_request.assert_called_once_with('ChangePassword', {'Password': 'new_password'}, secure=True)
assert mock_process_response.call_count == 1
|
Python
|
MIT
|
ScoreX/betdaq/tests/test_account.py
|
78f0938f-e4e6-4202-9aca-f32983dfab07
|
[]
|
[]
|
/*globals hapyak */
'use strict';
hapyak.shopifyWidget = {
didSetup: false,
inEditor: false,
widgetData: null,
library: null,
shopifyLoaded: false,
buttonText: "Add-to-Cart",
buttonColor: '#1892BF',
client: {},
annotationConfig: {},
addStyle: function(node, style, color) {
var target = document.getElementById(node);
target.style[style] = color;
},
init: function mainSetup(isEditMode, data) {
this.widgetData = data;
this.library = hapyak && hapyak.widget && hapyak.widget.library || {};
this.inEditor = this.library.mode === 'view' && hapyak.widget.player.isEditMode;
this.annotationConfig = hapyak && hapyak.context && hapyak.context.annotationConfig || {};
if (this.library.mode === 'edit') {
return;
};
hapyak.widget.stopEditing();
this.setup();
},
setupToggle: function mainSetupToggle() {
// Toggle for editing/viewing in hy edit mode
var toggleBtn = document.getElementById('change-mode'),
isEditMode = hapyak.widget.player.isEditMode,
addToCart = document.getElementById('add-to-cart'),
configSaveBtn = document.getElementById('save-widget-config');
this.buttonText = hapyak.shopifyWidget.annotationConfig.text.value
this.buttonColor = "#" + hapyak.shopifyWidget.annotationConfig["background-color"].value
if (toggleBtn) {
toggleBtn.style.display = isEditMode && this.library.mode === 'view' ? 'block' : 'none';
toggleBtn.addEventListener('click', this.library.utils.reload, false);
};
if (addToCart) {
this.addStyle('add-to-cart', 'backgroundColor', this.buttonColor);
addToCart.innerHTML = this.buttonText;
};
hapyak.context.player.addClass('hapyak-annotation-full-frame');
$('#add-to-cart').click(function(){
if ($('#shopify-container').hasClass('active')) {
hapyak.context.player.play();
addToCart.innerHTML = buttonText;
} else {
hapyak.context.player.pause();
addToCart.innerHTML = "Close";
}
$('#shopify-container').toggleClass('active');
});
},
setup: function mainSetup() {
this.library.utils.display('#widget-body', true);
this.library.utils.display('#view-container', true);
this.setupToggle();
if (this.didSetup) {
return;
}
if (this.shopifyLoaded) {
this.shopifyConfig()
}
this.library.utils.applyConfig(this.library.config);
this.didSetup = true;
},
shopifyConfig: function shopifyConfig(domain, accessToken, options, idsArray) {
// if (!domain || !apiKey || !appId) {
// return alert('Please make sure to setup all configs properly.')
// }
this.client = ShopifyBuy.buildClient({
domain: domain || 'hapyak-test-store.myshopify.com',
storefrontAccessToken: accessToken || '23fedee89bcadec0487bf990c2c714d1',
});
// Sample Options:
var defaultOptions = {
"product": {
"layout": "horizontal",
"variantId": "all",
"width": "100%",
"contents": {
"img": false,
"imgWithCarousel": true,
"variantTitle": false,
"description": true,
"buttonWithQuantity": false,
"quantity": false
},
"styles": {
"product": {
"text-align": "left",
"@media (min-width: 601px)": {
"max-width": "100%",
"margin-left": "0",
"margin-bottom": "50px"
}
},
"title": {
"font-size": "26px"
},
"price": {
"font-size": "18px"
},
"compareAt": {
"font-size": "15px"
}
}
},
"cart": {
"contents": {
"button": true
},
"styles": {
"footer": {
"background-color": "#ffffff"
}
}
},
"modalProduct": {
"contents": {
"img": false,
"imgWithCarousel": true,
"variantTitle": false,
"buttonWithQuantity": true,
"button": false,
"quantity": false
},
"styles": {
"product": {
"@media (min-width: 601px)": {
"max-width": "100%",
"margin-left": "0px",
"margin-bottom": "0px"
}
}
}
},
"productSet": {
"styles": {
"products": {
"@media (min-width: 601px)": {
"margin-left": "-20px"
}
}
}
}
};
ShopifyBuy.UI.onReady(this.client).then(function (ui) {
ui.createComponent('product', {
id: idsArray || [1658849624164],
node: document.getElementById('shopify-container'),
moneyFormat: '%24%7B%7Bamount%7D%7D',
options: options || defaultOptions
});
});
},
addProuct: function addProduct() {
},
trackAction: function hyTrackAction (action, mode, values) {
var dotget = this.library.utils.dotget;
var data;
if(!action || !mode || !values) {
return;
}
data = $.extend({}, values);
this.library.utils.track.event('hapyak', action, data);
},
loadScript: function loadScript() {
var script = document.createElement('script'),
scriptURL = 'https://sdks.shopifycdn.com/buy-button/latest/buy-button-storefront.min.js';
script.async = true;
script.src = scriptURL;
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(script);
script.onload = function () {
window.ShopifyBuy && window.ShopifyBuy.UI ? hapyak.shopifyWidget.shopifyLoaded = true : hapyak.shopifyWidget.shopifyLoaded = false;
};
},
customLoad: function customLoad() {
/*
Required to init widget load for both editor and viewer.
Widgets may require unique events to occur before load, so this logic
is executed on a per widget basis.
*/
window.ShopifyBuy && window.ShopifyBuy.UI ? hapyak.shopifyWidget.shopifyLoaded = true : hapyak.shopifyWidget.loadScript();
hapyak.widget.library.utils.startLoad();
}
};
hapyak.widget.library.utils.onWidgetLoad(hapyak.shopifyWidget.init.bind(hapyak.shopifyWidget));
hapyak.context.addEventListener('iframeshow', hapyak.shopifyWidget.customLoad, false);
|
JavaScript
|
MIT
|
mjmcgrath2010/shopify-widget/static/js/widget_main.js
|
1666bced-a69c-4362-9686-f2bec90b6b53
|
[]
|
[]
|
<?php
/*
* Fresns (https://fresns.org)
* Copyright (C) 2021-Present Jarvis Tang
* Released under the Apache-2.0 License.
*/
return [
'accepted' => 'باید ومنل شی :attribute.',
'accepted_if' => 'The :attribute must be accepted when :other is :value.',
'active_url' => ':attribute یو باوري لینک نه دی.',
'after' => 'باید:attribute تر نن ورځې نیټې پورې :date.',
'after_or_equal' => ':attribute باید وروستی نیټه وي یا د نیټې سره سمون ولري :date.',
'alpha' => 'دا باید شامل نه وي :attribute یوازې په حرفو کې.',
'alpha_dash' => 'دا باید شامل نه وي :attribute یوازې په حرفو کې، شمیرې او متره.',
'alpha_num' => 'شمیرې او متره :attribute یوازې خطونه او شمیرې.',
'array' => 'دا باید وي :attribute ًمیټرکس.',
'before' => 'باید:attribute د تاریخ پخوا تاریخ وټاکئ :date.',
'before_or_equal' => ':attribute دا باید وي د تیر نیټې یا نیټې سره سمون خوري :date.',
'between' => [
'array' => 'شمیرې او متره :attribute د عناصرو په منځ کې :min او :max.',
'file' => 'د دوتنې اندازه باید وي:attribute ما بين:min او :max كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute ما بين:min او :max.',
'string' => 'د متن حروف باید باید وي :attribute ما بين:min او :max.',
],
'boolean' => 'دا باید ارزښت وي :attribute او یا هم true یا false .',
'confirmed' => 'د تایید ساحه د ساحې سره سمون نه لري:attribute.',
'current_password' => 'The password is incorrect.',
'date' => ':attribute نېټه اعتبار نلري .',
'date_equals' => 'دا باید وي :attribute د نیټې سره سم:date.',
'date_format' => 'مطابقت نلري :attribute د شکل سره:format.',
'declined' => 'The :attribute must be declined.',
'declined_if' => 'The :attribute must be declined when :other is :value.',
'different' => 'ساحې باید وي :attribute و :other مختلف.',
'digits' => 'شمیرې او متره :attribute په :digits شمېر / شمېرې.',
'digits_between' => 'شمیرې او متره :attribute ما بين:min و :max شمېر / شمېرې .',
'dimensions' => 'د :attribute د ناباوره انځور اړخونه لري.',
'distinct' => 'د ساحې څخه :attribute د نقل ارزښت .',
'email' => 'دا باید وي :attribute یو باوري بریښلیک پته جوړښت.',
'ends_with' => 'The :attribute must end with one of the following: :values.',
'enum' => 'The selected :attribute is invalid.',
'exists' => 'مشخص ارزښت :attribute شتون نلري.',
'file' => 'د :attribute دا باید یوه فایل وي.',
'filled' => ':attribute لازمه ده.',
'gt' => [
'array' => 'شمیرې او متره :attribute له زیاتو څخه :value عناصر/عنصر.',
'file' => 'د دوتنې اندازه باید وي:attribute په پرتله ډیر :value كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute په پرتله ډیر :value.',
'string' => 'د متن اوږدوالی باید وي :attribute څخه زیات :value توري/توري.',
],
'gte' => [
'array' => 'شمیرې او متره :attribute لږ تر لږه :value عنصر / عناصر.',
'file' => 'د دوتنې اندازه باید وي:attribute لږترلږه :value كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute مساوی یا زیات :value.',
'string' => 'د متن اوږدوالی باید وي :attribute لږترلږه :value توري/توري.',
],
'image' => 'دا باید وي :attribute انځور.',
'in' => ':attribute غير موجود.',
'in_array' => ':attribute غير موجود في :other.',
'integer' => 'دا باید وي:attribute هو عدد صحيح.',
'ip' => 'دا باید وي:attribute عنوان IP ریښتیا.',
'ipv4' => 'دا باید وي:attribute عنوان IPv4 ریښتیا.',
'ipv6' => 'دا باید وي:attribute عنوان IPv6 ریښتیا.',
'json' => 'دا باید وي:attribute د اوریدلو ډول JSON.',
'lt' => [
'array' => 'شمیرې او متره :attribute له کم څخه :value عناصر/عنصر.',
'file' => 'د دوتنې اندازه باید وي:attribute لږ :value كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute لږ :value.',
'string' => 'د متن اوږدوالی باید وي :attribute له کم څخه :value توري/توري.',
],
'lte' => [
'array' => 'دا باید شامل نه وي :attribute له زیاتو څخه :value عناصر/عنصر.',
'file' => 'د دوتنې اندازه باید له حد نه زیاته نه وي :attribute :value كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute نسبت برابر یا کوچنی :value.',
'string' => 'د متن اوږدوالی باید له زیاتوالی نه وي:attribute :value توري/توري.',
],
'mac_address' => 'The :attribute must be a valid MAC address.',
'max' => [
'array' => 'دا باید شامل نه وي :attribute له زیاتو څخه :max عناصر/عنصر.',
'file' => 'د دوتنې اندازه باید له حد نه زیاته وي :attribute :max كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute نسبت برابر یا کوچنی :max.',
'string' => 'د متن اوږدوالی باید له زیاتوالی نه وي:attribute :max توري/توري.',
],
'mimes' => 'دا باید د ډول دوسیه وي : :values.',
'mimetypes' => 'دا باید یوه فایل وي: :values.',
'min' => [
'array' => 'شمیرې او متره :attribute لږ تر لږه :min عنصر / عناصر.',
'file' => 'د دوتنې اندازه باید وي:attribute لږترلږه :min كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute مساوی یا زیات :min.',
'string' => 'د متن اوږدوالی باید وي :attribute لږترلږه :min توري/توري.',
],
'multiple_of' => 'The :attribute must be a multiple of :value.',
'not_in' => ':attribute موجود.',
'not_regex' => 'فورمول :attribute غلط.',
'numeric' => 'باید:attribute یو شمېره.',
'password' => 'The password is incorrect.',
'present' => 'باید چمتو شی :attribute.',
'prohibited' => 'The :attribute field is prohibited.',
'prohibited_if' => 'The :attribute field is prohibited when :other is :value.',
'prohibited_unless' => 'The :attribute field is prohibited unless :other is in :values.',
'prohibits' => 'The :attribute field prohibits :other from being present.',
'regex' => 'فورمول :attribute .غير صحيح.',
'required' => ':attribute اړینه ده.',
'required_array_keys' => 'The :attribute field must contain entries for: :values.',
'required_if' => ':attribute که چیرې د اړتیا په صورت کې اړتیا وي:other مساو :value.',
'required_unless' => ':attribute که نه :other مساو :values.',
'required_with' => ':attribute که اړتیا وي شتون لري :values.',
'required_with_all' => ':attribute که اړتیا وي شتون لري :values.',
'required_without' => ':attribute د اړتیا پرته :values.',
'required_without_all' => ':attribute که اړتیا شتون نلري :values.',
'same' => 'اړینه ده :attribute سره :other.',
'size' => [
'array' => 'شمیرې او متره :attribute په :size عنصر/عناصر په سمه توګه.',
'file' => 'د دوتنې اندازه باید وي:attribute :size كيلوبايت.',
'numeric' => 'دا باید ارزښت وي :attribute سره برابر :size.',
'string' => 'شمیرې او متره متن :attribute په :size توري/توري په سمه توګه.',
],
'starts_with' => 'دا باید پیل شي :attribute د لاندې ارزښتونو څخه یو: :values',
'string' => 'دا باید وي:attribute متن.',
'timezone' => 'دا باید وي:attribute یو باوري نیټه.',
'unique' => 'ارزښتونه :attribute کارول شوی.',
'uploaded' => 'د پورته کولو توان نلري :attribute.',
'url' => 'د لینک بڼه :attribute غلط.',
'uuid' => ':attribute دا باید غیر رسمي وي UUID غږ.',
'custom' => [
'attribute-name' => [
'rule-name' => 'custom-message',
],
],
];
|
PHP
|
Apache-2.0
|
fresns/dto/src/Validate/lang/ps/validation.php
|
fce9a542-3eb4-41d6-8280-c076b6ae6a49
|
[]
|
[]
|
<?php
session_start();
mb_internal_encoding('UTF-8');
date_default_timezone_set('Europe/Rome');
/*
|--------------------------------------------------------------------------
| Optional security
|--------------------------------------------------------------------------
|
| if set to true only those will access RF whose url contains the access key(akey) like:
| <input type="button" href="../filemanager/dialog.php?field_id=imgField&lang=en_EN&akey=myPrivateKey" value="Files">
| in tinymce a new parameter added: filemanager_access_key:"myPrivateKey"
| example tinymce config:
|
| tiny init ...
| external_filemanager_path:"../filemanager/",
| filemanager_title:"Filemanager" ,
| filemanager_access_key:"myPrivateKey" ,
| ...
|
*/
define('USE_ACCESS_KEYS', false); // TRUE or FALSE
/*
|--------------------------------------------------------------------------
| DON'T COPY THIS VARIABLES IN FOLDERS config.php FILES
|--------------------------------------------------------------------------
*/
/*
|--------------------------------------------------------------------------
| Path configuration
|--------------------------------------------------------------------------
| In this configuration the folder tree is
| root
| |- source <- upload folder
| |- thumbs <- thumbnail folder [must have write permission (755)]
| |- filemanager
| |- js
| | |- tinymce
| | | |- plugins
| | | | |- responsivefilemanager
| | | | | |- plugin.min.js
*/
$config = array(
/*
|--------------------------------------------------------------------------
| DON'T TOUCH (base url (only domain) of site).
|--------------------------------------------------------------------------
|
| without final /
|
*/
'base_url' => ((isset($_SERVER['HTTPS']) && $_SERVER['HTTPS'] && ! in_array(strtolower($_SERVER['HTTPS']), array( 'off', 'no' ))) ? 'https' : 'http') . '://' . $_SERVER['HTTP_HOST'],
/*
|--------------------------------------------------------------------------
| path from base_url to base of upload folder
|--------------------------------------------------------------------------
|
| with start and final /
|
*/
'upload_dir' => '/public/upload2/',
/*
|--------------------------------------------------------------------------
| relative path from filemanager folder to upload folder
|--------------------------------------------------------------------------
|
| with final /
|
*/
'current_path' => '../../upload2/',
/*
|--------------------------------------------------------------------------
| relative path from filemanager folder to thumbs folder
|--------------------------------------------------------------------------
|
| with final /
| DO NOT put inside upload folder
|
*/
'thumbs_base_path' => '../../thumbs/',
/*
|--------------------------------------------------------------------------
| Access keys
|--------------------------------------------------------------------------
|
| add access keys eg: array('myPrivateKey', 'someoneElseKey');
| keys should only containt (a-z A-Z 0-9 \ . _ -) characters
| if you are integrating lets say to a cms for admins, i recommend making keys randomized something like this:
| $username = 'Admin';
| $salt = 'dsflFWR9u2xQa' (a hard coded string)
| $akey = md5($username.$salt);
| DO NOT use 'key' as access key!
| Keys are CASE SENSITIVE!
|
*/
'access_keys' => array(),
//--------------------------------------------------------------------------------------------------------
// YOU CAN COPY AND CHANGE THESE VARIABLES INTO FOLDERS config.php FILES TO CUSTOMIZE EACH FOLDER OPTIONS
//--------------------------------------------------------------------------------------------------------
/*
|--------------------------------------------------------------------------
| Maximum upload size
|--------------------------------------------------------------------------
|
| in Megabytes
|
*/
'MaxSizeUpload' => 100,
/*
|--------------------------------------------------------------------------
| default language file name
|--------------------------------------------------------------------------
*/
'default_language' => "en_EN",
/*
|--------------------------------------------------------------------------
| Icon theme
|--------------------------------------------------------------------------
|
| Default available: ico and ico_dark
| Can be set to custom icon inside filemanager/img
|
*/
'icon_theme' => "ico",
//Show or not show folder size in list view feature in filemanager (is possible, if there is a large folder, to greatly increase the calculations)
'show_folder_size' => true,
//Show or not show sorting feature in filemanager
'show_sorting_bar' => true,
//active or deactive the transliteration (mean convert all strange characters in A..Za..z0..9 characters)
'transliteration' => false,
//convert all spaces on files name and folders name with $replace_with variable
'convert_spaces' => false,
//convert all spaces on files name and folders name this value
'replace_with' => "_",
// -1: There is no lazy loading at all, 0: Always lazy-load images, 0+: The minimum number of the files in a directory
// when lazy loading should be turned on.
'lazy_loading_file_number_threshold' => 0,
//*******************************************
//Images limit and resizing configuration
//*******************************************
// set maximum pixel width and/or maximum pixel height for all images
// If you set a maximum width or height, oversized images are converted to those limits. Images smaller than the limit(s) are unaffected
// if you don't need a limit set both to 0
'image_max_width' => 0,
'image_max_height' => 0,
'image_max_mode' => 'auto',
/*
# $option: 0 / exact = defined size;
# 1 / portrait = keep aspect set height;
# 2 / landscape = keep aspect set width;
# 3 / auto = auto;
# 4 / crop= resize and crop;
*/
//Automatic resizing //
// If you set $image_resizing to TRUE the script converts all uploaded images exactly to image_resizing_width x image_resizing_height dimension
// If you set width or height to 0 the script automatically calculates the other dimension
// Is possible that if you upload very big images the script not work to overcome this increase the php configuration of memory and time limit
'image_resizing' => false,
'image_resizing_width' => 0,
'image_resizing_height' => 0,
'image_resizing_mode' => 'auto', // same as $image_max_mode
'image_resizing_override' => false,
// If set to TRUE then you can specify bigger images than $image_max_width & height otherwise if image_resizing is
// bigger than $image_max_width or height then it will be converted to those values
//******************
// Default layout setting
//
// 0 => boxes
// 1 => detailed list (1 column)
// 2 => columns list (multiple columns depending on the width of the page)
// YOU CAN ALSO PASS THIS PARAMETERS USING SESSION VAR => $_SESSION['RF']["VIEW"]=
//
//******************
'default_view' => 0,
//set if the filename is truncated when overflow first row
'ellipsis_title_after_first_row' => true,
//*************************
//Permissions configuration
//******************
'delete_files' => true,
'create_folders' => true,
'delete_folders' => true,
'upload_files' => true,
'rename_files' => true,
'rename_folders' => true,
'duplicate_files' => true,
'copy_cut_files' => true, // for copy/cut files
'copy_cut_dirs' => true, // for copy/cut directories
'chmod_files' => false, // change file permissions
'chmod_dirs' => false, // change folder permissions
'preview_text_files' => true, // eg.: txt, log etc.
'edit_text_files' => true, // eg.: txt, log etc.
'create_text_files' => true, // only create files with exts. defined in $editable_text_file_exts
// you can preview these type of files if $preview_text_files is true
'previewable_text_file_exts' => array( 'txt', 'log', 'xml', 'html', 'css', 'htm', 'js' ),
'previewable_text_file_exts_no_prettify' => array( 'txt', 'log' ),
// you can edit these type of files if $edit_text_files is true (only text based files)
// you can create these type of files if $create_text_files is true (only text based files)
// if you want you can add html,css etc.
// but for security reasons it's NOT RECOMMENDED!
'editable_text_file_exts' => array( 'txt', 'log', 'xml', 'html', 'css', 'htm', 'js' ),
// Preview with Google Documents
'googledoc_enabled' => true,
'googledoc_file_exts' => array( 'doc', 'docx', 'xls', 'xlsx', 'ppt', 'pptx' ),
// Preview with Viewer.js
'viewerjs_enabled' => true,
'viewerjs_file_exts' => array( 'pdf', 'odt', 'odp', 'ods' ),
// defines size limit for paste in MB / operation
// set 'FALSE' for no limit
'copy_cut_max_size' => 100,
// defines file count limit for paste / operation
// set 'FALSE' for no limit
'copy_cut_max_count' => 200,
//IF any of these limits reached, operation won't start and generate warning
//**********************
//Allowed extensions (lowercase insert)
//**********************
'ext_img' => array( 'jpg', 'jpeg', 'png', 'gif', 'bmp', 'tiff', 'svg' ), //Images
'ext_file' => array( 'doc', 'docx', 'rtf', 'pdf', 'xls', 'xlsx', 'txt', 'csv', 'html', 'xhtml', 'psd', 'sql', 'log', 'fla', 'xml', 'ade', 'adp', 'mdb', 'accdb', 'ppt', 'pptx', 'odt', 'ots', 'ott', 'odb', 'odg', 'otp', 'otg', 'odf', 'ods', 'odp', 'css', 'ai' ), //Files
'ext_video' => array( 'mov', 'mpeg', 'm4v', 'mp4', 'avi', 'mpg', 'wma', "flv", "webm" ), //Video
'ext_music' => array( 'mp3', 'm4a', 'ac3', 'aiff', 'mid', 'ogg', 'wav' ), //Audio
'ext_misc' => array( 'zip', 'rar', 'gz', 'tar', 'iso', 'dmg' ), //Archives
/******************
* AVIARY config
*******************/
'aviary_active' => true,
'aviary_apiKey' => "2444282ef4344e3dacdedc7a78f8877d",
'aviary_language' => "en",
'aviary_theme' => "light",
'aviary_tools' => "all",
'aviary_maxSize' => "1400",
// Add or modify the Aviary options below as needed - they will be json encoded when added to the configuration so arrays can be utilized as needed
//The filter and sorter are managed through both javascript and php scripts because if you have a lot of
//file in a folder the javascript script can't sort all or filter all, so the filemanager switch to php script.
//The plugin automatic swich javascript to php when the current folder exceeds the below limit of files number
'file_number_limit_js' => 500,
//**********************
// Hidden files and folders
//**********************
// set the names of any folders you want hidden (eg "hidden_folder1", "hidden_folder2" ) Remember all folders with these names will be hidden (you can set any exceptions in config.php files on folders)
'hidden_folders' => array(),
// set the names of any files you want hidden. Remember these names will be hidden in all folders (eg "this_document.pdf", "that_image.jpg" )
'hidden_files' => array( 'config.php' ),
/*******************
* JAVA upload
*******************/
'java_upload' => false,
'JAVAMaxSizeUpload' => 200, //Gb
//************************************
//Thumbnail for external use creation
//************************************
// New image resized creation with fixed path from filemanager folder after uploading (thumbnails in fixed mode)
// If you want create images resized out of upload folder for use with external script you can choose this method,
// You can create also more than one image at a time just simply add a value in the array
// Remember than the image creation respect the folder hierarchy so if you are inside source/test/test1/ the new image will create at
// path_from_filemanager/test/test1/
// PS if there isn't write permission in your destination folder you must set it
//
'fixed_image_creation' => false, //activate or not the creation of one or more image resized with fixed path from filemanager folder
'fixed_path_from_filemanager' => array( '../test/', '../test1/' ), //fixed path of the image folder from the current position on upload folder
'fixed_image_creation_name_to_prepend' => array( '', 'test_' ), //name to prepend on filename
'fixed_image_creation_to_append' => array( '_test', '' ), //name to appendon filename
'fixed_image_creation_width' => array( 300, 400 ), //width of image (you can leave empty if you set height)
'fixed_image_creation_height' => array( 200, '' ), //height of image (you can leave empty if you set width)
/*
# $option: 0 / exact = defined size;
# 1 / portrait = keep aspect set height;
# 2 / landscape = keep aspect set width;
# 3 / auto = auto;
# 4 / crop= resize and crop;
*/
'fixed_image_creation_option' => array( 'crop', 'auto' ), //set the type of the crop
// New image resized creation with relative path inside to upload folder after uploading (thumbnails in relative mode)
// With Responsive filemanager you can create automatically resized image inside the upload folder, also more than one at a time
// just simply add a value in the array
// The image creation path is always relative so if i'm inside source/test/test1 and I upload an image, the path start from here
//
'relative_image_creation' => false, //activate or not the creation of one or more image resized with relative path from upload folder
'relative_path_from_current_pos' => array( './', './' ), //relative path of the image folder from the current position on upload folder
'relative_image_creation_name_to_prepend' => array( '', '' ), //name to prepend on filename
'relative_image_creation_name_to_append' => array( '_thumb', '_thumb1' ), //name to append on filename
'relative_image_creation_width' => array( 300, 400 ), //width of image (you can leave empty if you set height)
'relative_image_creation_height' => array( 200, '' ), //height of image (you can leave empty if you set width)
/*
# $option: 0 / exact = defined size;
# 1 / portrait = keep aspect set height;
# 2 / landscape = keep aspect set width;
# 3 / auto = auto;
# 4 / crop= resize and crop;
*/
'relative_image_creation_option' => array( 'crop', 'crop' ), //set the type of the crop
// Remember text filter after close filemanager for future session
'remember_text_filter' => false,
);
return array_merge(
$config,
array(
'MaxSizeUpload' => ((int)(ini_get('post_max_size')) < $config['MaxSizeUpload'])
? (int)(ini_get('post_max_size')) : $config['MaxSizeUpload'],
'ext'=> array_merge(
$config['ext_img'],
$config['ext_file'],
$config['ext_misc'],
$config['ext_video'],
$config['ext_music']
),
// For a list of options see: https://developers.aviary.com/docs/web/setup-guide#constructor-config
'aviary_defaults_config' => array(
'apiKey' => $config['aviary_apiKey'],
'language' => $config['aviary_language'],
'theme' => $config['aviary_theme'],
'tools' => $config['aviary_tools'],
'maxSize' => $config['aviary_maxSize']
),
)
);
|
PHP
|
MIT
|
jonich/laraval_project_jonich/public/tinymce/filemanager/config/config.php
|
e417e58f-c39a-4376-9bfb-2ea87646b551
|
[{"tag": "USERNAME", "value": "Admin", "start": 3237, "end": 3242, "context": "s randomized something like this:\n\t| $username = 'Admin';\n\t| $salt = 'dsflFWR9u2xQa' (a hard coded string"}, {"tag": "PASSWORD", "value": "dsflFWR9u2xQa", "start": 3257, "end": 3270, "context": "ng like this:\n\t| $username = 'Admin';\n\t| $salt = 'dsflFWR9u2xQa' (a hard coded string)\n\t| $akey = md5($username.$"}]
|
[{"tag": "USERNAME", "value": "Admin", "start": 3237, "end": 3242, "context": "s randomized something like this:\n\t| $username = 'Admin';\n\t| $salt = 'dsflFWR9u2xQa' (a hard coded string"}, {"tag": "PASSWORD", "value": "dsflFWR9u2xQa", "start": 3257, "end": 3270, "context": "ng like this:\n\t| $username = 'Admin';\n\t| $salt = 'dsflFWR9u2xQa' (a hard coded string)\n\t| $akey = md5($username.$"}]
|
<?php
/**
* Text shown in error messaging.
*/
return [
// Permissions
'permission' => 'You do not have permission to access the requested page.',
'permissionJson' => 'You do not have permission to perform the requested action.',
// Auth
'error_user_exists_different_creds' => 'A user with the email :email already exists but with different credentials.',
'email_already_confirmed' => 'Email has already been confirmed, Try logging in.',
'email_confirmation_invalid' => 'This confirmation token is not valid or has already been used, Please try registering again.',
'email_confirmation_expired' => 'The confirmation token has expired, A new confirmation email has been sent.',
'email_confirmation_awaiting' => 'The email address for the account in use needs to be confirmed',
'ldap_fail_anonymous' => 'LDAP access failed using anonymous bind',
'ldap_fail_authed' => 'LDAP access failed using given dn & password details',
'ldap_extension_not_installed' => 'LDAP PHP extension not installed',
'ldap_cannot_connect' => 'Cannot connect to ldap server, Initial connection failed',
'saml_already_logged_in' => 'Already logged in',
'saml_user_not_registered' => 'The user :name is not registered and automatic registration is disabled',
'saml_no_email_address' => 'Could not find an email address, for this user, in the data provided by the external authentication system',
'saml_invalid_response_id' => 'The request from the external authentication system is not recognised by a process started by this application. Navigating back after a login could cause this issue.',
'saml_fail_authed' => 'Login using :system failed, system did not provide successful authorization',
'social_no_action_defined' => 'No action defined',
'social_login_bad_response' => "Error received during :socialAccount login: \n:error",
'social_account_in_use' => 'This :socialAccount account is already in use, Try logging in via the :socialAccount option.',
'social_account_email_in_use' => 'The email :email is already in use. If you already have an account you can connect your :socialAccount account from your profile settings.',
'social_account_existing' => 'This :socialAccount is already attached to your profile.',
'social_account_already_used_existing' => 'This :socialAccount account is already used by another user.',
'social_account_not_used' => 'This :socialAccount account is not linked to any users. Please attach it in your profile settings. ',
'social_account_register_instructions' => 'If you do not yet have an account, You can register an account using the :socialAccount option.',
'social_driver_not_found' => 'Social driver not found',
'social_driver_not_configured' => 'Your :socialAccount social settings are not configured correctly.',
'invite_token_expired' => 'This invitation link has expired. You can instead try to reset your account password.',
// System
'path_not_writable' => 'File path :filePath could not be uploaded to. Ensure it is writable to the server.',
'cannot_get_image_from_url' => 'Cannot get image from :url',
'cannot_create_thumbs' => 'The server cannot create thumbnails. Please check you have the GD PHP extension installed.',
'server_upload_limit' => 'The server does not allow uploads of this size. Please try a smaller file size.',
'uploaded' => 'The server does not allow uploads of this size. Please try a smaller file size.',
'image_upload_error' => 'An error occurred uploading the image',
'image_upload_type_error' => 'The image type being uploaded is invalid',
'file_upload_timeout' => 'The file upload has timed out.',
// Attachments
'attachment_not_found' => 'Attachment not found',
// Pages
'page_draft_autosave_fail' => 'Failed to save draft. Ensure you have internet connection before saving this page',
'page_custom_home_deletion' => 'Cannot delete a page while it is set as a homepage',
// Entities
'entity_not_found' => 'Entity not found',
'bookshelf_not_found' => 'Bookshelf not found',
'book_not_found' => 'Book not found',
'page_not_found' => 'Page not found',
'chapter_not_found' => 'Chapter not found',
'selected_book_not_found' => 'The selected book was not found',
'selected_book_chapter_not_found' => 'The selected Book or Chapter was not found',
'guests_cannot_save_drafts' => 'Guests cannot save drafts',
// Users
'users_cannot_delete_only_admin' => 'You cannot delete the only admin',
'users_cannot_delete_guest' => 'You cannot delete the guest user',
// Roles
'role_cannot_be_edited' => 'This role cannot be edited',
'role_system_cannot_be_deleted' => 'This role is a system role and cannot be deleted',
'role_registration_default_cannot_delete' => 'This role cannot be deleted while set as the default registration role',
'role_cannot_remove_only_admin' => 'This user is the only user assigned to the administrator role. Assign the administrator role to another user before attempting to remove it here.',
// Comments
'comment_list' => 'An error occurred while fetching the comments.',
'cannot_add_comment_to_draft' => 'You cannot add comments to a draft.',
'comment_add' => 'An error occurred while adding / updating the comment.',
'comment_delete' => 'An error occurred while deleting the comment.',
'empty_comment' => 'Cannot add an empty comment.',
// Error pages
'404_page_not_found' => 'Page Not Found',
'sorry_page_not_found' => 'Sorry, The page you were looking for could not be found.',
'sorry_page_not_found_permission_warning' => 'If you expected this page to exist, you might not have permission to view it.',
'return_home' => 'Return to home',
'error_occurred' => 'An Error Occurred',
'app_down' => ':appName is down right now',
'back_soon' => 'It will be back up soon.',
// API errors
'api_no_authorization_found' => 'No authorization token found on the request',
'api_bad_authorization_format' => 'An authorization token was found on the request but the format appeared incorrect',
'api_user_token_not_found' => 'No matching API token was found for the provided authorization token',
'api_incorrect_token_secret' => 'The secret provided for the given used API token is incorrect',
'api_user_no_api_permission' => 'The owner of the used API token does not have permission to make API calls',
'api_user_token_expired' => 'The authorization token used has expired',
// Settings & Maintenance
'maintenance_test_email_failure' => 'Error thrown when sending a test email:',
];
|
PHP
|
MIT
|
18306205201/TaoPeng/resources/lang/th/errors.php
|
c3ad021f-7c8f-4f80-80e4-30eed21cf6ee
|
[]
|
[]
|
//------------------------------------------------------------------------------
// <auto-generated>
// This code was generated by a tool.
//
// Changes to this file may cause incorrect behavior and will be lost if
// the code is regenerated.
// </auto-generated>
//------------------------------------------------------------------------------
using System;
using System.Web;
namespace MvcAccount {
/// <summary>
/// Provides access to localized strings.
/// </summary>
public static partial class AccountResources {
/// <summary>
/// Looks up a localized string similar to 'Change password'.
/// </summary>
public static string Links_ChangePassword {
get { return GetResource(Keys.Links_ChangePassword); }
}
/// <summary>
/// Looks up a localized string similar to 'Can't access your account?'.
/// </summary>
public static string Links_Recovery {
get { return GetResource(Keys.Links_Recovery); }
}
/// <summary>
/// Looks up a localized string similar to 'Sign in'.
/// </summary>
public static string Links_SignIn {
get { return GetResource(Keys.Links_SignIn); }
}
/// <summary>
/// Looks up a localized string similar to 'Sign out'.
/// </summary>
public static string Links_SignOut {
get { return GetResource(Keys.Links_SignOut); }
}
/// <summary>
/// Looks up a localized string similar to 'The {1} is incorrect.'.
/// </summary>
public static string Validation_CurrentPasswordIncorrect {
get { return GetResource(Keys.Validation_CurrentPasswordIncorrect); }
}
/// <summary>
/// Looks up a localized string similar to 'User with email '{0}' already exists.'.
/// </summary>
public static string Validation_EmailAlreadyExists {
get { return GetResource(Keys.Validation_EmailAlreadyExists); }
}
/// <summary>
/// Looks up a localized string similar to 'You have reached the max. number of invalid attempts, you can try again in {0} minutes.'.
/// </summary>
public static string Validation_MaxInvalidSignInAttempts {
get { return GetResource(Keys.Validation_MaxInvalidSignInAttempts); }
}
/// <summary>
/// Looks up a localized string similar to 'We don't have your email address in our records, your request cannot be completed.'.
/// </summary>
public static string Validation_MissingEmail {
get { return GetResource(Keys.Validation_MissingEmail); }
}
/// <summary>
/// Looks up a localized string similar to 'Cannot authenticate.'.
/// </summary>
public static string Validation_MissingPasswordCannotAuthenticate {
get { return GetResource(Keys.Validation_MissingPasswordCannotAuthenticate); }
}
/// <summary>
/// Looks up a localized string similar to 'Passwords do not match.'.
/// </summary>
public static string Validation_PasswordsNotMatch {
get { return GetResource(Keys.Validation_PasswordsNotMatch); }
}
/// <summary>
/// Looks up a localized string similar to 'Required field cannot be left blank.'.
/// </summary>
public static string Validation_Required {
get { return GetResource(Keys.Validation_Required); }
}
/// <summary>
/// Looks up a localized string similar to 'Your account is disabled.'.
/// </summary>
public static string Validation_UserDisabled {
get { return GetResource(Keys.Validation_UserDisabled); }
}
/// <summary>
/// Looks up a localized string similar to 'User '{0}' does not exist.'.
/// </summary>
public static string Validation_UserNotExist {
get { return GetResource(Keys.Validation_UserNotExist); }
}
/// <summary>
/// Looks up a localized string similar to '{0} and password do not match.'.
/// </summary>
public static string Validation_UserPassNotMatch {
get { return GetResource(Keys.Validation_UserPassNotMatch); }
}
/// <summary>
/// Looks up a localized string similar to 'Change password'.
/// </summary>
public static string Views_Password_Change_Change_Title {
get { return GetResource(Keys.Views_Password_Change_Change_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Account options'.
/// </summary>
public static string Views_Account_Index_Title {
get { return GetResource(Keys.Views_Account_Index_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Your password has been reset.'.
/// </summary>
public static string Views_Password_Reset_Done_Message {
get { return GetResource(Keys.Views_Password_Reset_Done_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Password Reset'.
/// </summary>
public static string Views_Password_Reset_Done_Title {
get { return GetResource(Keys.Views_Password_Reset_Done_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Your new password has been saved.'.
/// </summary>
public static string Views_Password_Change_Saved_Message {
get { return GetResource(Keys.Views_Password_Change_Saved_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Password saved'.
/// </summary>
public static string Views_Password_Change_Saved_Title {
get { return GetResource(Keys.Views_Password_Change_Saved_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'To reset your password, follow the instructions sent to your <strong>{0}</strong> email address.'.
/// </summary>
public static string Views_Password_Reset_VerificationSent_Message {
get { return GetResource(Keys.Views_Password_Reset_VerificationSent_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Recovery Email Sent'.
/// </summary>
public static string Views_Password_Reset_VerificationSent_Title {
get { return GetResource(Keys.Views_Password_Reset_VerificationSent_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'To reset your password, enter your {0}.'.
/// </summary>
public static string Views_Password_Reset_Reset_Message {
get { return GetResource(Keys.Views_Password_Reset_Reset_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Forgot your password?'.
/// </summary>
public static string Views_Password_Reset_Reset_Title {
get { return GetResource(Keys.Views_Password_Reset_Reset_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Select your new password and enter it below.'.
/// </summary>
public static string Views_Password_Reset_Finish_Message {
get { return GetResource(Keys.Views_Password_Reset_Finish_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Reset Password'.
/// </summary>
public static string Views_Password_Reset_Finish_Title {
get { return GetResource(Keys.Views_Password_Reset_Finish_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Sign in'.
/// </summary>
public static string Views_Authentication_SignIn_Title {
get { return GetResource(Keys.Views_Authentication_SignIn_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Re-enter new password'.
/// </summary>
public static string Model_ConfirmNewPassword {
get { return GetResource(Keys.Model_ConfirmNewPassword); }
}
/// <summary>
/// Looks up a localized string similar to 'Current password'.
/// </summary>
public static string Model_CurrentPassword {
get { return GetResource(Keys.Model_CurrentPassword); }
}
/// <summary>
/// Looks up a localized string similar to 'Email'.
/// </summary>
public static string Model_Email {
get { return GetResource(Keys.Model_Email); }
}
/// <summary>
/// Looks up a localized string similar to 'New password'.
/// </summary>
public static string Model_NewPassword {
get { return GetResource(Keys.Model_NewPassword); }
}
/// <summary>
/// Looks up a localized string similar to 'Password'.
/// </summary>
public static string Model_Password {
get { return GetResource(Keys.Model_Password); }
}
/// <summary>
/// Looks up a localized string similar to 'Stay signed in'.
/// </summary>
public static string Model_RememberMe {
get { return GetResource(Keys.Model_RememberMe); }
}
/// <summary>
/// Looks up a localized string similar to 'Password Assistance'.
/// </summary>
public static string Model_PasswordResetVerificationMessageSubject {
get { return GetResource(Keys.Model_PasswordResetVerificationMessageSubject); }
}
/// <summary>
/// Looks up a localized string similar to 'Username'.
/// </summary>
public static string Model_Username {
get { return GetResource(Keys.Model_Username); }
}
/// <summary>
/// Looks up a localized string similar to 'New email address'.
/// </summary>
public static string Model_NewEmail {
get { return GetResource(Keys.Model_NewEmail); }
}
/// <summary>
/// Looks up a localized string similar to 'The {0} is invalid.'.
/// </summary>
public static string Validation_EmailPattern {
get { return GetResource(Keys.Validation_EmailPattern); }
}
/// <summary>
/// Looks up a localized string similar to 'The {0} cannot have more than {1} characters.'.
/// </summary>
public static string Validation_StringLength {
get { return GetResource(Keys.Validation_StringLength); }
}
/// <summary>
/// Looks up a localized string similar to 'The {0} must have between {2} and {1} characters.'.
/// </summary>
public static string Validation_StringLengthWithMin {
get { return GetResource(Keys.Validation_StringLengthWithMin); }
}
/// <summary>
/// Looks up a localized string similar to 'Email change notification'.
/// </summary>
public static string Model_EmailChangeNotificationMessageSubject {
get { return GetResource(Keys.Model_EmailChangeNotificationMessageSubject); }
}
/// <summary>
/// Looks up a localized string similar to 'Email change verification'.
/// </summary>
public static string Model_EmailChangeVerificationMessageSubject {
get { return GetResource(Keys.Model_EmailChangeVerificationMessageSubject); }
}
/// <summary>
/// Looks up a localized string similar to 'The {1} must be different from the current one.'.
/// </summary>
public static string Validation_NewEmailSameAsCurrent {
get { return GetResource(Keys.Validation_NewEmailSameAsCurrent); }
}
/// <summary>
/// Looks up a localized string similar to 'Change email address'.
/// </summary>
public static string Links_ChangeEmail {
get { return GetResource(Keys.Links_ChangeEmail); }
}
/// <summary>
/// Looks up a localized string similar to 'Change email address'.
/// </summary>
public static string Views_Email_Change_Change_Title {
get { return GetResource(Keys.Views_Email_Change_Change_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Your new email address has been saved.'.
/// </summary>
public static string Views_Email_Change_Saved_Message {
get { return GetResource(Keys.Views_Email_Change_Saved_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Email saved'.
/// </summary>
public static string Views_Email_Change_Saved_Title {
get { return GetResource(Keys.Views_Email_Change_Saved_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'To confirm your email address change, follow the instructions sent to your <strong>{0}</strong> email address.'.
/// </summary>
public static string Views_Email_Change_VerificationSent_Message {
get { return GetResource(Keys.Views_Email_Change_VerificationSent_Message); }
}
/// <summary>
/// Looks up a localized string similar to 'Verification Email Sent'.
/// </summary>
public static string Views_Email_Change_VerificationSent_Title {
get { return GetResource(Keys.Views_Email_Change_VerificationSent_Title); }
}
/// <summary>
/// Looks up a localized string similar to 'Go to My account'.
/// </summary>
public static string Links_ReturnToIndex {
get { return GetResource(Keys.Links_ReturnToIndex); }
}
/// <summary>
/// Looks up a localized string similar to 'Cancel'.
/// </summary>
public static string Views_Cancel {
get { return GetResource(Keys.Views_Cancel); }
}
/// <summary>
/// Looks up a localized string similar to 'Sign in'.
/// </summary>
public static string Views_Authentication_SignIn_Submit {
get { return GetResource(Keys.Views_Authentication_SignIn_Submit); }
}
/// <summary>
/// Looks up a localized string similar to 'Change email address'.
/// </summary>
public static string Views_Email_Change_Change_Submit {
get { return GetResource(Keys.Views_Email_Change_Change_Submit); }
}
/// <summary>
/// Looks up a localized string similar to 'Change password'.
/// </summary>
public static string Views_Password_Change_Change_Submit {
get { return GetResource(Keys.Views_Password_Change_Change_Submit); }
}
/// <summary>
/// Looks up a localized string similar to 'Submit'.
/// </summary>
public static string Views_Password_Reset_Finish_Submit {
get { return GetResource(Keys.Views_Password_Reset_Finish_Submit); }
}
/// <summary>
/// Looks up a localized string similar to 'Submit'.
/// </summary>
public static string Views_Password_Reset_Reset_Submit {
get { return GetResource(Keys.Views_Password_Reset_Reset_Submit); }
}
internal static class Keys {
public const string
Links_ChangePassword = "Links_ChangePassword",
Links_Recovery = "Links_Recovery",
Links_SignIn = "Links_SignIn",
Links_SignOut = "Links_SignOut",
Validation_CurrentPasswordIncorrect = "Validation_CurrentPasswordIncorrect",
Validation_EmailAlreadyExists = "Validation_EmailAlreadyExists",
Validation_MaxInvalidSignInAttempts = "Validation_MaxInvalidSignInAttempts",
Validation_MissingEmail = "Validation_MissingEmail",
Validation_MissingPasswordCannotAuthenticate = "Validation_MissingPasswordCannotAuthenticate",
Validation_PasswordsNotMatch = "Validation_PasswordsNotMatch",
Validation_Required = "Validation_Required",
Validation_UserDisabled = "Validation_UserDisabled",
Validation_UserNotExist = "Validation_UserNotExist",
Validation_UserPassNotMatch = "Validation_UserPassNotMatch",
Views_Password_Change_Change_Title = "Views_Password_Change_Change_Title",
Views_Account_Index_Title = "Views_Account_Index_Title",
Views_Password_Reset_Done_Message = "Views_Password_Reset_Done_Message",
Views_Password_Reset_Done_Title = "Views_Password_Reset_Done_Title",
Views_Password_Change_Saved_Message = "Views_Password_Change_Saved_Message",
Views_Password_Change_Saved_Title = "Views_Password_Change_Saved_Title",
Views_Password_Reset_VerificationSent_Message = "Views_Password_Reset_VerificationSent_Message",
Views_Password_Reset_VerificationSent_Title = "Views_Password_Reset_VerificationSent_Title",
Views_Password_Reset_Reset_Message = "Views_Password_Reset_Reset_Message",
Views_Password_Reset_Reset_Title = "Views_Password_Reset_Reset_Title",
Views_Password_Reset_Finish_Message = "Views_Password_Reset_Finish_Message",
Views_Password_Reset_Finish_Title = "Views_Password_Reset_Finish_Title",
Views_Authentication_SignIn_Title = "Views_Authentication_SignIn_Title",
Model_ConfirmNewPassword = "Model_ConfirmNewPassword",
Model_CurrentPassword = "Model_CurrentPassword",
Model_Email = "Model_Email",
Model_NewPassword = "Model_NewPassword",
Model_Password = "Model_Password",
Model_RememberMe = "Model_RememberMe",
Model_PasswordResetVerificationMessageSubject = "Model_PasswordResetVerificationMessageSubject",
Model_Username = "Model_Username",
Model_NewEmail = "Model_NewEmail",
Validation_EmailPattern = "Validation_EmailPattern",
Validation_StringLength = "Validation_StringLength",
Validation_StringLengthWithMin = "Validation_StringLengthWithMin",
Model_EmailChangeNotificationMessageSubject = "Model_EmailChangeNotificationMessageSubject",
Model_EmailChangeVerificationMessageSubject = "Model_EmailChangeVerificationMessageSubject",
Validation_NewEmailSameAsCurrent = "Validation_NewEmailSameAsCurrent",
Links_ChangeEmail = "Links_ChangeEmail",
Views_Email_Change_Change_Title = "Views_Email_Change_Change_Title",
Views_Email_Change_Saved_Message = "Views_Email_Change_Saved_Message",
Views_Email_Change_Saved_Title = "Views_Email_Change_Saved_Title",
Views_Email_Change_VerificationSent_Message = "Views_Email_Change_VerificationSent_Message",
Views_Email_Change_VerificationSent_Title = "Views_Email_Change_VerificationSent_Title",
Links_ReturnToIndex = "Links_ReturnToIndex",
Views_Cancel = "Views_Cancel",
Views_Authentication_SignIn_Submit = "Views_Authentication_SignIn_Submit",
Views_Email_Change_Change_Submit = "Views_Email_Change_Change_Submit",
Views_Password_Change_Change_Submit = "Views_Password_Change_Change_Submit",
Views_Password_Reset_Finish_Submit = "Views_Password_Reset_Finish_Submit",
Views_Password_Reset_Reset_Submit = "Views_Password_Reset_Reset_Submit";
}
}
}
|
C#
|
Apache-2.0
|
maxtoroq/MvcAccount/src/MvcAccount/AccountResources.autogen.cs
|
91f6c3df-2078-42a7-8462-7efb0aaade76
|
[]
|
[]
|
/*
* Twilio - Accounts
*
* This is the public Twilio REST API.
*
* API version: 1.24.0
* Contact: support@twilio.com
*/
// Code generated by OpenAPI Generator (https://openapi-generator.tech); DO NOT EDIT.
package openapi
import (
"encoding/json"
"fmt"
"net/url"
"strings"
"github.com/NellybettIrahola/twilio-go/client"
)
// Optional parameters for the method 'CreateCredentialAws'
type CreateCredentialAwsParams struct {
// The SID of the Subaccount that this Credential should be associated with. Must be a valid Subaccount of the account issuing the request.
AccountSid *string `json:"AccountSid,omitempty"`
// A string that contains the AWS access credentials in the format `<AWS_ACCESS_KEY_ID>:<AWS_SECRET_ACCESS_KEY>`. For example, `AKIAIOSFODNN7EXAMPLE:wJalrXUtnFEMI/K7MDENG/bPxRfiCYEXAMPLEKEY`
Credentials *string `json:"Credentials,omitempty"`
// A descriptive string that you create to describe the resource. It can be up to 64 characters long.
FriendlyName *string `json:"FriendlyName,omitempty"`
}
func (params *CreateCredentialAwsParams) SetAccountSid(AccountSid string) *CreateCredentialAwsParams {
params.AccountSid = &AccountSid
return params
}
func (params *CreateCredentialAwsParams) SetCredentials(Credentials string) *CreateCredentialAwsParams {
params.Credentials = &Credentials
return params
}
func (params *CreateCredentialAwsParams) SetFriendlyName(FriendlyName string) *CreateCredentialAwsParams {
params.FriendlyName = &FriendlyName
return params
}
// Create a new AWS Credential
func (c *ApiService) CreateCredentialAws(params *CreateCredentialAwsParams) (*AccountsV1CredentialAws, error) {
path := "/v1/Credentials/AWS"
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.AccountSid != nil {
data.Set("AccountSid", *params.AccountSid)
}
if params != nil && params.Credentials != nil {
data.Set("Credentials", *params.Credentials)
}
if params != nil && params.FriendlyName != nil {
data.Set("FriendlyName", *params.FriendlyName)
}
resp, err := c.requestHandler.Post(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &AccountsV1CredentialAws{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Delete a Credential from your account
func (c *ApiService) DeleteCredentialAws(Sid string) error {
path := "/v1/Credentials/AWS/{Sid}"
path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1)
data := url.Values{}
headers := make(map[string]interface{})
resp, err := c.requestHandler.Delete(c.baseURL+path, data, headers)
if err != nil {
return err
}
defer resp.Body.Close()
return nil
}
// Fetch the AWS credentials specified by the provided Credential Sid
func (c *ApiService) FetchCredentialAws(Sid string) (*AccountsV1CredentialAws, error) {
path := "/v1/Credentials/AWS/{Sid}"
path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1)
data := url.Values{}
headers := make(map[string]interface{})
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &AccountsV1CredentialAws{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Optional parameters for the method 'ListCredentialAws'
type ListCredentialAwsParams struct {
// How many resources to return in each list page. The default is 50, and the maximum is 1000.
PageSize *int `json:"PageSize,omitempty"`
// Max number of records to return.
Limit *int `json:"limit,omitempty"`
}
func (params *ListCredentialAwsParams) SetPageSize(PageSize int) *ListCredentialAwsParams {
params.PageSize = &PageSize
return params
}
func (params *ListCredentialAwsParams) SetLimit(Limit int) *ListCredentialAwsParams {
params.Limit = &Limit
return params
}
// Retrieve a single page of CredentialAws records from the API. Request is executed immediately.
func (c *ApiService) PageCredentialAws(params *ListCredentialAwsParams, pageToken, pageNumber string) (*ListCredentialAwsResponse, error) {
path := "/v1/Credentials/AWS"
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.PageSize != nil {
data.Set("PageSize", fmt.Sprint(*params.PageSize))
}
if pageToken != "" {
data.Set("PageToken", pageToken)
}
if pageNumber != "" {
data.Set("Page", pageNumber)
}
resp, err := c.requestHandler.Get(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListCredentialAwsResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
// Lists CredentialAws records from the API as a list. Unlike stream, this operation is eager and loads 'limit' records into memory before returning.
func (c *ApiService) ListCredentialAws(params *ListCredentialAwsParams) ([]AccountsV1CredentialAws, error) {
if params == nil {
params = &ListCredentialAwsParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageCredentialAws(params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
var records []AccountsV1CredentialAws
for response != nil {
records = append(records, response.Credentials...)
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListCredentialAwsResponse); record == nil || err != nil {
return records, err
}
response = record.(*ListCredentialAwsResponse)
}
return records, err
}
// Streams CredentialAws records from the API as a channel stream. This operation lazily loads records as efficiently as possible until the limit is reached.
func (c *ApiService) StreamCredentialAws(params *ListCredentialAwsParams) (chan AccountsV1CredentialAws, error) {
if params == nil {
params = &ListCredentialAwsParams{}
}
params.SetPageSize(client.ReadLimits(params.PageSize, params.Limit))
response, err := c.PageCredentialAws(params, "", "")
if err != nil {
return nil, err
}
curRecord := 0
//set buffer size of the channel to 1
channel := make(chan AccountsV1CredentialAws, 1)
go func() {
for response != nil {
for item := range response.Credentials {
channel <- response.Credentials[item]
}
var record interface{}
if record, err = client.GetNext(c.baseURL, response, &curRecord, params.Limit, c.getNextListCredentialAwsResponse); record == nil || err != nil {
close(channel)
return
}
response = record.(*ListCredentialAwsResponse)
}
close(channel)
}()
return channel, err
}
func (c *ApiService) getNextListCredentialAwsResponse(nextPageUrl string) (interface{}, error) {
if nextPageUrl == "" {
return nil, nil
}
resp, err := c.requestHandler.Get(nextPageUrl, nil, nil)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &ListCredentialAwsResponse{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, nil
}
// Optional parameters for the method 'UpdateCredentialAws'
type UpdateCredentialAwsParams struct {
// A descriptive string that you create to describe the resource. It can be up to 64 characters long.
FriendlyName *string `json:"FriendlyName,omitempty"`
}
func (params *UpdateCredentialAwsParams) SetFriendlyName(FriendlyName string) *UpdateCredentialAwsParams {
params.FriendlyName = &FriendlyName
return params
}
// Modify the properties of a given Account
func (c *ApiService) UpdateCredentialAws(Sid string, params *UpdateCredentialAwsParams) (*AccountsV1CredentialAws, error) {
path := "/v1/Credentials/AWS/{Sid}"
path = strings.Replace(path, "{"+"Sid"+"}", Sid, -1)
data := url.Values{}
headers := make(map[string]interface{})
if params != nil && params.FriendlyName != nil {
data.Set("FriendlyName", *params.FriendlyName)
}
resp, err := c.requestHandler.Post(c.baseURL+path, data, headers)
if err != nil {
return nil, err
}
defer resp.Body.Close()
ps := &AccountsV1CredentialAws{}
if err := json.NewDecoder(resp.Body).Decode(ps); err != nil {
return nil, err
}
return ps, err
}
|
GO
|
MIT
|
NellybettIrahola/twilio-go/rest/accounts/v1/credentials_aws.go
|
63c8f990-fd20-4f7d-b435-cdc1175ddc2c
|
[{"tag": "EMAIL", "value": "support@twilio.com", "start": 104, "end": 122, "context": "o REST API.\n *\n * API version: 1.24.0\n * Contact: support@twilio.com\n */\n\n// Code generated by OpenAPI Generator (http"}]
|
[{"tag": "EMAIL", "value": "support@twilio.com", "start": 104, "end": 122, "context": "o REST API.\n *\n * API version: 1.24.0\n * Contact: support@twilio.com\n */\n\n// Code generated by OpenAPI Generator (http"}]
|
---
layout: post
title: Web Vitals
description: Основные показатели работоспособного сайта
hero: image/admin/BHaoqqR73jDWe6FL2kfw.png
authors:
- philipwalton
date: 2020-04-30
updated: 2020-07-21
tags:
- metrics
- performance
- web-vitals
---
Оптимизация качества взаимодействия с пользователемключ к долгосрочному успеху любого сайта в Интернете. Web Vitals поможет владельцам бизнеса, маркетологам или разработчикам количественно оценить впечатления от сайта и определить возможности для улучшения.
## Обзор
Web Vitalsэто инициатива Google, цель которойпредоставить единое руководство по сигналам качества, необходимым для обеспечения хорошего взаимодействия с пользователем в Интернете.
За прошедшие годы компания Google предоставила ряд инструментов для измерения производительности и ведения отчетности. Хотя среди разработчиков и есть эксперты по использованию этих инструментов, остальным сложно уследить за обилием средств и показателей.
Владельцы сайтов не должны быть гуру производительности, чтобы понимать качество обслуживания, которое они предоставляют своим пользователям. Цель инициативы Web Vitalsупростить сложившуюся ситуацию и помочь сосредоточиться на наиболее важных показателях**Core Web Vitals**.
## Core Web Vitals
Core Web Vitalsэто часть метрик Web Vitals, используемых для оценки веб-страниц и включенных во все инструменты Google. Владельцы сайтов должны учитывать эти метрики. Каждый показатель Core Web Vitals представляет собой отдельный аспект опыта взаимодействия пользователя с сайтом, измеряемый [в полевых условиях](/user-centric-performance-metrics/#how-metrics-are-measured) и отражающий реальные действия по достижению критически важного результата, [ориентированного на пользователя](/user-centric-performance-metrics/#how-metrics-are-measured).
Метрики Core Web Vitals будут [развиваться](#evolving-web-vitals) с течением времени. Текущий набор за 2020 год фокусируется на трех аспектах взаимодействия с пользователем: *скорости загрузки страниц сайта*, *интерактивности* и *визуальной стабильности*, и включает следующие показатели (и их соответствующие пороговые значения):
<div class="w-stack w-stack--center w-stack--md">{% Img src="image/tcFciHGuF3MxnTr1y5ue01OGLBn2/ZZU8Z7TMKXmzZT2mCjJU.svg", alt="Рекомендации по пороговому значению скорости загрузки основного контента", width="400", height="350" %} {% Img src="image/tcFciHGuF3MxnTr1y5ue01OGLBn2/iHYrrXKe4QRcb2uu8eV8.svg", alt="Рекомендации по пороговому значению времени ожидания до первого взаимодействия с контентом", width="400", height="350" %} {% Img src="image/tcFciHGuF3MxnTr1y5ue01OGLBn2/dgpDFckbHwwOKdIGDa3N.svg", alt="Рекомендации по пороговому значению совокупного смещения макета", width="400", height="350" %}</div>
- **[Largest Contentful Paint (LCP) : Скорость загрузки основного контента](/lcp/)**: измеряет производительность *загрузки*. Чтобы обеспечить удобство работы пользователей, показатель LCP должен быть в пределах **2,5 секунды** от начала загрузки страницы.
- **[First Input Delay (FID) : Время ожидания до первого взаимодействия с контентом](/fid/)**: измеряет *интерактивность*. Чтобы обеспечить удобство работы пользователей, показатель FID у страниц не должен превышать **100 миллисекунд**.
- **[Cumulative Layout Shift (CLS) : Совокупное смещение макета](/cls/)**: измеряет *визуальную стабильность*. Чтобы обеспечить удобство работы пользователей, показатель CLS не должен превышать **0,1**.
Рекомендуемым порогом для вышеперечисленных показателей считается **75-й процентиль** загрузки страниц, сегментированный по мобильным и настольным устройствам.
Инструменты аналитики должны положительно оценивать Core Web Vitals страницы, если все три вышеперечисленных показателя соответствуют рекомендуемым на уровне 75-го процентиля.
{% Aside %} Чтобы узнать больше об исследованиях и методологии, лежащих в основе этих рекомендаций, см. [«Определение пороговых значений показателей Core Web Vitals»](/defining-core-web-vitals-thresholds/) {% endAside %}
### Инструменты для измерения Core Web Vitals и ведения отчетности
Google верит, что показатели Core Web Vitals критически важны при оценке взаимодействия с пользователями. Поэтому компания стремится отображать эти показатели [во всех своих популярных инструментах](/vitals-tools/). Далее подробно описано, какие инструменты поддерживают Core Web Vitals.
#### Инструменты для измерения Core Web Vitals в полевых условиях
Отчет [Chrome User Experience Report](https://developers.google.com/web/tools/chrome-user-experience-report) собирает анонимные, реальные данные пользователей для каждого показателя Core Web Vital. Эти данные позволяют владельцам сайтов быстро оценивать производительность страниц, исключая ручной сбор аналитики и позволяя использовать такие мощные инструменты, как [PageSpeed Insights](https://developers.google.com/speed/pagespeed/insights/) и отчет [Core Web Vitals report](https://support.google.com/webmasters/answer/9205520) в Search Console.
<div class="w-table-wrapper">
<table>
<tr>
<td> </td>
<td>LCP</td>
<td>FID</td>
<td>CLS</td>
</tr>
<tr>
<td><a href="https://developers.google.com/web/tools/chrome-user-experience-report">Chrome User Experience Report</a></td>
<td>✔</td>
<td>✔</td>
<td>✔</td>
</tr>
<tr>
<td><a href="https://developers.google.com/speed/pagespeed/insights/">PageSpeed Insights</a></td>
<td>✔</td>
<td>✔</td>
<td>✔</td>
</tr>
<tr>
<td><a href="https://support.google.com/webmasters/answer/9205520">Search Console (Core Web Vitals report)</a></td>
<td>✔</td>
<td>✔</td>
<td>✔</td>
</tr>
</table>
</div>
{% Aside %} Чтобы узнать, как использовать эти инструменты и какой инструмент лучше подходит для вашего случая, см. [«Как начать измерять Web Vitals»](/vitals-measurement-getting-started/) {% endAside %}
Данные, предоставленные Chrome User Experience Report, предлагают быстрый способ оценки производительности сайтов, но не предоставляют подробную телеметрию для каждого просмотра страницы, которая часто необходима для точной диагностики, отслеживания и быстрого реагирования на регрессии. Поэтому мы настоятельно рекомендуем сайтам настроить собственный мониторинг реальных пользователей.
#### Измерение Core Web Vitals в JavaScript
Показатели Core Web Vitals можно измерить в JavaScript с помощью стандартных веб-API.
Самый простой способ измерить все показатели Core Web Vitalsиспользовать [JavaScript-библиотеку web-Vitals](https://github.com/GoogleChrome/web-vitals), которая представляет собой небольшую, готовую к работе оболочку для базовых веб-API, и измеряет каждую метрику аналогично инструментам Google, перечисленным выше.
С библиотекой [web-vitals](https://github.com/GoogleChrome/web-vitals) измерение каждой метрики сводится к простому вызову функции (см. документацию об [использовании](https://github.com/GoogleChrome/web-vitals#usage) и подробные сведения об [API](https://github.com/GoogleChrome/web-vitals#api)):
```js
import {getCLS, getFID, getLCP} from 'web-vitals';
function sendToAnalytics(metric) {
const body = JSON.stringify(metric);
// Use `navigator.sendBeacon()` if available, falling back to `fetch()`.
(navigator.sendBeacon && navigator.sendBeacon('/analytics', body)) ||
fetch('/analytics', {body, method: 'POST', keepalive: true});
}
getCLS(sendToAnalytics);
getFID(sendToAnalytics);
getLCP(sendToAnalytics);
```
После настройки сайта на использование библиотеки [web-vitals](https://github.com/GoogleChrome/web-vitals) для измерения и отправки данных Core Web Vitals в конечную точку аналитики, следующий шагагрегирование этих данных и создание отчетов по ним, чтобы увидеть, соответствуют ли ваши страницы рекомендуемым пороговым значениям по крайней мере для 75% посещений страниц.
Поставщики аналитикикак с интегрированной поддержкой показателей Core Web Vitals, так и без неедолжны включать базовые функции пользовательских метрик, позволяющие измерять Core Web Vitals в их инструментах.
Как раз такой примеротчет [Web Vitals Report](https://github.com/GoogleChromeLabs/web-vitals-report), который позволяет владельцам сайтов измерять показатели Core Web Vitals с помощью Google Analytics. Инструкции по измерению Core Web Vitals с помощью других инструментов аналитики см. в статье [«Рекомендации по измерению Web Vitals в полевых условиях»](/vitals-field-measurement-best-practices/).
Кроме того, можно создавать отчеты по каждому из показателей Core Web Vitals без написания кода, используя [расширение Web Vitals для Chrome](https://github.com/GoogleChrome/web-vitals-extension). Расширение применяет библиотеку [web-vitals](https://github.com/GoogleChrome/web-vitals) для измерения показателей и их показа пользователям при просмотре веб-страниц.
Расширение полезно для понимания производительности ваших сайтов, сайтов конкурентов и Интернета в целом.
<div class="w-table-wrapper">
<table>
<thead>
<tr>
<th> </th>
<th>LCP</th>
<th>FID</th>
<th>CLS</th>
</tr>
</thead>
<tbody>
<tr>
<td><a href="https://github.com/GoogleChrome/web-vitals">Web-Vitals</a></td>
<td>✔</td>
<td>✔</td>
<td>✔</td>
</tr>
<tr>
<td><a href="https://github.com/GoogleChrome/web-vitals-extension">Расширение Web Vitals</a></td>
<td>✔</td>
<td>✔</td>
<td>✔</td>
</tr>
</tbody>
</table>
</div>
Разработчики, предпочитающие измерять эти метрики непосредственно через базовые веб-API, могут обратиться к этим руководствам по метрикам для получения подробной информации о реализации:
- [Измерение LCP в JavaScript](/lcp/#measure-lcp-in-javascript)
- [Измерение FID в JavaScript](/fid/#measure-fid-in-javascript)
- [Измерение CLS в JavaScript](/cls/#measure-cls-in-javascript)
{% Aside %} Дополнительные инструкции по измерению этих показателей с помощью популярных аналитических сервисов (или собственных аналитических инструментов) см. в статье [«Рекомендации по измерению Web Vitals в полевых условиях»](/vitals-field-measurement-best-practices/) {% endAside %}
#### Инструменты для измерения Core Web Vitals в лабораторных условиях
Хотя Core Web Vitals являются, прежде всего, полевыми показателями, некоторые из них также можно измерить в лабораторных условиях.
Измерения в лабораторных условияхлучший способ проверить производительность функций во время разработки и до релиза. Это также лучший способ выявить снижение производительности до того, как оно произойдет.
Следующие инструменты можно использовать для измерения Core Web Vitals в лабораторных условиях:
<div class="w-table-wrapper">
<table>
<thead>
<tr>
<th> </th>
<th>LCP</th>
<th>FID</th>
<th>CLS</th>
</tr>
</thead>
<tbody>
<tr>
<td><a href="https://developers.google.com/web/tools/chrome-devtools">Chrome DevTools</a></td>
<td>✔</td>
<td>✘ (используйте для этого <a href="/tbt/">TBT)</a>
</td>
<td>✔</td>
</tr>
<tr>
<td><a href="https://developers.google.com/web/tools/lighthouse"> Lighthouse</a></td>
<td>✔</td>
<td>✘ (используйте для этого <a href="/tbt/">TBT)</a>
</td>
<td>✔</td>
</tr>
</tbody>
</table>
</div>
{% Aside %} Инструменты, подобные Lighthouse, которые загружают страницы в смоделированной среде без участия пользователя, не могут измерить FID (нет пользовательского ввода). Тем не менее показатель Total Blocking Time (TBT) (общее время блокировки) поддается лабораторным измерениям и является отличной метрикой для FID. Оптимизация производительности, приводящая к улучшению TBT в лабораторных условиях, должна улучшить FID в полевых условиях (см. рекомендации по производительности ниже). {% endAside %}
Измерения в лабораторных условиях являются неотъемлемой частью предоставления отличного пользовательского опыта, но они не могут заменить измерения в полевых условиях.
Производительность сайта может сильно различаться в зависимости от возможностей устройства пользователя, состояния его сети, других процессов, выполняемых на устройстве, и того, как они взаимодействуют со страницей. Фактически, каждый из показателей Core Web Vitals зависит от взаимодействия с пользователем. Только измерения в полевых условиях позволяют увидеть полную картину.
### Рекомендации для улучшения оценки ваших страниц
После измерения Core Web Vitals и определения областей, требующих улучшения, нужно переходить к оптимизации. Следующие руководства предлагают конкретные рекомендации по оптимизации страниц под каждый из показателей Core Web Vitals:
- [Оптимизация LCP](/optimize-lcp/)
- [Оптимизация FID](/optimize-fid/)
- [Оптимизация CLS](/optimize-cls/)
## Другие показатели Web Vitals
Хотя показатели Core Web Vitals являются критически важными для понимания и обеспечения хорошего взаимодействия с пользователем, существуют и другие важные метрики.
Они часто служат в качестве промежуточных или дополнительных показателей для Core Web Vitals, и помогают полнее охватить взаимодействие с пользователем или диагностировать конкретную проблему.
Например, метрики [Time to First Byte (TTFB) : Время до первого байта](/time-to-first-byte/) и [First Contentful Paint (FCP) : Первая отрисовка контента](/fcp/) являются критически важными аспектами *загрузки* и полезны для диагностики проблем с LCP (медленное [время отклика сервера](/overloaded-server/) или [ресурсы, блокирующие рендеринг](/render-blocking-resources/)).
Аналогично, такие показатели, как [Total Blocking Time (TBT) : Общее время блокировки](/tbt/) и [Time to Interactive (TTI) : Время до интерактивности](/tti/), являются крайне важными лабораторными метриками для выявления и диагностики потенциальных проблем с *интерактивностью*, которые могут повлиять на FID. Однако они не входят в набор Core Web Vitals, потому что не поддаются измерению в полевых условиях и не отражают результаты, [ориентированные на пользователя](/user-centric-performance-metrics/#how-metrics-are-measured).
## Развитие Web Vitals
Web Vitals и Core Web Vitalsлучшие из имеющихся на сегодня у разработчиков сигналов для измерения качества пользовательского опыта в Интернете. Но эти сигналы не совершенны, поэтому следует ожидать их дальнейшего улучшения и развития.
**Core Web Vitals** актуальны для всех веб-страниц и представлены в соответствующих инструментах Google. Изменения в этих показателях могут иметь далеко идущие последствия для ранжирования; поэтому способ определения и пороговые значения Core Web Vitals будут стабильными, а обновления будут проводиться с предварительным уведомлением разработчиков и с предсказуемой ежегодной периодичностью.
Другие Web Vitals больше зависят от контекста или инструмента и более экспериментальны, чем Core Web Vitals. Поэтому их способ определения и пороговые значения могут меняться чаще.
Для всех Web Vitals изменения будут четко задокументированы в общедоступном [ЖУРНАЛЕ ИЗМЕНЕНИЙ](http://bit.ly/chrome-speed-metrics-changelog) .
|
Markdown
|
Apache-2.0
|
AbiJarjani/web.dev/src/site/content/ru/vitals/index.md
|
a25b6d03-504e-429e-9972-be126d4a1f83
|
[{"tag": "USERNAME", "value": "philipwalton", "start": 147, "end": 159, "context": "image/admin/BHaoqqR73jDWe6FL2kfw.png\nauthors:\n - philipwalton\ndate: 2020-04-30\nupdated: 2020-07-21\ntags:\n - me"}]
|
[{"tag": "USERNAME", "value": "philipwalton", "start": 147, "end": 159, "context": "image/admin/BHaoqqR73jDWe6FL2kfw.png\nauthors:\n - philipwalton\ndate: 2020-04-30\nupdated: 2020-07-21\ntags:\n - me"}]
|
---
layout: post
title: "Vadym Bartko - Python Backend Engineer"
author: "Vadym Bartko"
permalink: /my_cv/
---
[My CV in PDF](/assets/other/Vadym Bartko - Python Backend Engineer.pdf)
## Contacts
* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@protonmail.com)
* skype: hudvin
## COMPUTER SKILLS AND COMPETENCES
**AWS:**
Lambda, SQS, SNS, S3, Textract
**Backend:**
Python Backend Stack - Flask, Django, DRF, gunicorn, RabbitMQ, Celery,
boto3, Tornado, Flasgger, dynaconf, MongoDB, ElasticSearch etc
**Pdf Processing:**
ghostscript, fitz
**AI:**
Convolutional Neural Networks, image classification, object detection, image
captioning, image segmentation(UNet), homography, keypoints detection,
image processing, FaceNet, classification and clusterization methods, ROS,
datasets preparation
**AI Tools:**
OpenCV, Scikit-learn, Scikit-image, Tensorflow, Pandas, Jupyter Notebook,
Keras, Dlib, Pillow
**Languages:**
Python, Java
**OS:**
Linux (Debian based), Windows
**Cloud and Deployment:**
Kubernetes, Docker, nvidia-docker, Helm, AWS, microservices and distributed systems
**Other:**
some experience with LIDARs and stereocameras, R&D, basic language
processing
## NOTABLE PROJECTS
### [pdf processing - fintech]
processing and analysis of financial pdf documents, OCR, dealing with pdf
format issues, pdf/image compression and optimization, ghoscript based
processing, dealing with font related issues
ghostscript, imagemagic, fitz, flask, AWS Lambda, AWS Te xtract
### Khumbu
image search engine based on AI and computer vision. Performs face
recognition, object detection, image classification, metadata extraction, query
processing.
Docker, RabbitMQ, Flask, Mongo, Keras, Tensorflow, Kubernetes, React
technical research, market research and analysis, prepare pitch deck,
interviewing potential customers, talk to investors, implement backend and
frontend parts
### [bots and image processing]
automatically train customer specific NLP bots, deploy ML apps to cloud
Kubernetes, Helm, nvidia-docker, Flask, Celery, Airflow
implement web api for third-party ML models, build distributed training system, Kubernetes management and deployment, performance optimization
### [media assets management software]
face detection and recognition, image tagging, inappropriate content detection
OpenCV, Keras, Tensorflow, Docker, dlib, pandas, scikit-image
implement image classification and nsfw content detection, build dataset for
face recognition tasks, test various facerecog approaches, integrate with
another services
### [autonomous robocar]
control, navigation, route planning and map building for small 4W robot
OpenCV, ROS, LIDAR, ZED Camera
hardware design, test various SLAM algorithms, setup ROS env for wheel
robot, experiments with LIDAR and stereocamera
### [gene variations classification]
detect pathogenic variations in genes
pandas, scikit-learn
### ITraffic
detect empty parking lots using webcamera
Opencv, Caffe, digits, Python, Docker, RabbitMQ; AKAZE/ORB/BRISK,
homography
create datasets, train models, implement API for models, deployment,
implement cv tasks
### [crawling/scraping]
large scale web crawling and data extraction
RabbitMQ, MySQL, Python, PhantomJS, RedShift, S3, EC2, ElasticSearch,
NLTK
### Firefly
web manager for scientific papers
GridFS, TitanDB, ElasticSearch
### Nomad
distributed crawler
TitanDB, OrientDB, gremlin
### Denigma
tools for ageing research project, server administration, knowledge extraction
CloudStack, Scala, NLTK, OpenNLP, semantic tools
research, backend implementation, some DevOps
### BigPlanet
offline map with different sources of geo data
Android 2.x
## WORK EXPERIENCE
* 2020 – current time Zoral, Python Backend Engineer
* 2018 – 2019 Khumbu, founder
* 2015 – 2019 Lohika, Machine Learning Engineer
* 2014 – 2015 freelance(Python Engineer)
* 2013 – 2014 Codeminders, Java Engineer
* 2012 – 2013 Researcher, freelance
* 2011 – 2012 Ciklum, Java/Android Software Engineer, Team Lead
* 2009 – 2011 P-Product, Java/Android Software Engineer
* 2008 – 2009 N.S, co-founder
* 2007-2008 Exadel, Java Engineer
## EDUCATION AND TRAINING
* University Kharkiv National University of Radio Electronics, 2008 – 2014
* Degree BS in Information Technology
* Online course Machine Learning, Coursera
* Online course Introduction to Artificial Intelligence, EdX
* Certification MapR Certified Hadoop Developer, [https://accredible.com/10056013](https://accredible.com/10056013)
## OTHER
* Mother tongue(s): Russian
* Other language(s): English, Ukrainian, Deutsch(beginner)
* Hobbies: climbing, hiking, photography, history, art
* Personal blog: [https://vadym.bartko.me/](https://vadym.bartko.me/)
|
Markdown
|
MIT
|
hudvin/hudvin.github.io/_pages/my_cv.md
|
d71872f0-4ede-474c-aa97-5e843ce2a5b5
|
[{"tag": "EMAIL", "value": "vadym.bartko@protonmail.com", "start": 279, "end": 306, "context": "udvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@protonmail.com)\n\n* skype: hudvin\n\n\n## COMPUTER SKILLS AND COMPET"}, {"tag": "NAME", "value": "Vadym Bartko", "start": 74, "end": 86, "context": "\"Vadym Bartko - Python Backend Engineer\"\nauthor: \"Vadym Bartko\"\npermalink: /my_cv/\n---\n\n\n[My CV in PDF](/assets/"}, {"tag": "EMAIL", "value": "hudvin@gmail.com", "start": 228, "end": 244, "context": "er.pdf)\n\n\n## Contacts\n* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@pr"}, {"tag": "EMAIL", "value": "hudvin@gmail.com", "start": 210, "end": 226, "context": "hon Backend Engineer.pdf)\n\n\n## Contacts\n* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.co"}, {"tag": "EMAIL", "value": "vadym.bartko@protonmail.com", "start": 250, "end": 277, "context": "* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@protonmail.com)\n\n* skype: hudvin\n\n\n"}, {"tag": "NAME", "value": "Vadym Bartko", "start": 25, "end": 37, "context": "---\nlayout: post\ntitle: \"Vadym Bartko - Python Backend Engineer\"\nauthor: \"Vadym Bartko\""}, {"tag": "NAME", "value": "Vadym Bartko", "start": 142, "end": 154, "context": "alink: /my_cv/\n---\n\n\n[My CV in PDF](/assets/other/Vadym Bartko - Python Backend Engineer.pdf)\n\n\n## Contacts\n* em"}]
|
[{"tag": "EMAIL", "value": "vadym.bartko@protonmail.com", "start": 279, "end": 306, "context": "udvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@protonmail.com)\n\n* skype: hudvin\n\n\n## COMPUTER SKILLS AND COMPET"}, {"tag": "NAME", "value": "Vadym Bartko", "start": 74, "end": 86, "context": "\"Vadym Bartko - Python Backend Engineer\"\nauthor: \"Vadym Bartko\"\npermalink: /my_cv/\n---\n\n\n[My CV in PDF](/assets/"}, {"tag": "EMAIL", "value": "hudvin@gmail.com", "start": 228, "end": 244, "context": "er.pdf)\n\n\n## Contacts\n* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@pr"}, {"tag": "EMAIL", "value": "hudvin@gmail.com", "start": 210, "end": 226, "context": "hon Backend Engineer.pdf)\n\n\n## Contacts\n* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.co"}, {"tag": "EMAIL", "value": "vadym.bartko@protonmail.com", "start": 250, "end": 277, "context": "* email: [hudvin@gmail.com](hudvin@gmail.com) or [vadym.bartko@protonmail.com](vadym.bartko@protonmail.com)\n\n* skype: hudvin\n\n\n"}, {"tag": "NAME", "value": "Vadym Bartko", "start": 25, "end": 37, "context": "---\nlayout: post\ntitle: \"Vadym Bartko - Python Backend Engineer\"\nauthor: \"Vadym Bartko\""}, {"tag": "NAME", "value": "Vadym Bartko", "start": 142, "end": 154, "context": "alink: /my_cv/\n---\n\n\n[My CV in PDF](/assets/other/Vadym Bartko - Python Backend Engineer.pdf)\n\n\n## Contacts\n* em"}]
|
<?php
/* modular/main.html.twig */
class __TwigTemplate_7cb7b2d8d91a4b84073a773f2da41e819f04ec3ea517fe14bf3852960a2912f6 extends Twig_Template
{
public function __construct(Twig_Environment $env)
{
parent::__construct($env);
$this->parent = false;
$this->blocks = array(
);
}
protected function doDisplay(array $context, array $blocks = array())
{
// line 1
$context["grid_size"] = $this->env->getExtension('Grav\Common\Twig\TwigExtension')->themeVarFunc("grid-size");
// line 2
$context["hero_image"] = (($this->getAttribute($this->getAttribute(($context["page"] ?? null), "header", array()), "hero_image", array())) ? ($this->getAttribute($this->getAttribute(($context["page"] ?? null), "media", array()), $this->getAttribute($this->getAttribute(($context["page"] ?? null), "header", array()), "hero_image", array()), array(), "array")) : (twig_first($this->env, $this->getAttribute($this->getAttribute(($context["page"] ?? null), "media", array()), "images", array()))));
// line 3
echo "
";
// line 4
echo $this->env->getExtension('Grav\Common\Twig\TwigExtension')->dump($this->env, $context, ($context["hero_image"] ?? null));
echo "
<div style=\"background-image: ";
// line 6
echo $this->getAttribute($this->getAttribute($this->getAttribute(($context["page"] ?? null), "media", array()), $this->getAttribute($this->getAttribute(($context["page"] ?? null), "header", array()), "media_order", array()), array(), "array"), "url", array());
echo ";height:auto;width=100%;\"><img src=\"";
echo $this->getAttribute($this->getAttribute($this->getAttribute(($context["page"] ?? null), "media", array()), $this->getAttribute($this->getAttribute(($context["page"] ?? null), "header", array()), "media_order", array()), array(), "array"), "url", array());
echo "\" /></div>
";
}
public function getTemplateName()
{
return "modular/main.html.twig";
}
public function isTraitable()
{
return false;
}
public function getDebugInfo()
{
return array ( 31 => 6, 26 => 4, 23 => 3, 21 => 2, 19 => 1,);
}
/** @deprecated since 1.27 (to be removed in 2.0). Use getSourceContext() instead */
public function getSource()
{
@trigger_error('The '.__METHOD__.' method is deprecated since version 1.27 and will be removed in 2.0. Use getSourceContext() instead.', E_USER_DEPRECATED);
return $this->getSourceContext()->getCode();
}
public function getSourceContext()
{
return new Twig_Source("{% set grid_size = theme_var('grid-size') %}
{% set hero_image = page.header.hero_image ? page.media[page.header.hero_image] : page.media.images|first %}
{{ dump(hero_image) }}
<div style=\"background-image: {{ page.media[page.header.media_order].url }};height:auto;width=100%;\"><img src=\"{{ page.media[page.header.media_order].url }}\" /></div>
", "modular/main.html.twig", "/Library/WebServer/Documents/webpage/user/themes/quark/templates/modular/main.html.twig");
}
}
|
PHP
|
MIT
|
vortexntnu/webpage/cache/twig/ca/ca259ec54c46145850c48a846afbbbd1088b67eb650760332ef02dd80b05e17c.php
|
578bebc5-7f65-45ab-bb29-d15e1a660597
|
[]
|
[]
|
/*
* Hisilicon clock separated gate driver
*
* Copyright (c) 2012-2013 Hisilicon Limited.
* Copyright (c) 2012-2013 Linaro Limited.
*
* Author: Haojian Zhuang <haojian.zhuang@linaro.org>
* Xin Li <li.xin@linaro.org>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*
*/
#include <linux/kernel.h>
#include <linux/clk-provider.h>
#include <linux/io.h>
#include <linux/slab.h>
#include "clk.h"
/* clock separated gate register offset */
#define CLKGATE_SEPERATED_ENABLE 0x0
#define CLKGATE_SEPERATED_DISABLE 0x4
#define CLKGATE_SEPERATED_STATUS 0x8
struct clkgate_separated {
struct clk_hw hw;
void __iomem *enable; /* enable register */
u8 bit_idx; /* bits in enable/disable register */
u8 flags;
spinlock_t *lock;
};
static int clkgate_separated_enable(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
unsigned long flags = 0;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
if (sclk->lock)
spin_lock_irqsave(sclk->lock, flags);
reg = BIT(sclk->bit_idx);
writel_relaxed(reg, sclk->enable);
readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
if (sclk->lock)
spin_unlock_irqrestore(sclk->lock, flags);
return 0;
}
static void clkgate_separated_disable(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
unsigned long flags = 0;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
if (sclk->lock)
spin_lock_irqsave(sclk->lock, flags);
reg = BIT(sclk->bit_idx);
writel_relaxed(reg, sclk->enable + CLKGATE_SEPERATED_DISABLE);
readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
if (sclk->lock)
spin_unlock_irqrestore(sclk->lock, flags);
}
static int clkgate_separated_is_enabled(struct clk_hw *hw)
{
struct clkgate_separated *sclk;
u32 reg;
sclk = container_of(hw, struct clkgate_separated, hw);
reg = readl_relaxed(sclk->enable + CLKGATE_SEPERATED_STATUS);
reg &= BIT(sclk->bit_idx);
return reg ? 1 : 0;
}
static struct clk_ops clkgate_separated_ops = {
.enable = clkgate_separated_enable,
.disable = clkgate_separated_disable,
.is_enabled = clkgate_separated_is_enabled,
};
struct clk *hisi_register_clkgate_sep(struct device *dev, const char *name,
const char *parent_name,
unsigned long flags,
void __iomem *reg, u8 bit_idx,
u8 clk_gate_flags, spinlock_t *lock)
{
struct clkgate_separated *sclk;
struct clk *clk;
struct clk_init_data init;
sclk = kzalloc(sizeof(*sclk), GFP_KERNEL);
if (!sclk) {
pr_err("%s: fail to allocate separated gated clk\n", __func__);
return ERR_PTR(-ENOMEM);
}
init.name = name;
init.ops = &clkgate_separated_ops;
init.flags = flags | CLK_IS_BASIC;
init.parent_names = (parent_name ? &parent_name : NULL);
init.num_parents = (parent_name ? 1 : 0);
sclk->enable = reg + CLKGATE_SEPERATED_ENABLE;
sclk->bit_idx = bit_idx;
sclk->flags = clk_gate_flags;
sclk->hw.init = &init;
sclk->lock = lock;
clk = clk_register(dev, &sclk->hw);
if (IS_ERR(clk))
kfree(sclk);
return clk;
}
|
C
|
MIT
|
2016k8009929004/libmpk/kernel/drivers/clk/hisilicon/clkgate-separated.c
|
45306e67-f03f-4bda-a5c1-9533de550a81
|
[{"tag": "EMAIL", "value": "li.xin@linaro.org", "start": 207, "end": 224, "context": " Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This program is free software; you can red"}, {"tag": "NAME", "value": "Xin Li", "start": 199, "end": 205, "context": " Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This program is free so"}, {"tag": "EMAIL", "value": "haojian.zhuang@linaro.org", "start": 166, "end": 191, "context": "013 Linaro Limited.\n *\n * Author: Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This prog"}, {"tag": "NAME", "value": "Haojian Zhuang", "start": 150, "end": 164, "context": "right (c) 2012-2013 Linaro Limited.\n *\n * Author: Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@"}]
|
[{"tag": "EMAIL", "value": "li.xin@linaro.org", "start": 207, "end": 224, "context": " Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This program is free software; you can red"}, {"tag": "NAME", "value": "Xin Li", "start": 199, "end": 205, "context": " Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This program is free so"}, {"tag": "EMAIL", "value": "haojian.zhuang@linaro.org", "start": 166, "end": 191, "context": "013 Linaro Limited.\n *\n * Author: Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@linaro.org>\n *\n * This prog"}, {"tag": "NAME", "value": "Haojian Zhuang", "start": 150, "end": 164, "context": "right (c) 2012-2013 Linaro Limited.\n *\n * Author: Haojian Zhuang <haojian.zhuang@linaro.org>\n *\t Xin Li <li.xin@"}]
|
/**
* The Forgotten Server - a free and open-source MMORPG server emulator
* Copyright (C) 2015 Mark Samman <mark.samman@gmail.com>
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation; either version 2 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License along
* with this program; if not, write to the Free Software Foundation, Inc.,
* 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
*/
#include "otpch.h"
#include "housetile.h"
#include "house.h"
#include "game.h"
extern Game g_game;
HouseTile::HouseTile(int32_t x, int32_t y, int32_t z, House* _house) :
DynamicTile(x, y, z)
{
house = _house;
setFlag(TILESTATE_HOUSE);
}
void HouseTile::addThing(int32_t index, Thing* thing)
{
Tile::addThing(index, thing);
if (!thing->getParent()) {
return;
}
if (Item* item = thing->getItem()) {
updateHouse(item);
}
}
void HouseTile::internalAddThing(uint32_t index, Thing* thing)
{
Tile::internalAddThing(index, thing);
if (!thing->getParent()) {
return;
}
if (Item* item = thing->getItem()) {
updateHouse(item);
}
}
void HouseTile::updateHouse(Item* item)
{
if (item->getParent() != this) {
return;
}
Door* door = item->getDoor();
if (door) {
if (door->getDoorId() != 0) {
house->addDoor(door);
}
} else {
BedItem* bed = item->getBed();
if (bed) {
house->addBed(bed);
}
}
}
ReturnValue HouseTile::queryAdd(int32_t index, const Thing& thing, uint32_t count, uint32_t flags, Creature* actor/* = nullptr*/) const
{
if (const Creature* creature = thing.getCreature()) {
if (const Player* player = creature->getPlayer()) {
if (!house->isInvited(player)) {
return RETURNVALUE_PLAYERISNOTINVITED;
}
} else {
return RETURNVALUE_NOTPOSSIBLE;
}
} else if (thing.getItem() && actor) {
Player* actorPlayer = actor->getPlayer();
if (!house->isInvited(actorPlayer)) {
return RETURNVALUE_CANNOTTHROW;
}
}
return Tile::queryAdd(index, thing, count, flags, actor);
}
Tile* HouseTile::queryDestination(int32_t& index, const Thing& thing, Item** destItem, uint32_t& flags)
{
if (const Creature* creature = thing.getCreature()) {
if (const Player* player = creature->getPlayer()) {
if (!house->isInvited(player)) {
const Position& entryPos = house->getEntryPosition();
Tile* destTile = g_game.map.getTile(entryPos);
if (!destTile) {
std::cout << "Error: [HouseTile::queryDestination] House entry not correct"
<< " - Name: " << house->getName()
<< " - House id: " << house->getId()
<< " - Tile not found: " << entryPos << std::endl;
destTile = g_game.map.getTile(player->getTemplePosition());
if (!destTile) {
destTile = &(Tile::nullptr_tile);
}
}
index = -1;
*destItem = nullptr;
return destTile;
}
}
}
return Tile::queryDestination(index, thing, destItem, flags);
}
|
C++
|
MIT
|
marcielazevedo/ll/housetile.cpp
|
9e54b9c1-8b63-45fd-ace7-498f605cf269
|
[{"tag": "NAME", "value": "Mark Samman", "start": 99, "end": 110, "context": "rce MMORPG server emulator\n * Copyright (C) 2015 Mark Samman <mark.samman@gmail.com>\n *\n * This program is fre"}, {"tag": "EMAIL", "value": "mark.samman@gmail.com", "start": 112, "end": 133, "context": "rver emulator\n * Copyright (C) 2015 Mark Samman <mark.samman@gmail.com>\n *\n * This program is free software; you can red"}]
|
[{"tag": "NAME", "value": "Mark Samman", "start": 99, "end": 110, "context": "rce MMORPG server emulator\n * Copyright (C) 2015 Mark Samman <mark.samman@gmail.com>\n *\n * This program is fre"}, {"tag": "EMAIL", "value": "mark.samman@gmail.com", "start": 112, "end": 133, "context": "rver emulator\n * Copyright (C) 2015 Mark Samman <mark.samman@gmail.com>\n *\n * This program is free software; you can red"}]
|
cask "tip" do
version "2.0.0"
sha256 "4d986a461d1b24bb5776fb49063b9a1891939f336b306a6bc75f58d0a4e98bcb"
url "https://github.com/tanin47/tip/releases/download/v#{version}/Tip.zip"
name "Tip"
desc "Programmable tooltip that can be used with any app"
homepage "https://github.com/tanin47/tip"
app "Tip.app"
zap trash: "~/Library/Application Scripts/tanin.tip"
end
|
Ruby
|
BSD-2-Clause
|
030/homebrew-cask/Casks/tip.rb
|
967aab0e-2e62-4fa0-9813-c30d32ec6f6a
|
[{"tag": "SSH_KEY", "value": "4d986a461d1b24bb5776fb49063b9a1891939f336b306a6bc75f58d0a4e98bcb", "start": 42, "end": 106, "context": "cask \"tip\" do\n version \"2.0.0\"\n sha256 \"4d986a461d1b24bb5776fb49063b9a1891939f336b306a6bc75f58d0a4e98bcb\"\n\n url \"https://github.com/tanin47/tip/releases/"}]
|
[{"tag": "KEY", "value": "4d986a461d1b24bb5776fb49063b9a1891939f336b306a6bc75f58d0a4e98bcb", "start": 42, "end": 106, "context": "cask \"tip\" do\n version \"2.0.0\"\n sha256 \"4d986a461d1b24bb5776fb49063b9a1891939f336b306a6bc75f58d0a4e98bcb\"\n\n url \"https://github.com/tanin47/tip/releases/"}]
|
"""
Off Multipage Cheatsheet
https://github.com/daniellewisDL/streamlit-cheat-sheet
@daniellewisDL : https://github.com/daniellewisDL
"""
import streamlit as st
from pathlib import Path
import base64
from modules.toc import *
# Initial page config
st.set_page_config(
page_title='Code Compendium Intro Page',
layout="wide",
# initial_sidebar_state="expanded",
)
# col2.title("Table of contents")
# col2.write("http://localhost:8502/#display-progress-and-status")
# toc.header("Header 1")
# toc.header("Header 2")
# toc.subheader("Subheader 1")
# toc.subheader("Subheader 2")
# toc.generate()
# Thanks to streamlitopedia for the following code snippet
def img_to_bytes(img_path):
img_bytes = Path(img_path).read_bytes()
encoded = base64.b64encode(img_bytes).decode()
return encoded
# sidebar
# def cs_sidebar():
# st.sidebar.markdown('''[<img src='data:image/png;base64,{}' class='img-fluid' width=32 height=32>](https://streamlit.io/)'''.format(img_to_bytes("logomark_website.png")), unsafe_allow_html=True)
# st.sidebar.header('Streamlit cheat sheet')
# st.sidebar.markdown('''
# <small>Summary of the [docs](https://docs.streamlit.io/en/stable/api.html), as of [Streamlit v1.0.0](https://www.streamlit.io/).</small>
# ''', unsafe_allow_html=True)
# st.sidebar.markdown('__How to install and import__')
# st.sidebar.code('$ pip install streamlit')
# st.sidebar.markdown('Import convention')
# st.sidebar.code('>>> import streamlit as st')
# st.sidebar.markdown('__Add widgets to sidebar__')
# st.sidebar.code('''
# st.sidebar.<widget>
# >>> a = st.sidebar.radio(\'R:\',[1,2])
# ''')
# st.sidebar.markdown('__Command line__')
# st.sidebar.code('''
# $ streamlit --help
# $ streamlit run your_script.py
# $ streamlit hello
# $ streamlit config show
# $ streamlit cache clear
# $ streamlit docs
# $ streamlit --version
# ''')
# st.sidebar.markdown('__Pre-release features__')
# st.sidebar.markdown('[Beta and experimental features](https://docs.streamlit.io/en/stable/api.html#beta-and-experimental-features)')
# st.sidebar.code('''
# pip uninstall streamlit
# pip install streamlit-nightly --upgrade
# ''')
# st.sidebar.markdown('''<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', unsafe_allow_html=True)
# return None
##########################
# Main body of cheat sheet
##########################
def cs_body():
col1 = st.columns(1)
col1.header('Ryan Paik')
col1.markdown(
'''
*“You don't learn to walk by following rules. You learn by doing, and by falling over.”*
-Richard Branson
-----
''')
col1.subheader("Welcome to my Code Compendium.")
col1.markdwon('''
This website/webapp is my personal cheatsheet for of all the code snippets that I have needed over the past 2 years. This ended up being a quick detour into Streamlit that I fell in love with while I was building flask api's.
-----
**Programming is only as deep as you want to dive in.**
This webapp features the basic code snippets from all the "googling" from programming I have done.
I have taken the plunge and have created my own markdown notebooks organizing information from quick solution tidbits to documentation for programming languages.
Please visit my github for practical code and my research notebooks:
*[rypaik (Ryan Paik) · GitHub](https://github.com/rypaik)*
If you would like access to my Gist please email me.
ryanpaik@protonmail.com
-----
**Bio:**
Currently a Sophomore at University of Illinois at Urbana-Champaign
Working Nights on my degree from the System Engineering Program
**Hobbies:**
Trying to become a real guitar hero minus the game system, playing Valorant with the St Mark's crew, getting interesting eats no matter where I am, and playing toss with my baseball field rat of a cousin.
The newest hobby is figuring out what I can build with all the new breakthroughs in technology.
**Currently Working On**
Frameworks and Languages:
- Flask, Django, FastAPI, PyTorch, Streamlit, OpenCV, shell scripting, Python, C++
Databases:
- Postgres, Redis, MongoDB, and applicable ORMs
When I can get up for Air:
- React, swift(ios), Rust, GO!!
- Find a team to get a paper In Arxiv
**This site will be constantly updated as long as I program. Feel free to pass on the URL.**
''')
# col2.subheader('Display interactive widgets')
# col2.code('''
# st.button('Hit me')
# st.download_button('On the dl', data)
# st.checkbox('Check me out')
# st.radio('Radio', [1,2,3])
# st.selectbox('Select', [1,2,3])
# st.multiselect('Multiselect', [1,2,3])
# st.slider('Slide me', min_value=0, max_value=10)
# st.select_slider('Slide to select', options=[1,'2'])
# st.text_input('Enter some text')
# st.number_input('Enter a number')
# st.text_area('Area for textual entry')
# st.date_input('Date input')
# st.time_input('Time entry')
# st.file_uploader('File uploader')
# st.color_picker('Pick a color')
# ''')
# col2.write('Use widgets\' returned values in variables:')
# col2.code('''
# >>> for i in range(int(st.number_input('Num:'))): foo()
# >>> if st.sidebar.selectbox('I:',['f']) == 'f': b()
# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)
# >>> st.write(slider_val)
# ''')
# # Control flow
# col2.subheader('Control flow')
# col2.code('''
# st.stop()
# ''')
# # Lay out your app
# col2.subheader('Lay out your app')
# col2.code('''
# st.form('my_form_identifier')
# st.form_submit_button('Submit to me')
# st.container()
# st.columns(spec)
# >>> col1, col2 = st.columns(2)
# >>> col1.subheader('Columnisation')
# st.expander('Expander')
# >>> with st.expander('Expand'):
# >>> st.write('Juicy deets')
# ''')
# col2.write('Batch widgets together in a form:')
# col2.code('''
# >>> with st.form(key='my_form'):
# >>> text_input = st.text_input(label='Enter some text')
# >>> submit_button = st.form_submit_button(label='Submit')
# ''')
# # Display code
# col2.subheader('Display code')
# col2.code('''
# st.echo()
# >>> with st.echo():
# >>> st.write('Code will be executed and printed')
# ''')
# # Display progress and status
# col2.subheader('Display progress and status')
# col2.code('''
# st.progress(progress_variable_1_to_100)
# st.spinner()
# >>> with st.spinner(text='In progress'):
# >>> time.sleep(5)
# >>> st.success('Done')
# st.balloons()
# st.error('Error message')
# st.warning('Warning message')
# st.info('Info message')
# st.success('Success message')
# st.exception(e)
# ''')
# # Placeholders, help, and options
# col2.subheader('Placeholders, help, and options')
# col2.code('''
# st.empty()
# >>> my_placeholder = st.empty()
# >>> my_placeholder.text('Replaced!')
# st.help(pandas.DataFrame)
# st.get_option(key)
# st.set_option(key, value)
# st.set_page_config(layout='wide')
# ''')
# # Mutate data
# col2.subheader('Mutate data')
# col2.code('''
# DeltaGenerator.add_rows(data)
# >>> my_table = st.table(df1)
# >>> my_table.add_rows(df2)
# >>> my_chart = st.line_chart(df1)
# >>> my_chart.add_rows(df2)
# ''')
# # Optimize performance
# col2.subheader('Optimize performance')
# col2.code('''
# @st.cache
# >>> @st.cache
# ... def fetch_and_clean_data(url):
# ... # Mutate data at url
# ... return data
# >>> # Executes d1 as first time
# >>> d1 = fetch_and_clean_data(ref1)
# >>> # Does not execute d1; returns cached value, d1==d2
# >>> d2 = fetch_and_clean_data(ref1)
# >>> # Different arg, so function d1 executes
# >>> d3 = fetch_and_clean_data(ref2)
# ''')
# col2.subheader('Other key parts of the API')
# col2.markdown('''
# <small>[State API](https://docs.streamlit.io/en/stable/session_state_api.html)</small><br>
# <small>[Theme option reference](https://docs.streamlit.io/en/stable/theme_options.html)</small><br>
# <small>[Components API reference](https://docs.streamlit.io/en/stable/develop_streamlit_components.html)</small><br>
# <small>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>
# ''', unsafe_allow_html=True)
# Column 3 TOC Generator
# col3.subheader('test')
# toc = Toc(col3)
# # col2.title("Table of contents")
# col3.write("http://localhost:8502/#display-progress-and-status", unsafe_allow_html=True)
# toc.header("Header 1")
# toc.header("Header 2")
# toc.generate()
# toc.subheader("Subheader 1")
# toc.subheader("Subheader 2")
# toc.generate()
# return None
# Run main()
# if __name__ == '__main__':
# main()
# def main():
def app():
# cs_sidebar()
cs_body()
return None
|
Python
|
MIT
|
rypaik/Streamlit_Ref/.history/pages/intro_20220303154534.py
|
18f1d9fd-384b-4698-ad25-bcc8bf81b1c6
|
[{"tag": "USERNAME", "value": "daniellewisDL", "start": 48, "end": 61, "context": "\"\"\"\nOff Multipage Cheatsheet\nhttps://github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://gi"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 2303, "end": 2316, "context": "<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', un"}, {"tag": "NAME", "value": "Richard Branson", "start": 2700, "end": 2715, "context": "arn by doing, and by falling over.\u201d*\n -Richard Branson\n-----\n''')\n\ncol1.subheader(\"Welcome to my Code Co"}, {"tag": "NAME", "value": "Ryan Paik", "start": 2547, "end": 2556, "context": "body():\n col1 = st.columns(1)\n col1.header('Ryan Paik')\n col1.markdown(\n '''\n *\u201cYou do"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 85, "end": 98, "context": "//github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport s"}, {"tag": "EMAIL", "value": "ryanpaik@protonmail.com", "start": 3540, "end": 3563, "context": "ou would like access to my Gist please email me.\n\nryanpaik@protonmail.com\n\n\n\n\n\n-----\n\n**Bio:**\n\nCurrently a Sophomore at Un"}, {"tag": "NAME", "value": "Quinn Mallory", "start": 5322, "end": 5335, "context": "f']) == 'f': b()\n# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)\n# >>> st.write(slider_val)\n# ''')\n\n#"}, {"tag": "USERNAME", "value": "rypaik", "start": 3428, "end": 3434, "context": " for practical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*"}, {"tag": "NAME", "value": "Ryan Paik", "start": 3436, "end": 3445, "context": "ctical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you wo"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 120, "end": 133, "context": "t-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport streamlit as st\nfrom pathlib import "}, {"tag": "USERNAME", "value": "daniellewisdl", "start": 8203, "end": 8216, "context": "mall>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>\n# ''', "}, {"tag": "USERNAME", "value": "rypaik", "start": 3476, "end": 3482, "context": "*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you would like access to my Gist please ema"}]
|
[{"tag": "USERNAME", "value": "daniellewisDL", "start": 48, "end": 61, "context": "\"\"\"\nOff Multipage Cheatsheet\nhttps://github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://gi"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 2303, "end": 2316, "context": "<small>[st.cheat_sheet v1.0.0](https://github.com/daniellewisDL/streamlit-cheat-sheet) | Oct 2021</small>''', un"}, {"tag": "NAME", "value": "Richard Branson", "start": 2700, "end": 2715, "context": "arn by doing, and by falling over.\u201d*\n -Richard Branson\n-----\n''')\n\ncol1.subheader(\"Welcome to my Code Co"}, {"tag": "NAME", "value": "Ryan Paik", "start": 2547, "end": 2556, "context": "body():\n col1 = st.columns(1)\n col1.header('Ryan Paik')\n col1.markdown(\n '''\n *\u201cYou do"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 85, "end": 98, "context": "//github.com/daniellewisDL/streamlit-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport s"}, {"tag": "EMAIL", "value": "ryanpaik@protonmail.com", "start": 3540, "end": 3563, "context": "ou would like access to my Gist please email me.\n\nryanpaik@protonmail.com\n\n\n\n\n\n-----\n\n**Bio:**\n\nCurrently a Sophomore at Un"}, {"tag": "NAME", "value": "Quinn Mallory", "start": 5322, "end": 5335, "context": "f']) == 'f': b()\n# >>> my_slider_val = st.slider('Quinn Mallory', 1, 88)\n# >>> st.write(slider_val)\n# ''')\n\n#"}, {"tag": "USERNAME", "value": "rypaik", "start": 3428, "end": 3434, "context": " for practical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*"}, {"tag": "NAME", "value": "Ryan Paik", "start": 3436, "end": 3445, "context": "ctical code and my research notebooks:\n\n*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you wo"}, {"tag": "USERNAME", "value": "daniellewisDL", "start": 120, "end": 133, "context": "t-cheat-sheet\n@daniellewisDL : https://github.com/daniellewisDL\n\n\"\"\"\n\nimport streamlit as st\nfrom pathlib import "}, {"tag": "USERNAME", "value": "daniellewisdl", "start": 8203, "end": 8216, "context": "mall>[API cheat sheet](https://share.streamlit.io/daniellewisdl/streamlit-cheat-sheet/app.py)</small><br>\n# ''', "}, {"tag": "USERNAME", "value": "rypaik", "start": 3476, "end": 3482, "context": "*[rypaik (Ryan Paik) \u00b7 GitHub](https://github.com/rypaik)*\n\nIf you would like access to my Gist please ema"}]
|
import os
from typing import Optional
from pytorchltr.utils.downloader import DefaultDownloadProgress
from pytorchltr.utils.downloader import Downloader
from pytorchltr.utils.file import validate_and_download
from pytorchltr.utils.file import extract_zip
from pytorchltr.utils.file import dataset_dir
from pytorchltr.datasets.svmrank.svmrank import SVMRankDataset
class MSLR10K(SVMRankDataset):
"""
Utility class for downloading and using the MSLR-WEB10K dataset:
https://www.microsoft.com/en-us/research/project/mslr/.
This dataset is a smaller sampled version of the MSLR-WEB30K dataset.
"""
downloader = Downloader(
url="https://api.onedrive.com/v1.0/shares/s!AtsMfWUz5l8nbOIoJ6Ks0bEMp78/root/content", # noqa: E501
target="MSLR-WEB10K.zip",
sha256_checksum="2902142ea33f18c59414f654212de5063033b707d5c3939556124b1120d3a0ba", # noqa: E501
progress_fn=DefaultDownloadProgress(),
postprocess_fn=extract_zip)
per_fold_expected_files = {
1: [
{"path": "Fold1/train.txt", "sha256": "6eb3fae4e1186e1242a6520f53a98abdbcde5b926dd19a28e51239284b1d55dc"}, # noqa: E501
{"path": "Fold1/test.txt", "sha256": "33fe002374a4fce58c4e12863e4eee74745d5672a26f3e4ddacc20ccfe7d6ba0"}, # noqa: E501
{"path": "Fold1/vali.txt", "sha256": "e86fb3fe7e8a5f16479da7ce04f783ae85735f17f66016786c3ffc797dd9d4db"} # noqa: E501
],
2: [
{"path": "Fold2/train.txt", "sha256": "40e4a2fcc237d9c164cbb6a3f2fa91fe6cf7d46a419d2f73e21cf090285659eb"}, # noqa: E501
{"path": "Fold2/test.txt", "sha256": "44add582ccd674cf63af24d3bf6e1074e87a678db77f00b44c37980a3010917a"}, # noqa: E501
{"path": "Fold2/vali.txt", "sha256": "33fe002374a4fce58c4e12863e4eee74745d5672a26f3e4ddacc20ccfe7d6ba0"} # noqa: E501
],
3: [
{"path": "Fold3/train.txt", "sha256": "f13005ceb8de0db76c93b02ee4b2bded6f925097d3ab7938931e8d07aa72acd7"}, # noqa: E501
{"path": "Fold3/test.txt", "sha256": "c0a5a3c6bd7790d0b4ff3d5e961d0c8c5f8ff149089ce492540fa63035801b7a"}, # noqa: E501
{"path": "Fold3/vali.txt", "sha256": "44add582ccd674cf63af24d3bf6e1074e87a678db77f00b44c37980a3010917a"} # noqa: E501
],
4: [
{"path": "Fold4/train.txt", "sha256": "6c1677cf9b2ed491e26ac6b8c8ca7dfae9c1a375e2bce8cba6df36ab67ce5836"}, # noqa: E501
{"path": "Fold4/test.txt", "sha256": "dc6083c24a5f0c03df3c91ad3eed7542694115b998acf046e51432cb7a22b848"}, # noqa: E501
{"path": "Fold4/vali.txt", "sha256": "c0a5a3c6bd7790d0b4ff3d5e961d0c8c5f8ff149089ce492540fa63035801b7a"} # noqa: E501
],
5: [
{"path": "Fold5/train.txt", "sha256": "4249797a2f0f46bff279973f0fb055d4a78f67f337769eabd56e82332c044794"}, # noqa: E501
{"path": "Fold5/test.txt", "sha256": "e86fb3fe7e8a5f16479da7ce04f783ae85735f17f66016786c3ffc797dd9d4db"}, # noqa: E501
{"path": "Fold5/vali.txt", "sha256": "dc6083c24a5f0c03df3c91ad3eed7542694115b998acf046e51432cb7a22b848"} # noqa: E501
]
}
splits = {
"train": "train.txt",
"test": "test.txt",
"vali": "vali.txt"
}
def __init__(self, location: str = dataset_dir("MSLR10K"),
split: str = "train", fold: int = 1, normalize: bool = True,
filter_queries: Optional[bool] = None, download: bool = True,
validate_checksums: bool = True):
"""
Args:
location: Directory where the dataset is located.
split: The data split to load ("train", "test" or "vali")
fold: Which data fold to load (1...5)
normalize: Whether to perform query-level feature
normalization.
filter_queries: Whether to filter out queries that
have no relevant items. If not given this will filter queries
for the test set but not the train set.
download: Whether to download the dataset if it does not
exist.
validate_checksums: Whether to validate the dataset files
via sha256.
"""
# Check if specified split and fold exists.
if split not in MSLR10K.splits.keys():
raise ValueError("unrecognized data split '%s'" % str(split))
if fold not in MSLR10K.per_fold_expected_files.keys():
raise ValueError("unrecognized data fold '%s'" % str(fold))
# Validate dataset exists and is correct, or download it.
validate_and_download(
location=location,
expected_files=MSLR10K.per_fold_expected_files[fold],
downloader=MSLR10K.downloader if download else None,
validate_checksums=validate_checksums)
# Only filter queries on non-train splits.
if filter_queries is None:
filter_queries = False if split == "train" else True
# Initialize the dataset.
datafile = os.path.join(location, "Fold%d" % fold,
MSLR10K.splits[split])
super().__init__(file=datafile, sparse=False, normalize=normalize,
filter_queries=filter_queries, zero_based="auto")
|
Python
|
MIT
|
SuperXiang/pytorchltr/pytorchltr/datasets/svmrank/mslr10k.py
|
14650e97-235f-41d5-8b3b-c544984e5adb
|
[]
|
[]
|
/*
Module : AADATE.CPP
Purpose: Implementation for the algorithms which convert between the Gregorian and Julian calendars and the Julian Day
Created: PJN / 29-12-2003
History: PJN / 10-11-2004 1. Fix for CAADate::Get so that it works correctly for propalactive calendar dates
PJN / 15-05-2005 1. Fix for CAADate::Set(double JD, bool bGregorianCalendarCalendar) not setting the m_bGregorianCalendarCalendar
member variable correctly.
PJN / 26-01-2006 1. After a bug report from Ing. Taras Kapuszczak that a round trip of the date 25 January 100 as
specified in the Gregorian calendar to the Julian day number and then back again produces the
incorrect date 26 January 100, I've spent some time looking into the 2 key Meeus Julian Day
algorithms. It seems that the algorithms which converts from a Calendar date to JD works ok for
propalactive dates, but the reverse algorithm which converts from a JD to a Calendar date does not.
Since I made the change in behaviour to support propalactive Gregorian dates to address issues
with the Moslem calendar (and since then I have discovered further unresolved bugs in the Moslem
calendar algorithms and advised people to check out my AA+ library instead), I am now reverting
these changes so that the date algorithms are now as presented in Meeus's book. This means that
dates after 15 October 1582 are assumed to be in the Gregorian calendar and dates before are
assumed to be in the Julian calendar. This change also means that some of the CAADate class
methods no longer require the now defunct "bool" parameter to specify which calendar the date
represents. As part of the testing for this release verification code has been added to AATest.cpp
to test all the dates from JD 0 (i.e. 1 January -4712) to a date long in the future. Hopefully
with this verification code, we should have no more reported issues with the class CAADate. Again
if you would prefer a much more robust and comprehensive Date time class framework, don't forget
to check out the authors DTime+ library.
2. Optimized CAADate constructor code
3. Provided a static version of CAADate::DaysInMonth() method
4. Discovered an issue in CAADate::JulianToGregorian. It seems the algorithm presented in the
book to do conversion from the Julian to Gregorian calendar fails for Julian dates before the
Gregorian calendar reform in 1582. I have sent an email to Jean Meeus to find out if this is a
bug in my code or a deficiency in the algorithm presented. Currently the code will assert in this
function if it is called for a date before the Gregorian reform.
PJN / 27-01-2007 1. The static version of the Set method has been renamed to DateToJD to avoid any confusion with
the other Set methods. Thanks to Ing. Taras Kapuszczak for reporting this issue.
2. The method InGregorianCalendar has now also been renamed to the more appropriate
AfterPapalReform.
3. Reinstated the bGregorianCalendar parameter for the CAADate constructors and Set methods.
4. Changed the parameter layout for the static version of DaysInMonth
5. Addition of a InGregorianCalendar method.
6. Addition of a SetInGregorianCalendar method.
7. Reworked implementation of GregorianToJulian method.
8. Reworked implementation of JulianToGregorian method.
PJN / 07-02-2009 1. Updated the static version of CAADate::DaysInMonth to compile cleanly using code analysis
PJN / 29-03-2015 1. Fixed up some variable initializations around the use of modf. Thanks to Arnaud Cueille for
reporting this issue.
Copyright (c) 2003 - 2015 by PJ Naughter (Web: www.naughter.com, Email: pjna@naughter.com)
All rights reserved.
Copyright / Usage Details:
You are allowed to include the source code in any product (commercial, shareware, freeware or otherwise)
when your product is released in binary form. You are allowed to modify the source code in any way you want
except you cannot modify the copyright details at the top of each module. If you want to distribute source
code with your application, then you are only allowed to distribute versions released by the author. This is
to maintain a single distribution point for the source code.
*/
//////////////////////////// Includes /////////////////////////////////////////
#include "stdafx.h"
#include "AADate.h"
#include <cmath>
#include <cassert>
using namespace std;
//////////////////////////// Implementation ///////////////////////////////////
CAADate::CAADate() : m_dblJulian(0),
m_bGregorianCalendar(false)
{
}
CAADate::CAADate(long Year, long Month, double Day, bool bGregorianCalendar)
{
Set(Year, Month, Day, 0, 0, 0, bGregorianCalendar);
}
CAADate::CAADate(long Year, long Month, double Day, double Hour, double Minute, double Second, bool bGregorianCalendar)
{
Set(Year, Month, Day, Hour, Minute, Second, bGregorianCalendar);
}
CAADate::CAADate(double JD, bool bGregorianCalendar)
{
Set(JD, bGregorianCalendar);
}
bool CAADate::AfterPapalReform(long Year, long Month, double Day)
{
return ((Year > 1582) || ((Year == 1582) && (Month > 10)) || ((Year == 1582) && (Month == 10) && (Day >= 15)));
}
bool CAADate::AfterPapalReform(double JD)
{
return (JD >= 2299160.5);
}
double CAADate::DateToJD(long Year, long Month, double Day, bool bGregorianCalendar)
{
long Y = Year;
long M = Month;
if (M < 3)
{
Y = Y - 1;
M = M + 12;
}
long B = 0;
if (bGregorianCalendar)
{
long A = INT(Y / 100.0);
B = 2 - A + INT(A / 4.0);
}
return INT(365.25 * (Y + 4716)) + INT(30.6001 * (M + 1)) + Day + B - 1524.5;
}
bool CAADate::IsLeap(long Year, bool bGregorianCalendar)
{
if (bGregorianCalendar)
{
if ((Year % 100) == 0)
return ((Year % 400) == 0) ? true : false;
else
return ((Year % 4) == 0) ? true : false;
}
else
return ((Year % 4) == 0) ? true : false;
}
void CAADate::Set(long Year, long Month, double Day, double Hour, double Minute, double Second, bool bGregorianCalendar)
{
double dblDay = Day + (Hour/24) + (Minute/1440) + (Second / 86400);
Set(DateToJD(Year, Month, dblDay, bGregorianCalendar), bGregorianCalendar);
}
void CAADate::Get(long& Year, long& Month, long& Day, long& Hour, long& Minute, double& Second) const
{
double JD = m_dblJulian + 0.5;
double tempZ = 0;
double F = modf(JD, &tempZ);
long Z = static_cast<long>(tempZ);
long A;
if (m_bGregorianCalendar) //There is a difference here between the Meeus implementation and this one
//if (Z >= 2299161) //The Meeus implementation automatically assumes the Gregorian Calendar
//came into effect on 15 October 1582 (JD: 2299161), while the CAADate
//implementation has a "m_bGregorianCalendar" value to decide if the date
//was specified in the Gregorian or Julian Calendars. This difference
//means in effect that CAADate fully supports a propalactive version of the
//Julian calendar. This allows you to construct Julian dates after the Papal
//reform in 1582. This is useful if you want to construct dates in countries
//which did not immediately adapt the Gregorian calendar
{
long alpha = INT((Z - 1867216.25) / 36524.25);
A = Z + 1 + alpha - INT(INT(alpha)/4.0);
}
else
A = Z;
long B = A + 1524;
long C = INT((B - 122.1) / 365.25);
long D = INT(365.25 * C);
long E = INT((B - D) / 30.6001);
double dblDay = B - D - INT(30.6001 * E) + F;
Day = static_cast<long>(dblDay);
if (E < 14)
Month = E - 1;
else
Month = E - 13;
if (Month > 2)
Year = C - 4716;
else
Year = C - 4715;
F = modf(dblDay, &tempZ);
Hour = INT(F*24);
Minute = INT((F - (Hour)/24.0)*1440.0);
Second = (F - (Hour / 24.0) - (Minute / 1440.0)) * 86400.0;
}
void CAADate::Set(double JD, bool bGregorianCalendar)
{
m_dblJulian = JD;
SetInGregorianCalendar(bGregorianCalendar);
}
void CAADate::SetInGregorianCalendar(bool bGregorianCalendar)
{
bool bAfterPapalReform = (m_dblJulian >= 2299160.5);
#ifdef _DEBUG
if (bGregorianCalendar) //We do not allow storage of propalatic Gregorian dates
assert(bAfterPapalReform);
#endif
m_bGregorianCalendar = bGregorianCalendar && bAfterPapalReform;
}
long CAADate::Day() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Day;
}
long CAADate::Month() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Month;
}
long CAADate::Year() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Year;
}
long CAADate::Hour() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Hour;
}
long CAADate::Minute() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Minute;
}
double CAADate::Second() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return Second;
}
CAADate::DAY_OF_WEEK CAADate::DayOfWeek() const
{
return static_cast<DAY_OF_WEEK>((static_cast<long>(m_dblJulian + 1.5) % 7));
}
long CAADate::DaysInMonth(long Month, bool bLeap)
{
//Validate our parameters
assert(Month >= 1 && Month <= 12);
#ifdef _MSC_VER
__analysis_assume(Month >= 1 && Month <= 12);
#endif
int MonthLength[] =
{
31, 28, 31, 30, 31, 30,
31, 31, 30, 31, 30, 31
};
if (bLeap)
MonthLength[1]++;
return MonthLength[Month-1];
}
long CAADate::DaysInMonth() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return DaysInMonth(Month, IsLeap(Year, m_bGregorianCalendar));
}
long CAADate::DaysInYear() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
if (IsLeap(Year, m_bGregorianCalendar))
return 366;
else
return 365;
}
double CAADate::DayOfYear() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
return DayOfYear(m_dblJulian, Year, AfterPapalReform(Year, 1, 1));
}
double CAADate::DayOfYear(double JD, long Year, bool bGregorianCalendar)
{
return JD - DateToJD(Year, 1, 1, bGregorianCalendar) + 1;
}
double CAADate::FractionalYear() const
{
long Year = 0;
long Month = 0;
long Day = 0;
long Hour = 0;
long Minute = 0;
double Second = 0;
Get(Year, Month, Day, Hour, Minute, Second);
long DaysInYear;
if (IsLeap(Year, m_bGregorianCalendar))
DaysInYear = 366;
else
DaysInYear = 365;
return Year + ((m_dblJulian - DateToJD(Year, 1, 1, AfterPapalReform(Year, 1, 1))) / DaysInYear);
}
bool CAADate::Leap() const
{
return IsLeap(Year(), m_bGregorianCalendar);
}
void CAADate::DayOfYearToDayAndMonth(long DayOfYear, bool bLeap, long& DayOfMonth, long& Month)
{
long K = bLeap ? 1 : 2;
Month = INT(9*(K + DayOfYear)/275.0 + 0.98);
if (DayOfYear < 32)
Month = 1;
DayOfMonth = DayOfYear - INT((275*Month)/9.0) + (K*INT((Month + 9)/12.0)) + 30;
}
long CAADate::INT(double value)
{
if (value >= 0)
return static_cast<long>(value);
else
return static_cast<long>(value - 1);
}
CAACalendarDate CAADate::JulianToGregorian(long Year, long Month, long Day)
{
CAADate date(Year, Month, Day, false);
date.SetInGregorianCalendar(true);
CAACalendarDate GregorianDate;
long Hour = 0;
long Minute = 0;
double Second = 0;
date.Get(GregorianDate.Year, GregorianDate.Month, GregorianDate.Day, Hour, Minute, Second);
return GregorianDate;
}
CAACalendarDate CAADate::GregorianToJulian(long Year, long Month, long Day)
{
CAADate date(Year, Month, Day, true);
date.SetInGregorianCalendar(false);
CAACalendarDate JulianDate;
long Hour = 0;
long Minute = 0;
double Second = 0;
date.Get(JulianDate.Year, JulianDate.Month, JulianDate.Day, Hour, Minute, Second);
return JulianDate;
}
|
C++
|
MIT
|
farhad01/SimpleVectorMapRenderer/Example/Pods/WhirlyGlobe/WhirlyGlobeSrc/local_libs/aaplus/AADate.cpp
|
e9b57134-829b-4182-b4e7-627366f86bd1
|
[{"tag": "EMAIL", "value": "pjna@naughter.com", "start": 4535, "end": 4552, "context": "015 by PJ Naughter (Web: www.naughter.com, Email: pjna@naughter.com)\r\n\r\nAll rights reserved.\r\n\r\nCopyright / Usage Det"}, {"tag": "NAME", "value": "PJ Naughter", "start": 4492, "end": 4503, "context": "rting this issue.\r\n\r\nCopyright (c) 2003 - 2015 by PJ Naughter (Web: www.naughter.com, Email: pjna@naughter.com)"}]
|
[{"tag": "EMAIL", "value": "pjna@naughter.com", "start": 4535, "end": 4552, "context": "015 by PJ Naughter (Web: www.naughter.com, Email: pjna@naughter.com)\r\n\r\nAll rights reserved.\r\n\r\nCopyright / Usage Det"}, {"tag": "NAME", "value": "PJ Naughter", "start": 4492, "end": 4503, "context": "rting this issue.\r\n\r\nCopyright (c) 2003 - 2015 by PJ Naughter (Web: www.naughter.com, Email: pjna@naughter.com)"}]
|
/*
* SpanDSP - a series of DSP components for telephony
*
* super_tone_rx.h - Flexible telephony supervisory tone detection.
*
* Written by Steve Underwood <steveu@coppice.org>
*
* Copyright (C) 2003 Steve Underwood
*
* All rights reserved.
*
* This program is free software; you can redistribute it and/or modify
* it under the terms of the GNU Lesser General Public License version 2.1,
* as published by the Free Software Foundation.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public
* License along with this program; if not, write to the Free Software
* Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
*
* $Id: super_tone_rx.h,v 1.21 2009/02/10 13:06:47 steveu Exp $
*/
#if !defined(_SPANDSP_SUPER_TONE_RX_H_)
#define _SPANDSP_SUPER_TONE_RX_H_
/*! \page super_tone_rx_page Supervisory tone detection
\section super_tone_rx_page_sec_1 What does it do?
The supervisory tone detector may be configured to detect most of the world's
telephone supervisory tones - things like ringback, busy, number unobtainable,
and so on.
\section super_tone_rx_page_sec_2 How does it work?
The supervisory tone detector is passed a series of data structures describing
the tone patterns - the frequencies and cadencing - of the tones to be searched
for. It constructs one or more Goertzel filters to monitor the required tones.
If tones are close in frequency a single Goertzel set to the centre of the
frequency range will be used. This optimises the efficiency of the detector. The
Goertzel filters are applied without applying any special window functional
(i.e. they use a rectangular window), so they have a sinc like response.
However, for most tone patterns their rejection qualities are adequate.
The detector aims to meet the need of the standard call progress tones, to
ITU-T E.180/Q.35 (busy, dial, ringback, reorder). Also, the extended tones,
to ITU-T E.180, Supplement 2 and EIA/TIA-464-A (recall dial tone, special
ringback tone, intercept tone, call waiting tone, busy verification tone,
executive override tone, confirmation tone).
*/
/*! Tone detection indication callback routine */
typedef void (*tone_report_func_t)(void *user_data, int code, int level, int delay);
typedef struct super_tone_rx_segment_s super_tone_rx_segment_t;
typedef struct super_tone_rx_descriptor_s super_tone_rx_descriptor_t;
typedef struct super_tone_rx_state_s super_tone_rx_state_t;
#if defined(__cplusplus)
extern "C"
{
#endif
/*! Create a new supervisory tone detector descriptor.
\param desc The supervisory tone set desciptor. If NULL, the routine will allocate space for a
descriptor.
\return The supervisory tone set descriptor.
*/
SPAN_DECLARE(super_tone_rx_descriptor_t *) super_tone_rx_make_descriptor(super_tone_rx_descriptor_t *desc);
/*! Free a supervisory tone detector descriptor.
\param desc The supervisory tone set desciptor.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_free_descriptor(super_tone_rx_descriptor_t *desc);
/*! Add a new tone pattern to a supervisory tone detector set.
\param desc The supervisory tone set descriptor.
\return The new tone ID. */
SPAN_DECLARE(int) super_tone_rx_add_tone(super_tone_rx_descriptor_t *desc);
/*! Add a new tone pattern element to a tone pattern in a supervisory tone detector.
\param desc The supervisory tone set desciptor.
\param tone The tone ID within the descriptor.
\param f1 Frequency 1 (-1 for a silent period).
\param f2 Frequency 2 (-1 for a silent period, or only one frequency).
\param min The minimum duration, in ms.
\param max The maximum duration, in ms.
\return The new number of elements in the tone description.
*/
SPAN_DECLARE(int) super_tone_rx_add_element(super_tone_rx_descriptor_t *desc,
int tone,
int f1,
int f2,
int min,
int max);
/*! Initialise a supervisory tone detector.
\param s The supervisory tone detector context.
\param desc The tone descriptor.
\param callback The callback routine called to report the valid detection or termination of
one of the monitored tones.
\param user_data An opaque pointer passed when calling the callback routine.
\return The supervisory tone detector context.
*/
SPAN_DECLARE(super_tone_rx_state_t *) super_tone_rx_init(super_tone_rx_state_t *s,
super_tone_rx_descriptor_t *desc,
tone_report_func_t callback,
void *user_data);
/*! Release a supervisory tone detector.
\param s The supervisory tone context.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_release(super_tone_rx_state_t *s);
/*! Free a supervisory tone detector.
\param s The supervisory tone context.
\return 0 for OK, -1 for fail.
*/
SPAN_DECLARE(int) super_tone_rx_free(super_tone_rx_state_t *s);
/*! Define a callback routine to be called each time a tone pattern element is complete. This is
mostly used when analysing a tone.
\param s The supervisory tone context.
\param callback The callback routine.
*/
SPAN_DECLARE(void) super_tone_rx_segment_callback(super_tone_rx_state_t *s,
void (*callback)(void *data, int f1, int f2, int duration));
/*! Apply supervisory tone detection processing to a block of audio samples.
\brief Apply supervisory tone detection processing to a block of audio samples.
\param super The supervisory tone context.
\param amp The audio sample buffer.
\param samples The number of samples in the buffer.
\return The number of samples processed.
*/
SPAN_DECLARE(int) super_tone_rx(super_tone_rx_state_t *super, const int16_t amp[], int samples);
#if defined(__cplusplus)
}
#endif
#endif
/*- End of file ------------------------------------------------------------*/
|
C
|
MIT
|
BRILLIANT-ESYSTEMS-LIMITED/sipdroid/app/src/main/jni/spandsp/spandsp/super_tone_rx.h
|
428abf5d-a5c9-4b01-aac5-fa1e92f65c4f
|
[{"tag": "NAME", "value": "Steve Underwood", "start": 145, "end": 160, "context": "hony supervisory tone detection.\n *\n * Written by Steve Underwood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Ste"}, {"tag": "EMAIL", "value": "steveu@coppice.org", "start": 162, "end": 180, "context": "tone detection.\n *\n * Written by Steve Underwood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Steve Underwood\n *\n * A"}, {"tag": "NAME", "value": "Steve Underwood", "start": 207, "end": 222, "context": "ood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Steve Underwood\n *\n * All rights reserved.\n *\n * This program is "}]
|
[{"tag": "NAME", "value": "Steve Underwood", "start": 145, "end": 160, "context": "hony supervisory tone detection.\n *\n * Written by Steve Underwood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Ste"}, {"tag": "EMAIL", "value": "steveu@coppice.org", "start": 162, "end": 180, "context": "tone detection.\n *\n * Written by Steve Underwood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Steve Underwood\n *\n * A"}, {"tag": "NAME", "value": "Steve Underwood", "start": 207, "end": 222, "context": "ood <steveu@coppice.org>\n *\n * Copyright (C) 2003 Steve Underwood\n *\n * All rights reserved.\n *\n * This program is "}]
|
/*
* yosys -- Yosys Open SYnthesis Suite
*
* Copyright (C) 2012 Clifford Wolf <clifford@clifford.at>
*
* Permission to use, copy, modify, and/or distribute this software for any
* purpose with or without fee is hereby granted, provided that the above
* copyright notice and this permission notice appear in all copies.
*
* THE SOFTWARE IS PROVIDED "AS IS" AND THE AUTHOR DISCLAIMS ALL WARRANTIES
* WITH REGARD TO THIS SOFTWARE INCLUDING ALL IMPLIED WARRANTIES OF
* MERCHANTABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR BE LIABLE FOR
* ANY SPECIAL, DIRECT, INDIRECT, OR CONSEQUENTIAL DAMAGES OR ANY DAMAGES
* WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS, WHETHER IN AN
* ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS ACTION, ARISING OUT OF
* OR IN CONNECTION WITH THE USE OR PERFORMANCE OF THIS SOFTWARE.
*
* ---
*
* The Verilog frontend.
*
* This frontend is using the AST frontend library (see frontends/ast/).
* Thus this frontend does not generate RTLIL code directly but creates an
* AST directly from the Verilog parse tree and then passes this AST to
* the AST frontend library.
*
* ---
*
* Ad-hoc implementation of a Verilog preprocessor. The directives `define,
* `include, `ifdef, `ifndef, `else and `endif are handled here. All other
* directives are handled by the lexer (see verilog_lexer.l).
*
*/
#include "preproc.h"
#include "verilog_frontend.h"
#include "kernel/log.h"
#include <assert.h>
#include <stdarg.h>
#include <stdio.h>
#include <string.h>
YOSYS_NAMESPACE_BEGIN
using namespace VERILOG_FRONTEND;
static std::list<std::string> output_code;
static std::list<std::string> input_buffer;
static size_t input_buffer_charp;
static void return_char(char ch)
{
if (input_buffer_charp == 0)
input_buffer.push_front(std::string() + ch);
else
input_buffer.front()[--input_buffer_charp] = ch;
}
static void insert_input(std::string str)
{
if (input_buffer_charp != 0) {
input_buffer.front() = input_buffer.front().substr(input_buffer_charp);
input_buffer_charp = 0;
}
input_buffer.push_front(str);
}
static char next_char()
{
if (input_buffer.empty())
return 0;
log_assert(input_buffer_charp <= input_buffer.front().size());
if (input_buffer_charp == input_buffer.front().size()) {
input_buffer_charp = 0;
input_buffer.pop_front();
return next_char();
}
char ch = input_buffer.front()[input_buffer_charp++];
return ch == '\r' ? next_char() : ch;
}
static std::string skip_spaces()
{
std::string spaces;
while (1) {
char ch = next_char();
if (ch == 0)
break;
if (ch != ' ' && ch != '\t') {
return_char(ch);
break;
}
spaces += ch;
}
return spaces;
}
static std::string next_token(bool pass_newline = false)
{
std::string token;
char ch = next_char();
if (ch == 0)
return token;
token += ch;
if (ch == '\n') {
if (pass_newline) {
output_code.push_back(token);
return "";
}
return token;
}
if (ch == ' ' || ch == '\t')
{
while ((ch = next_char()) != 0) {
if (ch != ' ' && ch != '\t') {
return_char(ch);
break;
}
token += ch;
}
}
else if (ch == '"')
{
while ((ch = next_char()) != 0) {
token += ch;
if (ch == '"')
break;
if (ch == '\\') {
if ((ch = next_char()) != 0)
token += ch;
}
}
if (token == "\"\"" && (ch = next_char()) != 0) {
if (ch == '"')
token += ch;
else
return_char(ch);
}
}
else if (ch == '/')
{
if ((ch = next_char()) != 0) {
if (ch == '/') {
token += '*';
char last_ch = 0;
while ((ch = next_char()) != 0) {
if (ch == '\n') {
return_char(ch);
break;
}
if (last_ch != '*' || ch != '/') {
token += ch;
last_ch = ch;
}
}
token += " */";
}
else if (ch == '*') {
token += '*';
int newline_count = 0;
char last_ch = 0;
while ((ch = next_char()) != 0) {
if (ch == '\n') {
newline_count++;
token += ' ';
} else
token += ch;
if (last_ch == '*' && ch == '/')
break;
last_ch = ch;
}
while (newline_count-- > 0)
return_char('\n');
}
else
return_char(ch);
}
}
else
{
const char *ok = "abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ$0123456789";
if (ch == '`' || strchr(ok, ch) != NULL)
{
char first = ch;
ch = next_char();
if (first == '`' && (ch == '"' || ch == '`')) {
token += ch;
} else do {
if (strchr(ok, ch) == NULL) {
return_char(ch);
break;
}
token += ch;
} while ((ch = next_char()) != 0);
}
}
return token;
}
struct macro_arg_t
{
macro_arg_t(const std::string &name_, const char *default_value_)
: name(name_),
has_default(default_value_ != nullptr),
default_value(default_value_ ? default_value_ : "")
{}
std::string name;
bool has_default;
std::string default_value;
};
static bool all_white(const std::string &str)
{
for (char c : str)
if (!isspace(c))
return false;
return true;
}
struct arg_map_t
{
arg_map_t()
{}
void add_arg(const std::string &name, const char *default_value)
{
if (find(name)) {
log_error("Duplicate macro arguments with name `%s'.\n", name.c_str());
}
name_to_pos[name] = args.size();
args.push_back(macro_arg_t(name, default_value));
}
// Find an argument by name; return nullptr if it doesn't exist. If pos is not null, write
// the argument's position to it on success.
const macro_arg_t *find(const std::string &name, int *pos = nullptr) const
{
auto it = name_to_pos.find(name);
if (it == name_to_pos.end())
return nullptr;
if (pos) *pos = it->second;
return &args[it->second];
}
// Construct the name for the local macro definition we use for the given argument
// (something like macro_foobar_arg2). This doesn't include the leading backtick.
static std::string str_token(const std::string ¯o_name, int pos)
{
return stringf("macro_%s_arg%d", macro_name.c_str(), pos);
}
// Return definitions for the macro arguments (so that substituting in the macro body and
// then performing macro expansion will do argument substitution properly).
std::vector<std::pair<std::string, std::string>>
get_vals(const std::string ¯o_name, const std::vector<std::string> &arg_vals) const
{
std::vector<std::pair<std::string, std::string>> ret;
for (int i = 0; i < GetSize(args); ++ i) {
// The SystemVerilog rules are:
//
// - If the call site specifies an argument and it's not whitespace, use
// it.
//
// - Otherwise, if the argument has a default value, use it.
//
// - Otherwise, if the call site specified whitespace, use that.
//
// - Otherwise, error.
const std::string *dflt = nullptr;
if (args[i].has_default)
dflt = &args[i].default_value;
const std::string *given = nullptr;
if (i < GetSize(arg_vals))
given = &arg_vals[i];
const std::string *val = nullptr;
if (given && (! (dflt && all_white(*given))))
val = given;
else if (dflt)
val = dflt;
else if (given)
val = given;
else
log_error("Cannot expand macro `%s by giving only %d argument%s "
"(argument %d has no default).\n",
macro_name.c_str(), GetSize(arg_vals),
(GetSize(arg_vals) == 1 ? "" : "s"), i + 1);
assert(val);
ret.push_back(std::make_pair(str_token(macro_name, i), * val));
}
return ret;
}
std::vector<macro_arg_t> args;
std::map<std::string, int> name_to_pos;
};
struct define_body_t
{
define_body_t(const std::string &body, const arg_map_t *args = nullptr)
: body(body),
has_args(args != nullptr),
args(args ? *args : arg_map_t())
{}
std::string body;
bool has_args;
arg_map_t args;
};
define_map_t::define_map_t()
{
add("YOSYS", "1");
add(formal_mode ? "FORMAL" : "SYNTHESIS", "1");
}
// We must define this destructor here (rather than relying on the default), because we need to
// define it somewhere we've got a complete definition of define_body_t.
define_map_t::~define_map_t()
{}
void
define_map_t::add(const std::string &name, const std::string &txt, const arg_map_t *args)
{
defines[name] = std::unique_ptr<define_body_t>(new define_body_t(txt, args));
}
void define_map_t::merge(const define_map_t &map)
{
for (const auto &pr : map.defines) {
// These contortions are so that we take a copy of each definition body in
// map.defines.
defines[pr.first] = std::unique_ptr<define_body_t>(new define_body_t(*pr.second));
}
}
const define_body_t *define_map_t::find(const std::string &name) const
{
auto it = defines.find(name);
return (it == defines.end()) ? nullptr : it->second.get();
}
void define_map_t::erase(const std::string &name)
{
defines.erase(name);
}
void define_map_t::clear()
{
defines.clear();
}
void define_map_t::log() const
{
for (auto &it : defines) {
const std::string &name = it.first;
const define_body_t &body = *it.second;
Yosys::log("`define %s%s %s\n",
name.c_str(), body.has_args ? "()" : "", body.body.c_str());
}
}
static void input_file(std::istream &f, std::string filename)
{
char buffer[513];
int rc;
insert_input("");
auto it = input_buffer.begin();
input_buffer.insert(it, "`file_push \"" + filename + "\"\n");
while ((rc = readsome(f, buffer, sizeof(buffer)-1)) > 0) {
buffer[rc] = 0;
input_buffer.insert(it, buffer);
}
input_buffer.insert(it, "\n`file_pop\n");
}
// Read tokens to get one argument (either a macro argument at a callsite or a default argument in a
// macro definition). Writes the argument to dest. Returns true if we finished with ')' (the end of
// the argument list); false if we finished with ','.
static bool read_argument(std::string &dest)
{
std::vector<char> openers;
for (;;) {
skip_spaces();
std::string tok = next_token(true);
if (tok == ")") {
if (openers.empty())
return true;
if (openers.back() != '(')
log_error("Mismatched brackets in macro argument: %c and %c.\n",
openers.back(), tok[0]);
openers.pop_back();
dest += tok;
continue;
}
if (tok == "]") {
char opener = openers.empty() ? '(' : openers.back();
if (opener != '[')
log_error("Mismatched brackets in macro argument: %c and %c.\n",
opener, tok[0]);
openers.pop_back();
dest += tok;
continue;
}
if (tok == "}") {
char opener = openers.empty() ? '(' : openers.back();
if (opener != '{')
log_error("Mismatched brackets in macro argument: %c and %c.\n",
opener, tok[0]);
openers.pop_back();
dest += tok;
continue;
}
if (tok == "," && openers.empty()) {
return false;
}
if (tok == "(" || tok == "[" || tok == "{")
openers.push_back(tok[0]);
dest += tok;
}
}
static bool try_expand_macro(define_map_t &defines, std::string &tok)
{
if (tok == "`\"") {
std::string literal("\"");
// Expand string literal
while (!input_buffer.empty()) {
std::string ntok = next_token();
if (ntok == "`\"") {
insert_input(literal+"\"");
return true;
} else if (!try_expand_macro(defines, ntok)) {
literal += ntok;
}
}
return false; // error - unmatched `"
}
if (tok == "``") {
// Swallow `` in macro expansion
return true;
}
if (tok.size() <= 1 || tok[0] != '`')
return false;
// This token looks like a macro name (`foo).
std::string macro_name = tok.substr(1);
const define_body_t *body = defines.find(tok.substr(1));
if (! body) {
// Apparently not a name we know.
return false;
}
std::string name = tok.substr(1);
std::string skipped_spaces = skip_spaces();
tok = next_token(false);
if (tok == "(" && body->has_args) {
std::vector<std::string> args;
bool done = false;
while (!done) {
std::string arg;
done = read_argument(arg);
args.push_back(arg);
}
for (const auto &pr : body->args.get_vals(name, args)) {
defines.add(pr.first, pr.second);
}
} else {
insert_input(tok);
insert_input(skipped_spaces);
}
insert_input(body->body);
return true;
}
// Read the arguments for a `define preprocessor directive with formal arguments. This is called
// just after reading the token containing "(". Returns the number of newlines to emit afterwards to
// keep line numbers in sync, together with the map from argument name to data (pos and default
// value).
static std::pair<int, arg_map_t>
read_define_args()
{
// Each argument looks like one of the following:
//
// identifier
// identifier = default_text
// identifier =
//
// The first example is an argument with no default value. The second is an argument whose
// default value is default_text. The third is an argument with default value the empty
// string.
int newline_count = 0;
arg_map_t args;
// FSM state.
//
// 0: At start of identifier
// 1: After identifier (stored in arg_name)
// 2: After closing paren
int state = 0;
std::string arg_name, default_val;
skip_spaces();
for (;;) {
if (state == 2)
// We've read the closing paren.
break;
std::string tok = next_token();
// Cope with escaped EOLs
if (tok == "\\") {
char ch = next_char();
if (ch == '\n') {
// Eat the \, the \n and any trailing space and keep going.
skip_spaces();
continue;
} else {
// There aren't any other situations where a backslash makes sense.
log_error("Backslash in macro arguments (not at end of line).\n");
}
}
switch (state) {
case 0:
// At start of argument. If the token is ')', we've presumably just seen
// something like "`define foo() ...". Set state to 2 to finish. Otherwise,
// the token should be a valid simple identifier, but we'll allow anything
// here.
if (tok == ")") {
state = 2;
} else {
arg_name = tok;
state = 1;
}
skip_spaces();
break;
case 1:
// After argument. The token should either be an equals sign or a comma or
// closing paren.
if (tok == "=") {
std::string default_val;
//Read an argument into default_val and set state to 2 if we're at
// the end; 0 if we hit a comma.
state = read_argument(default_val) ? 2 : 0;
args.add_arg(arg_name, default_val.c_str());
skip_spaces();
break;
}
if (tok == ",") {
// Take the identifier as an argument with no default value.
args.add_arg(arg_name, nullptr);
state = 0;
skip_spaces();
break;
}
if (tok == ")") {
// As with comma, but set state to 2 (end of args)
args.add_arg(arg_name, nullptr);
state = 2;
skip_spaces();
break;
}
log_error("Trailing contents after identifier in macro argument `%s': "
"expected '=', ',' or ')'.\n",
arg_name.c_str());
default:
// The only FSM states are 0-2 and we dealt with 2 at the start of the loop.
__builtin_unreachable();
}
}
return std::make_pair(newline_count, args);
}
// Read a `define preprocessor directive. This is called just after reading the token containing
// "`define".
static void
read_define(const std::string &filename,
define_map_t &defines_map,
define_map_t &global_defines_cache)
{
std::string name, value;
arg_map_t args;
skip_spaces();
name = next_token(true);
bool here_doc_mode = false;
int newline_count = 0;
// The FSM state starts at 0. If it sees space (or enters here_doc_mode), it assumes this is
// a macro without formal arguments and jumps to state 1.
//
// In state 0, if it sees an opening parenthesis, it assumes this is a macro with formal
// arguments. It reads the arguments with read_define_args() and then jumps to state 2.
//
// In states 1 or 2, the FSM reads tokens to the end of line (or end of here_doc): this is
// the body of the macro definition.
int state = 0;
if (skip_spaces() != "")
state = 1;
for (;;) {
std::string tok = next_token();
if (tok.empty())
break;
// printf("define-tok: >>%s<<\n", tok != "\n" ? tok.c_str() : "NEWLINE");
if (tok == "\"\"\"") {
here_doc_mode = !here_doc_mode;
continue;
}
if (state == 0 && tok == "(") {
auto pr = read_define_args();
newline_count += pr.first;
args = pr.second;
state = 2;
continue;
}
// This token isn't an opening parenthesis immediately following the macro name, so
// it's presumably at or after the start of the macro body. If state isn't already 2
// (which would mean we'd parsed an argument list), set it to 1.
if (state == 0) {
state = 1;
}
if (tok == "\n") {
if (here_doc_mode) {
value += " ";
newline_count++;
} else {
return_char('\n');
break;
}
continue;
}
if (tok == "\\") {
char ch = next_char();
if (ch == '\n') {
value += " ";
newline_count++;
} else {
value += std::string("\\");
return_char(ch);
}
continue;
}
// Is this token the name of a macro argument? If so, replace it with a magic symbol
// that we'll replace with the argument value.
int arg_pos;
if (args.find(tok, &arg_pos)) {
value += '`' + args.str_token(name, arg_pos);
continue;
}
// This token is nothing special. Insert it verbatim into the macro body.
value += tok;
}
// Append some newlines so that we don't mess up line counts in error messages.
while (newline_count-- > 0)
return_char('\n');
if (strchr("abcdefghijklmnopqrstuvwxyz_ABCDEFGHIJKLMNOPQRSTUVWXYZ$0123456789", name[0])) {
// printf("define: >>%s<< -> >>%s<<\n", name.c_str(), value.c_str());
defines_map.add(name, value, (state == 2) ? &args : nullptr);
global_defines_cache.add(name, value, (state == 2) ? &args : nullptr);
} else {
log_file_error(filename, 0, "Invalid name for macro definition: >>%s<<.\n", name.c_str());
}
}
std::string
frontend_verilog_preproc(std::istream &f,
std::string filename,
const define_map_t &pre_defines,
define_map_t &global_defines_cache,
const std::list<std::string> &include_dirs)
{
define_map_t defines;
defines.merge(pre_defines);
defines.merge(global_defines_cache);
std::vector<std::string> filename_stack;
int ifdef_fail_level = 0;
bool in_elseif = false;
output_code.clear();
input_buffer.clear();
input_buffer_charp = 0;
input_file(f, filename);
while (!input_buffer.empty())
{
std::string tok = next_token();
// printf("token: >>%s<<\n", tok != "\n" ? tok.c_str() : "NEWLINE");
if (tok == "`endif") {
if (ifdef_fail_level > 0)
ifdef_fail_level--;
if (ifdef_fail_level == 0)
in_elseif = false;
continue;
}
if (tok == "`else") {
if (ifdef_fail_level == 0)
ifdef_fail_level = 1;
else if (ifdef_fail_level == 1 && !in_elseif)
ifdef_fail_level = 0;
continue;
}
if (tok == "`elsif") {
skip_spaces();
std::string name = next_token(true);
if (ifdef_fail_level == 0)
ifdef_fail_level = 1, in_elseif = true;
else if (ifdef_fail_level == 1 && defines.find(name))
ifdef_fail_level = 0, in_elseif = true;
continue;
}
if (tok == "`ifdef") {
skip_spaces();
std::string name = next_token(true);
if (ifdef_fail_level > 0 || !defines.find(name))
ifdef_fail_level++;
continue;
}
if (tok == "`ifndef") {
skip_spaces();
std::string name = next_token(true);
if (ifdef_fail_level > 0 || defines.find(name))
ifdef_fail_level++;
continue;
}
if (ifdef_fail_level > 0) {
if (tok == "\n")
output_code.push_back(tok);
continue;
}
if (tok == "`include") {
skip_spaces();
std::string fn = next_token(true);
while (try_expand_macro(defines, fn)) {
fn = next_token();
}
while (1) {
size_t pos = fn.find('"');
if (pos == std::string::npos)
break;
if (pos == 0)
fn = fn.substr(1);
else
fn = fn.substr(0, pos) + fn.substr(pos+1);
}
std::ifstream ff;
ff.clear();
std::string fixed_fn = fn;
ff.open(fixed_fn.c_str());
bool filename_path_sep_found;
bool fn_relative;
#ifdef _WIN32
// Both forward and backslash are acceptable separators on Windows.
filename_path_sep_found = (filename.find_first_of("/\\") != std::string::npos);
// Easier just to invert the check for an absolute path (e.g. C:\ or C:/)
fn_relative = !(fn[1] == ':' && (fn[2] == '/' || fn[2] == '\\'));
#else
filename_path_sep_found = (filename.find('/') != std::string::npos);
fn_relative = (fn[0] != '/');
#endif
if (ff.fail() && fn.size() > 0 && fn_relative && filename_path_sep_found) {
// if the include file was not found, it is not given with an absolute path, and the
// currently read file is given with a path, then try again relative to its directory
ff.clear();
#ifdef _WIN32
fixed_fn = filename.substr(0, filename.find_last_of("/\\")+1) + fn;
#else
fixed_fn = filename.substr(0, filename.rfind('/')+1) + fn;
#endif
ff.open(fixed_fn);
}
if (ff.fail() && fn.size() > 0 && fn_relative) {
// if the include file was not found and it is not given with an absolute path, then
// search it in the include path
for (auto incdir : include_dirs) {
ff.clear();
fixed_fn = incdir + '/' + fn;
ff.open(fixed_fn);
if (!ff.fail()) break;
}
}
if (ff.fail()) {
output_code.push_back("`file_notfound " + fn);
} else {
input_file(ff, fixed_fn);
yosys_input_files.insert(fixed_fn);
}
continue;
}
if (tok == "`file_push") {
skip_spaces();
std::string fn = next_token(true);
if (!fn.empty() && fn.front() == '"' && fn.back() == '"')
fn = fn.substr(1, fn.size()-2);
output_code.push_back(tok + " \"" + fn + "\"");
filename_stack.push_back(filename);
filename = fn;
continue;
}
if (tok == "`file_pop") {
output_code.push_back(tok);
filename = filename_stack.back();
filename_stack.pop_back();
continue;
}
if (tok == "`define") {
read_define(filename, defines, global_defines_cache);
continue;
}
if (tok == "`undef") {
std::string name;
skip_spaces();
name = next_token(true);
// printf("undef: >>%s<<\n", name.c_str());
defines.erase(name);
global_defines_cache.erase(name);
continue;
}
if (tok == "`timescale") {
skip_spaces();
while (!tok.empty() && tok != "\n")
tok = next_token(true);
if (tok == "\n")
return_char('\n');
continue;
}
if (tok == "`resetall") {
defines.clear();
global_defines_cache.clear();
continue;
}
if (try_expand_macro(defines, tok))
continue;
output_code.push_back(tok);
}
std::string output;
for (auto &str : output_code)
output += str;
output_code.clear();
input_buffer.clear();
input_buffer_charp = 0;
return output;
}
YOSYS_NAMESPACE_END
|
C++
|
ISC
|
Afromaniac/yosys/frontends/verilog/preproc.cc
|
2221b8e9-931e-450f-bfcd-6a8bd1120aa7
|
[{"tag": "NAME", "value": "Clifford Wolf", "start": 70, "end": 83, "context": "s Open SYnthesis Suite\n *\n * Copyright (C) 2012 Clifford Wolf <clifford@clifford.at>\n *\n * Permission to use, "}, {"tag": "EMAIL", "value": "clifford@clifford.at", "start": 85, "end": 105, "context": "s Suite\n *\n * Copyright (C) 2012 Clifford Wolf <clifford@clifford.at>\n *\n * Permission to use, copy, modify, and/or d"}]
|
[{"tag": "NAME", "value": "Clifford Wolf", "start": 70, "end": 83, "context": "s Open SYnthesis Suite\n *\n * Copyright (C) 2012 Clifford Wolf <clifford@clifford.at>\n *\n * Permission to use, "}, {"tag": "EMAIL", "value": "clifford@clifford.at", "start": 85, "end": 105, "context": "s Suite\n *\n * Copyright (C) 2012 Clifford Wolf <clifford@clifford.at>\n *\n * Permission to use, copy, modify, and/or d"}]
|
class AssignmentController < ApplicationController
get '/courses/:id/assignments' do
@course = Course.find_by(id: params[:id])
redirect to "/courses/#{@course.id}"
end
get '/courses/:id/assignments/new' do
@course = Course.find_by(id: params[:id])
erb :'assignments/new'
end
get '/courses/:course_id/assignments/:id' do
@course = Course.find_by(id: params[:course_id])
@assignment = Assignment.find_by(id: params[:id])
erb :'assignments/show'
end
get '/courses/:course_id/assignments/:id/edit' do
@course = Course.find_by(id: params[:course_id])
@assignment = Assignment.find_by(id: params[:id])
erb :'assignments/edit'
end
post '/courses/:id/assignments' do
@course = Course.find_by(id: params[:assignment][:course_id])
if !params[:assignment][:name].empty?
@assignment = Assignment.create(params[:assignment])
@assignment.course = @course
redirect to "/courses/#{@course.id}"
else
flash[:message] = "ASSIGNMENT NOT CREATED. Name is required."
redirect to "/courses/#{@course.id}"
end
end
patch '/courses/:course_id/assignments/:id' do
@course = Course.find_by(id: params[:course_id])
@assignment = Assignment.find_by(id: params[:id])
if @assignment.update(params[:assignment])
@assignment.update(params[:assignment])
if !params[:student].empty?
params[:student].each do |s|
student = Student.find_by(id: s[:id])
student.student_assignments.joins(:assignment).where("assignments.id = ?", @assignment.id).update(grade: s[:grade])
end
end
flash[:message] = "Assignment successfully updated!"
redirect to "/courses/#{@course.id}"
else
flash[:message] = "Assignment must have a name."
redirect to "/courses/#{@course.id}/assignments/#{@assignment.id}"
end
end
delete '/courses/:course_id/assignments/:id' do
@course = Course.find_by(id: params[:course_id])
@assignment = Assignment.find_by(id: params[:id])
if @course.teacher_id == session[:user_id]
@assignment.destroy
flash[:message] = "Assignment successfully removed."
redirect "/courses/#{@course.id}"
else
flash[:message] = "You do not have access to this function."
redirect '/courses'
end
end
end
|
Ruby
|
MIT
|
hanreitz/sinatra-project-gradebook/app/controllers/assignment_controller.rb
|
fd3b305b-5b70-4d15-be1a-3f1b1de43442
|
[]
|
[]
|
# frozen_string_literal: true
# Copyright 2020 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# Auto-generated by gapic-generator-ruby. DO NOT EDIT!
require "gapic/operation"
require "google/longrunning/operations_pb"
module Google
module Cloud
module Dialogflow
module V2
module Intents
# Service that implements Longrunning Operations API.
class Operations
# @private
attr_reader :operations_stub
##
# Configuration for the Intents Operations API.
#
# @yield [config] Configure the Operations client.
# @yieldparam config [Operations::Configuration]
#
# @return [Operations::Configuration]
#
def self.configure
@configure ||= Operations::Configuration.new
yield @configure if block_given?
@configure
end
##
# Configure the Intents Operations instance.
#
# The configuration is set to the derived mode, meaning that values can be changed,
# but structural changes (adding new fields, etc.) are not allowed. Structural changes
# should be made on {Operations.configure}.
#
# @yield [config] Configure the Operations client.
# @yieldparam config [Operations::Configuration]
#
# @return [Operations::Configuration]
#
def configure
yield @config if block_given?
@config
end
##
# Create a new Operations client object.
#
# @yield [config] Configure the Client client.
# @yieldparam config [Operations::Configuration]
#
def initialize
# These require statements are intentionally placed here to initialize
# the gRPC module only when it's required.
# See https://github.com/googleapis/toolkit/issues/446
require "gapic/grpc"
require "google/longrunning/operations_services_pb"
# Create the configuration object
@config = Configuration.new Operations.configure
# Yield the configuration if needed
yield @config if block_given?
# Create credentials
credentials = @config.credentials
credentials ||= Credentials.default scope: @config.scope
if credentials.is_a?(String) || credentials.is_a?(Hash)
credentials = Credentials.new credentials, scope: @config.scope
end
@quota_project_id = @config.quota_project
@quota_project_id ||= credentials.quota_project_id if credentials.respond_to? :quota_project_id
@operations_stub = ::Gapic::ServiceStub.new(
::Google::Longrunning::Operations::Stub,
credentials: credentials,
endpoint: @config.endpoint,
channel_args: @config.channel_args,
interceptors: @config.interceptors
)
end
# Service calls
##
# Lists operations that match the specified filter in the request. If the
# server doesn't support this method, it returns `UNIMPLEMENTED`.
#
# NOTE: the `name` binding allows API services to override the binding
# to use different resource name schemes, such as `users/*/operations`. To
# override the binding, API services can add a binding such as
# `"/v1/{name=users/*}/operations"` to their service configuration.
# For backwards compatibility, the default name includes the operations
# collection id, however overriding users must ensure the name binding
# is the parent resource, without the operations collection id.
#
# @overload list_operations(request, options = nil)
# Pass arguments to `list_operations` via a request object, either of type
# {::Google::Longrunning::ListOperationsRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::ListOperationsRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload list_operations(name: nil, filter: nil, page_size: nil, page_token: nil)
# Pass arguments to `list_operations` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation's parent resource.
# @param filter [::String]
# The standard list filter.
# @param page_size [::Integer]
# The standard list page size.
# @param page_token [::String]
# The standard list page token.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::PagedEnumerable<::Gapic::Operation>]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::PagedEnumerable<::Gapic::Operation>]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def list_operations request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::ListOperationsRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.list_operations.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Dialogflow::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.list_operations.timeout,
metadata: metadata,
retry_policy: @config.rpcs.list_operations.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :list_operations, request, options: options do |response, operation|
wrap_lro_operation = ->(op_response) { ::Gapic::Operation.new op_response, @operations_client }
response = ::Gapic::PagedEnumerable.new @operations_stub, :list_operations, request, response, operation, options, format_resource: wrap_lro_operation
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Gets the latest state of a long-running operation. Clients can use this
# method to poll the operation result at intervals as recommended by the API
# service.
#
# @overload get_operation(request, options = nil)
# Pass arguments to `get_operation` via a request object, either of type
# {::Google::Longrunning::GetOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::GetOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload get_operation(name: nil)
# Pass arguments to `get_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def get_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::GetOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.get_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Dialogflow::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.get_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.get_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :get_operation, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Deletes a long-running operation. This method indicates that the client is
# no longer interested in the operation result. It does not cancel the
# operation. If the server doesn't support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`.
#
# @overload delete_operation(request, options = nil)
# Pass arguments to `delete_operation` via a request object, either of type
# {::Google::Longrunning::DeleteOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::DeleteOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload delete_operation(name: nil)
# Pass arguments to `delete_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to be deleted.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def delete_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::DeleteOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.delete_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Dialogflow::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.delete_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.delete_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :delete_operation, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Starts asynchronous cancellation on a long-running operation. The server
# makes a best effort to cancel the operation, but success is not
# guaranteed. If the server doesn't support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`. Clients can use
# Operations.GetOperation or
# other methods to check whether the cancellation succeeded or whether the
# operation completed despite cancellation. On successful cancellation,
# the operation is not deleted; instead, it becomes an operation with
# an {::Google::Longrunning::Operation#error Operation.error} value with a {::Google::Rpc::Status#code google.rpc.Status.code} of 1,
# corresponding to `Code.CANCELLED`.
#
# @overload cancel_operation(request, options = nil)
# Pass arguments to `cancel_operation` via a request object, either of type
# {::Google::Longrunning::CancelOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::CancelOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload cancel_operation(name: nil)
# Pass arguments to `cancel_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to be cancelled.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Google::Protobuf::Empty]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Google::Protobuf::Empty]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def cancel_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::CancelOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.cancel_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Dialogflow::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
header_params = {
"name" => request.name
}
request_params_header = header_params.map { |k, v| "#{k}=#{v}" }.join("&")
metadata[:"x-goog-request-params"] ||= request_params_header
options.apply_defaults timeout: @config.rpcs.cancel_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.cancel_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :cancel_operation, request, options: options do |response, operation|
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Waits for the specified long-running operation until it is done or reaches
# at most a specified timeout, returning the latest state. If the operation
# is already done, the latest state is immediately returned. If the timeout
# specified is greater than the default HTTP/RPC timeout, the HTTP/RPC
# timeout is used. If the server does not support this method, it returns
# `google.rpc.Code.UNIMPLEMENTED`.
# Note that this method is on a best-effort basis. It may return the latest
# state before the specified timeout (including immediately), meaning even an
# immediate response is no guarantee that the operation is done.
#
# @overload wait_operation(request, options = nil)
# Pass arguments to `wait_operation` via a request object, either of type
# {::Google::Longrunning::WaitOperationRequest} or an equivalent Hash.
#
# @param request [::Google::Longrunning::WaitOperationRequest, ::Hash]
# A request object representing the call parameters. Required. To specify no
# parameters, or to keep all the default parameter values, pass an empty Hash.
# @param options [::Gapic::CallOptions, ::Hash]
# Overrides the default settings for this call, e.g, timeout, retries, etc. Optional.
#
# @overload wait_operation(name: nil, timeout: nil)
# Pass arguments to `wait_operation` via keyword arguments. Note that at
# least one keyword argument is required. To specify no parameters, or to keep all
# the default parameter values, pass an empty Hash as a request object (see above).
#
# @param name [::String]
# The name of the operation resource to wait on.
# @param timeout [::Google::Protobuf::Duration, ::Hash]
# The maximum duration to wait before timing out. If left blank, the wait
# will be at most the time permitted by the underlying HTTP/RPC protocol.
# If RPC context deadline is also specified, the shorter one will be used.
#
# @yield [response, operation] Access the result along with the RPC operation
# @yieldparam response [::Gapic::Operation]
# @yieldparam operation [::GRPC::ActiveCall::Operation]
#
# @return [::Gapic::Operation]
#
# @raise [::Google::Cloud::Error] if the RPC is aborted.
#
def wait_operation request, options = nil
raise ::ArgumentError, "request must be provided" if request.nil?
request = ::Gapic::Protobuf.coerce request, to: ::Google::Longrunning::WaitOperationRequest
# Converts hash and nil to an options object
options = ::Gapic::CallOptions.new(**options.to_h) if options.respond_to? :to_h
# Customize the options with defaults
metadata = @config.rpcs.wait_operation.metadata.to_h
# Set x-goog-api-client and x-goog-user-project headers
metadata[:"x-goog-api-client"] ||= ::Gapic::Headers.x_goog_api_client \
lib_name: @config.lib_name, lib_version: @config.lib_version,
gapic_version: ::Google::Cloud::Dialogflow::V2::VERSION
metadata[:"x-goog-user-project"] = @quota_project_id if @quota_project_id
options.apply_defaults timeout: @config.rpcs.wait_operation.timeout,
metadata: metadata,
retry_policy: @config.rpcs.wait_operation.retry_policy
options.apply_defaults metadata: @config.metadata,
retry_policy: @config.retry_policy
@operations_stub.call_rpc :wait_operation, request, options: options do |response, operation|
response = ::Gapic::Operation.new response, @operations_client, options: options
yield response, operation if block_given?
return response
end
rescue ::GRPC::BadStatus => e
raise ::Google::Cloud::Error.from_error(e)
end
##
# Configuration class for the Operations API.
#
# This class represents the configuration for Operations,
# providing control over timeouts, retry behavior, logging, transport
# parameters, and other low-level controls. Certain parameters can also be
# applied individually to specific RPCs. See
# {::Google::Longrunning::Operations::Client::Configuration::Rpcs}
# for a list of RPCs that can be configured independently.
#
# Configuration can be applied globally to all clients, or to a single client
# on construction.
#
# # Examples
#
# To modify the global config, setting the timeout for list_operations
# to 20 seconds, and all remaining timeouts to 10 seconds:
#
# ::Google::Longrunning::Operations::Client.configure do |config|
# config.timeout = 10.0
# config.rpcs.list_operations.timeout = 20.0
# end
#
# To apply the above configuration only to a new client:
#
# client = ::Google::Longrunning::Operations::Client.new do |config|
# config.timeout = 10.0
# config.rpcs.list_operations.timeout = 20.0
# end
#
# @!attribute [rw] endpoint
# The hostname or hostname:port of the service endpoint.
# Defaults to `"dialogflow.googleapis.com"`.
# @return [::String]
# @!attribute [rw] credentials
# Credentials to send with calls. You may provide any of the following types:
# * (`String`) The path to a service account key file in JSON format
# * (`Hash`) A service account key as a Hash
# * (`Google::Auth::Credentials`) A googleauth credentials object
# (see the [googleauth docs](https://googleapis.dev/ruby/googleauth/latest/index.html))
# * (`Signet::OAuth2::Client`) A signet oauth2 client object
# (see the [signet docs](https://googleapis.dev/ruby/signet/latest/Signet/OAuth2/Client.html))
# * (`GRPC::Core::Channel`) a gRPC channel with included credentials
# * (`GRPC::Core::ChannelCredentials`) a gRPC credentails object
# * (`nil`) indicating no credentials
# @return [::Object]
# @!attribute [rw] scope
# The OAuth scopes
# @return [::Array<::String>]
# @!attribute [rw] lib_name
# The library name as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] lib_version
# The library version as recorded in instrumentation and logging
# @return [::String]
# @!attribute [rw] channel_args
# Extra parameters passed to the gRPC channel. Note: this is ignored if a
# `GRPC::Core::Channel` object is provided as the credential.
# @return [::Hash]
# @!attribute [rw] interceptors
# An array of interceptors that are run before calls are executed.
# @return [::Array<::GRPC::ClientInterceptor>]
# @!attribute [rw] timeout
# The call timeout in seconds.
# @return [::Numeric]
# @!attribute [rw] metadata
# Additional gRPC headers to be sent with the call.
# @return [::Hash{::Symbol=>::String}]
# @!attribute [rw] retry_policy
# The retry policy. The value is a hash with the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
# @return [::Hash]
# @!attribute [rw] quota_project
# A separate project against which to charge quota.
# @return [::String]
#
class Configuration
extend ::Gapic::Config
config_attr :endpoint, "dialogflow.googleapis.com", ::String
config_attr :credentials, nil do |value|
allowed = [::String, ::Hash, ::Proc, ::Symbol, ::Google::Auth::Credentials, ::Signet::OAuth2::Client, nil]
allowed += [::GRPC::Core::Channel, ::GRPC::Core::ChannelCredentials] if defined? ::GRPC
allowed.any? { |klass| klass === value }
end
config_attr :scope, nil, ::String, ::Array, nil
config_attr :lib_name, nil, ::String, nil
config_attr :lib_version, nil, ::String, nil
config_attr(:channel_args, { "grpc.service_config_disable_resolution"=>1 }, ::Hash, nil)
config_attr :interceptors, nil, ::Array, nil
config_attr :timeout, nil, ::Numeric, nil
config_attr :metadata, nil, ::Hash, nil
config_attr :retry_policy, nil, ::Hash, ::Proc, nil
config_attr :quota_project, nil, ::String, nil
# @private
def initialize parent_config = nil
@parent_config = parent_config unless parent_config.nil?
yield self if block_given?
end
##
# Configurations for individual RPCs
# @return [Rpcs]
#
def rpcs
@rpcs ||= begin
parent_rpcs = nil
parent_rpcs = @parent_config.rpcs if defined?(@parent_config) && @parent_config&.respond_to?(:rpcs)
Rpcs.new parent_rpcs
end
end
##
# Configuration RPC class for the Operations API.
#
# Includes fields providing the configuration for each RPC in this service.
# Each configuration object is of type `Gapic::Config::Method` and includes
# the following configuration fields:
#
# * `timeout` (*type:* `Numeric`) - The call timeout in seconds
# * `metadata` (*type:* `Hash{Symbol=>String}`) - Additional gRPC headers
# * `retry_policy (*type:* `Hash`) - The retry policy. The policy fields
# include the following keys:
# * `:initial_delay` (*type:* `Numeric`) - The initial delay in seconds.
# * `:max_delay` (*type:* `Numeric`) - The max delay in seconds.
# * `:multiplier` (*type:* `Numeric`) - The incremental backoff multiplier.
# * `:retry_codes` (*type:* `Array<String>`) - The error codes that should
# trigger a retry.
#
class Rpcs
##
# RPC-specific configuration for `list_operations`
# @return [::Gapic::Config::Method]
#
attr_reader :list_operations
##
# RPC-specific configuration for `get_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :get_operation
##
# RPC-specific configuration for `delete_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :delete_operation
##
# RPC-specific configuration for `cancel_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :cancel_operation
##
# RPC-specific configuration for `wait_operation`
# @return [::Gapic::Config::Method]
#
attr_reader :wait_operation
# @private
def initialize parent_rpcs = nil
list_operations_config = parent_rpcs&.list_operations if parent_rpcs&.respond_to? :list_operations
@list_operations = ::Gapic::Config::Method.new list_operations_config
get_operation_config = parent_rpcs&.get_operation if parent_rpcs&.respond_to? :get_operation
@get_operation = ::Gapic::Config::Method.new get_operation_config
delete_operation_config = parent_rpcs&.delete_operation if parent_rpcs&.respond_to? :delete_operation
@delete_operation = ::Gapic::Config::Method.new delete_operation_config
cancel_operation_config = parent_rpcs&.cancel_operation if parent_rpcs&.respond_to? :cancel_operation
@cancel_operation = ::Gapic::Config::Method.new cancel_operation_config
wait_operation_config = parent_rpcs&.wait_operation if parent_rpcs&.respond_to? :wait_operation
@wait_operation = ::Gapic::Config::Method.new wait_operation_config
yield self if block_given?
end
end
end
end
end
end
end
end
end
|
Ruby
|
Apache-2.0
|
Neenu1995/google-cloud-ruby/google-cloud-dialogflow-v2/lib/google/cloud/dialogflow/v2/intents/operations.rb
|
dbd5d3b2-4d2d-4ff5-a57a-b2d27e18012f
|
[]
|
[]
|
import * as angularDevkitSchematics from '@angular-devkit/schematics';
import {
SchematicTestRunner,
UnitTestTree,
} from '@angular-devkit/schematics/testing';
import * as path from 'path';
import { readJsonInTree } from '../../src/utils';
const { Tree } = angularDevkitSchematics;
jest.mock(
'@angular-devkit/schematics',
() =>
({
__esModule: true,
...jest.requireActual('@angular-devkit/schematics'),
// For some reason TS (BUT only via ts-jest, not in VSCode) has an issue with this spread usage of requireActual(), so suppressing with any
// eslint-disable-next-line @typescript-eslint/no-explicit-any
} as any),
);
const schematicRunner = new SchematicTestRunner(
'@angular-eslint/schematics',
path.join(__dirname, '../../src/collection.json'),
);
describe('library', () => {
let appTree: UnitTestTree;
beforeEach(() => {
appTree = new UnitTestTree(Tree.empty());
appTree.create('package.json', JSON.stringify({}));
appTree.create(
'angular.json',
JSON.stringify({
$schema: './node_modules/@angular/cli/lib/config/schema.json',
version: 1,
newProjectRoot: 'projects',
projects: {},
}),
);
});
it('should pass all the given options directly to the @schematics/angular schematic', async () => {
const spy = jest.spyOn(angularDevkitSchematics, 'externalSchematic');
const options = {
name: 'bar',
};
expect(spy).not.toHaveBeenCalled();
await schematicRunner
.runSchematicAsync('library', options, appTree)
.toPromise();
expect(spy).toHaveBeenCalledTimes(1);
expect(spy).toHaveBeenCalledWith(
'@schematics/angular',
'library',
expect.objectContaining(options),
);
});
it('should change the lint target to use the @angular-eslint builder', async () => {
const tree = await schematicRunner
.runSchematicAsync('application', { name: 'bar' }, appTree)
.toPromise();
expect(readJsonInTree(tree, 'angular.json').projects.bar.architect.lint)
.toMatchInlineSnapshot(`
Object {
"builder": "@angular-eslint/builder:lint",
"options": Object {
"lintFilePatterns": Array [
"projects/bar/**/*.ts",
"projects/bar/**/*.html",
],
},
}
`);
});
it('should add the ESLint config for the project and delete the TSLint config', async () => {
const tree = await schematicRunner
.runSchematicAsync(
'application',
{ name: 'bar', prefix: 'something-else-custom' },
appTree,
)
.toPromise();
expect(tree.exists('projects/bar/tslint.json')).toBe(false);
expect(tree.read('projects/bar/.eslintrc.json')?.toString())
.toMatchInlineSnapshot(`
"{
\\"extends\\": \\"../../.eslintrc.json\\",
\\"ignorePatterns\\": [
\\"!**/*\\"
],
\\"overrides\\": [
{
\\"files\\": [
\\"*.ts\\"
],
\\"parserOptions\\": {
\\"project\\": [
\\"projects/bar/tsconfig.app.json\\",
\\"projects/bar/tsconfig.spec.json\\",
\\"projects/bar/e2e/tsconfig.json\\"
],
\\"createDefaultProgram\\": true
},
\\"rules\\": {
\\"@angular-eslint/directive-selector\\": [
\\"error\\",
{
\\"type\\": \\"attribute\\",
\\"prefix\\": \\"something-else-custom\\",
\\"style\\": \\"camelCase\\"
}
],
\\"@angular-eslint/component-selector\\": [
\\"error\\",
{
\\"type\\": \\"element\\",
\\"prefix\\": \\"something-else-custom\\",
\\"style\\": \\"kebab-case\\"
}
]
}
},
{
\\"files\\": [
\\"*.html\\"
],
\\"rules\\": {}
}
]
}
"
`);
});
});
|
TypeScript
|
MIT
|
MitchellCash/angular-eslint/packages/schematics/tests/library/index.test.ts
|
da1f358c-1aec-4d9f-ab7f-9c27f12266dd
|
[]
|
[]
|
<?
#############################################
# Bitrix Site Manager Forum #
# Copyright (c) 2002-2009 Bitrix #
# http://www.bitrixsoft.com #
# mailto:admin@bitrixsoft.com #
#############################################
IncludeModuleLangFile(__FILE__);
class CAllVoteAnswer
{
function err_mess()
{
$module_id = "vote";
return "<br>Module: ".$module_id."<br>Class: CAllVoteAnswer<br>File: ".__FILE__;
}
function CheckFields($ACTION, &$arFields, $ID = 0)
{
global $APPLICATION;
$aMsg = array();
$ID = intval($ID);
$ACTION = ($ID > 0 && $ACTION == "UPDATE" ? "UPDATE" : "ADD");
unset($arFields["ID"]);
if (is_set($arFields, "QUESTION_ID") || $ACTION == "ADD"):
$arFields["QUESTION_ID"] = intval($arFields["QUESTION_ID"]);
if ($arFields["QUESTION_ID"] <= 0):
$aMsg[] = array(
"id" => "QUESTION_ID",
"text" => GetMessage("VOTE_FORGOT_QUESTION_ID"));
endif;
endif;
if (is_set($arFields, "MESSAGE") || $ACTION == "ADD"):
//$arFields["MESSAGE"] = trim($arFields["MESSAGE"]);
$arFields["MESSAGE"] = ($arFields["MESSAGE"] != ' ') ? trim($arFields["MESSAGE"]):' ';
if ($arFields["MESSAGE"] == ''):
$aMsg[] = array(
"id" => "MESSAGE",
"text" => GetMessage("VOTE_FORGOT_MESSAGE"));
endif;
endif;
if (array_key_exists("IMAGE_ID", $arFields))
{
if (!is_array($arFields["IMAGE_ID"]))
{
$arFields["IMAGE_ID"] = intval($arFields["IMAGE_ID"]);
}
else if ($arFields["IMAGE_ID"]["name"] == '' && $arFields["IMAGE_ID"]["del"] == '')
{
unset($arFields["IMAGE_ID"]);
}
else if ($str = CFile::CheckImageFile($arFields["IMAGE_ID"]))
{
$aMsg[] = array(
"id" => "IMAGE_ID",
"text" => "Answer: ".$str);
}
else
{
$arFields["IMAGE_ID"]["MODULE_ID"] = "vote";
}
}
if (is_set($arFields, "ACTIVE") || $ACTION == "ADD") $arFields["ACTIVE"] = ($arFields["ACTIVE"] == "N" ? "N" : "Y");
unset($arFields["TIMESTAMP_X"]);
if (is_set($arFields, "C_SORT") || $ACTION == "ADD") $arFields["C_SORT"] = (intval($arFields["C_SORT"]) > 0 ? intval($arFields["C_SORT"]) : 100);
if (is_set($arFields, "COUNTER") || $ACTION == "ADD") $arFields["COUNTER"] = intval($arFields["COUNTER"]);
if (is_set($arFields, "FIELD_TYPE") || $ACTION == "ADD") $arFields["FIELD_TYPE"] = intval($arFields["FIELD_TYPE"]);
if (is_set($arFields, "FIELD_WIDTH") || $ACTION == "ADD") $arFields["FIELD_WIDTH"] = intval($arFields["FIELD_WIDTH"]);
if (is_set($arFields, "FIELD_HEIGHT") || $ACTION == "ADD") $arFields["FIELD_HEIGHT"] = intval($arFields["FIELD_HEIGHT"]);
if (is_set($arFields, "FIELD_PARAM") || $ACTION == "ADD") $arFields["FIELD_PARAM"] = mb_substr(trim($arFields["FIELD_PARAM"]), 0, 255)?: "";
if (is_set($arFields, "COLOR") || $ACTION == "ADD") $arFields["COLOR"] = mb_substr(trim($arFields["COLOR"]), 0, 7)?: "";
if(!empty($aMsg))
{
$e = new CAdminException(array_reverse($aMsg));
$APPLICATION->ThrowException($e);
return false;
}
return true;
}
public static function Add($arFields)
{
global $DB;
if (!CVoteAnswer::CheckFields("ADD", $arFields))
return false;
/***************** Event onBeforeVoteAnswerAdd *********************/
foreach (GetModuleEvents("vote", "onBeforeVoteAnswerAdd", true) as $arEvent)
if (ExecuteModuleEventEx($arEvent, array(&$arFields)) === false)
return false;
/***************** /Event ******************************************/
if (empty($arFields))
return false;
if (
array_key_exists("IMAGE_ID", $arFields) &&
is_array($arFields["IMAGE_ID"])
)
{
$arFields["IMAGE_ID"]["MODULE_ID"] = "vote";
CFile::SaveForDB($arFields, "IMAGE_ID", "vote");
}
if ($DB->type == "ORACLE")
$arFields["ID"] = $DB->NextID("SQ_B_VOTE_ANSWER");
$arInsert = $DB->PrepareInsert("b_vote_answer", $arFields);
$DB->QueryBind("INSERT INTO b_vote_answer (".$arInsert[0].", TIMESTAMP_X) VALUES(".$arInsert[1].", ".$DB->GetNowFunction().")", array("MESSAGE" => $arFields["MESSAGE"]), false);
$ID = intval($DB->type == "ORACLE" ? $arFields["ID"] : $DB->LastID());
/***************** Event onAfterVoteAnswerAdd **********************/
foreach (GetModuleEvents("vote", "onAfterVoteAnswerAdd", true) as $arEvent)
ExecuteModuleEventEx($arEvent, array($ID, $arFields));
/***************** /Event ******************************************/
return $ID;
}
function Update($ID, $arFields)
{
global $DB;
$arBinds = array();
$ID = intval($ID);
$err_mess = (self::err_mess())."<br>Function: Update<br>Line: ";
if ($ID <= 0 || !CVoteAnswer::CheckFields("UPDATE", $arFields, $ID))
return false;
/***************** Event onBeforeVoteQuestionUpdate ****************/
foreach (GetModuleEvents("vote", "onBeforeVoteAnswerUpdate", true) as $arEvent)
if (ExecuteModuleEventEx($arEvent, array($ID, &$arFields)) === false)
return false;
/***************** /Event ******************************************/
if (empty($arFields))
return false;
if (
array_key_exists("IMAGE_ID", $arFields) &&
is_array($arFields["IMAGE_ID"])
)
{
$arFields["IMAGE_ID"]["MODULE_ID"] = "vote";
CFile::SaveForDB($arFields, "IMAGE_ID", "vote");
}
$arFields["~TIMESTAMP_X"] = $DB->GetNowFunction();
$strUpdate = $DB->PrepareUpdate("b_vote_answer", $arFields);
if (is_set($arFields, "MESSAGE"))
$arBinds["MESSAGE"] = $arFields["MESSAGE"];
if (!empty($strUpdate)):
$strSql = "UPDATE b_vote_answer SET ".$strUpdate." WHERE ID=".$ID;
$DB->QueryBind($strSql, $arBinds, false, $err_mess);
// $DB->Query($strSql, false, $err_mess);
endif;
/***************** Event onAfterVoteAnswerUpdate *******************/
foreach (GetModuleEvents("vote", "onAfterVoteAnswerUpdate", true) as $arEvent)
ExecuteModuleEventEx($arEvent, array($ID, $arFields));
/***************** /Event ******************************************/
return $ID;
}
public static function Delete($ID, $QUESTION_ID = false, $VOTE_ID = false)
{
global $DB;
$err_mess = (self::err_mess())."<br>Function: Delete<br>Line: ";
/***************** Event onBeforeVoteAnswerDelete ******************/
foreach (GetModuleEvents("vote", "onBeforeVoteAnswerDelete", true) as $arEvent)
{
if (ExecuteModuleEventEx($arEvent, array(&$ID, &$QUESTION_ID, &$VOTE_ID)) === false)
return false;
}
/***************** /Event ******************************************/
$ID = (intval($ID) > 0 ? intval($ID) : false);
$QUESTION_ID = (intval($QUESTION_ID) > 0 ? intval($QUESTION_ID) : false);
$VOTE_ID = (intval($VOTE_ID) > 0 ? intval($VOTE_ID) : false);
if ($ID != false):
$strSqlEventAnswer = "DELETE FROM b_vote_event_answer WHERE ANSWER_ID=".$ID;
$strSqlAnswer = "DELETE FROM b_vote_answer WHERE ID=".$ID;
elseif ($QUESTION_ID != false):
$strSqlEventAnswer = "DELETE FROM b_vote_event_answer WHERE ANSWER_ID IN (
SELECT VA.ID FROM b_vote_answer VA WHERE VA.QUESTION_ID = ".$QUESTION_ID.")";
$strSqlAnswer = "DELETE FROM b_vote_answer WHERE QUESTION_ID = ".$QUESTION_ID;
elseif ($VOTE_ID != false):
$strSqlEventAnswer = "DELETE FROM b_vote_event_answer WHERE ANSWER_ID IN (
SELECT VA.ID
FROM b_vote_answer VA, b_vote_question VQ
WHERE VA.QUESTION_ID = VQ.ID AND VQ.VOTE_ID = ".$VOTE_ID.")";
$strSqlAnswer = "DELETE FROM b_vote_answer WHERE QUESTION_ID IN (
SELECT VQ.ID FROM b_vote_question VQ WHERE VQ.VOTE_ID = ".$VOTE_ID.")";
else:
return false;
endif;
$DB->Query($strSqlEventAnswer, false, $err_mess.__LINE__);
$DB->Query($strSqlAnswer, false, $err_mess.__LINE__);
/***************** Event onAfterVoteAnswerDelete *******************/
foreach (GetModuleEvents("vote", "onAfterVoteAnswerDelete", true) as $arEvent)
ExecuteModuleEventEx($arEvent, array($ID, $QUESTION_ID, $VOTE_ID));
/***************** /Event ******************************************/
return true;
}
public static function GetList($QUESTION_ID, $by="s_c_sort", $order="asc", $arFilter=array(), $arAddParams = array())
{
global $DB;
$QUESTION_ID = intval($QUESTION_ID);
$arSqlSearch = Array();
$arFilter = (is_array($arFilter) ? $arFilter : array());
foreach ($arFilter as $key => $val)
{
if(empty($val) || $val === "NOT_REF")
continue;
$key = mb_strtoupper($key);
switch($key)
{
case "ID":
case "FIELD_TYPE":
$match = ($arFilter[$key."_EXACT_MATCH"]=="N" ? "Y" : "N");
$arSqlSearch[] = GetFilterQuery("A.".$key, $val, $match);
break;
case "MESSAGE":
case "FIELD_PARAM":
$match = ($arFilter[$key."_EXACT_MATCH"]=="Y" ? "N" : "Y");
$arSqlSearch[] = GetFilterQuery("A.".$key, $val, $match);
break;
case "ACTIVE":
$arSqlSearch[] = ($val=="Y") ? "A.ACTIVE='Y'" : "A.ACTIVE='N'";
break;
}
}
$order = ($order!="desc" ? "asc" : "desc");
$by = (($by == "s_id" || $by == "s_counter") ? $by : "s_c_sort");
if ($by == "s_id") $strSqlOrder = " ORDER BY A.ID";
elseif ($by == "s_counter") $strSqlOrder = " ORDER BY A.COUNTER";
else $strSqlOrder = " ORDER BY A.C_SORT";
$strSqlOrder .= " ".$order;
$strSqlSearch = GetFilterSqlSearch($arSqlSearch);
$strSqlFrom = "FROM b_vote_answer A WHERE ".$strSqlSearch." and A.QUESTION_ID=".$QUESTION_ID."";
$strSql = "SELECT A.* ".$strSqlFrom.$strSqlOrder;
if ($arAddParams["nTopCount"] > 0)
{
$arAddParams["nTopCount"] = intval($arAddParams["nTopCount"]);
if ($DB->type=="MSSQL")
$strSql = "SELECT TOP ".$arAddParams["nTopCount"]." A.* ".$strSqlFrom.$strSqlOrder;
else if ($DB->type=="ORACLE")
$strSql = "SELECT * FROM(".$strSql.") WHERE ROWNUM<=".$arAddParams["nTopCount"];
else
$strSql = "SELECT A.* ".$strSqlFrom.$strSqlOrder." LIMIT 0,".$arAddParams["nTopCount"];
}
else if (is_set($arAddParams, "bDescPageNumbering"))
{
$db_res = $DB->Query("SELECT COUNT(A.ID) as CNT ".$strSqlFrom, false, "File: ".__FILE__."<br>Line: ".__LINE__);
$iCnt = (($db_res && ($ar_res = $db_res->Fetch())) ? intval($ar_res["CNT"]) : 0 );
$db_res = new CDBResult();
$db_res->NavQuery($strSql, $iCnt, $arAddParams);
return $db_res;
}
return $DB->Query($strSql, false, "File: ".__FILE__."<br>Line: ".__LINE__);
}
public static function GetListEx($arOrder = array("ID" => "ASC"), $arFilter=array())
{
global $DB;
$arSqlSearch = Array();
$strSqlSearch = "";
$arSqlOrder = Array();
$strSqlOrder = "";
$arFilter = (is_array($arFilter) ? $arFilter : array());
foreach ($arFilter as $key => $val)
{
if ($val === "NOT_REF")
continue;
$key_res = VoteGetFilterOperation($key);
$strNegative = $key_res["NEGATIVE"];
$strOperation = $key_res["OPERATION"];
$key = mb_strtoupper($key_res["FIELD"]);
switch($key)
{
case "ID":
case "QUESTION_ID":
$str = ($strNegative=="Y"?"NOT":"")."(VA.".$key." IS NULL OR VA.".$key."<=0)";
if (!empty($val))
{
$str = ($strNegative=="Y"?" VA.".$key." IS NULL OR NOT ":"")."(VA.".$key." ".$strOperation." ".intval($val).")";
if ($strOperation == "IN")
{
$val = array_unique(array_map("intval", (is_array($val) ? $val : explode(",", $val))), SORT_NUMERIC);
if (!empty($val))
{
$str = ($strNegative=="Y"?" NOT ":"")."(VA.".$key." IN (".implode(",", $val)."))";
}
}
}
$arSqlSearch[] = $str;
break;
case "VOTE_ID":
$str = ($strNegative=="Y"?"NOT":"")."(VQ.".$key." IS NULL OR VQ.".$key."<=0)";
if (!empty($val))
{
$str = ($strNegative=="Y"?" VQ.".$key." IS NULL OR NOT ":"")."(VQ.".$key." ".$strOperation." ".intval($val).")";
if ($strOperation == "IN")
{
$val = array_unique(array_map("intval", (is_array($val) ? $val : explode(",", $val))), SORT_NUMERIC);
if (!empty($val))
{
$str = ($strNegative=="Y"?" NOT ":"")."(VQ.".$key." IN (".implode(",", $val)."))";
}
}
}
$arSqlSearch[] = $str;
break;
case "CHANNEL_ID":
$str = ($strNegative=="Y"?"NOT":"")."(V.".$key." IS NULL OR V.".$key."<=0)";
if (!empty($val))
{
$str = ($strNegative=="Y"?" V.".$key." IS NULL OR NOT ":"")."(V.".$key." ".$strOperation." ".intval($val).")";
if ($strOperation == "IN")
{
$val = array_unique(array_map("intval", (is_array($val) ? $val : explode(",", $val))), SORT_NUMERIC);
if (!empty($val))
{
$str = ($strNegative=="Y"?" NOT ":"")."(V.".$key." IN (".implode(",", $val)."))";
}
}
}
$arSqlSearch[] = $str;
break;
case "ACTIVE":
if (empty($val))
$arSqlSearch[] = ($strNegative=="Y"?"NOT":"")."(VA.".$key." IS NULL OR ".($DB->type == "MSSQL" ? "LEN" : "LENGTH")."(VA.".$key.")<=0)";
else
$arSqlSearch[] = ($strNegative=="Y"?" VA.".$key." IS NULL OR NOT ":"")."(VA.".$key." ".$strOperation." '".$DB->ForSql($val)."')";
break;
}
}
if (count($arSqlSearch) > 0)
$strSqlSearch = " AND (".implode(") AND (", $arSqlSearch).") ";
foreach ($arOrder as $by => $order)
{
$by = mb_strtoupper($by);
$order = mb_strtoupper($order);
$by = (in_array($by, array("ACTIVE", "QUESTION_ID", "C_SORT", "COUNTER")) ? $by : "ID");
if ($order!="ASC") $order = "DESC";
if ($by == "ACTIVE") $arSqlOrder[] = " VA.ACTIVE ".$order." ";
elseif ($by == "QUESTION_ID") $arSqlOrder[] = " VA.QUESTION_ID ".$order." ";
elseif ($by == "C_SORT") $arSqlOrder[] = " VA.C_SORT ".$order." ";
elseif ($by == "COUNTER") $arSqlOrder[] = " VA.COUNTER ".$order." ";
else $arSqlOrder[] = " VA.ID ".$order." ";
}
DelDuplicateSort($arSqlOrder);
if (count($arSqlOrder) > 0)
$strSqlOrder = " ORDER BY ".implode(", ", $arSqlOrder);
$strSql = "
SELECT V.CHANNEL_ID, VQ.VOTE_ID, VA.*
FROM b_vote_answer VA
INNER JOIN b_vote_question VQ ON (VA.QUESTION_ID = VQ.ID)
INNER JOIN b_vote V ON (VQ.VOTE_ID = V.ID)
WHERE 1=1 ".$strSqlSearch." ".$strSqlOrder;
return $DB->Query($strSql, false, "File: ".__FILE__."<br>Line: ".__LINE__);
}
function GetGroupAnswers($ANSWER_ID)
{
$err_mess = (self::err_mess())."<br>Function: GetGroupAnswers<br>Line: ";
global $DB;
$ANSWER_ID = intval($ANSWER_ID);
$strSql =
"SELECT A.MESSAGE, count(A.ID) as COUNTER ".
"FROM
b_vote_event_answer A,
b_vote_event_question Q,
b_vote_event E
WHERE
A.ANSWER_ID = '$ANSWER_ID'
and Q.ID = A.EVENT_QUESTION_ID
and E.ID = Q.EVENT_ID
and E.VALID = 'Y'
GROUP BY A.MESSAGE
ORDER BY COUNTER desc";
$res = $DB->Query($strSql, false, $err_mess.__LINE__);
return $res;
}
}
|
PHP
|
MIT
|
Evil1991/bitrixdock/www/html/bitrix/modules/vote/classes/general/answer.php
|
0ed5aa61-60b6-45a9-8768-de2932e78be2
|
[{"tag": "EMAIL", "value": "admin@bitrixsoft.com", "start": 163, "end": 183, "context": "ix\t\t\t#\n# http://www.bitrixsoft.com\t\t\t\t\t#\n# mailto:admin@bitrixsoft.com\t\t\t\t#\n############################################"}]
|
[{"tag": "EMAIL", "value": "admin@bitrixsoft.com", "start": 163, "end": 183, "context": "ix\t\t\t#\n# http://www.bitrixsoft.com\t\t\t\t\t#\n# mailto:admin@bitrixsoft.com\t\t\t\t#\n############################################"}]
|
version https://git-lfs.github.com/spec/v1
oid sha256:a4febfd4a34ac7ce0118127c17496cae5d9d7eaa6b94a92b2815d131bfc6a310
size 2579
|
JavaScript
|
MIT
|
Lavish883/Loki-Stream/node_modules/@react-navigation/stack/lib/commonjs/views/GestureHandlerNative.js
|
18e10dbd-cc55-493a-94d0-8adc66933f52
|
[]
|
[]
|
require 'vanagon/platform'
require 'vanagon/project'
require 'vanagon/common'
# These constants are defined for the purpose of the project/generic file merge tests
# to point these directories to test areas under the /tmp directory.
# This allows individual test cases to be specified accurately.
# The actual resources/windows/wix files under vanagon are avoided, as the necessary
# data structures are not available under the test conditions causing failures in the
# ERB template translation
WORK_BASE = "/tmp/vanwintest"
VANAGON_ROOT = "#{WORK_BASE}/generic"
PROJ_ROOT = "#{WORK_BASE}/project"
WORKDIR = "#{WORK_BASE}/workdir"
# Admittedly this might not be the best placed statement, but my limited rspec
# started to defeat me when it came to using "let" for wixtestfiles
WIXTESTFILES = File.expand_path("./spec/fixtures/wix/resources/windows/wix")
describe "Vanagon::Platform::Windows" do
platforms =[
{
:name => "windows-2012r2-x64",
:os_name => "windows",
:os_version => "2012r2",
:architecture => "x64",
:output_dir => "windows/x64",
:output_dir_with_target => "windows/thing/x64",
:target_user => "Administrator",
:projname => "test-proj",
:block => %Q[ platform "windows-2012r2-x64" do |plat| plat.servicetype 'windows' end ]
},
]
platforms.each do |plat|
context "on #{plat[:name]}" do
let(:platform) { plat }
let(:cur_plat) { Vanagon::Platform::DSL.new(plat[:name]) }
let (:project_block) {
<<-HERE
project 'test-fixture' do |proj|
proj.setting(:company_name, "Test Name")
proj.setting(:company_id, "TestID")
proj.setting(:product_id, "TestProduct")
proj.setting(:base_dir, "ProgramFilesFolder")
end
HERE
}
before do
cur_plat.instance_eval(plat[:block])
end
describe "#output_dir" do
it "returns an output dir consistent with the packaging repo" do
expect(cur_plat._platform.output_dir).to eq(plat[:output_dir])
end
it "adds the target repo in the right way" do
expect(cur_plat._platform.output_dir('thing')).to eq(plat[:output_dir_with_target])
end
end
describe '#target_user' do
it "sets the target_user to 'Administrator'" do
expect(cur_plat._platform.target_user).to eq(plat[:target_user])
end
end
describe '#wix_product_version' do
it "returns first three digits only" do
expect(cur_plat._platform.wix_product_version("1.0.0.1")).to eq("1.0.0")
end
it "returns only numbers" do
expect(cur_plat._platform.wix_product_version("1.0.g0")).to eq("1.0.0")
end
end
describe '#generate_msi_packaging_artifacts' do
before(:each) do
# Create Workdir and temp root directory
FileUtils.mkdir_p("#{WORKDIR}/wix")
FileUtils.mkdir_p("#{VANAGON_ROOT}/resources/windows/wix")
FileUtils.mkdir_p("#{PROJ_ROOT}/resources/windows/wix")
# Switch directory so that project specific folder points to tmp area
@pwd = Dir.pwd
Dir.chdir(PROJ_ROOT)
end
after(:each) do
# Cleanup the complete work directory tree
FileUtils.rm_rf("#{WORK_BASE}")
Dir.chdir(@pwd)
end
it "Copies Wix File from product specific directory to output directory" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-1.wxs", "#{PROJ_ROOT}/resources/windows/wix/file-1.wxs")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(File).to exist("#{WORKDIR}/wix/file-1.wxs")
end
it "Copies Wix File from Vanagon directory to work directory" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-1.wxs", "#{VANAGON_ROOT}/resources/windows/wix/file-1.wxs")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(File).to exist("#{WORKDIR}/wix/file-1.wxs")
end
it "Picks Project Specific Wix File in favour of Generic Wix file" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-1.wxs", "#{PROJ_ROOT}/resources/windows/wix/file-wix.wxs")
FileUtils.cp("#{WIXTESTFILES}/file-2.wxs", "#{VANAGON_ROOT}/resources/windows/wix/file-wix.wxs")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(FileUtils.compare_file("#{WIXTESTFILES}/file-1.wxs", "#{WORKDIR}/wix/file-wix.wxs")).to be_truthy
end
it "Picks Project Specific Wix File in favour of Generic ERB file" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-1.wxs", "#{PROJ_ROOT}/resources/windows/wix/file-wix.wxs")
FileUtils.cp("#{WIXTESTFILES}/file-3.wxs.erb", "#{VANAGON_ROOT}/resources/windows/wix/file-wix.wxs.erb")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(FileUtils.compare_file("#{WIXTESTFILES}/file-1.wxs", "#{WORKDIR}/wix/file-wix.wxs")).to be_truthy
end
it "Picks Project Specific ERB File in favour of Generic Wix file" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-3.wxs.erb", "#{PROJ_ROOT}/resources/windows/wix/file-wix.wxs.erb")
FileUtils.cp("#{WIXTESTFILES}/file-2.wxs", "#{VANAGON_ROOT}/resources/windows/wix/file-wix.wxs")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(FileUtils.compare_file("#{WIXTESTFILES}/file-3.wxs.erb", "#{WORKDIR}/wix/file-wix.wxs")).to be_truthy
end
it "Picks Project Specific ERB File in favour of Generic ERB file" do
# setup source directories and run artifact generation
FileUtils.cp("#{WIXTESTFILES}/file-3.wxs.erb", "#{PROJ_ROOT}/resources/windows/wix/file-wix.wxs.erb")
FileUtils.cp("#{WIXTESTFILES}/file-4.wxs.erb", "#{VANAGON_ROOT}/resources/windows/wix/file-wix.wxs.erb")
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(FileUtils.compare_file("#{WIXTESTFILES}/file-3.wxs.erb", "#{WORKDIR}/wix/file-wix.wxs")).to be_truthy
end
it "Copies Hierarchy of files from Product Specific Directory to output directory with ERB translation as necessary" do
# setup source directories and run artifact generation
FileUtils.cp_r("#{WIXTESTFILES}/", "#{PROJ_ROOT}/resources/windows/", :verbose => true)
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(File).to exist("#{WORKDIR}/wix/file-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-2.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-3.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-4.wxs")
expect(File).to exist("#{WORKDIR}/wix/project.filter.xslt")
expect(File).to exist("#{WORKDIR}/wix/project.wxs")
expect(File).to exist("#{WORKDIR}/wix/include/include-sample-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/ui/ui-sample-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/ui/bitmaps/bitmap.bmp")
expect(File).not_to exist("#{WORKDIR}/wix/project.filter.xslt.erb")
expect(File).not_to exist("#{WORKDIR}/wix/file-3.wxs.erb")
expect(File).not_to exist("#{WORKDIR}/wix/file-4.wxs.erb")
end
it "Copies Hierarchy of files from vanagon directory to output directory with ERB translation as necessary" do
# setup source directories and run artifact generation
FileUtils.cp_r("#{WIXTESTFILES}/", "#{VANAGON_ROOT}/resources/windows/", :verbose => true)
cur_plat._platform.generate_msi_packaging_artifacts(WORKDIR, plat[:projname], binding)
# check the result
expect(File).to exist("#{WORKDIR}/wix/file-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-2.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-3.wxs")
expect(File).to exist("#{WORKDIR}/wix/file-4.wxs")
expect(File).to exist("#{WORKDIR}/wix/project.filter.xslt")
expect(File).to exist("#{WORKDIR}/wix/project.wxs")
expect(File).to exist("#{WORKDIR}/wix/include/include-sample-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/ui/ui-sample-1.wxs")
expect(File).to exist("#{WORKDIR}/wix/ui/bitmaps/bitmap.bmp")
expect(File).not_to exist("#{WORKDIR}/wix/project.filter.xslt.erb")
expect(File).not_to exist("#{WORKDIR}/wix/file-3.wxs.erb")
expect(File).not_to exist("#{WORKDIR}/wix/file-4.wxs.erb")
end
describe "generate_wix_dirs" do
it "returns one directory with install_service defaults" do
proj = Vanagon::Project::DSL.new('test-fixture', {}, [])
proj.instance_eval(project_block)
cur_plat.instance_eval(plat[:block])
comp = Vanagon::Component::DSL.new('service-test', {}, cur_plat._platform)
comp.install_service('SourceDir/ProgramFilesFolder/TestID/TestProduct/opt/bin.exe')
expect(cur_plat._platform.generate_service_bin_dirs([comp._component.service].flatten.compact, proj._project)).to eq( \
<<-HERE
<Directory Name="opt" Id="opt">
<Directory Id="SERVICETESTBINDIR" />
</Directory>
HERE
)
end
it "returns one directory with non-default name" do
proj = Vanagon::Project::DSL.new('test-fixture', {}, [])
proj.instance_eval(project_block)
cur_plat.instance_eval(plat[:block])
comp = Vanagon::Component::DSL.new('service-test', {}, cur_plat._platform)
comp.install_service('SourceDir/ProgramFilesFolder/TestID/TestProduct/opt/bin.exe', nil, "service-test-2")
expect(cur_plat._platform.generate_service_bin_dirs([comp._component.service].flatten.compact, proj._project)).to eq( \
<<-HERE
<Directory Name="opt" Id="opt">
<Directory Id="SERVICETEST2BINDIR" />
</Directory>
HERE
)
end
it "returns nested directory correctly with \\" do
proj = Vanagon::Project::DSL.new('test-fixture', {}, [])
proj.instance_eval(project_block)
cur_plat.instance_eval(plat[:block])
comp = Vanagon::Component::DSL.new('service-test', {}, cur_plat._platform)
comp.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\someotherdir\\bin.exe')
expect(cur_plat._platform.generate_service_bin_dirs([comp._component.service].flatten.compact, proj._project)).to eq( \
<<-HERE
<Directory Name="somedir" Id="somedir">
<Directory Name="someotherdir" Id="someotherdir">
<Directory Id="SERVICETESTBINDIR" />
</Directory>
</Directory>
HERE
)
end
it "adds a second directory for the same input but different components" do
proj = Vanagon::Project::DSL.new('test-fixture', {}, [])
proj.instance_eval(project_block)
cur_plat.instance_eval(plat[:block])
comp = Vanagon::Component::DSL.new('service-test', {}, cur_plat._platform)
comp.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\bin.exe')
comp2 = Vanagon::Component::DSL.new('service-test-2', {}, cur_plat._platform)
comp2.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\bin.exe')
expect(cur_plat._platform.generate_service_bin_dirs([comp._component.service, comp2._component.service].flatten.compact, proj._project)).to eq( \
<<-HERE
<Directory Name="somedir" Id="somedir">
<Directory Id="SERVICETESTBINDIR" />
<Directory Id="SERVICETEST2BINDIR" />
</Directory>
HERE
)
end
it "returns correctly formatted multiple nested directories" do
proj = Vanagon::Project::DSL.new('test-fixture', {}, [])
proj.instance_eval(project_block)
cur_plat.instance_eval(plat[:block])
comp = Vanagon::Component::DSL.new('service-test-1', {}, cur_plat._platform)
comp.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\oneUp\\twoUp\\bin.exe')
comp2 = Vanagon::Component::DSL.new('service-test-2', {}, cur_plat._platform)
comp2.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\oneUpAgain\\twoUp\\bin.exe')
comp3 = Vanagon::Component::DSL.new('service-test-3', {}, cur_plat._platform)
comp3.install_service('SourceDir\\ProgramFilesFolder\\TestID\\TestProduct\\somedir\\oneUpAgain\\twoUpAgain\\bin.exe')
expect(cur_plat._platform.generate_service_bin_dirs([comp._component.service, comp2._component.service, comp3._component.service].flatten.compact, proj._project)).to eq( \
<<-HERE
<Directory Name="somedir" Id="somedir">
<Directory Name="oneUp" Id="oneUp">
<Directory Name="twoUp" Id="twoUp">
<Directory Id="SERVICETEST1BINDIR" />
</Directory>
</Directory>
<Directory Name="oneUpAgain" Id="oneUpAgain">
<Directory Name="twoUp" Id="twoUp">
<Directory Id="SERVICETEST2BINDIR" />
</Directory>
<Directory Name="twoUpAgain" Id="twoUpAgain">
<Directory Id="SERVICETEST3BINDIR" />
</Directory>
</Directory>
</Directory>
HERE
)
end
end
end
end
end
end
|
Ruby
|
Apache-2.0
|
Marcushawley/vanagon/spec/lib/vanagon/platform/windows_spec.rb
|
37874d01-3e2d-40f9-a1ff-a6575ef5baec
|
[]
|
[]
|
import {structUtils} from '@yarnpkg/core';
import {NativePath, PortablePath, Filename} from '@yarnpkg/fslib';
import {toFilename, npath, ppath} from '@yarnpkg/fslib';
import {PnpApi, PhysicalPackageLocator, PackageInformation} from '@yarnpkg/pnp';
import {hoist, HoisterTree, HoisterResult} from './hoist';
// Babel doesn't support const enums, thats why we use non-const enum for LinkType in @yarnpkg/pnp
// But because of this TypeScript requires @yarnpkg/pnp during runtime
// To prevent this we redeclare LinkType enum here, to not depend on @yarnpkg/pnp during runtime
export enum LinkType {HARD = `HARD`, SOFT = `SOFT`}
// The list of directories stored within a node_modules (or node_modules/@foo)
export type NodeModulesBaseNode = {
dirList: Set<Filename>
};
// The entry for a package within a node_modules
export type NodeModulesPackageNode = {
locator: LocatorKey,
// The source path. Note that the virtual paths have been resolved/lost!
target: PortablePath,
// Hard links are copies of the target; soft links are symlinks to it
linkType: LinkType,
// Contains ["node_modules"] if there's nested n_m entries
dirList?: undefined,
aliases: Array<string>,
};
/**
* Node modules tree - a map of every folder within the node_modules, along with their
* directory listing and whether they are a symlink and their location.
*
* Sample contents:
* /home/user/project/node_modules -> {dirList: ['foo', 'bar']}
* /home/user/project/node_modules/foo -> {target: '/home/user/project/.yarn/.cache/foo.zip/node_modules/foo', linkType: 'HARD'}
* /home/user/project/node_modules/bar -> {target: '/home/user/project/packages/bar', linkType: 'SOFT'}
*/
export type NodeModulesTree = Map<PortablePath, NodeModulesBaseNode | NodeModulesPackageNode>;
export interface NodeModulesTreeOptions {
pnpifyFs?: boolean;
}
/** node_modules path segment */
const NODE_MODULES = toFilename(`node_modules`);
/** Package locator key for usage inside maps */
type LocatorKey = string;
/**
* Returns path to archive, if package location is inside the archive.
*
* @param packagePath package location
*
* @returns path to archive is location is insde the archive or null otherwise
*/
export const getArchivePath = (packagePath: PortablePath): PortablePath | null =>
packagePath.indexOf(`.zip/${NODE_MODULES}/`) >= 0 ?
npath.toPortablePath(packagePath.split(`/${NODE_MODULES}/`)[0]) :
null;
/**
* Retrieve full package list and build hoisted `node_modules` directories
* representation in-memory.
*
* @param pnp PnP API
*
* @returns hoisted `node_modules` directories representation in-memory
*/
export const buildNodeModulesTree = (pnp: PnpApi, options: NodeModulesTreeOptions): NodeModulesTree => {
const packageTree = buildPackageTree(pnp, options);
const hoistedTree = hoist(packageTree);
return populateNodeModulesTree(pnp, hoistedTree, options);
};
const stringifyLocator = (locator: PhysicalPackageLocator): LocatorKey => `${locator.name}@${locator.reference}`;
export type NodeModulesLocatorMap = Map<LocatorKey, {
target: PortablePath;
linkType: LinkType;
locations: Array<PortablePath>;
aliases: Array<string>;
}>
export const buildLocatorMap = (nodeModulesTree: NodeModulesTree): NodeModulesLocatorMap => {
const map = new Map();
for (const [location, val] of nodeModulesTree.entries()) {
if (!val.dirList) {
let entry = map.get(val.locator);
if (!entry) {
entry = {target: val.target, linkType: val.linkType, locations: [], aliases: val.aliases};
map.set(val.locator, entry);
}
entry.locations.push(location);
}
}
for (const val of map.values()) {
// Sort locations by depth first and then alphabetically for determinism
val.locations = val.locations.sort((loc1: PortablePath, loc2: PortablePath) => {
const len1 = loc1.split(ppath.delimiter).length;
const len2 = loc2.split(ppath.delimiter).length;
return len1 !== len2 ? len2 - len1: loc2.localeCompare(loc1);
});
}
return map;
};
function isPortalLocator(locatorKey: LocatorKey): boolean {
let descriptor = structUtils.parseDescriptor(locatorKey);
if (structUtils.isVirtualDescriptor(descriptor))
descriptor = structUtils.devirtualizeDescriptor(descriptor);
return descriptor.range.startsWith(`portal:`);
}
/**
* Traverses PnP tree and produces input for the `RawHoister`
*
* @param pnp PnP API
*
* @returns package tree, packages info and locators
*/
const buildPackageTree = (pnp: PnpApi, options: NodeModulesTreeOptions): HoisterTree => {
const pnpRoots = pnp.getDependencyTreeRoots();
const topPkg = pnp.getPackageInformation(pnp.topLevel);
if (topPkg === null)
throw new Error(`Assertion failed: Expected the top-level package to have been registered`);
const topLocator = pnp.findPackageLocator(topPkg.packageLocation);
if (topLocator === null)
throw new Error(`Assertion failed: Expected the top-level package to have a physical locator`);
const topLocatorKey = stringifyLocator(topLocator);
for (const locator of pnpRoots) {
if (stringifyLocator(locator) !== topLocatorKey) {
topPkg.packageDependencies.set(`$wsroot$${locator.name}`, locator.reference);
}
}
const packageTree: HoisterTree = {
name: topLocator.name,
reference: topLocator.reference,
peerNames: topPkg.packagePeers,
dependencies: new Set<HoisterTree>(),
};
const nodes = new Map<LocatorKey, HoisterTree>();
const addPackageToTree = (pkg: PackageInformation<NativePath>, locator: PhysicalPackageLocator, parent: HoisterTree, parentPkg: PackageInformation<NativePath>) => {
const locatorKey = stringifyLocator(locator);
let node = nodes.get(locatorKey);
const isSeen = !!node;
if (!isSeen && locatorKey === topLocatorKey) {
node = packageTree;
nodes.set(locatorKey, packageTree);
}
if (!node) {
nodes.set(locatorKey, node = {
name: locator.name,
reference: locator.reference,
dependencies: new Set(),
peerNames: pkg.packagePeers,
});
}
parent.dependencies.add(node);
// If we link dependencies to file system we must not try to install children dependencies inside portal folders
const shouldAddChildrenDependencies = options.pnpifyFs || !isPortalLocator(locatorKey);
if (!isSeen && shouldAddChildrenDependencies) {
for (const [name, referencish] of pkg.packageDependencies) {
if (referencish !== null && !node.peerNames.has(name)) {
const depLocator = pnp.getLocator(name, referencish);
const pkgLocator = pnp.getLocator(name.replace(`$wsroot$`, ``), referencish);
const depPkg = pnp.getPackageInformation(pkgLocator);
if (depPkg === null)
throw new Error(`Assertion failed: Expected the package to have been registered`);
// Skip package self-references
if (stringifyLocator(depLocator) === locatorKey)
continue;
addPackageToTree(depPkg, depLocator, node, pkg);
}
}
}
};
addPackageToTree(topPkg, topLocator, packageTree, topPkg);
return packageTree;
};
function getTargetLocatorPath(locator: PhysicalPackageLocator, pnp: PnpApi, options: NodeModulesTreeOptions): {linkType: LinkType, target: PortablePath} {
const pkgLocator = pnp.getLocator(locator.name.replace(/^\$wsroot\$/, ``), locator.reference);
const info = pnp.getPackageInformation(pkgLocator);
if (info === null)
throw new Error(`Assertion failed: Expected the package to be registered`);
let linkType;
let target;
if (options.pnpifyFs) {
// In case of pnpifyFs we represent modules as symlinks to archives in NodeModulesFS
// `/home/user/project/foo` is a symlink to `/home/user/project/.yarn/.cache/foo.zip/node_modules/foo`
// To make this fs layout work with legacy tools we make
// `/home/user/project/.yarn/.cache/foo.zip/node_modules/foo/node_modules` (which normally does not exist inside archive) a symlink to:
// `/home/user/project/node_modules/foo/node_modules`, so that the tools were able to access it
target = npath.toPortablePath(info.packageLocation);
linkType = LinkType.SOFT;
} else {
const truePath = pnp.resolveVirtual && locator.reference && locator.reference.startsWith(`virtual:`)
? pnp.resolveVirtual(info.packageLocation)
: info.packageLocation;
target = npath.toPortablePath(truePath || info.packageLocation);
linkType = info.linkType;
}
return {linkType, target};
}
/**
* Converts hoisted tree to node modules map
*
* @param pnp PnP API
* @param hoistedTree hoisted package tree from `RawHoister`
* @param locators locators
* @param packages package weights
*
* @returns node modules map
*/
const populateNodeModulesTree = (pnp: PnpApi, hoistedTree: HoisterResult, options: NodeModulesTreeOptions): NodeModulesTree => {
const tree: NodeModulesTree = new Map();
const makeLeafNode = (locator: PhysicalPackageLocator, aliases: Array<string>): {locator: LocatorKey, target: PortablePath, linkType: LinkType, aliases: Array<string>} => {
const {linkType, target} = getTargetLocatorPath(locator, pnp, options);
return {
locator: stringifyLocator(locator),
target,
linkType,
aliases,
};
};
const getPackageName = (locator: PhysicalPackageLocator): { name: Filename, scope: Filename | null } => {
const [nameOrScope, name] = locator.name.split(`/`);
return name ? {
scope: toFilename(nameOrScope),
name: toFilename(name),
} : {
scope: null,
name: toFilename(nameOrScope),
};
};
const seenNodes = new Set<HoisterResult>();
const buildTree = (pkg: HoisterResult, locationPrefix: PortablePath) => {
if (seenNodes.has(pkg))
return;
seenNodes.add(pkg);
for (const dep of pkg.dependencies) {
// We do not want self-references in node_modules, since they confuse existing tools
if (dep === pkg)
continue;
const references = Array.from(dep.references).sort();
const locator = {name: dep.name, reference: references[0]};
const {name, scope} = getPackageName(locator);
const packageNameParts = scope
? [scope, name]
: [name];
const nodeModulesDirPath = ppath.join(locationPrefix, NODE_MODULES);
const nodeModulesLocation = ppath.join(nodeModulesDirPath, ...packageNameParts);
const leafNode = makeLeafNode(locator, references.slice(1));
if (!dep.name.startsWith(`$wsroot$`)) {
tree.set(nodeModulesLocation, leafNode);
const segments = nodeModulesLocation.split(`/`);
const nodeModulesIdx = segments.indexOf(NODE_MODULES);
let segCount = segments.length - 1;
while (nodeModulesIdx >= 0 && segCount > nodeModulesIdx) {
const dirPath = npath.toPortablePath(segments.slice(0, segCount).join(ppath.sep));
const targetDir = toFilename(segments[segCount]);
const subdirs = tree.get(dirPath);
if (!subdirs) {
tree.set(dirPath, {dirList: new Set([targetDir])});
} else if (subdirs.dirList) {
if (subdirs.dirList.has(targetDir)) {
break;
} else {
subdirs.dirList.add(targetDir);
}
}
segCount--;
}
}
buildTree(dep, leafNode.linkType === LinkType.SOFT ? leafNode.target : nodeModulesLocation);
}
};
const rootNode = makeLeafNode({name: hoistedTree.name, reference: Array.from(hoistedTree.references)[0] as string}, []);
const rootPath = rootNode.target;
tree.set(rootPath, rootNode);
buildTree(hoistedTree, rootPath);
return tree;
};
/**
* Benchmarks raw hoisting performance.
*
* The function is used for troubleshooting purposes only.
*
* @param packageTree package tree
* @param packages package info
*
* @returns average raw hoisting time
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const benchmarkRawHoisting = (packageTree: HoisterTree) => {
const iterCount = 10;
const startTime = Date.now();
for (let iter = 0; iter < iterCount; iter++)
hoist(packageTree);
const endTime = Date.now();
return (endTime - startTime) / iterCount;
};
/**
* Benchmarks node_modules tree building.
*
* The function is used for troubleshooting purposes only.
*
* @param packageTree package tree
* @param packages package info
*
* @returns average raw hoisting time
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const benchmarkBuildTree = (pnp: PnpApi, options: NodeModulesTreeOptions): number => {
const iterCount = 100;
const startTime = Date.now();
for (let iter = 0; iter < iterCount; iter++) {
const packageTree = buildPackageTree(pnp, options);
const hoistedTree = hoist(packageTree);
populateNodeModulesTree(pnp, hoistedTree, options);
}
const endTime = Date.now();
return (endTime - startTime) / iterCount;
};
/**
* Pretty-prints node_modules tree.
*
* The function is used for troubleshooting purposes only.
*
* @param tree node_modules tree
* @param rootPath top-level project root folder
*
* @returns sorted node_modules tree
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const dumpNodeModulesTree = (tree: NodeModulesTree, rootPath: PortablePath): string => {
const sortedTree: NodeModulesTree = new Map();
const keys = Array.from(tree.keys()).sort();
for (const key of keys) {
const val = tree.get(key)!;
sortedTree.set(key, val.dirList ? {dirList: new Set(Array.from(val.dirList).sort())} : val);
}
const seenPaths = new Set();
const dumpTree = (nodePath: PortablePath, prefix: string = ``, dirPrefix = ``): string => {
const node = sortedTree.get(nodePath);
if (!node)
return ``;
seenPaths.add(nodePath);
let str = ``;
if (node.dirList) {
const dirs = Array.from(node.dirList);
for (let idx = 0; idx < dirs.length; idx++) {
const dir = dirs[idx];
str += `${prefix}${idx < dirs.length - 1 ? `├─` : `└─`}${dirPrefix}${dir}\n`;
str += dumpTree(ppath.join(nodePath, dir), `${prefix}${idx < dirs.length - 1 ?`│ ` : ` `}`);
}
} else {
const {target, linkType} = node;
str += dumpTree(ppath.join(nodePath, NODE_MODULES), `${prefix}│ `, `${NODE_MODULES}/`);
str += `${prefix}└─${linkType === LinkType.SOFT ? `s>` : `>`}${target}\n`;
}
return str;
};
let str = dumpTree(ppath.join(rootPath, NODE_MODULES));
for (const key of sortedTree.keys()) {
if (!seenPaths.has(key)) {
str += `${key.replace(rootPath, ``)}\n${dumpTree(key)}`;
}
}
return str;
};
/**
* Pretty-prints dependency tree in the `yarn why`-like format
*
* The function is used for troubleshooting purposes only.
*
* @param pkg node_modules tree
*
* @returns sorted node_modules tree
*/
// eslint-disable-next-line @typescript-eslint/no-unused-vars
const dumpDepTree = (tree: HoisterResult) => {
const dumpLocator = (locator: PhysicalPackageLocator): string => {
if (locator.reference === `workspace:.`) {
return `.`;
} else if (!locator.reference) {
return `${locator.name}@${locator.reference}`;
} else {
const version = (locator.reference.indexOf(`#`) > 0 ? locator.reference.split(`#`)[1] : locator.reference).replace(`npm:`, ``);
if (locator.reference.startsWith(`virtual`)) {
return `v:${locator.name}@${version}`;
} else {
return `${locator.name}@${version}`;
}
}
};
const dumpPackage = (pkg: HoisterResult, parents: Array<HoisterResult>, prefix = ``): string => {
if (parents.includes(pkg))
return ``;
const dependencies = Array.from(pkg.dependencies);
let str = ``;
for (let idx = 0; idx < dependencies.length; idx++) {
const dep = dependencies[idx];
str += `${prefix}${idx < dependencies.length - 1 ? `├─` : `└─`}${(parents.includes(dep) ? `>` : ``) + dumpLocator({name: dep.name, reference: Array.from(dep.references)[0]})}\n`;
str += dumpPackage(dep, [...parents, dep], `${prefix}${idx < dependencies.length - 1 ?`│ ` : ` `}`);
}
return str;
};
return dumpPackage(tree, []);
};
|
TypeScript
|
BSD-2-Clause
|
7rulnik/berry/packages/yarnpkg-pnpify/sources/buildNodeModulesTree.ts
|
fd43c17c-43bb-49f2-916c-8d483d42c623
|
[]
|
[]
|
from datetime import datetime
import logging
import os
import subprocess
import sys
from argparse import Namespace
logging.getLogger("transformers").setLevel(logging.WARNING)
import click
import torch
from luke.utils.model_utils import ModelArchive
from zero.utils.experiment_logger import commet_logger_args, CometLogger, NullLogger
LOG_FORMAT = "[%(asctime)s] [%(levelname)s] %(message)s (%(funcName)s@%(filename)s:%(lineno)s)"
try:
import absl.logging
# https://github.com/tensorflow/tensorflow/issues/27045#issuecomment-519642980
logging.getLogger().removeHandler(absl.logging._absl_handler)
absl.logging._warn_preinit_stderr = False
except ImportError:
pass
logger = logging.getLogger(__name__)
@click.group()
@click.option(
"--output-dir", default="models", type=click.Path()
)
@click.option("--num-gpus", default=1)
@click.option("--experiment-logger", "--logger", type=click.Choice(["comet"]))
@click.option("--master-port", default=29500)
@click.option("--local-rank", "--local_rank", default=-1)
@click.option("--model-file", type=click.Path(exists=True))
@click.option("--device-id", type=int)
@commet_logger_args
@click.pass_context
def cli(ctx, **kwargs):
args = Namespace(**kwargs)
if args.local_rank == -1 and args.num_gpus > 1:
current_env = os.environ.copy()
current_env["MASTER_ADDR"] = "127.0.0.1"
current_env["MASTER_PORT"] = str(args.master_port)
current_env["WORLD_SIZE"] = str(args.num_gpus)
processes = []
for args.local_rank in range(0, args.num_gpus):
current_env["RANK"] = str(args.local_rank)
current_env["LOCAL_RANK"] = str(args.local_rank)
cmd = [sys.executable, "-u", "-m", "examples.cli", "--local-rank={}".format(args.local_rank)]
cmd.extend(sys.argv[1:])
process = subprocess.Popen(cmd, env=current_env)
processes.append(process)
for process in processes:
process.wait()
if process.returncode != 0:
raise subprocess.CalledProcessError(returncode=process.returncode, cmd=cmd)
sys.exit(0)
else:
if args.local_rank not in (-1, 0):
logging.basicConfig(format=LOG_FORMAT, level=logging.WARNING)
else:
logging.basicConfig(format=LOG_FORMAT, level=logging.INFO)
if not os.path.exists(args.output_dir) and args.local_rank in [-1, 0]:
os.makedirs(args.output_dir)
logger.info("Output dir: %s", args.output_dir)
# NOTE: ctx.obj is documented here: http://click.palletsprojects.com/en/7.x/api/#click.Context.obj
ctx.obj = dict(local_rank=args.local_rank, output_dir=args.output_dir)
if args.num_gpus == 0:
ctx.obj["device"] = torch.device("cpu")
elif args.local_rank == -1:
ctx.obj["device"] = torch.device("cuda:{}".format(args.device_id))
else:
torch.cuda.set_device(args.local_rank)
ctx.obj["device"] = torch.device("cuda", args.local_rank)
torch.distributed.init_process_group(backend="nccl")
experiment_logger = NullLogger()
if args.local_rank in (-1, 0) and args.experiment_logger == "comet":
experiment_logger = CometLogger(args)
experiment_logger.log_parameters({p.name: getattr(args, p.name) for p in cli.params})
ctx.obj["experiment"] = experiment_logger
if args.model_file:
model_archive = ModelArchive.load(args.model_file)
ctx.obj["tokenizer"] = model_archive.tokenizer
ctx.obj["entity_vocab"] = model_archive.entity_vocab
ctx.obj["bert_model_name"] = model_archive.bert_model_name
ctx.obj["model_config"] = model_archive.config
ctx.obj["max_mention_length"] = model_archive.max_mention_length
ctx.obj["model_weights"] = model_archive.state_dict
experiment_logger.log_parameter("model_file_name", os.path.basename(args.model_file))
from zero.ner.main import cli as ner_cli
cli.add_command(ner_cli)
if __name__ == "__main__":
cli()
|
Python
|
MIT
|
nguyenvanhoang7398/nndl2-project/zero/cli.py
|
05ddc276-d2a2-4498-badb-4549074ba7c0
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1365, "end": 1374, "context": "iron.copy()\n current_env[\"MASTER_ADDR\"] = \"127.0.0.1\"\n current_env[\"MASTER_PORT\"] = str(args.ma"}]
|
[{"tag": "IP_ADDRESS", "value": "127.0.0.1", "start": 1365, "end": 1374, "context": "iron.copy()\n current_env[\"MASTER_ADDR\"] = \"127.0.0.1\"\n current_env[\"MASTER_PORT\"] = str(args.ma"}]
|
//
// Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018
//
// Distributed under the Boost Software License, Version 1.0. (See accompanying
// file LICENSE_1_0.txt or copy at http://www.boost.org/LICENSE_1_0.txt)
//
#include "td/utils/port/SocketFd.h"
#include "td/utils/logging.h"
#if TD_PORT_WINDOWS
#include "td/utils/misc.h"
#endif
#if TD_PORT_POSIX
#include <arpa/inet.h>
#include <fcntl.h>
#include <netinet/in.h>
#include <netinet/tcp.h>
#include <sys/socket.h>
#include <sys/types.h>
#include <unistd.h>
#endif
namespace td {
Result<SocketFd> SocketFd::open(const IPAddress &address) {
SocketFd socket;
TRY_STATUS(socket.init(address));
return std::move(socket);
}
#if TD_PORT_POSIX
Result<SocketFd> SocketFd::from_native_fd(int fd) {
auto fd_guard = ScopeExit() + [fd]() { ::close(fd); };
TRY_STATUS(detail::set_native_socket_is_blocking(fd, false));
// TODO remove copypaste
int flags = 1;
setsockopt(fd, SOL_SOCKET, SO_REUSEADDR, reinterpret_cast<const char *>(&flags), sizeof(flags));
setsockopt(fd, SOL_SOCKET, SO_KEEPALIVE, reinterpret_cast<const char *>(&flags), sizeof(flags));
setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, reinterpret_cast<const char *>(&flags), sizeof(flags));
// TODO: SO_REUSEADDR, SO_KEEPALIVE, TCP_NODELAY, SO_SNDBUF, SO_RCVBUF, TCP_QUICKACK, SO_LINGER
fd_guard.dismiss();
SocketFd socket;
socket.fd_ = Fd(fd, Fd::Mode::Owner);
return std::move(socket);
}
#endif
Status SocketFd::init(const IPAddress &address) {
auto fd = socket(address.get_address_family(), SOCK_STREAM, 0);
#if TD_PORT_POSIX
if (fd == -1) {
#elif TD_PORT_WINDOWS
if (fd == INVALID_SOCKET) {
#endif
return OS_SOCKET_ERROR("Failed to create a socket");
}
auto fd_quard = ScopeExit() + [fd]() {
#if TD_PORT_POSIX
::close(fd);
#elif TD_PORT_WINDOWS
::closesocket(fd);
#endif
};
TRY_STATUS(detail::set_native_socket_is_blocking(fd, false));
#if TD_PORT_POSIX
int flags = 1;
#elif TD_PORT_WINDOWS
BOOL flags = TRUE;
#endif
setsockopt(fd, SOL_SOCKET, SO_REUSEADDR, reinterpret_cast<const char *>(&flags), sizeof(flags));
setsockopt(fd, SOL_SOCKET, SO_KEEPALIVE, reinterpret_cast<const char *>(&flags), sizeof(flags));
setsockopt(fd, IPPROTO_TCP, TCP_NODELAY, reinterpret_cast<const char *>(&flags), sizeof(flags));
// TODO: SO_REUSEADDR, SO_KEEPALIVE, TCP_NODELAY, SO_SNDBUF, SO_RCVBUF, TCP_QUICKACK, SO_LINGER
#if TD_PORT_POSIX
int e_connect = connect(fd, address.get_sockaddr(), static_cast<socklen_t>(address.get_sockaddr_len()));
if (e_connect == -1) {
auto connect_errno = errno;
if (connect_errno != EINPROGRESS) {
return Status::PosixError(connect_errno, PSLICE() << "Failed to connect to " << address);
}
}
fd_ = Fd(fd, Fd::Mode::Owner);
#elif TD_PORT_WINDOWS
auto bind_addr = address.get_any_addr();
auto e_bind = bind(fd, bind_addr.get_sockaddr(), narrow_cast<int>(bind_addr.get_sockaddr_len()));
if (e_bind != 0) {
return OS_SOCKET_ERROR("Failed to bind a socket");
}
fd_ = Fd::create_socket_fd(fd);
fd_.connect(address);
#endif
fd_quard.dismiss();
return Status::OK();
}
const Fd &SocketFd::get_fd() const {
return fd_;
}
Fd &SocketFd::get_fd() {
return fd_;
}
void SocketFd::close() {
fd_.close();
}
bool SocketFd::empty() const {
return fd_.empty();
}
int32 SocketFd::get_flags() const {
return fd_.get_flags();
}
Status SocketFd::get_pending_error() {
return fd_.get_pending_error();
}
Result<size_t> SocketFd::write(Slice slice) {
return fd_.write(slice);
}
Result<size_t> SocketFd::read(MutableSlice slice) {
return fd_.read(slice);
}
} // namespace td
|
C++
|
MIT
|
vilkoz/nchat/ext/td/tdutils/td/utils/port/SocketFd.cpp
|
4584479a-7bf4-4366-a4a3-4cf150306e36
|
[{"tag": "EMAIL", "value": "arseny30@gmail.com", "start": 70, "end": 88, "context": "ksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n//\n// Distributed under the Boost Soft"}, {"tag": "NAME", "value": "Aliaksei Levin", "start": 16, "end": 30, "context": "//\n// Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@g"}, {"tag": "NAME", "value": "Arseny Smirnov", "start": 54, "end": 68, "context": "/ Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n//\n// Distributed "}, {"tag": "EMAIL", "value": "levlam@telegram.org", "start": 32, "end": 51, "context": "//\n// Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n/"}]
|
[{"tag": "EMAIL", "value": "arseny30@gmail.com", "start": 70, "end": 88, "context": "ksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n//\n// Distributed under the Boost Soft"}, {"tag": "NAME", "value": "Aliaksei Levin", "start": 16, "end": 30, "context": "//\n// Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@g"}, {"tag": "NAME", "value": "Arseny Smirnov", "start": 54, "end": 68, "context": "/ Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n//\n// Distributed "}, {"tag": "EMAIL", "value": "levlam@telegram.org", "start": 32, "end": 51, "context": "//\n// Copyright Aliaksei Levin (levlam@telegram.org), Arseny Smirnov (arseny30@gmail.com) 2014-2018\n/"}]
|
// Copyright 2021 gRPC authors.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
//
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
#include <grpc/support/port_platform.h>
#include "src/core/ext/transport/binder/server/binder_server.h"
#ifndef GRPC_NO_BINDER
#include <memory>
#include <string>
#include <utility>
#include "absl/memory/memory.h"
#include <grpc/grpc.h>
#include "src/core/ext/transport/binder/transport/binder_transport.h"
#include "src/core/ext/transport/binder/wire_format/binder_android.h"
#include "src/core/lib/iomgr/exec_ctx.h"
#include "src/core/lib/surface/server.h"
#include "src/core/lib/transport/error_utils.h"
#ifdef GPR_SUPPORT_BINDER_TRANSPORT
#include <android/binder_ibinder.h>
#include <android/binder_ibinder_jni.h>
#include <jni.h>
extern "C" {
// This will be invoked from
// src/core/ext/transport/binder/java/io/grpc/binder/cpp/GrpcCppServerBuilder.java
JNIEXPORT jobject JNICALL
Java_io_grpc_binder_cpp_GrpcCppServerBuilder_GetEndpointBinderInternal__Ljava_lang_String_2(
JNIEnv* jni_env, jobject, jstring conn_id_jstring) {
AIBinder* ai_binder = nullptr;
{
// This block is the scope of conn_id c-string
jboolean isCopy;
const char* conn_id = jni_env->GetStringUTFChars(conn_id_jstring, &isCopy);
ai_binder =
static_cast<AIBinder*>(grpc_get_endpoint_binder(std::string(conn_id)));
if (ai_binder == nullptr) {
gpr_log(GPR_ERROR, "Cannot find endpoint binder with connection id = %s",
conn_id);
}
if (isCopy == JNI_TRUE) {
jni_env->ReleaseStringUTFChars(conn_id_jstring, conn_id);
}
}
if (ai_binder == nullptr) {
return nullptr;
}
return AIBinder_toJavaBinder(jni_env, ai_binder);
}
}
#endif
namespace grpc {
namespace experimental {
namespace binder {
void* GetEndpointBinder(const std::string& service) {
return grpc_get_endpoint_binder(service);
}
void AddEndpointBinder(const std::string& service, void* endpoint_binder) {
grpc_add_endpoint_binder(service, endpoint_binder);
}
void RemoveEndpointBinder(const std::string& service) {
grpc_remove_endpoint_binder(service);
}
} // namespace binder
} // namespace experimental
} // namespace grpc
static absl::flat_hash_map<std::string, void*>* g_endpoint_binder_pool =
nullptr;
namespace {
grpc_core::Mutex* GetBinderPoolMutex() {
static grpc_core::Mutex* mu = new grpc_core::Mutex();
return mu;
}
} // namespace
void grpc_add_endpoint_binder(const std::string& service,
void* endpoint_binder) {
grpc_core::MutexLock lock(GetBinderPoolMutex());
if (g_endpoint_binder_pool == nullptr) {
g_endpoint_binder_pool = new absl::flat_hash_map<std::string, void*>();
}
(*g_endpoint_binder_pool)[service] = endpoint_binder;
}
void grpc_remove_endpoint_binder(const std::string& service) {
grpc_core::MutexLock lock(GetBinderPoolMutex());
if (g_endpoint_binder_pool == nullptr) {
return;
}
g_endpoint_binder_pool->erase(service);
}
void* grpc_get_endpoint_binder(const std::string& service) {
grpc_core::MutexLock lock(GetBinderPoolMutex());
if (g_endpoint_binder_pool == nullptr) {
return nullptr;
}
auto iter = g_endpoint_binder_pool->find(service);
return iter == g_endpoint_binder_pool->end() ? nullptr : iter->second;
}
namespace grpc_core {
class BinderServerListener : public Server::ListenerInterface {
public:
BinderServerListener(
Server* server, std::string addr, BinderTxReceiverFactory factory,
std::shared_ptr<grpc::experimental::binder::SecurityPolicy>
security_policy)
: server_(server),
addr_(std::move(addr)),
factory_(std::move(factory)),
security_policy_(security_policy) {}
void Start(Server* /*server*/,
const std::vector<grpc_pollset*>* /*pollsets*/) override {
tx_receiver_ = factory_(
[this](transaction_code_t code, grpc_binder::ReadableParcel* parcel,
int uid) { return OnSetupTransport(code, parcel, uid); });
endpoint_binder_ = tx_receiver_->GetRawBinder();
grpc_add_endpoint_binder(addr_, endpoint_binder_);
}
channelz::ListenSocketNode* channelz_listen_socket_node() const override {
return nullptr;
}
void SetOnDestroyDone(grpc_closure* on_destroy_done) override {
on_destroy_done_ = on_destroy_done;
}
void Orphan() override { delete this; }
~BinderServerListener() override {
ExecCtx::Get()->Flush();
if (on_destroy_done_) {
ExecCtx::Run(DEBUG_LOCATION, on_destroy_done_, GRPC_ERROR_NONE);
ExecCtx::Get()->Flush();
}
grpc_remove_endpoint_binder(addr_);
}
private:
absl::Status OnSetupTransport(transaction_code_t code,
grpc_binder::ReadableParcel* parcel, int uid) {
grpc_core::ExecCtx exec_ctx;
if (grpc_binder::BinderTransportTxCode(code) !=
grpc_binder::BinderTransportTxCode::SETUP_TRANSPORT) {
return absl::InvalidArgumentError("Not a SETUP_TRANSPORT request");
}
gpr_log(GPR_ERROR, "calling uid = %d", uid);
if (!security_policy_->IsAuthorized(uid)) {
// TODO(mingcl): For now we just ignore this unauthorized
// SETUP_TRANSPORT transaction and ghost the client. Check if we should
// send back a SHUTDOWN_TRANSPORT in this case.
return absl::PermissionDeniedError(
"UID " + std::to_string(uid) +
" is not allowed to connect to this "
"server according to security policy.");
}
int version;
absl::Status status = parcel->ReadInt32(&version);
if (!status.ok()) {
return status;
}
gpr_log(GPR_INFO, "version = %d", version);
// TODO(waynetu): Check supported version.
std::unique_ptr<grpc_binder::Binder> client_binder{};
status = parcel->ReadBinder(&client_binder);
if (!status.ok()) {
return status;
}
if (!client_binder) {
return absl::InvalidArgumentError("NULL binder read from the parcel");
}
client_binder->Initialize();
// Finish the second half of SETUP_TRANSPORT in
// grpc_create_binder_transport_server().
grpc_transport* server_transport = grpc_create_binder_transport_server(
std::move(client_binder), security_policy_);
GPR_ASSERT(server_transport);
grpc_channel_args* args = grpc_channel_args_copy(server_->channel_args());
grpc_error_handle error = server_->SetupTransport(server_transport, nullptr,
args, nullptr, nullptr);
grpc_channel_args_destroy(args);
return grpc_error_to_absl_status(error);
}
Server* server_;
grpc_closure* on_destroy_done_ = nullptr;
std::string addr_;
BinderTxReceiverFactory factory_;
std::shared_ptr<grpc::experimental::binder::SecurityPolicy> security_policy_;
void* endpoint_binder_ = nullptr;
std::unique_ptr<grpc_binder::TransactionReceiver> tx_receiver_;
};
bool AddBinderPort(const std::string& addr, grpc_server* server,
BinderTxReceiverFactory factory,
std::shared_ptr<grpc::experimental::binder::SecurityPolicy>
security_policy) {
// TODO(mingcl): Check if the addr is valid here after binder address resolver
// related code are merged.
const std::string kBinderUriScheme = "binder:";
if (addr.compare(0, kBinderUriScheme.size(), kBinderUriScheme) != 0) {
return false;
}
std::string conn_id = addr.substr(kBinderUriScheme.size());
grpc_core::Server* core_server = server->core_server.get();
core_server->AddListener(
grpc_core::OrphanablePtr<grpc_core::Server::ListenerInterface>(
new grpc_core::BinderServerListener(
core_server, conn_id, std::move(factory), security_policy)));
return true;
}
} // namespace grpc_core
#endif
|
C++
|
BSD-3-Clause
|
ZeBraHack0/mygrpc/src/core/ext/transport/binder/server/binder_server.cc
|
ca6f4cba-8d63-4b2a-95c4-9ed8be03a73f
|
[]
|
[]
|
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class ComplianceResultsOperations:
"""ComplianceResultsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.security.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def list(
self,
scope: str,
**kwargs: Any
) -> AsyncIterable["_models.ComplianceResultList"]:
"""Security compliance results in the subscription.
:param scope: Scope of the query, can be subscription
(/subscriptions/0b06d9ea-afe6-4779-bd59-30e5c2d9d13f) or management group
(/providers/Microsoft.Management/managementGroups/mgName).
:type scope: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either ComplianceResultList or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.security.models.ComplianceResultList]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ComplianceResultList"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-08-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'scope': self._serialize.url("scope", scope, 'str', skip_quote=True),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('ComplianceResultList', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/{scope}/providers/Microsoft.Security/complianceResults'} # type: ignore
async def get(
self,
resource_id: str,
compliance_result_name: str,
**kwargs: Any
) -> "_models.ComplianceResult":
"""Security Compliance Result.
:param resource_id: The identifier of the resource.
:type resource_id: str
:param compliance_result_name: name of the desired assessment compliance result.
:type compliance_result_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: ComplianceResult, or the result of cls(response)
:rtype: ~azure.mgmt.security.models.ComplianceResult
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.ComplianceResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2017-08-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceId': self._serialize.url("resource_id", resource_id, 'str', skip_quote=True),
'complianceResultName': self._serialize.url("compliance_result_name", compliance_result_name, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('ComplianceResult', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/{resourceId}/providers/Microsoft.Security/complianceResults/{complianceResultName}'} # type: ignore
|
Python
|
MIT
|
AFengKK/azure-sdk-for-python/sdk/security/azure-mgmt-security/azure/mgmt/security/aio/operations/_compliance_results_operations.py
|
a8d209c1-3c44-4a65-b6b0-dad15c0fc2a7
|
[]
|
[]
|
// Copyright 2019, California Institute of Technology ("Caltech").
// U.S. Government sponsorship acknowledged.
//
// All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are met:
//
// * Redistributions of source code must retain the above copyright notice,
// this list of conditions and the following disclaimer.
// * Redistributions must reproduce the above copyright notice, this list of
// conditions and the following disclaimer in the documentation and/or other
// materials provided with the distribution.
// * Neither the name of Caltech nor its operating division, the Jet Propulsion
// Laboratory, nor the names of its contributors may be used to endorse or
// promote products derived from this software without specific prior written
// permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
// AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
// IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
// ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE
// LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
// CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
// SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
// INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
// CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
// ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
// POSSIBILITY OF SUCH DAMAGE.
package gov.nasa.pds.objectAccess.example;
import static org.testng.Assert.assertEquals;
import static org.testng.Assert.assertTrue;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileOutputStream;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.OutputStream;
import java.io.UnsupportedEncodingException;
import java.io.Writer;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.ArrayList;
import java.util.List;
import org.apache.commons.csv.CSVFormat;
import org.apache.commons.csv.CSVParser;
import org.apache.commons.csv.CSVRecord;
import org.testng.annotations.AfterMethod;
import org.testng.annotations.BeforeClass;
import org.testng.annotations.BeforeMethod;
import org.testng.annotations.DataProvider;
import org.testng.annotations.Test;
import freemarker.template.Configuration;
import freemarker.template.Template;
import freemarker.template.TemplateException;
import freemarker.template.TemplateExceptionHandler;
public class ExtractTableTest {
private static final String UTF8_CHARSET = "UTF-8";
private static final String CRLF = "\r\n";
private static final String FIELD_NAME = "field,1";
private static final String[] STRING_VALUES = {
"",
"x",
"this is a test"
};
private static final String[] INT8_VALUES = {
"0", "1", "-1", Long.toString(- (1L << 7)), Long.toString((1L << 7) - 1)
};
private static final String[] INT16_VALUES = {
"0", "1", "-1", Long.toString(- (1L << 15)), Long.toString((1L << 15) - 1)
};
private static final String[] INT32_VALUES = {
"0", "1", "-1", Long.toString(- (1L << 31)), Long.toString((1L << 31) - 1)
};
private static final String[] INT64_VALUES = {
"0", "1", "-1", Long.toString(1L << 63), Long.toString((1L << 63) - 1)
};
private static final String[] UINT8_VALUES = {
"0", "1", Long.toString(1L << 7), Long.toString((1L << 8) - 1)
};
private static final String[] UINT16_VALUES = {
"0", "1", Long.toString(1L << 15), Long.toString((1L << 16) - 1)
};
private static final String[] UINT32_VALUES = {
"0", "1", Long.toString(1L << 31), Long.toString((1L << 32) - 1)
};
private static final String[] UINT64_VALUES = {
"0", "1", BigInteger.ONE.shiftLeft(63).toString(), BigInteger.ONE.shiftLeft(64).subtract(BigInteger.ONE).toString()
};
private static final String[] REAL_VALUES = {
"0.0", "1.0", "-1.0", "1000.0", "-1000.0"
};
// Text and delimited field data types.
private static final FieldType[] TEXT_AND_DELIMITED_FIELD_TYPES = {
new FieldType("ASCII_AnyURI", new TextAccessor(false), 20, new String[] {"http://www.yahoo.com"}),
new FieldType("ASCII_Boolean", new BooleanTextAccessor(), 20, new String[] {"false", "true"}),
new FieldType("ASCII_DOI", new TextAccessor(false), 20, STRING_VALUES),
new FieldType("ASCII_Date", new TextAccessor(false), 20, new String[] {"2000-01-01Z"}),
new FieldType("ASCII_Date_DOY", new TextAccessor(false), 20, new String[] {"2000-001Z"}),
new FieldType("ASCII_Date_Time", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_Time_DOY", new TextAccessor(false), 20, new String[] {"2000-001T00:00:00Z"}),
new FieldType("ASCII_Date_Time_UTC", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_Time_YMD", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_YMD", new TextAccessor(false), 20, new String[] {"2000-01-01Z"}),
new FieldType("ASCII_Directory_Path_Name", new TextAccessor(false), 20, new String[] {"abc/def/ghi"}),
new FieldType("ASCII_File_Name", new TextAccessor(false), 20, new String[] {"sample.dat"}),
new FieldType("ASCII_File_Specification_Name", new TextAccessor(false), 20, new String[] {"sample.dat"}),
new FieldType("ASCII_Integer", new IntegerTextAccessor(), 30, INT64_VALUES),
new FieldType("ASCII_LID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product"}),
new FieldType("ASCII_LIDVID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product:1.0"}),
new FieldType("ASCII_LIDVID_LID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product:1.0"}),
new FieldType("ASCII_MD5_Checksum", new TextAccessor(false), 40, new String[] {"b1946ac92492d2347c6235b4d2611184"}),
new FieldType("ASCII_NonNegative_Integer", new IntegerTextAccessor(), 30, UINT64_VALUES),
new FieldType("ASCII_Numeric_Base16", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Numeric_Base2", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Numeric_Base8", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Real", new DecimalTextAccessor(), 20, REAL_VALUES),
new FieldType("ASCII_String", new TextAccessor(false), 20, STRING_VALUES),
new FieldType("ASCII_Time", new TextAccessor(false), 20, new String[] {"00:00:00Z"}),
new FieldType("ASCII_VID", new TextAccessor(false), 20, new String[] {"1.0"}),
new FieldType("UTF8_String", new TextAccessor(false), 20, STRING_VALUES),
};
// Binary field data types.
private static final FieldType[] BINARY_FIELD_TYPES = {
new FieldType("ASCII_AnyURI", new TextAccessor(false), 20, STRING_VALUES),
new FieldType("ASCII_Boolean", new BooleanTextAccessor(), 20, new String[] {"false", "true"}),
new FieldType("ASCII_DOI", new TextAccessor(false), 20, STRING_VALUES),
new FieldType("ASCII_Date", new TextAccessor(false), 20, new String[] {"2000-01-01Z"}),
new FieldType("ASCII_Date_DOY", new TextAccessor(false), 20, new String[] {"2000-001Z"}),
new FieldType("ASCII_Date_Time", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_Time_DOY", new TextAccessor(false), 20, new String[] {"2000-001T00:00:00Z"}),
new FieldType("ASCII_Date_Time_UTC", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_Time_YMD", new TextAccessor(false), 20, new String[] {"2000-01-01T00:00:00Z"}),
new FieldType("ASCII_Date_YMD", new TextAccessor(false), 20, new String[] {"2000-01-01Z"}),
new FieldType("ASCII_Directory_Path_Name", new TextAccessor(false), 20, new String[] {"abc/def/ghi"}),
new FieldType("ASCII_File_Name", new TextAccessor(false), 20, new String[] {"sample.dat"}),
new FieldType("ASCII_File_Specification_Name", new TextAccessor(false), 20, new String[] {"sample.dat"}),
new FieldType("ASCII_Integer", new IntegerTextAccessor(), 20, new String[] {}),
new FieldType("ASCII_LID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product"}),
new FieldType("ASCII_LIDVID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product:1.0"}),
new FieldType("ASCII_LIDVID_LID", new TextAccessor(false), 50, new String[] {"urn:nasa:pds:bundle:collection:product:1.0"}),
new FieldType("ASCII_MD5_Checksum", new TextAccessor(false), 40, new String[] {"b1946ac92492d2347c6235b4d2611184"}),
new FieldType("ASCII_NonNegative_Integer", new TextAccessor(false), 30, UINT64_VALUES),
new FieldType("ASCII_Numeric_Base16", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Numeric_Base2", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Numeric_Base8", new TextAccessor(false), 20, new String[] {}),
new FieldType("ASCII_Real", new DecimalTextAccessor(), 20, REAL_VALUES),
new FieldType("ASCII_String", new TextAccessor(false), 20, STRING_VALUES),
new FieldType("ASCII_Time", new TextAccessor(false), 20, new String[] {"00:00:00Z"}),
new FieldType("ASCII_VID", new TextAccessor(false), 20, new String[] {"1.0"}),
/*
new FieldType("ComplexLSB16", new TextAccessor(false), 20, new String[] {}),
new FieldType("ComplexLSB8", new TextAccessor(false), 20, new String[] {}),
new FieldType("ComplexMSB16", new TextAccessor(false), 20, new String[] {}),
new FieldType("ComplexMSB8", new TextAccessor(false), 20, new String[] {}),
*/
new FieldType("IEEE754LSBDouble", new BinaryDoubleAccessor(false), 8, REAL_VALUES),
new FieldType("IEEE754LSBSingle", new BinaryFloatAccessor(false), 4, REAL_VALUES),
new FieldType("IEEE754MSBDouble", new BinaryDoubleAccessor(true), 8, REAL_VALUES),
new FieldType("IEEE754MSBSingle", new BinaryFloatAccessor(true), 4, REAL_VALUES),
/*
new FieldType("SignedBitString", new TextAccessor(false), 20, new String[] {}),
*/
new FieldType("SignedByte", new BinaryIntegerAccessor(true, true), 1, INT8_VALUES),
new FieldType("SignedLSB2", new BinaryIntegerAccessor(true, false), 2, INT16_VALUES),
new FieldType("SignedLSB4", new BinaryIntegerAccessor(true, false), 4, INT32_VALUES),
new FieldType("SignedLSB8", new BinaryIntegerAccessor(true, false), 8, INT64_VALUES),
new FieldType("SignedMSB2", new BinaryIntegerAccessor(true, true), 2, INT16_VALUES),
new FieldType("SignedMSB4", new BinaryIntegerAccessor(true, true), 4, INT32_VALUES),
new FieldType("SignedMSB8", new BinaryIntegerAccessor(true, true), 8, INT64_VALUES),
new FieldType("UTF8_String", new TextAccessor(false), 20, STRING_VALUES),
/*
new FieldType("UnsignedBitString", new TextAccessor(false), 20, new String[] {}),
*/
new FieldType("UnsignedByte", new BinaryIntegerAccessor(false, true), 1, UINT8_VALUES),
new FieldType("UnsignedLSB2", new BinaryIntegerAccessor(false, false), 2, UINT16_VALUES),
new FieldType("UnsignedLSB4", new BinaryIntegerAccessor(false, false), 4, UINT32_VALUES),
new FieldType("UnsignedLSB8", new BinaryIntegerAccessor(false, false), 8, UINT64_VALUES),
new FieldType("UnsignedMSB2", new BinaryIntegerAccessor(false, true), 2, UINT16_VALUES),
new FieldType("UnsignedMSB4", new BinaryIntegerAccessor(false, true), 4, UINT32_VALUES),
new FieldType("UnsignedMSB8", new BinaryIntegerAccessor(false, true), 8, UINT64_VALUES),
};
private Configuration config;
private File labelFile;
private File dataFile;
private File resultFile;
@BeforeClass
private void configureFreeMarker() throws IOException {
config = new Configuration(Configuration.VERSION_2_3_21);
config.setDirectoryForTemplateLoading(new File("src/test/resources/data_type_tests"));
config.setDefaultEncoding(UTF8_CHARSET);
config.setTemplateExceptionHandler(TemplateExceptionHandler.DEBUG_HANDLER);
}
@BeforeMethod
public void createFiles() throws IOException {
labelFile = File.createTempFile("label-", ".xml");
dataFile = File.createTempFile("data-", ".dat");
resultFile = File.createTempFile("result-", ".txt");
}
@AfterMethod
public void deleteFiles() {
labelFile.delete();
dataFile.delete();
resultFile.delete();
}
@Test(dataProvider="TextAndDelimitedTypeTests")
public void testReadTableCharacterCSV(FieldType fieldType) throws IOException, TemplateException {
writeLabel("TableCharacter-template.xml", getTableProperties(fieldType, true), labelFile);
writeTextData(dataFile, fieldType.getValues(), fieldType.getLength(), fieldType.getAccessor());
ExtractTable.main(new String[] {"--csv", "-o", resultFile.getAbsolutePath(), labelFile.getAbsolutePath()});
checkCSVValues(resultFile, fieldType.getValues());
}
@Test(dataProvider="TextAndDelimitedTypeTests")
public void testReadTableCharacterFixed(FieldType fieldType) throws IOException, TemplateException {
writeLabel("TableCharacter-template.xml", getTableProperties(fieldType, true), labelFile);
writeTextData(dataFile, fieldType.getValues(), fieldType.getLength(), fieldType.getAccessor());
ExtractTable.main(new String[] {"-o", resultFile.getAbsolutePath(), labelFile.getAbsolutePath()});
checkTextValues(resultFile, fieldType.getLength(), fieldType.isRightJustified(), fieldType.getValues());
}
@Test(dataProvider="TextAndDelimitedTypeTests")
public void testReadTableDelimitedCSV(FieldType fieldType) throws IOException, TemplateException {
writeLabel("TableDelimited-template.xml", getTableProperties(fieldType, true), labelFile);
writeDelimitedData(dataFile, fieldType.getValues(), fieldType.getAccessor());
ExtractTable.main(new String[] {"--csv", "-o", resultFile.getAbsolutePath(), labelFile.getAbsolutePath()});
checkCSVValues(resultFile, fieldType.getValues());
}
@DataProvider(name="TextAndDelimitedTypeTests")
private Object[][] getTextAndDelimitedTypeTests() {
Object[][] result = new Object[TEXT_AND_DELIMITED_FIELD_TYPES.length][];
for (int i=0; i < TEXT_AND_DELIMITED_FIELD_TYPES.length; ++i) {
result[i] = new Object[] {TEXT_AND_DELIMITED_FIELD_TYPES[i]};
}
return result;
}
@Test(dataProvider="BinaryTypeTests")
public void testReadTableBinaryCSV(FieldType fieldType) throws IOException, TemplateException {
writeLabel("TableBinary-template.xml", getTableProperties(fieldType, false), labelFile);
writeBinaryData(dataFile, fieldType.getValues(), fieldType.getLength(), fieldType.getAccessor());
ExtractTable.main(new String[] {"--csv", "-o", resultFile.getAbsolutePath(), labelFile.getAbsolutePath()});
checkCSVValues(resultFile, fieldType.getValues());
}
@DataProvider(name="BinaryTypeTests")
private Object[][] getBinaryTypeTests() {
Object[][] result = new Object[BINARY_FIELD_TYPES.length][];
for (int i=0; i < BINARY_FIELD_TYPES.length; ++i) {
result[i] = new Object[] {BINARY_FIELD_TYPES[i]};
}
return result;
}
private TableProperties getTableProperties(FieldType fieldType, boolean isText) {
TableProperties props = new TableProperties();
props.setFileName(dataFile.getName());
props.setRecordLength(fieldType.getLength() + (isText ? CRLF.length() : 0));
props.setRecordCount(fieldType.getValues().length);
props.setFieldName(FIELD_NAME);
props.setFieldType(fieldType.getTypeName());
props.setFieldLength(fieldType.getLength());
return props;
}
private void writeLabel(String templateName, TableProperties props, File labelFile) throws IOException, TemplateException {
Template template = config.getTemplate(templateName);
Writer out = new FileWriter(labelFile);
template.process(props, out);
out.close();
}
private void writeTextData(File outputFile, String[] values, int fieldLength, FieldAccessor accessor) throws IOException {
OutputStream out = new FileOutputStream(dataFile);
for (String value : values) {
byte[] b = accessor.string2Bytes(value, fieldLength);
out.write(b);
out.write(CRLF.getBytes(UTF8_CHARSET));
}
out.close();
}
private void writeDelimitedData(File outputFile, String[] values, FieldAccessor accessor) throws IOException {
OutputStream out = new FileOutputStream(dataFile);
for (String value : values) {
out.write(value.getBytes(UTF8_CHARSET));
out.write(CRLF.getBytes(UTF8_CHARSET));
}
out.close();
}
private void writeBinaryData(File outputFile, String[] values, int fieldLength, FieldAccessor accessor) throws IOException {
OutputStream out = new FileOutputStream(dataFile);
for (String value : values) {
byte[] b = accessor.string2Bytes(value, fieldLength);
out.write(b);
}
out.close();
}
private void checkCSVValues(File f, String[] values) throws IOException {
CSVParser parser = CSVFormat.DEFAULT.parse(new FileReader(f));
List<CSVRecord> records = parser.getRecords();
if (records.size() != values.length + 1) {
int x = 1;
}
assertEquals(records.size(), values.length + 1);
int row=0;
for (CSVRecord record : records) {
assertEquals(record.size(), 1);
if (row == 0) {
assertEquals(record.get(0), FIELD_NAME);
} else {
assertEquals(record.get(0), values[row-1]);
}
++row;
}
}
private void checkTextValues(File f, int length, boolean isRightJustified, String[] values) throws IOException {
BufferedReader in = new BufferedReader(new FileReader(f));
List<String> lines = new ArrayList<String>();
for (;;) {
String line = in.readLine();
if (line == null) {
break;
}
lines.add(line);
}
assertEquals(lines.size(), values.length + 1);
assertEquals(lines.get(0).trim(), FIELD_NAME);
for (int row=1; row <= values.length; ++row) {
if (!isRightJustified) {
assertTrue(lines.get(row).startsWith(values[row-1]));
} else {
assertTrue(lines.get(row).endsWith(values[row-1]));
}
++row;
}
}
private void writeTableRow(OutputStream out, FieldType fieldType, int length, String strValue) throws IOException {
out.write(fieldType.getAccessor().string2Bytes(strValue, length));
}
private static class FieldType {
private String typeName;
private FieldAccessor accessor;
private int length;
private String[] values;
public FieldType(String typeName, FieldAccessor accessor, int length, String[] values) {
this.typeName = typeName;
this.accessor = accessor;
this.length = length;
this.values = values;
}
public boolean isRightJustified() {
return accessor.isRightJustified();
}
public String getTypeName() {
return typeName;
}
public void setTypeName(String typeName) {
this.typeName = typeName;
}
public FieldAccessor getAccessor() {
return accessor;
}
public void setAccessor(FieldAccessor accessor) {
this.accessor = accessor;
}
public int getLength() {
return length;
}
public void setLength(int length) {
this.length = length;
}
public String[] getValues() {
return values;
}
public void setValues(String[] values) {
this.values = values;
}
@Override
public String toString() {
return "{field:" + typeName + "}";
}
}
private static abstract class FieldAccessor {
public abstract byte[] string2Bytes(String value, int length);
public final String bytes2String(byte[] b) {
return bytes2String(b, 0, b.length);
}
public abstract String bytes2String(byte[] b, int offset, int length);
public boolean isRightJustified() {
return false;
}
protected String bytes2StringText(byte[] b, int offset, int length) {
try {
return new String(b, offset, length, UTF8_CHARSET);
} catch (UnsupportedEncodingException e) {
// Cannot happen - UTF-8 must be supported in Java.
throw new RuntimeException("UTF-8 character set is missing");
}
}
protected byte[] orderBytes(byte[] b, int offset, int length, boolean isBigEndian) {
if (!isBigEndian) {
for (int i=offset, j=length-1; i < j; ++i, --j) {
byte temp = b[i];
b[i] = b[j];
b[j] = temp;
}
}
return b;
}
}
private static class TextAccessor extends FieldAccessor {
private boolean isRightJustified;
public TextAccessor(boolean isRightJustified) {
this.isRightJustified = isRightJustified;
}
@Override
public byte[] string2Bytes(String value, int length) {
try {
return justify(value, length).getBytes(UTF8_CHARSET);
} catch (UnsupportedEncodingException e) {
// Cannot happen - UTF-8 must be supported in Java.
throw new RuntimeException("UTF-8 character set is missing");
}
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
return justify(bytes2StringText(b, offset, length).trim(), length);
}
protected String justify(String s, int length) {
if (isRightJustified) {
return String.format("%" + length + "s", s);
} else {
return String.format("%-" + length + "s", s);
}
}
@Override
public boolean isRightJustified() {
return isRightJustified;
}
}
private static class IntegerTextAccessor extends TextAccessor {
public IntegerTextAccessor() {
super(true);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
String strValue = bytes2StringText(b, offset, length).trim();
BigInteger integerValue = new BigInteger(strValue);
return justify(integerValue.toString(), length);
}
}
private static class DecimalTextAccessor extends TextAccessor {
public DecimalTextAccessor() {
super(true);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
String strValue = bytes2String(b, offset, length).trim();
BigDecimal decimalValue = new BigDecimal(strValue);
return justify(String.format("%.1f", decimalValue), length);
}
}
private static class BooleanTextAccessor extends TextAccessor {
public BooleanTextAccessor() {
super(true);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
String strValue = bytes2String(b, offset, length).trim();
boolean booleanValue = Boolean.valueOf(strValue);
return justify(String.valueOf(booleanValue), length);
}
}
private static class BinaryIntegerAccessor extends FieldAccessor {
private boolean isSigned;
private boolean isBigEndian;
public BinaryIntegerAccessor(boolean isSigned, boolean isBigEndian) {
this.isSigned = isSigned;
this.isBigEndian = isBigEndian;
}
@Override
public byte[] string2Bytes(String value, int length) {
byte[] b = new BigInteger(value).toByteArray();
if (b.length > length) {
byte[] temp = new byte[length];
System.arraycopy(b, b.length-length, temp, 0, length);
b = temp;
} else if (b.length < length) {
byte[] temp = new byte[length];
System.arraycopy(b, 0, temp, length - b.length, b.length);
if (isSigned && b[0] < 0) {
for (int i=0; i < length - b.length; ++i) {
temp[i] = (byte) 0xFF;
}
}
b = temp;
}
return orderBytes(b, 0, b.length, isBigEndian);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
byte[] temp = new byte[length + 1];
System.arraycopy(b, offset, temp, 1, length);
if (!isSigned || b[0] >= 0) {
temp[0] = 0;
} else {
temp[0] = (byte) 0xFF;
}
orderBytes(temp, 1, length, isBigEndian);
return new BigInteger(temp).toString();
}
}
private static class BinaryFloatAccessor extends FieldAccessor {
private boolean isBigEndian;
public BinaryFloatAccessor(boolean isBigEndian) {
this.isBigEndian = isBigEndian;
}
@Override
public byte[] string2Bytes(String value, int length) {
assert (length == Float.SIZE/Byte.SIZE);
int bits = Float.floatToIntBits(Float.parseFloat(value));
byte[] b = {
(byte) ((bits >> 24) & 0xFF),
(byte) ((bits >> 16) & 0xFF),
(byte) ((bits >> 8) & 0xFF),
(byte) (bits & 0xFF),
};
return orderBytes(b, 0, length, isBigEndian);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
assert (length == Float.SIZE/Byte.SIZE);
byte[] temp = orderBytes(b, offset, length, isBigEndian);
int bits =
(temp[0] << 24)
| ((temp[1] << 16) & 0xFF)
| ((temp[2] << 8) & 0xFF)
| (temp[0] & 0xFF);
return Float.toString(Float.intBitsToFloat(bits));
}
}
private static class BinaryDoubleAccessor extends FieldAccessor {
private boolean isBigEndian;
public BinaryDoubleAccessor(boolean isBigEndian) {
this.isBigEndian = isBigEndian;
}
@Override
public byte[] string2Bytes(String value, int length) {
assert (length == Double.SIZE/Byte.SIZE);
long bits = Double.doubleToLongBits(Double.parseDouble(value));
byte[] b = {
(byte) ((bits >> 56) & 0xFF),
(byte) ((bits >> 48) & 0xFF),
(byte) ((bits >> 40) & 0xFF),
(byte) ((bits >> 32) & 0xFF),
(byte) ((bits >> 24) & 0xFF),
(byte) ((bits >> 16) & 0xFF),
(byte) ((bits >> 8) & 0xFF),
(byte) (bits & 0xFF),
};
return orderBytes(b, 0, length, isBigEndian);
}
@Override
public String bytes2String(byte[] b, int offset, int length) {
assert (length == Double.SIZE/Byte.SIZE);
byte[] temp = orderBytes(b, offset, length, isBigEndian);
long bits =
(temp[0] << 56)
| ((temp[1] << 48) & 0xFF)
| ((temp[1] << 40) & 0xFF)
| ((temp[1] << 32) & 0xFF)
| ((temp[1] << 24) & 0xFF)
| ((temp[1] << 16) & 0xFF)
| ((temp[2] << 8) & 0xFF)
| (temp[0] & 0xFF);
return Double.toString(Double.longBitsToDouble(bits));
}
}
public static class TableProperties {
private String fileName;
private int recordCount;
private int recordLength;
private String fieldName;
private String fieldType;
private int fieldLength;
public String getFileName() {
return fileName;
}
public void setFileName(String fileName) {
this.fileName = fileName;
}
public int getRecordCount() {
return recordCount;
}
public void setRecordCount(int recordCount) {
this.recordCount = recordCount;
}
public int getRecordLength() {
return recordLength;
}
public void setRecordLength(int recordLength) {
this.recordLength = recordLength;
}
public String getFieldName() {
return fieldName;
}
public void setFieldName(String fieldName) {
this.fieldName = fieldName;
}
public String getFieldType() {
return fieldType;
}
public void setFieldType(String fieldType) {
this.fieldType = fieldType;
}
public int getFieldLength() {
return fieldLength;
}
public void setFieldLength(int fieldLength) {
this.fieldLength = fieldLength;
}
}
}
|
Java
|
Apache-2.0
|
autoplot/pds4-jparser/src/test/java/gov/nasa/pds/objectAccess/example/ExtractTableTest.java
|
12e944c7-3c49-4f3d-94f1-100d70a2d5d5
|
[]
|
[]
|
<?php
namespace App\Http\Requests;
use Illuminate\Foundation\Http\FormRequest;
class LoginRequest extends FormRequest
{
/**
* Determine if the user is authorized to make this request.
*
* @return bool
*/
public function authorize()
{
return true;
}
/**
* Get the validation rules that apply to the request.
*
* @return array
*/
public function rules()
{
return [
'txtUsername' => 'required',
'txtPassword' => 'required'
];
}
public function messages() {
return [
'txtUsername.required' => 'Bạn phải nhập vào username',
'txtPassword.required' => 'Bạn chưa nhập vào password'
];
}
}
|
PHP
|
MIT
|
lek571993/tiembanh/app/Http/Requests/LoginRequest.php
|
7a31e993-fac0-4d0b-8854-ca7875017190
|
[]
|
[]
|
/*
Example usage of MediaMachine SDK for video transcoding.
This example shows how to use s3 for storage, however, using Azure blob store or GCP buckets is also supported:
Simply change the scheme in InputURL/OutputURL to "azure://example-bucket..." or "gcp://example-bucket..." according
to your requirements.
*/
package main
import (
"github.com/stackrock/mediamachinego/mediamachine"
"log"
"time"
)
// Use your MediaMachine API Key to initialize the SDK
const apiKey = "my-mediamachine-key"
func main() {
mm := mediamachine.MediaMachine{APIKey: apiKey}
// Using S3: input video from s3, output uploaded to s3
// It is a good security practice to make narrow scoped AWS access keys
// that restrict access to a specific bucket (or even specific prefixes and objects if needed).
creds := mediamachine.CredsAWS{
AccessKeyID: "my-aws-access-key-id",
SecretAccessKey: "my-aws-secret-access-key",
Region: "my-aws-region",
}
// If your video assets are served via a file server, you can directly use their urls. You can also mix-and-match
// using file server urls and bucket stores.
// See Thumbnail example for working with videos served via a file server.
s3TranscodeJob, err := mm.Transcode(mediamachine.TranscodeConfig{
InputURL: "s3://example-bucket/my-awesome-video.mp4",
OutputURL: "s3://example-bucket/my-awesome-video-transcode.wemb",
// Note: You can use a different set of creds for input and output if you want to upload to a totally different
// account for example or to a different bucket etc.
InputCreds: creds,
OutputCreds: creds,
// Make sure the encoder and container are compatible with each other.
Container: mediamachine.ContainerWebm,
Encoder: mediamachine.EncoderVp8,
// You can opt to get notified via webhooks here or periodically check job status depending on your preferred setup
SuccessURL: "https://example.com/mediamachine/jobdone",
FailureURL: "https://example.com/mediamachine/jobfailed",
})
// Handle any errors returned during job creation
if err != nil {
log.Panicf("failed to create a transcode job: %+v", err)
}
// Example function for waiting for job completion
waitForJob := func(job mediamachine.Job, done chan struct{}) {
defer close(done)
for range time.NewTicker(time.Second * 60).C {
status, err := job.FetchStatus()
if err != nil {
log.Printf("failed to fetch status for job: %s", job.ID)
return
}
switch status {
case mediamachine.JobStatusDone:
log.Printf("transcode is ready! JobId: %s", job.ID)
return
case mediamachine.JobStatusErrored:
log.Printf("transcode creation failed :( JobId: %s", job.ID)
return
}
}
}
jobsDone := make(chan struct{})
go waitForJob(s3TranscodeJob, jobsDone)
// Wait for job to finish
<-jobsDone
log.Printf("All done!")
}
|
GO
|
Apache-2.0
|
stackrock/mediamachinego/examples/transcode/main.go
|
abfe6028-8763-4aa8-98b7-02cdbdf8ff51
|
[{"tag": "USERNAME", "value": "stackrock", "start": 351, "end": 360, "context": "uirements.\n*/\npackage main\n\nimport (\n\t\"github.com/stackrock/mediamachinego/mediamachine\"\n\t\"log\"\n\t\"time\"\n)\n\n//"}]
|
[{"tag": "USERNAME", "value": "stackrock", "start": 351, "end": 360, "context": "uirements.\n*/\npackage main\n\nimport (\n\t\"github.com/stackrock/mediamachinego/mediamachine\"\n\t\"log\"\n\t\"time\"\n)\n\n//"}]
|
cask 'spectacle' do
version '1.2'
sha256 '766d5bf3b404ec567110a25de1d221290bc829302283b28ed0fbe73b9557f30c'
# spectacle.s3.amazonaws.com/ was verified as official when first introduced to the cask
url "https://spectacle.s3.amazonaws.com/downloads/Spectacle+#{version}.zip"
appcast 'https://www.spectacleapp.com/updates/appcast.xml'
name 'Spectacle'
homepage 'https://www.spectacleapp.com/'
auto_updates true
app 'Spectacle.app'
uninstall quit: 'com.divisiblebyzero.Spectacle'
zap trash: [
'~/Library/Application Support/Spectacle',
'~/Library/Caches/com.divisiblebyzero.Spectacle',
'~/Library/Caches/com.plausiblelabs.crashreporter.data/com.divisiblebyzero.Spectacle',
'~/Library/Cookies/com.divisiblebyzero.Spectacle.binarycookies',
'~/Library/Preferences/com.divisiblebyzero.Spectacle.plist',
]
caveats do
discontinued
end
end
|
Ruby
|
BSD-2-Clause
|
22dm/homebrew-cask/Casks/spectacle.rb
|
5aa0bb54-919a-489b-b242-e1efe1365cbf
|
[]
|
[]
|
#include "Profile.hpp"
#include <cassert>
#include <limits>
#include "../Math/Constants.hpp"
#include "Distance.hpp"
#include "Intersections.hpp"
namespace JEBMath {
using namespace std;
namespace
{
int compare(double a, double b)
{
if (a < b)
return -1;
else
return a > b;
}
double getTAtX(const LineSegment<double, 2>& l, double xc)
{
return (xc - getX(l.getStart())) / getX(l.getVector());
}
double getYAtX(const LineSegment<double, 2>& l, double xc)
{
return getY(l.getStart()) + getTAtX(l, xc) * getY(l.getVector());
}
bool firstIntersectionOnVertLineSegment(Vector<double, 2>& isect,
const LineSegment<double, 2>& line,
const Profile& prof)
{
double lMin = std::min(getY(line.getStart()), getY(line.getEnd()));
double lMax = std::max(getY(line.getStart()), getY(line.getEnd()));
double pMin = interpolateY(prof, getX(line.getStart()), PickLowest);
double pMax = interpolateY(prof, getX(line.getStart()), PickHighest);
if (getY(line.getVector()) >= 0)
{
if (pMin <= lMin && lMin <= pMax)
{
isect = line.getStart();
return true;
}
else if (lMin <= pMin && pMin <= lMax)
{
isect = vector2(getX(line.getStart()), pMin);
return true;
}
}
else
{
if (pMin <= lMax && lMax <= pMax)
{
isect = line.getStart();
return true;
}
else if (lMin <= pMax && pMax <= lMax)
{
isect = vector2(getX(line.getStart()), pMax);
return true;
}
}
return false;
}
}
bool isProfile(const Profile& prof)
{
if (getSegmentCount(prof) == 0)
return false;
for (size_t i = 1; i < prof.size(); ++i)
{
if (getX(prof[i - 1]) > getX(prof[i]))
return false;
int direction = 0;
while (getX(prof[i - 1]) == getX(prof[i]))
{
int curDirection = compare(getY(prof[i - 1]), getY(prof[i]));
if (direction == 0)
direction = curDirection;
else if (curDirection != 0 && curDirection != direction)
return false;
i++;
}
}
return true;
}
size_t indexOfBottomPoint(const Profile& prof)
{
size_t index = ::JEBMath::InvalidIndex;
double bottom = -std::numeric_limits<double>::max();
for (size_t i = 0; i < prof.size(); i++)
{
if (getY(prof[i]) < bottom)
{
index = i;
bottom = getY(prof[i]);
}
}
return index;
}
double interpolateY(const Profile& prof,
double xc,
ConflictResolution pick)
{
auto i = firstSegmentAt(prof, xc);
if (i == ::JEBMath::InvalidIndex || i == getSegmentCount(prof))
return ::JEBMath::InvalidDouble;
else if (xc != getX(prof[i + 1]))
return getYAtX(getSegment(prof, i), xc);
// If x equals the end of a line segment, it is necessary to check
// the next segment to see if it's vertical or not. If it is, use the
// conflict resolution method to pick the elevation.
if (getX(prof[i]) != xc)
i++;
auto min = getY(prof[i]), max = getY(prof[i]);
while (i < prof.size() && getX(prof[i]) == getX(prof[i + 1]))
{
i++;
if (getY(prof[i]) < min)
min = getY(prof[i]);
else if (max < getX(prof[i]))
max = getY(prof[i]);
}
if (pick == PickHighest)
return max;
else if (pick == PickMiddle)
return (min + max) / 2;
else
return min;
}
size_t findProfileSegment(const Profile& prof, double xc)
{
size_t min = 0, max = prof.size();
while (min < max)
{
auto mid = (min + max) / 2;
if (getX(prof[mid]) > xc)
max = mid - 1;
else
min = mid;
}
return min;
}
struct LineStringPos
{
LineStringPos() : segment(0), t(0) {}
LineStringPos(size_t segment, double t) : segment(segment), t(t) {}
size_t segment;
double t;
};
struct IntersectionPos
{
LineStringPos a;
LineStringPos b;
};
size_t lowerBound(const Profile& prof, double xc)
{
size_t min = 0, max = prof.size();
while (min < max)
{
size_t mid = (min + max) / 2;
if (getX(prof[mid]) < xc)
min = mid + 1;
else
max = mid;
}
return min;
}
size_t upperBound(const Profile& prof, double xc)
{
size_t min = 0, max = prof.size();
while (min < max)
{
size_t mid = (min + max) / 2;
if (xc < getX(prof[mid]))
max = mid;
else
min = mid + 1;
}
return min;
}
size_t firstSegmentAt(const Profile& prof, double xc)
{
auto n = lowerBound(prof, xc);
if (n > 0)
return n - 1;
else if (getX(prof[0]) == xc)
return 0;
else
return ::JEBMath::InvalidIndex;
}
size_t lastSegmentAt(const Profile& prof, double xc)
{
auto n = upperBound(prof, xc);
if (n == 0)
return ::JEBMath::InvalidIndex;
else if (n < prof.size())
return n - 1;
else if (getX(prof.back()) == xc)
return n - 2;
else
return n - 1;
}
bool nearestPointBefore(Vector<double, 2>& nearestPoint,
size_t& segmentIndex,
const Profile& prof,
const Vector<double, 2>& point)
{
auto i = lastSegmentAt(prof, getX(point));
auto minDist = numeric_limits<double>::max();
if (i < getSegmentCount(prof) && getX(prof[i + 1]) > getX(point))
{
auto yt = getYAtX(getSegment(prof, i), getX(point));
auto dy = getY(getSegment(prof, i).getVector());
if ((yt <= getY(point) && dy >= 0) || (yt >= getY(point) && dy <= 0))
{
minDist = fabs(yt - getY(point));
nearestPoint = vector2(getX(point), yt);
segmentIndex = i;
i--;
}
}
else if (i == getSegmentCount(prof))
{
i--;
}
while (::JEBMath::isValid(i) && getX(point) - getX(prof[i + 1]) < minDist)
{
auto candidate = getNearestPoint(getSegment(prof, i), point);
auto dist = getDistance(candidate, point);
if (dist < minDist)
{
nearestPoint = candidate;
segmentIndex = i;
minDist = dist;
}
i--;
}
return minDist != numeric_limits<double>::max();
}
bool nearestPointAfter(Vector<double, 2>& nearestPoint,
size_t& segmentIndex,
const Profile& prof,
const Vector<double, 2>& point)
{
auto i = firstSegmentAt(prof, getX(point));
auto minDist = numeric_limits<double>::max();
if (i < getSegmentCount(prof) && getX(prof[i]) < getX(point))
{
auto yt = getYAtX(getSegment(prof, i), getX(point));
auto dy = getY(getSegment(prof, i).getVector());
if ((yt <= getY(point) && dy <= 0) || (yt >= getY(point) && dy >= 0))
{
minDist = fabs(yt - getY(point));
nearestPoint = vector2(getX(point), yt);
segmentIndex = i;
i++;
}
}
else if (!isValid(i))
{
i = 0;
}
while (i < getSegmentCount(prof) && getX(prof[i]) - getX(point) < minDist)
{
auto candidate = getNearestPoint(getSegment(prof, i), point);
auto dist = getDistance(candidate, point);
if (dist < minDist)
{
nearestPoint = candidate;
segmentIndex = i;
minDist = dist;
}
i++;
}
return minDist != numeric_limits<double>::max();
}
bool firstIntersection(Vector<double, 2>& isect,
const LineSegment<double, 2>& line,
const Profile& prof)
{
auto v = line.getVector();
if (getX(v) == 0)
return firstIntersectionOnVertLineSegment(isect, line, prof);
auto xMin = std::min(getX(line.getStart()), getX(line.getEnd()));
auto xMax = std::max(getX(line.getStart()), getX(line.getEnd()));
auto iMin = firstSegmentAt(prof, xMin);
auto iMax = lastSegmentAt(prof, xMax);
if (iMax == ::JEBMath::InvalidIndex || iMin == getSegmentCount(prof))
return false;
if (iMax == getSegmentCount(prof))
iMax = getSegmentCount(prof) - 1;
if (iMin == ::JEBMath::InvalidIndex)
iMin = 0;
if (getX(v) < 0)
{
for (auto i = iMax; i != ::JEBMath::InvalidIndex; i--)
{
if (intersection(isect, getSegment(prof, i), line, 0) == INTERSECTING)
return true;
}
}
else
{
for (auto i = iMin; i <= iMax; i++)
{
if (intersection(isect, getSegment(prof, i), line, 0) == INTERSECTING)
return true;
}
}
return false;
}
}
|
C++
|
BSD-3-Clause
|
jebreimo/JEBLib/JEBMath/JEBMath/Geometry/Profile.cpp
|
e2d20cc7-cb51-4c03-a24e-71370a510f38
|
[]
|
[]
|
#pragma once
#include "../Eigen/Dense"
using namespace Eigen;
namespace ode
{
/**
* \brief Representation of current and past states for a linear multistep ODE methods
* like BDF.
* Note that the number of past states is fixed at 8 even though the max useable order of BDF is 6 which would require storing 7 elements,
* this is done for alignment reasons and fixed to 8 for simplicity.
* TODO : review if this should be reduced for low order methods performance
* \tparam Scalar float or double
* \tparam Dim state space dimension, should be <= 4
*/
template<typename Scalar, int Dim>
using StateMatrix = Eigen::Matrix<Scalar, Dim, 8>;
template<typename Scalar, int Dim>
using StateVector = Eigen::Matrix<Scalar, Dim, 1>;
template<typename Function, typename Scalar, int Dim, int Order >
class SubStep
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x);
};
template<typename Scalar>
struct BDFNewtonConstants
{
static constexpr Scalar incrementEps{1.0e-12};
};
template<typename Function, typename Scalar, int Dim>
void BDFNewtonSolve(Ref<StateMatrix<Scalar, Dim>> state, const Function& f, Scalar x, const StateVector<Scalar,Dim>& midTerm, Scalar rightCoeff, Scalar T, Scalar tolerance = 1.0e-7,
int maxIterations = 10);
/**
* Backward differentiation linear multistep ODE solver.
* Important note : in the current version this should only be used for small Dimensions
* i.e Dim <= 4, a dynamic allocation version should be written for larger Dimensions.
*/
template<typename Function, typename Scalar, int Dim, int Order >
class BDF
{
private:
Scalar _T;
StateMatrix<Scalar, Dim> _state;
int _numStepsKnown{};
public:
BDF(const BDF &) = delete;
const BDF &operator=(const BDF &) = delete;
EIGEN_MAKE_ALIGNED_OPERATOR_NEW
BDF()
{
_state = StateMatrix<Scalar, Dim>::Zero();
}
void SetSampleRate(Scalar sampleRate)
{
_T = Scalar(1.0) / sampleRate;
}
void SetInitConditions(const StateVector<Scalar,Dim>& init)
{
_state = StateMatrix<Scalar,Dim>::Zero();
_state.col(1) = init;
//Note setting the first column has no effect unless we take the current state before solving a step
_state.col(0) = init;
_numStepsKnown = 1;
}
void Step(const Function& f, Scalar x)
{
SubStep<Function, Scalar, Dim, Order>::StepSolve(_state, _numStepsKnown, f, _T, x);
}
StateVector<Scalar,Dim> CurrentState() const
{
return _state.col(0);
}
Ref<StateMatrix<Scalar,Dim>> FullState()
{
return _state;
}
};
//Order 1 BDF ie implicit Euler method
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,1>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
//Solve y[n] - y[n-1] = T * ( f(y[n] , x[n]) )
BDFNewtonSolve<Function,Scalar,Dim>(state, f,x,
-state.col(1),
Scalar(1),
T);
//Note : always keep one more column in case the method is used as an initialization for a
//higher order method
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,2>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
if(numPastSteps < 2)
{
SubStep<Function, Scalar, Dim, 1>::StepSolve(state, numPastSteps, f, T, x);
numPastSteps = 2;
}
//Solve y[n] - 4/3*y[n-1] + 1/3*y[n-2] = 2/3 * T * ( f(y[n] , x[n]) )
BDFNewtonSolve<Function, Scalar, Dim>(state, f, x,
-Scalar(4) / 3 * state.col(1)
+ Scalar(1) / 3 * state.col(2),
Scalar(2) / 3,
T);
//Note : always keep one more column in case the method is used as an initialization for a
//higher order method
state.col(3) = state.col(2);
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,3>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
if(numPastSteps < 3)
{
SubStep<Function, Scalar, Dim, 2>::StepSolve(state, numPastSteps, f, T, x);
numPastSteps = 3;
}
BDFNewtonSolve<Function, Scalar, Dim>(state, f, x,
-Scalar(18) / 11 * state.col(1)
+ Scalar(9) / 11 * state.col(2)
- Scalar(2) / 11 * state.col(3),
Scalar(6) / 11,
T);
//Note : always keep one more column in case the method is used as an initialization for a
//higher order method
state.col(4) = state.col(3);
state.col(3) = state.col(2);
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,4>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
if(numPastSteps < 4)
{
SubStep<Function, Scalar, Dim, 3>::StepSolve(state, numPastSteps, f, T, x);
numPastSteps = 4;
}
BDFNewtonSolve<Function, Scalar, Dim>(state, f, x,
-Scalar(48) / 25 * state.col(1)
+ Scalar(36) / 25 * state.col(2)
- Scalar(16) / 25 * state.col(3)
+ Scalar(3) / 25 * state.col(4),
Scalar(12) / 25,
T);
//Note : always keep one more column in case the method is used as an initialization for a
//higher order method
state.col(5) = state.col(4);
state.col(4) = state.col(3);
state.col(3) = state.col(2);
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,5>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
if(numPastSteps < 5)
{
SubStep<Function, Scalar, Dim, 4>::StepSolve(state, numPastSteps, f, T, x);
numPastSteps = 5;
}
BDFNewtonSolve<Function, Scalar, Dim>(state, f, x,
-Scalar(300) / 137 * state.col(1)
+ Scalar(300) / 137 * state.col(2)
- Scalar(200) / 137 * state.col(3)
+ Scalar(75) / 137 * state.col(4)
- Scalar(12) / 137 * state.col(5),
Scalar(60) / 137,
T);
//Note : always keep one more column in case the method is used as an initialization for a
//higher order method
state.col(6) = state.col(5);
state.col(5) = state.col(4);
state.col(4) = state.col(3);
state.col(3) = state.col(2);
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
//Note this is the maximum stable order for BDF
template<typename Function, typename Scalar, int Dim>
class SubStep<Function,Scalar,Dim,6>
{
public:
static void StepSolve(Ref<StateMatrix<Scalar, Dim>> state, int& numPastSteps, const Function& f, Scalar T, Scalar x)
{
if(numPastSteps < 6)
{
SubStep<Function, Scalar, Dim, 5>::StepSolve(state, numPastSteps, f, T, x);
numPastSteps = 6;
}
BDFNewtonSolve<Function, Scalar, Dim>(state, f, x,
-Scalar(360) / 147 * state.col(1)
+ Scalar(450) / 147 * state.col(2)
- Scalar(400) / 147 * state.col(3)
+ Scalar(225) / 147 * state.col(4)
- Scalar(72) / 147 * state.col(5)
+ Scalar(10) / 147 * state.col(6),
Scalar(60) / 147,
T);
state.col(7) = state.col(6);
state.col(6) = state.col(5);
state.col(5) = state.col(4);
state.col(4) = state.col(3);
state.col(3) = state.col(2);
state.col(2) = state.col(1);
state.col(1) = state.col(0);
}
};
template<>
struct BDFNewtonConstants<float>
{
static constexpr float incrementEps{ 1.0e-10f };
};
template<>
struct BDFNewtonConstants<double>
{
static constexpr double incrementEps{ 1.0e-12 };
};
template<typename Function, typename Scalar, int Dim>
void BDFNewtonSolve(Ref<StateMatrix<Scalar, Dim>> state,
const Function& f,
Scalar x,
const StateVector<Scalar,Dim>& midTerm,
Scalar rightCoeff,
Scalar T,
Scalar tolerance,
int maxIterations)
{
//Solve Phi(Y) = Y + midTerm - T* rightCoeff * f(Y) = 0
//where Y = the first column of state
//and midterm is a function of the other columns
//1 step euler for the initial guess
StateVector<Scalar, Dim> y = state.col(1) + T * f.DyDt(state.col(1), x);
StateVector<Scalar, Dim> phi = y + midTerm - T * rightCoeff * f.DyDt(y, x);
int i = 0;
while( i++ < maxIterations && phi.norm() > tolerance)
{
Matrix<Scalar, Dim, Dim> J = Matrix<Scalar, Dim, Dim>::Identity() - T * rightCoeff * f.Jacobian(y, x);
//Solve J * (Y[n+1] - Y[n]) = - Phi(Y[n])
StateVector<Scalar, Dim> increment = J.fullPivLu().solve(-phi);
if (increment.norm() <= BDFNewtonConstants<Scalar>::incrementEps)
break;
//if(increment(0) != increment(0))
//{
// int breakHere = 0;
//}
y = y + increment;
phi = y + midTerm - T * rightCoeff * f.DyDt(y, x);
}
//Store solution
state.col(0) = y;
}
}
|
C++
|
BSD-3-Clause
|
JTriggerFish/TriggerFish-VCV/src/models/ode.hpp
|
0a9095d4-31b8-4224-9c29-5dda6ec187b1
|
[]
|
[]
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.