repo_id
stringclasses 208
values | file_path
stringlengths 31
190
| content
stringlengths 1
2.65M
| __index_level_0__
int64 0
0
|
---|---|---|---|
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/store_webapps_deltas/api_functions.py
|
"""
Module to contain functions related to accessing the Google Analytics Data API v1
"""
import os
import sys
sys.path.append(os.path.dirname(os.path.abspath(__file__)))
from google.analytics.data_v1beta import BetaAnalyticsDataClient
from google.analytics.data_v1beta.types import (
DateRange, Metric, RunReportRequest)
def get_data_api_response(property_id, extraction_date):
"Builds the API request and fetches total Users count from GA4 properties"
try:
client = BetaAnalyticsDataClient()
request = RunReportRequest(
property=f"properties/{property_id}",
metrics=[Metric(name="totalUsers")],
date_ranges=[DateRange(start_date=extraction_date, end_date=extraction_date)],
)
response = client.run_report(request)
except Exception as error:
print(f'\n Python says: {error}')
raise Exception('\n Exception while accessing the API.') from error
return response
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/store_webapps_deltas/requirements.txt
|
google-analytics-data==0.12.1
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/store_webapps_deltas/dynamodb_functions.py
|
"""
Module to contain the DynamoDB operations related to QElo.
"""
import boto3
from botocore.exceptions import ClientError
def write_into_table(items, table_name):
"Writes items/records into DynamoDB table."
dynamodb = None
# Initialize a DynamoDB table resource.
if not dynamodb:
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(table_name)
try:
with table.batch_writer() as batch:
for item in items:
batch.put_item(Item=item)
print(f"\n Added yesterday's records successfully into DynamoDB table {table_name}.")
except ClientError as dynamodb_error:
print(f'\n Error while writing into table {table_name} : \n {dynamodb_error.response}')
raise Exception('Exception encountered and run aborted!.') from dynamodb_error
except Exception as error:
raise Exception('Exception while inserting data into table.') from error
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo/store_webapps_deltas
|
qxf2_public_repos/qxf2-lambdas/qelo/store_webapps_deltas/conf/webapps_conf.json
|
{
"webapps":[
{"app_name": "Database trainer",
"property_id": "391331833"},
{"app_name": "QA Interview Application",
"property_id": "368011034"},
{"app_name": "QXf2 Trainer",
"property_id": "391318124"},
{"app_name": "Weather Shopper",
"property_id": "361260080"},
{"app_name": "Interview Scheduler",
"property_id": "362478383"}
]
}
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/read_task_completion_sheet/requirements.txt
|
oauth2client<4.0.0
google-auth==1.25.0
gspread==3.6.0
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/read_task_completion_sheet/utils.py
|
"""
This module contains all the required methods for the lambda
"""
import calendar
import csv
import json
import os
import re
from datetime import date
from io import StringIO
import boto3
import gspread
import pandas as pd
from botocore.exceptions import ClientError
from google.oauth2 import service_account
def get_sheet():
"Connect to Google Sheets and get the sheet"
try:
scope = ['https://www.googleapis.com/auth/drive']
credentials = service_account.Credentials.from_service_account_info(\
json.loads(os.environ['API_CREDENTIALS']), scopes = scope)
gspread_obj = gspread.authorize(credentials)
gsheet = gspread_obj.open(os.environ['SHEET_NAME']).worksheet(os.environ['WORKSHEET_NAME'])
except gspread.exceptions.SpreadsheetNotFound as spreadsheet_not_found_exception:
print(f'\n Spreadsheet by name {os.environ["SHEET_NAME"]} is not found\n!')
raise Exception('\n Spreadsheet not found!') from spreadsheet_not_found_exception
except gspread.exceptions.WorksheetNotFound as worksheet_not_found_exception:
print(f'\n Worksheet by name {os.environ["WORKSHEET_NAME"]} is not found\n!')
raise Exception('\n Worksheet not found!') from worksheet_not_found_exception
except Exception as error:
print(f'\n Python says : {error}\n')
raise Exception('\n Unable to read spreadsheet!') from error
return gsheet
def read_sheet():
"Reads the spreadsheet into a dataframe"
gsheet = get_sheet()
spreadsheet_data = gsheet.get_all_values()
headers = spreadsheet_data.pop(0)
return spreadsheet_data, headers
def get_current_date_info():
"Gets the current month and year info."
current_date = date.today()
current_month = current_date.strftime('%b')
current_year = current_date.strftime('%Y')
return current_month, current_year
def get_months():
"Extracts month names in MMM format."
month_names = []
months = [calendar.month_name[i] for i in range(1, 13)]
for _, month in enumerate(months):
month_names.append(month[0:3])
return month_names
def filter_current_quarter_data(spreadsheet_data):
"Filters the current quarter's data from the overall spreasheet data."
data = []
# Get current month and year.
current_month, current_year = get_current_date_info()
# Get the month names in MMM format.
months = get_months()
for index, sheet_data in enumerate(spreadsheet_data):
# Search spreadsheet data and select those records that match the current quarter
if re.search(r'Jan|Feb|Mar', current_month, re.IGNORECASE):
if re.search(r'([0-9]{1,2}).(\b%s|%s|%s\b).(\b%s\b)'\
%(months[0], months[1], months[2], current_year),\
sheet_data[2], re.IGNORECASE):
data.append(spreadsheet_data[index])
elif re.search(r'Apr|May|Jun', current_month, re.IGNORECASE):
if re.search(r'([0-9]{1,2}).(\b%s|%s|%s\b).(\b%s\b)'\
%(months[3], months[4], months[5], current_year),\
sheet_data[2], re.IGNORECASE):
data.append(spreadsheet_data[index])
elif re.search(r'Jul|Aug|Sep', current_month, re.IGNORECASE):
if re.search(r'([0-9]{1,2}).(\b%s|%s|%s\b).(\b%s\b)'\
%(months[6], months[7], months[8], current_year),\
sheet_data[2], re.IGNORECASE):
data.append(spreadsheet_data[index])
elif re.search(r'Oct|Nov|Dec', current_month, re.IGNORECASE):
if re.search(r'([0-9]{1,2}).(\b%s|%s|%s\b).(\b%s\b)'\
%(months[9], months[10], months[11], current_year),\
sheet_data[2], re.IGNORECASE):
data.append(spreadsheet_data[index])
return data
def to_csv(data, headers):
"Writes the data into a csv"
data = pd.DataFrame(data, columns=headers)
csv_buffer = StringIO()
data.to_csv(csv_buffer, index=False)
return csv_buffer
def upload_to_s3(spreadsheet_data, headers, filename, content):
"Upload the data to S3 bucket"
if content == 'Complete':
csv_data = to_csv(spreadsheet_data, headers)
else:
filtered_data = filter_current_quarter_data(spreadsheet_data)
csv_data = to_csv(filtered_data, headers)
try:
s3_resource = boto3.resource('s3')
s3_resource.Object(os.environ['S3_BUCKET'], filename).put(Body=csv_data.getvalue())
print(f"{content} backup of R&D task completion data at {os.environ['S3_BUCKET']} S3 bucket, as {filename}")
status = 1
except ClientError as s3_error:
print(f"\n {s3_error.response['Error']['Message']}! \n")
raise Exception ('\n Error on upload to S3!') from s3_error
except Exception as error:
print(f'\n Python says : {error}\n')
raise Exception('Error on upload to S3!') from error
return status
def prepare_data(filename):
"Prepare the data to transfer it to DynamoDB"
try:
s3_client = boto3.client('s3')
csv_object = s3_client.get_object(Bucket=os.environ['S3_BUCKET'], Key=filename)
csv_file = csv_object['Body'].read().decode('utf-8')
data = csv_file.split("\n")
except ClientError as s3_error:
print(f"\n {s3_error.response['Error']['Message']}! \n")
raise Exception ('\n Error on reading CSV from S3!') from s3_error
except Exception as error:
print(f'\n Python says : {error}\n')
raise Exception('Error on reading CSV from S3!') from error
return data
def init_table(table_name):
"Initializes and returns the DynamoDB table resource."
try:
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(table_name)
except ClientError as dynamodb_error:
print(f"\n {dynamodb_error.response['Error']['Message']}! \n")
raise Exception ('\n Error on initializing DynamoDB resource!') from dynamodb_error
except Exception as error:
print(f'\n Python says : {error}\n')
raise Exception('Error on initializing DynamoDB resource!') from error
return table
def initiate_migrate(row_data, table):
"Initiates the process of migrating data from S3 to DynamoDB"
with table.batch_writer() as batch:
for email in row_data[4].split(','):
batch.put_item(
Item={
"TaskNo": int(row_data[0]),
"Task" :row_data[1],
"DateOfCompletion" :row_data[2],
"TrelloTicket" : row_data[3],
"Email":email.strip(),
"ArtifactLocation":row_data[5],
"Techs" : row_data[6],
"NextSteps":row_data[7],
"Level":row_data[8],
"Stream":row_data[9],
"Substream":row_data[10],
"Share":row_data[11]
}
)
def migrate_to_dynamodb():
"Populate DynamoDB with complete sheet data"
is_empty = False
#Prepare the data to initiate transfer to DynamoDB
data = prepare_data(os.environ['COMPLETE_SHEET_S3_KEY'])
table = init_table(os.environ['TABLE_NAME'])
if table.item_count == 0 :
is_empty = True
for row_data in csv.reader(data[1:], quotechar='"',\
delimiter=',', quoting=csv.QUOTE_ALL, skipinitialspace=True):
if row_data:
try:
initiate_migrate(row_data, table)
except ClientError as dynamodb_error:
print(f"\n Error while migrating data into table {os.environ['TABLE_NAME']}: \n {dynamodb_error.response}")
raise Exception('Exception encountered and run aborted!.') from dynamodb_error
except Exception as error:
raise Exception('Exception while migrating data into table.') from error
print(f"R&D task completion data migrated to {os.environ['TABLE_NAME']} table in DynamoDB.")
return is_empty
def initiate_update(row_data, table):
"Initiates the updation of current quarter data into DynamoDB"
for email in row_data[4].split(','):
table.update_item(
ExpressionAttributeNames= {"#lev":"Level", "#s":"Stream", "#sh":"Share"},
ExpressionAttributeValues={
":att1":row_data[1], # Task attribute
":att2":row_data[2], # DateOfCompletion attribute
":att3":row_data[3], # TrelloTicket attribute
":att5":row_data[5], # ArtifactLocation attribute
":att6":row_data[6], # Techs attribute
":att7":row_data[7], # NextSteps attribute
":Level":row_data[8], # Level attribute
":Stream":row_data[9], # Stream attribute
":att10":row_data[10],# Substream attribute
":Share":row_data[11] # Share attribute
},
Key={
"TaskNo": int(row_data[0]), #primary key
"Email": email.strip(), #sort key
},
UpdateExpression="SET Task = :att1,\
DateOfCompletion = :att2, TrelloTicket = :att3,\
ArtifactLocation = :att5, Techs = :att6,\
NextSteps = :att7, #lev = :Level, #s = :Stream,\
Substream = :att10, #sh = :Share"
)
def update_dynamodb():
"Update the dynamodb with the current quarter data"
data = prepare_data(os.environ['CURRENT_QUARTER_S3_KEY'])
table = init_table(os.environ['TABLE_NAME'])
for row_data in csv.reader(data[1:], quotechar='"',\
delimiter=',', quoting=csv.QUOTE_ALL, skipinitialspace=True):
if row_data:
try:
initiate_update(row_data, table)
except ClientError as dynamodb_error:
print(f"\n Error while updating data into table {os.environ['TABLE_NAME']}: \n {dynamodb_error.response}")
raise Exception('Exception encountered and run aborted!.') from dynamodb_error
except Exception as exception:
print(f"\n Failed with exception {exception.__class__.__name__}")
raise Exception('Exception while updating data into table.') from exception
print(f"Updated {os.environ['TABLE_NAME']} table in DynamoDB with current quarters R&D task completion data.")
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/read_task_completion_sheet/read_task_completion_sheet.py
|
"""
This script is intended to backup R&D task completion data in S3 and DynamoDB, every Saturday.
"""
import os
import utils
def lambda_handler(event, context):
"Lambda entry point"
table_empty = False
upload_status = 0
# Read the spreadsheet
spreadsheet_data, headers = utils.read_sheet()
# Upload spreadsheet data to S3 bucket
upload_status = utils.upload_to_s3(spreadsheet_data, headers, \
os.environ['COMPLETE_SHEET_S3_KEY'], 'Complete')
if upload_status == 1:
utils.upload_to_s3(spreadsheet_data, headers, \
os.environ['CURRENT_QUARTER_S3_KEY'], \
'Current quarter data')
# Store the complete spreadsheet data, if the DynamoDB table is empty
table_empty = utils.migrate_to_dynamodb()
# Update the DynamoDB with edits to current quarter data
if table_empty is False:
utils.update_dynamodb()
return "Read task completion sheet and, populated S3 and Dynamodb"
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/store_github_stats/github_stats.py
|
"""
Collect and store the GitHub stats for all Qxf2 repositories
- DynamoDB table format being:
date, repo_name, stars, forks, clones, visitors
This script is meant to be run daily at 11pm UST (ie 4.30am IST)
"""
from datetime import datetime
import json
import os
import time
from github import Github
import dynamodb_functions
def get_github_instance():
"Returns a GitHub instance"
github_obj = Github(os.environ["GITHUB_ACCESS_KEY"])
return github_obj
def get_all_repos():
"Gets all repos for the given username from GitHub"
github_obj = get_github_instance()
user = github_obj.get_user(os.environ["GITHUB_USER"])
all_repos = []
repos = user.get_repos('all')
for repo in repos:
if os.environ["GITHUB_USER"] + '/' in repo.full_name:
all_repos.append(repo.full_name)
return all_repos
def prepare_substreams(date, repos):
"Prepare substream data by taking all repo info"
substreams = [] # List of dicts
for repo in repos:
substreams.append({'date':date,\
'repo_name':repo})
return substreams
def prepare_stats(date, repos):
"Prepare stats data by taking all repo info"
stats = [] #list of dicts
github_obj = get_github_instance()
for repo in repos:
repo_obj = github_obj.get_repo(repo)
stars = repo_obj.stargazers_count
forks = repo_obj.forks
visitor_stats = repo_obj.get_views_traffic()
unique_visitors = visitor_stats.get('uniques', 0)
clone_stats = repo_obj.get_clones_traffic()
unique_clones = clone_stats.get('uniques', 0)
stats.append({'date':date, 'repo_name':repo, 'stars':stars,\
'forks':forks, 'clones':unique_clones, 'visitors':unique_visitors})
return stats
def lambda_handler(event, context):
"Entry point for Lambda execution"
response = {}
for retry_count in range(int(os.environ["RETRIES_COUNT"])):
try:
print(f'Retry attempt : {retry_count}')
all_repos = get_all_repos()
print("Fetched all GitHub repos!")
current_date = datetime.now().strftime('%d-%b-%Y')
substreams_data = prepare_substreams(current_date, all_repos)
stats_data = prepare_stats(current_date, all_repos)
print("Fetched stats for all GitHub repos!")
dynamodb_functions.write_into_table(substreams_data,
os.environ["GITHUB_SUBSTREAMS_TABLE"])
dynamodb_functions.write_into_table(stats_data,
os.environ["GITHUB_STATS_TABLE"])
response = {
'statusCode': 200,
'body': json.dumps({
'message' : 'github-stats-prod lambda executed successully!',
'stats_status_flag' : 1})
}
break
except Exception as error:
print(f'Exception : {error}')
time.sleep(10)
continue
return response
| 0 |
qxf2_public_repos/qxf2-lambdas/qelo
|
qxf2_public_repos/qxf2-lambdas/qelo/store_github_stats/dynamodb_functions.py
|
"""
Module to contain the DynamoDB functions.
"""
import boto3
from botocore.exceptions import ClientError
def init_table(table_name):
"Initializes and returns the DynamoDB table resource"
dynamodb = boto3.resource('dynamodb')
table = dynamodb.Table(table_name)
return table
def write_into_table(items, table_name):
"Writes items/records into DynamoDB table"
try:
table = init_table(table_name)
with table.batch_writer() as batch:
for item in items:
batch.put_item(Item=item)
print(f"Added today's records successfully into DynamoDB table {table_name}!")
except ClientError as dynamodb_error:
print(f'\n Error while writing into table {table_name} :\n {dynamodb_error.response}')
except Exception as error:
print(f'Exception while inserting data into {table_name} table :\n {error}')
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/raspberry_notifier.py
|
"""
Lambda to connect to raspberry pi using remote.it.
All devices must be registered with remote.it account.
It will connect to all online devices and play out an audio message.
"""
from multiprocessing import Pipe, Process
from skype_sender import send_skype_message
from remoteit_helper import list_devices, connect_and_run
def run_in_parallel(command):
"Makes connections to different devices in parallel"
parent_connections = []
processes = []
device_address_alias_dict = list_devices()
for device_address in device_address_alias_dict:
device_alias = device_address_alias_dict[device_address]
parent_conn, child_conn = Pipe()
parent_connections.append(parent_conn)
# create the process, pass instance and connection
process = Process(
target=connect_and_run,
args=(command, device_address, device_alias, child_conn),
)
processes.append(process)
for process in processes:
process.start()
for process in processes:
process.join()
def trigger_notifications(event, context):
"lambda entry point"
TRIGGER_COMMAND = event['msg']
run_in_parallel(TRIGGER_COMMAND)
if "buddy" in TRIGGER_COMMAND:
send_skype_message(TRIGGER_COMMAND)
TRIGGER_COMMAND = (
TRIGGER_COMMAND + "https://water-cooler-talks.qxf2.com/water-cooler-talks"
)
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/config.py
|
import os
PI_USERNAME = os.environ["PI_USERNAME"]
PI_PASSWORD = os.environ["PI_PASSWORD"]
DEVELOPER_KEY = os.environ["DEVELOPER_KEY"]
ACCESS_KEY_ID = os.environ["ACCESS_KEY_ID"]
SECRET_ACCESS_KEY = os.environ["SECRET_ACCESS_KEY"]
REMOTEIT_HOST = os.environ["REMOTEIT_HOST"]
SKYPE_URL = os.environ["SKYPE_URL"]
SKYPE_CHANNEL = os.environ["SKYPE_CHANNEL"]
API_KEY = os.environ["API_KEY"]
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/requirements.txt
|
paramiko
requests
httpsig>=1.3.0
bcrypt==3.2.2
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/remoteit_helper.py
|
"""
This script contains functions that fetch the devices registered with remote.it account, connect to the devices that are online
and runs a command
"""
import json
import sys
import time
import requests
from http_signature_helper import get_headers
from raspberry_helper import run_command_in_pi
from config import REMOTEIT_HOST
def list_devices():
"Returns the list of online devices in remote.it"
listing_url = "/apv/v27/device/list/all"
headers = get_headers("GET", listing_url)
try:
response = requests.get("https://" + REMOTEIT_HOST + listing_url, headers=headers)
list_data = json.loads(response.text)
if list_data["status"] != "true":
print("Unable to fetch list of devices: ", list_data["reason"])
sys.exit(2)
device_data = list_data["devices"]
device_address_alias_map = {}
ip_alias_map = {}
for device in device_data:
if device["servicetitle"] == "Bulk Service":
ip_alias_map[device["devicelastip"].split(":")[0]] = device[
"devicealias"
]
for device in device_data:
if device["servicetitle"] == "SSH" and device["devicestate"] == "active":
device_address = device["deviceaddress"]
device_address_alias_map[device_address] = ip_alias_map[
device["devicelastip"].split(":")[0]
]
if len(device_address_alias_map) == 0:
print("No devices online")
sys.exit(3)
except Exception as error:
print("Error while trying to get the list of devices", error)
raise SystemExit(error)
return device_address_alias_map
def connect_and_run(command, device_address, device_alias, conn):
"Creates a proxy connection to the device, gets details of host and port and runs command"
url_path = "/apv/v27/device/connect"
payload = {"deviceaddress": device_address, "wait": "true", "hostip": "0.0.0.0"}
body = json.dumps(payload)
content_length = len(body)
headers = get_headers("POST", url_path, content_length)
try:
time_now = time.time()
response = requests.post(
"https://" + REMOTEIT_HOST + url_path, headers=headers, data=body
)
print("Got response: {}", response.text)
connect_data = json.loads(response.text)
if connect_data["status"] == "true":
proxy_server = connect_data["connection"]["proxyserver"]
proxy_port = connect_data["connection"]["proxyport"]
proxy_data = {
"proxy_server": proxy_server,
"proxy_port": proxy_port,
"device_alias": device_alias,
}
print(
"Time taken to get ip and port for "
+ device_alias
+ " is: "
+ str(time.time() - time_now)
)
run_command_in_pi(command, proxy_data, conn)
else:
print("Status of " + device_alias + " is false, device might be inactive")
except Exception as error:
print("Error while trying to connect to the device to get host/port", error)
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/raspberry_helper.py
|
"""
This script contains the operations that can be performed on Raspberry
"""
import time
import traceback
import paramiko
import config
def run_command_in_pi(command, proxy_data, conn):
"Connects to each device and executes a command"
time_now = time.time()
ssh = paramiko.SSHClient()
ssh.set_missing_host_key_policy(paramiko.AutoAddPolicy())
device_alias = proxy_data["device_alias"]
try:
ssh.connect(
proxy_data["proxy_server"],
username=config.PI_USERNAME,
password=config.PI_PASSWORD,
port=proxy_data["proxy_port"],
banner_timeout=20,
)
ssh_command = verify_command_and_pick_correct_audio(ssh, command)
ssh_stdin, ssh_stdout, ssh_stderr = ssh.exec_command(ssh_command)
print(
"\nTime taken to ssh and run espeak for : "
+ device_alias
+ " : "
+ str(time.time() - time_now)
)
stdout = ssh_stdout.readlines()
if len(stdout) > 0:
print("\nStdout from " + device_alias + ": " + str(stdout))
stderr = ssh_stderr.readlines()
if len(stderr) > 0:
print("\nStderr from " + device_alias + ": " + str(stderr))
except Exception as error:
print(
"\nError while running espeak for "
+ device_alias
+ ", time taken: "
+ str(time.time() - time_now)
)
print(error)
print(traceback.format_exc())
conn.close()
def verify_command_and_pick_correct_audio(ssh, command):
"Verify message and play correct audio"
if "buddy" in command:
run_sftp(ssh, "./audio_files/voiceDavid.wav")
ssh_command = "aplay /home/pi/voiceDavid.wav"
elif "0957" in command:
run_sftp(ssh, "./audio_files/0957.wav")
ssh_command = "aplay /home/pi/0957.wav"
elif "1057" in command:
run_sftp(ssh, "./audio_files/1057.wav")
ssh_command = "aplay /home/pi/1057.wav"
elif "1157" in command:
run_sftp(ssh, "./audio_files/1157.wav")
ssh_command = "aplay /home/pi/1157.wav"
elif "1357" in command:
run_sftp(ssh, "./audio_files/1357.wav")
ssh_command = "aplay /home/pi/1357.wav"
elif "1500" in command:
run_sftp(ssh, "./audio_files/1500.wav")
ssh_command = "aplay /home/pi/1500.wav"
elif "1557" in command:
run_sftp(ssh, "./audio_files/1557.wav")
ssh_command = "aplay /home/pi/1557.wav"
elif "1657" in command:
run_sftp(ssh, "./audio_files/1657.wav")
ssh_command = "aplay /home/pi/1657.wav"
elif "1757" in command:
run_sftp(ssh, "./audio_files/1757.wav")
ssh_command = "aplay /home/pi/1757.wav"
elif "1857" in command:
run_sftp(ssh, "./audio_files/1857.wav")
ssh_command = "aplay /home/pi/1857.wav"
elif "1957" in command:
run_sftp(ssh, "./audio_files/1957.wav")
ssh_command = "aplay /home/pi/1957.wav"
elif "2057" in command:
run_sftp(ssh, "./audio_files/2057.wav")
ssh_command = "aplay /home/pi/2057.wav"
elif "2157" in command:
run_sftp(ssh, "./audio_files/2157.wav")
ssh_command = "aplay /home/pi/2157.wav"
elif "2257" in command:
run_sftp(ssh, "./audio_files/2257.wav")
ssh_command = "aplay /home/pi/2257.wav"
else:
ssh_command = (
"espeak -ven-us+f3 -s 125 --stdout '" + command + "' | aplay 2>/dev/null"
)
return ssh_command
def run_sftp(ssh, file_to_copy):
"Copy files to the device"
sftp = ssh.open_sftp()
try:
sftp.stat("/home/pi/" + file_to_copy)
print("The file exists")
except:
file_name = file_to_copy.split("/")
file_name = file_name[2]
sftp.put(file_to_copy, "/home/pi/" + file_name)
print("Copied the file")
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/skype_sender.py
|
"""
This script will let a user send messages on some Qxf2 Skype channels
"""
import json
import requests
import config
def send_skype_message(message):
"Posts a message on set Skype channel"
try:
headers = {
"Content-Type": "application/json",
}
data = {}
data["msg"] = message
data["channel"] = config.SKYPE_CHANNEL
data["API_KEY"] = config.API_KEY
response = requests.post(config.SKYPE_URL, headers=headers, data=json.dumps(data))
if response.status_code == 200:
print(f'Successfully sent the Skype message - {message}')
else:
print(f'Failed to send Skype message', level='error')
except Exception as err:
raise Exception(f'Unable to post message to Skype channel, due to {err}')
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/raspberry_notifier/http_signature_helper.py
|
"""
This module provides the headers that are necessary to login to remote.it
"""
import datetime
from base64 import b64decode
import httpsig
import config
from config import REMOTEIT_HOST
DEVELOPER_KEY = config.DEVELOPER_KEY
ACCESS_KEY_ID = config.ACCESS_KEY_ID
SECRET_ACCESS_KEY = config.SECRET_ACCESS_KEY
CONTENT_TYPE = "application/json"
def get_token(request_method, path, headers):
"Fetch token using Signature authentication"
header_signer = httpsig.HeaderSigner(
ACCESS_KEY_ID,
b64decode(SECRET_ACCESS_KEY),
algorithm="hmac-sha256",
headers=[
"(request-target)",
"host",
"date",
"content-type",
"content-length",
],
)
return header_signer.sign(headers, method=request_method, path=path)[
"authorization"
]
def get_headers(request_method, path, content_length=0):
"Set headers for the request including authentication token"
date = datetime.datetime.utcnow().strftime("%Y-%m-%dT%H:%M:%S%z")
headers = {
"host": REMOTEIT_HOST,
"date": date,
"content-type": CONTENT_TYPE,
"DeveloperKey": DEVELOPER_KEY,
"content-length": str(content_length),
}
headers["Authorization"] = get_token(request_method, path, headers)
return headers
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_unit_pto_detector.py
|
"""
Unit test for the methods in pto_detector.py
"""
import os
import sys
import warnings
from unittest import mock
from unittest.mock import patch
from moto import mock_sqs
import boto3
from pto_detector import pto_detector
import tests.conf.channel_configuration_conf as channel_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Added following code to fix deprecation warning
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=DeprecationWarning)
import imp
def test_clean_message():
"Test to check message get cleaned"
message = "T'est"
result = pto_detector.clean_message(message)
assert result == "T-est"
@patch('requests.post')
def test_get_is_pto_pto_message(mock_post):
"Test to check message is pto"
mock_response = mock.Mock()
mock_post.return_value = mock_response
mock_response.json.return_value = {'score': 1}
message = "I am on PTO today"
result = pto_detector.get_is_pto(message)
assert result is True
mock_post.assert_called_with(url='https://practice-testing-ai-ml.qxf2.com/is-pto', data={'message': 'I am on PTO today'})
@patch('requests.post')
def test_get_is_pto_non_pto_message(mock_post):
"Test to check message is non pto"
mock_response = mock.Mock()
mock_post.return_value = mock_response
mock_response.json.return_value = {'score': 0}
message = "I am happy today"
result = pto_detector.get_is_pto(message)
assert result is False
mock_post.assert_called_with(url='https://practice-testing-ai-ml.qxf2.com/is-pto', data={'message': 'I am happy today'})
@mock_sqs
def test_write_message():
"Test to write message"
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName='test-skype-sender')
pto_detector.QUEUE_URL = queue.url
message = "I am on PTO today"
channel = channel_conf.channel
expected_message = str({'msg':f'{message}', 'channel':channel})
pto_detector.write_message(message,channel)
sqs_message = queue.receive_messages()
assert len(sqs_message) == 1
assert sqs_message[0].body == expected_message
def test_get_message_contents():
"Test to check get message contents"
event = {"Records": [{"body": "{\"Message\":\"{\\\"msg\\\": \\\"Test: I am on PTO today\\\", \
\\\"chat_id\\\": \\\"blah\\\", \\\"user_id\\\":\\\"blah\\\"}\"}"}]}
expected_message = {'msg': 'Test: I am on PTO today', 'chat_id': 'blah','user_id': 'blah'}
message = pto_detector.get_message_contents(event)
assert message == expected_message
assert message['msg'] == 'Test: I am on PTO today'
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/docker-compose.yaml
|
# Compose file to start/run the LocalStack
version: '3.8'
services:
localstack:
container_name: "${LOCALSTACK_DOCKER_NAME-localstack_main}"
image: localstack/localstack:2.2
ports:
- "4566:4566" # LocalStack Gateway
- "4510-4559:4510-4559" # external services port range
environment:
- DEBUG=1
- SERVICES=lambda,serverless,sqs
- LAMBDA_REMOVE_CONTAINERS=false
- SQS_ENDPOINT_STRATEGY=path
- MAIN_DOCKER_NETWORK=ls
networks:
- ls
volumes:
- "${LOCALSTACK_VOLUME_DIR:-./volume}:/tmp/localstack"
- "/var/run/docker.sock:/var/run/docker.sock"
networks:
ls:
name: ls
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/requirements.txt
|
asyncio==3.4.3
boto3==1.14.59
botocore==1.17.59
coverage==5.3
mock==4.0.2
moto==1.3.16
pytest==6.1.1
python-json-logger==0.1.11
requests==2.24.0
parameterized==0.7.4
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_end_to_end_employee_skype_message.py
|
"""
This End to end test employee skype message covers following:
Setup- Purging SQS queue
Step 1: Trigger employee message lambda
Step 2: Print message from cloudwatch logs
Step 3: Verify message with skype-listner sqs queue and culture file
"""
import os
import sys
import time
import asyncio
import logging
import unittest
import pytest
from pythonjsonlogger import jsonlogger
import tests.helpers.asyncio_helper
import tests.helpers.cloudwatch_helper
import tests.helpers.filter_message_helper
import tests.helpers.lambda_helper
import tests.conf.cloudwatch_configuration_conf as cloudwatch_conf
import tests.conf.sqs_utilities_conf as queue_url_conf
import tests.conf.lambda_configuration_conf as lambda_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# logging
log_handler = logging.StreamHandler()
log_handler.setFormatter(jsonlogger.JsonFormatter())
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(log_handler)
# Declaring class for test object
class Skypemessagetest():
"""
Class for test object
"""
logger = logging.getLogger(__name__)
def __init__(self):
"""
Initilalise class
"""
def get_request_id(self):
"""
get the response from lambda
"""
request_id = tests.helpers.lambda_helper.get_request_id_from_lambda_response()
return request_id
def get_message_from_cloudwatch_log_ptr(self):
"""
Method to get message from cloudwatch log pointer
"""
message = None
for i in range(1, 6):
ptr_value = tests.helpers.cloudwatch_helper.get_ptr_value\
(cloudwatch_conf.log_group_bot_sender,cloudwatch_conf.query_skype_sender)
if ptr_value:
message = tests.helpers.cloudwatch_helper.get_message(ptr_value)
break
time.sleep(60)
return message
def clear_queues(self):
"""
Method to clear queues
"""
for every_queue_url in queue_url_conf.QUEUE_URL_LIST:
tests.helpers.sqs_helper.purge_sqs_queue(every_queue_url)
time.sleep(1)
class TestEndtoEndSkypeMessage():
"""
Test class
"""
def test_end_to_end_skype_message(self):
"""
Test case
"""
Skypemessagetest_obj = Skypemessagetest()
logger.info("Setup- Purge SQS queue")
logger.info("---------------------------------------------------------------------------")
Skypemessagetest_obj.clear_queues()
logger.info("Step 1: Trigger employee message lambda--------------------------------")
tests.helpers.lambda_helper.trigger_cron_lambda(lambda_conf.daily_message_lambda)
logger.info("---------------------------------------------------------------------------")
logger.info("Step 2: Print message from cloudwatch logs------------------------------")
message = Skypemessagetest_obj.get_message_from_cloudwatch_log_ptr()
logger.info("---------------------------------------------------------------------------")
logger.info(message)
logger.info("-------------------------------------------------------------------------- ")
logger.info("Step 3: Verify message with skype-listner sqs queue and culture file----")
with pytest.raises(SystemExit) as system_exception:
asyncio.run(tests.helpers.asyncio_helper.poll_message(message))
assert system_exception.type == SystemExit
logger.info("-------------------------------------------------------------------------- ")
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/testutils.py
|
"test utils for LocalStack tests"
import json
import os
import zipfile
import subprocess
import fnmatch
import boto3
import botocore
# Specify the paths and configuration
LAMBDA_ZIP='lambda_with_dependency.zip'
LAMBDA_FOLDER_PATH = '../url_filtering_lambda_rohini'
REQUIREMENTS_FILE_PATH = '../url_filtering_lambda_rohini/requirements.txt'
CONFIG = botocore.config.Config(retries={'max_attempts': 0})
LOCALSTACK_ENDPOINT = 'http://localhost.localstack.cloud:4566'
def get_lambda_client():
"get lambda client"
return boto3.client(
'lambda',
aws_access_key_id='test',
aws_secret_access_key='test',
region_name='us-east-1',
endpoint_url= LOCALSTACK_ENDPOINT,
config=CONFIG
)
def get_sqs_client():
"get SQS client"
return boto3.client(
'sqs',
aws_access_key_id='test',
aws_secret_access_key='test',
region_name='us-east-1',
endpoint_url= LOCALSTACK_ENDPOINT)
def create_queue(queue_name):
"Create a new SQS queue"
sqs = get_sqs_client()
response = sqs.create_queue(
QueueName=queue_name,
Attributes={
'VisibilityTimeout': '300' # Visibility timeout in seconds
}
)
# Retrieve the URL of the created queue
queue_url = response['QueueUrl']
print("Queue URL:", queue_url)
def get_queue_url(queue_name):
"Get the URL of an existing SQS queue"
sqs = get_sqs_client()
response = sqs.get_queue_url(QueueName=queue_name)
queue_url = response['QueueUrl']
print("Queue URL:", queue_url)
return queue_url
def delete_queue(queue_url):
"Delete the SQS queue"
sqs = get_sqs_client()
response = sqs.delete_queue(QueueUrl=queue_url)
if response['ResponseMetadata']['HTTPStatusCode'] == 200:
print("Queue deleted successfully.")
else:
print("Failed to delete the queue.")
def create_zip_file_with_lambda_files_and_packages(lambda_zip, lambda_folder_path, temp_directory):
"Create a zip file with lambda files and installed packages"
with zipfile.ZipFile(lambda_zip, 'w', zipfile.ZIP_DEFLATED) as zipf:
# Add the Lambda function code to the ZIP
for root, dirs, files in os.walk(lambda_folder_path):
for file in files:
file_path = os.path.join(root, file)
if (not file_path.endswith('.zip') and not fnmatch.fnmatch(file_path, '*_pycache_*')
and not fnmatch.fnmatch(file_path, '*.pytest*')):
zipf.write(file_path, os.path.relpath(file_path, lambda_folder_path))
# Add the installed packages to the ZIP at root
for root, dirs, files in os.walk(temp_directory):
for file in files:
file_path = os.path.join(root, file)
zipf.write(file_path, os.path.relpath(file_path, temp_directory))
def delete_temp_file_and_its_content(temp_directory):
"Delete the temporary directory and its contents"
for root, dirs, files in os.walk(temp_directory, topdown=False):
for file in files:
file_path = os.path.join(root, file)
os.remove(file_path)
for dir in dirs:
dir_path = os.path.join(root, dir)
os.rmdir(dir_path)
os.rmdir(temp_directory)
def create_lambda_zip(lambda_zip, lambda_folder_path, requirements_file_path):
"Create a zip file to deploy Lambda along with its dependencies"
# Create a temporary directory to install packages
temp_directory = '../package'
os.makedirs(temp_directory, exist_ok=True)
# Install packages to the temporary directory
subprocess.check_call(['pip', 'install', '-r', requirements_file_path, '-t', temp_directory])
# Create a new ZIP file with Lambda files and installed packages
create_zip_file_with_lambda_files_and_packages(lambda_zip, lambda_folder_path, temp_directory)
# Remove the temporary directory and its contents
delete_temp_file_and_its_content(temp_directory)
def create_lambda(function_name):
"Create Lambda"
lambda_client = get_lambda_client()
create_lambda_zip(LAMBDA_ZIP, LAMBDA_FOLDER_PATH, REQUIREMENTS_FILE_PATH)
with open(LAMBDA_ZIP, 'rb') as zip_file:
zipped_code = zip_file.read()
lambda_client.create_function(
FunctionName=function_name,
Runtime='python3.8',
Role='arn:aws:iam::123456789012:role/test-role',
Handler=function_name + '.lambda_handler',
Code={"ZipFile": zipped_code},
Timeout=180,
Environment={
'Variables': {
'ETC_CHANNEL': 'dummy_chat@id.007',
'Qxf2Bot_USER': 'dummy_user.id.006',
'CHATGPT_API_KEY': os.environ.get('CHATGPT_API_KEY'),
'CHATGPT_VERSION': os.environ.get('CHATGPT_VERSION'),
'DEFAULT_CATEGORY': os.environ.get('DEFAULT_CATEGORY'),
'API_KEY_VALUE': os.environ.get('API_KEY_VALUE'),
'URL': os.environ.get('URL'),
'SKYPE_SENDER_QUEUE_URL': get_queue_url('test-queue'),
'employee_list': os.environ.get('employee_list'),
'LOCALSTACK_ENV': 'true'
}
}
)
def delete_lambda(function_name):
"Delete Lambda"
lambda_client = get_lambda_client()
lambda_client.delete_function(
FunctionName=function_name
)
os.remove(LAMBDA_ZIP)
def invoke_function_and_get_message(function_name, event):
"trigger Lambda and return received message"
lambda_client = get_lambda_client()
# Convert the event message to JSON
event_payload = json.dumps(event)
# Invoke the Lambda function
response = lambda_client.invoke(
FunctionName=function_name,
InvocationType='RequestResponse',
Payload=event_payload
)
# Parse the response from the Lambda function
response_payload = response['Payload'].read().decode('utf-8')
response_data = json.loads(response_payload)
print ("response data:", response_data)
return response_data
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_integration_daily_messages_lambda.py
|
"""
Code level tests for the daily messages lambda
"""
import os
import sys
import boto3
import warnings
from unittest import mock
from unittest.mock import patch
from moto import mock_sqs
from daily_messages import daily_messages
import tests.conf.channel_configuration_conf as channel_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Added following code to fix deprecation warning
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=DeprecationWarning)
import imp
@mock_sqs
@patch('daily_messages.daily_messages.write_message')
@patch('daily_messages.daily_messages.get_message')
def test_daily_message_lambda_sqs_integration(mock_get_message, mock_write_message):
"Test the write_message method with a valid message"
event = {'endpoint':'/message', 'channel':channel_conf.channel}
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName='test-skype-sender')
daily_messages.QUEUE_URL = queue.url
mock_get_message.return_value = 'Did it mocked really?'
expected_message = str({'msg':f'Did it mocked really?', 'channel':channel_conf.channel})
daily_messages.lambda_handler(event=event, context=None)
sqs_messages = queue.receive_messages()
# Assertion for get message
mock_get_message.assert_called_with('/message')
assert mock_get_message.call_count == 1
#Asserting for write message
mock_write_message.assert_called_with('Did it mocked really?', channel_conf.channel)
assert mock_write_message.call_count == 1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_end_to_end_pto_detector_non_pto_message.py
|
"""
This End to end test employee skype message covers following:
Step 1: Trigger non pto detector message from pto detector lambda
Step 2: Assert message from cloudwatch log with message sent
Step 3: Assert logstream from the record containing message and false flag
"""
import os
import sys
import time
import ast
import asyncio
from datetime import datetime, timedelta
import json
import logging
import boto3
import pytest
import unittest
import tests.helpers.cloudwatch_helper
import tests.helpers.generic_utils_helper as generic_utils
import tests.conf.cloudwatch_configuration_conf as cloudwatch_conf
from pythonjsonlogger import jsonlogger
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# logging
log_handler = logging.StreamHandler()
log_handler.setFormatter(jsonlogger.JsonFormatter())
logger = logging.getLogger()
logger.setLevel(logging.INFO)
logger.addHandler(log_handler)
class ptodetectortest():
"""
class for pto detector test
"""
logger = logging.getLogger(__name__)
def __init__(self):
"""
Initilalise class
"""
def trigger_cron_lambda(self,lambda_name: str):
"""
:return: The AWS response.
Except a response
"""
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
client = boto3.client('lambda')
Event = {}
pyloadobj = json.dumps({"Records": [{
"body": "{\"Message\":\"{\\\"msg\\\": \\\"Test: I am not on PTO today\\\", \\\"chat_id\\\": \\\"19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype\\\", \\\"user_id\\\":\\\"blah\\\"}\"}"}]})
response = client.invoke(FunctionName=lambda_name,InvocationType='Event',Payload = pyloadobj,LogType='None')
return response
def get_query_id(self):
"""
Method returns query id
"""
query_id = None
while query_id == None:
client = boto3.client('logs')
start_query_response = client.start_query(logGroupName=cloudwatch_conf.pto_log_group,\
startTime=int((datetime.today() - timedelta(minutes=6)).timestamp()),\
endTime=int(datetime.now().timestamp()),queryString=cloudwatch_conf.query_pto_detector)
query_id = start_query_response['queryId']
return query_id
def get_message_from_pto_detector_logs(self,query_id):
"""
Method to get message from pto_detector lambda insights
"""
response = None
message = None
while response == None:
generic_utils.wait(1)
client = boto3.client('logs')
response = client.get_query_results(queryId=query_id)
response_dict = tests.helpers.cloudwatch_helper.get_data_structure(response)
if "results_0_1_value" in response_dict.keys():
message = response_dict['results_0_1_value']
return message
def get_logstream_from_ptr_value(self,query_id):
"""
Method to fetch logstream ptr value
"""
ptr_value = None
response = None
logstream = None
while response == None:
generic_utils.wait(1)
client = boto3.client('logs')
response = client.get_query_results(queryId=query_id)
response_dict = tests.helpers.cloudwatch_helper.get_data_structure(response)
logger.info(response_dict)
ptr_value = response_dict['results_0_2_value']
if ptr_value:
response = client.get_log_record(logRecordPointer=ptr_value)
response_dict = tests.helpers.cloudwatch_helper.get_data_structure(response)
logstream = response_dict['logRecord_@logStream']
return logstream
def get_query_id_from_flag_message(self):
"""
Method to get query id from flag message
"""
client = boto3.client('logs')
start_query_response = client.start_query(logGroupName=cloudwatch_conf.pto_log_group,\
startTime=int((datetime.today() - timedelta(minutes=7)).timestamp()),\
endTime=int(datetime.now().timestamp()),queryString=cloudwatch_conf.query_pto_detector_flag)
query_id = start_query_response['queryId']
return query_id
def get_logstream_from_flag_value(self,query_id):
"""
Method to fetch logstream from flag value
"""
response = None
logstream = None
while response == None:
generic_utils.wait(1)
client = boto3.client('logs')
response = client.get_query_results(queryId=query_id)
response_dict = tests.helpers.cloudwatch_helper.get_data_structure(response)
logstream = response_dict['results_0_2_value']
return logstream
class TestPtoDetectorNonPtotest():
"""
Test class
"""
def test_end_to_end_pto_detector_non_pto_message(self):
"""
Test method
"""
logger.info("---------------------------------------------------------------------------")
ptodetectortest_obj = ptodetectortest()
logger.info("Step 1: Trigger pto detector lambda--------------------------------")
pto_detector_lambda_name = 'pto_detector'
ptodetectortest_obj.trigger_cron_lambda(pto_detector_lambda_name)
logger.info("-------------waiting till we get query id--------------------------------------------------")
generic_utils.wait(360)
logger.info('-------------printing query id fetched using query containing message----------------------')
query_id = ptodetectortest_obj.get_query_id()
logger.info(query_id)
logger.info('-----------------fetching original message from cloudwatch logs------------------------------------------')
message_from_logs = ptodetectortest_obj.get_message_from_pto_detector_logs(query_id)
logger.info("Step 2: Assert message from cloudwatch log with message sent--------------------------------")
assert message_from_logs == "Test: I am not on PTO today, blah, 19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype\n"
logger.info('---------------------Getting log stream from log reord pointer of original message-------------------------------')
logstream = ptodetectortest_obj.get_logstream_from_ptr_value(query_id)
logger.info(logstream)
logger.info('--------------Printing query_id from flag message-------------------------------------------------')
query_id_from_flag = ptodetectortest_obj.get_query_id_from_flag_message()
logger.info(query_id_from_flag)
logger.info('---------------------Getting log stream for flag message-------------------------------')
logstream_from_flag = ptodetectortest_obj.get_logstream_from_flag_value(query_id_from_flag)
logger.info(logstream_from_flag)
logger.info("Step 3: Assert logstream from the record containing message and false flag----------------------------------------------------")
assert logstream == logstream_from_flag
logger.info("---------------------------------------------------------------------------")
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_pto_detector_get_is_pto.py
|
"""
Code level tests for get_is_pto method of pto_detector lambda
"""
import requests
from parameterized import parameterized_class
@parameterized_class(("url", "message", "score", "expected_status_code"), [
("https://practice-testing-ai-ml.qxf2.com/is-pto","I am sick out today", 1, 200),
("https://practice-testing-ai-ml.qxf2.com/is-pto","I am happy today", 0, 200),
])
class Testgetispto(object):
"""
Test class for get is pto method
"""
def test_get_is_pto_score(self):
"""
code level test for pto_score
"""
resp = requests.post(self.url, data={"message":self.message})
assert resp.status_code == self.expected_status_code
assert resp.json()['score'] == self.score
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_url_filter.py
|
"""
Tests for the URL filtering lambda
> get_url logic
- correct url
- incorrect url
> lambda_handler
- correct url
- incorrect url
- multiple urls
- correct channel
- incorrect channel
- Qxf2Bot user
- Not bot user
> post_to_newsletter
- single url
- multiple url
"""
import json
import os
import pytest
import url_filtering_lambda_rohini.url_filtering_lambda_rohini as url_filter
from unittest import mock
from unittest.mock import patch
TEST_DATA = [("This is a URL: https://qxf2.com",["https://qxf2.com"]),
("This is NOT a URL http:/qxf2.com",[]),
("Message with multiple urls https://qxf2.com and https://chessbase.com", ["https://qxf2.com", "https://chessbase.com"])]
@pytest.mark.parametrize("sentence,expected", TEST_DATA)
def test_get_url(sentence, expected):
"Test if the URL is getting filtered correctly by get_url"
result = url_filter.get_url(sentence)
assert result == expected
@pytest.mark.parametrize("sentence,expected", TEST_DATA)
@patch('url_filtering_lambda_rohini.url_filtering_lambda_rohini.post_to_newsletter')
@patch('url_filtering_lambda_rohini.url_filtering_lambda_rohini.clean_message')
@patch('url_filtering_lambda_rohini.url_filtering_lambda_rohini.get_message_contents')
def test_url_filter(mock_message_contents, mock_clean_message, mock_post, sentence, expected):
"Verify that filtered URLs are working ok within the lambda handler"
result_status_code = "This is from the test"
mock_post.return_value = result_status_code
mock_clean_message.return_value = sentence
channel = "19:1941d15dada14943b5d742f2acdb99aa@thread.skype"
os.environ['ETC_CHANNEL'] = channel
mock_message_contents.return_value = {'msg': sentence,
'chat_id': channel,
'user_id': 'blah'}
result = url_filter.lambda_handler({}, {})
expected_status_code = result_status_code if expected else ''
assert result['body'] == json.dumps(expected)
assert result['statusCode'] == expected_status_code
@patch('url_filtering_lambda_rohini.url_filtering_lambda_rohini.get_message_contents')
def test_wrong_channel(mock_message_contents):
"Verify that URLs are not filtered unless it is the etc channel"
channel = "etc channel"
os.environ['ETC_CHANNEL'] = channel
mock_message_contents.return_value = {'msg': "See https://qxf2.com",
'chat_id': "NOT etc channel",
'user_id': 'blah'}
result = url_filter.lambda_handler({}, {})
assert result['body'] == json.dumps([])
assert result['statusCode'] == ''
@patch('url_filtering_lambda_rohini.url_filtering_lambda_rohini.get_message_contents')
def test_qxf2_bot_user(mock_message_contents):
"Ensure messages sent by Qxf2 Bot are not being processed"
Qxf2Bot_USER = "Qxf2Bot"
channel = "etc channel"
os.environ['ETC_CHANNEL'] = channel
os.environ['Qxf2Bot_USER'] = Qxf2Bot_USER
mock_message_contents.return_value = {'msg': "See https://qxf2.com",
'chat_id': channel,
'user_id': Qxf2Bot_USER}
result = url_filter.lambda_handler({}, {})
assert result['body'] == json.dumps([])
assert result['statusCode'] == ''
@pytest.mark.parametrize("sentence,expected", TEST_DATA)
@patch('requests.post')
def test_multiple_url_post(mock_post, sentence, expected):
"Verify that post_to_newsletter is called the correct number of times"
if expected == []:
test_status_code = ""
else:
test_status_code = 'Via test!'
mock_response = mock.MagicMock()
mock_response.status_code = test_status_code
mock_post.return_value = mock_response
mock_article_editor = "qxf2_editor"
result = url_filter.post_to_newsletter(expected,mock_article_editor)
assert result == test_status_code
assert mock_post.call_count == len(expected)
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_newsletter_sqs_batch_messages.py
|
"""
This is a reliability testing to test the impact of sending
tons of messages to the Newsletter automation SQS.
The test covers the following scenarios:
1. Send a batch of messages to the SQS and confirm all the messages are sent
2. Recieve messages from the SQS and make sure all the messages sent earlier are present in the retrieved messages
"""
import os
import boto3
import time
import unittest
import aws_sqs_batchlib
class TestSQS(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.queue_url = os.environ.get('newsletter_queue_url')
cls.messages = []
def test_send_batch_messages(self):
"Send batch messages to the SQS and verify if all messages are sent"
# Create a list of messages to send
message_count=50
for message in range(message_count):
self.messages.append({'Id': f'{message}', 'MessageBody': f'https://www.test-qxf2.com/{message}'})
# Send the messages to the queue
res = aws_sqs_batchlib.send_message_batch(
QueueUrl=self.queue_url,
Entries=self.messages,
)
assert len(res['Successful']) == message_count
def test_receive_messages(self):
"Retreive messages from the SQS and verify if all the messages sent are retrieved"
# Receive the messages from the queue
res = aws_sqs_batchlib.receive_message(
QueueUrl = self.queue_url,
MaxNumberOfMessages=100,
WaitTimeSeconds=15,
)
# Verify that the messages match the sent messages
received_messages = {msg['Body'] for msg in res['Messages']}
for message in self.messages:
self.assertIn(message['MessageBody'], received_messages, f"Message {message['MessageBody']} should be received")
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_pto_detector_write_message.py
|
"""
Code level tests for write_message method pf pto_detector lambda
"""
import os
import sys
import boto3
import warnings
from moto import mock_sqs
from parameterized import parameterized, parameterized_class
from pto_detector import pto_detector
import tests.conf.channel_configuration_conf as channel_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
# Added following code to fix deprecation warning
with warnings.catch_warnings():
warnings.filterwarnings("ignore",category=DeprecationWarning)
import imp
def get_class_name(cls, num, params_dict):
"""
multiple parameters being included in the generated class name:
"""
return "%s_%s_%s%s" %(
cls.__name__,
num,
parameterized.to_safe_name(params_dict['message_to_send']),
parameterized.to_safe_name(params_dict['expected_message']),
)
@parameterized_class([
{ "message_to_send": "Detected PTO message Test: I am out sick today", "expected_message": str({'msg':f'Detected PTO message Test: I am out sick today', 'channel':channel_conf.channel})},
{ "message_to_send": "Detected PTO message Test: I am on PTO today", "expected_message": str({'msg':f'Detected PTO message Test: I am on PTO today', 'channel':channel_conf.channel})},
], class_name_func=get_class_name)
@mock_sqs
class TestWriteMessage(object):
"""
Declaring class for write method"
"""
def test_pto_detector_write_message(self):
"""
Code level test for write_messsage
"""
sqs = boto3.resource('sqs')
queue = sqs.create_queue(QueueName='test-skype-sender')
pto_detector.QUEUE_URL = queue.url
pto_detector.write_message(self.message_to_send,channel_conf.channel)
sqs_message = queue.receive_messages()
assert sqs_message[0].body == self.expected_message
assert len(sqs_message) == 1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_url_filter_lambda_on_localstack.py
|
"""
Test URL Filter Lambda on the LocalStack:
- Deploy Lambda along with its dependencies and SQS
- Run test
- Delete Lambda and SQS queue
"""
import time
from unittest import TestCase
import tests.testutils as testutils
class UrlFilterLambdaLocalStackTest(TestCase):
"Deploy Lambda, SQS on LocalStack and run the test"
@classmethod
def setup_class(cls):
"Create SQS and Lambda"
print('\nCreating a SQS')
testutils.create_queue('test-queue')
print('\nCreating the Lambda')
testutils.create_lambda('url_filtering_lambda_rohini')
cls.wait_for_function_active('url_filtering_lambda_rohini')
@classmethod
def teardown_class(cls):
"Delete the Lambda, SQS and teardown the session"
print('\nDeleting the Lambda')
testutils.delete_lambda('url_filtering_lambda_rohini')
print('\nDeleting the SQS test queue')
queue_url = testutils.get_queue_url('test-queue')
testutils.delete_queue(queue_url)
@classmethod
def wait_for_function_active(cls, function_name):
"Wait till Lambda is up and active"
lambda_client = testutils.get_lambda_client()
while True:
response = lambda_client.get_function(FunctionName=function_name)
function_state = response['Configuration']['State']
if function_state == 'Active':
break
time.sleep(1) # Wait for 1 second before checking again
def test_that_lambda_returns_filtered_url(self):
"Test Lambda's received message"
print('\nInvoking the Lambda and verifying return message')
message = {
"Records": [
{
"body": "{\"Message\":\"{\\\"msg\\\": \\\"Checkout how we can test Lambda "
"locally using LocalStack "
"https://qxf2.com/blog/testing-aws-lambda-locally-using-localstack-and-pytest\\\","
"\\\"chat_id\\\": \\\"dummy_chat@id.007\\\", "
"\\\"user_id\\\":\\\"dummy_user.id.007\\\"}\"}"
}
]
}
payload = testutils.invoke_function_and_get_message('url_filtering_lambda_rohini', message)
self.assertEqual(payload['body'], '["https://qxf2.com/blog/testing-aws-lambda-locally-using-localstack-and-pytest"]')
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_integration_skype_lambda.py
|
"""
Code level tests for the skype sender lambda
"""
import os
import sys
import boto3
from unittest import mock
from unittest.mock import patch
from skype_sender import qxf2_skype_sender
import tests.conf.channel_configuration_conf as channel_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
@patch('skype_sender.qxf2_skype_sender.post_message')
def test_skype_sender_lambda(mock_post_message):
"Test to write post messages"
event = {"Records": [{"body": "{'msg': '<b>Test message</b> I am a test message from lambda-tester', 'channel':channel_conf.channel}"}]}
mock_post_message.return_value = '<b>Test message</b> I am a test message from lambda-tester'
# Triggering the response
qxf2_skype_sender.post_message(event=event,context=None)
#Assertion
mock_post_message.assert_called_with(event=event,context=None,)
assert mock_post_message.call_count == 1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/tests/test_pto_detector_get_message_contents.py
|
"""
Code level tests for get_message_contents method pf pto_detector lambda
"""
import ast
import collections
from parameterized import parameterized, parameterized_class
from pto_detector import pto_detector
def get_class_name(cls, num, params_dict):
"""
multiple parameters being included in the generated class name:
"""
return "%s_%s_%s%s" %(
cls.__name__,
num,
parameterized.to_safe_name(params_dict['event']),
parameterized.to_safe_name(params_dict['expected_message_contents']),
)
@parameterized_class([
{ "event":str({"Records": [{"body": "{\"Message\":\"{\\\"msg\\\": \\\"Test: I am on PTO today\\\", \\\"chat_id\\\": \\\"19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype\\\", \\\"user_id\\\":\\\"blah\\\"}\"}"}]}), "expected_message_contents": str({'msg': 'Test: I am on PTO today', 'chat_id': '19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype','user_id': 'blah'})},
{ "event":str({"Records": [{"body": "{\"Message\":\"{\\\"msg\\\": \\\"Test: I am on PTO tomorrow\\\", \\\"chat_id\\\": \\\"19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype\\\", \\\"user_id\\\":\\\"blah\\\"}\"}"}]}), "expected_message_contents": str({'msg': 'Test: I am on PTO tomorrow', 'chat_id': '19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype','user_id': 'blah'})}
], class_name_func=get_class_name)
class TestWriteMessage(object):
"""
Test class for get message contents
"""
def get_event_data_structure(self, event_string):
"""
Method used for converting nested dictionary/list to data similar to tabular form
"""
obj = collections.OrderedDict()
def recurse(dataobject,parent_key=""):
"""
Method will recurse through object
"""
if isinstance(dataobject,list):
# loop through list and call recurse()
for i in range(len(dataobject)):
recurse(dataobject[i],parent_key + "_" + str(i) if parent_key else str(i))
elif isinstance(dataobject,dict):
# loop through dictionary and call recurse()
for key,value in dataobject.items():
recurse(value,parent_key + "_" + key if parent_key else key)
else:
# use the parent_key and store the value to obj
obj[parent_key] = dataobject
recurse(event_string)
return obj
def extract_event_from_event_string(self):
"""
Method to get event dict
"""
event_structure = self.get_event_data_structure(self.event)
event =ast.literal_eval(event_structure[''])
return event
def test_pto_detector_get_message_contents(self):
"""
Code level test for pto_detctor
"""
event = self.extract_event_from_event_string()
actual_message = pto_detector.get_message_contents(event)
assert str(actual_message) == self.expected_message_contents
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/conf/lambda_configuration_conf.py
|
"""
This configuration file contains details about lambda configuration
"""
daily_message_lambda = 'qxf2-employee-messages'
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/conf/cloudwatch_configuration_conf.py
|
"""
The config file will list config details for aws configuration
"""
# settings for qxf2-employee-messages
log_group = '/aws/lambda/qxf2-employee-messages'
#settings for qxf2-skype-sender
query_skype_sender = f"fields @timestamp, @message ,Records.0.body|filter Records.0.attributes.SenderId = 'AROAUFFUKR766EKQRFECO:qxf2-employee-messages'"
log_group_bot_sender='/aws/lambda/qxf2-bot-sender'
# cloudwatch log dictionary keys
ptr_value = 'results_0_3_value'
record_body = 'logRecord_Records.0.body'
record_messageid = 'logRecord_Records.0.messageId'
# cloodwatch configuration for pto detector
pto_log_group = '/aws/lambda/pto_detector'
query_pto_detector = f"fields @timestamp, @message|filter @message like 'Test: I am not on PTO today, blah, 19:f33e901e871d4c3c9ebbbbee66e59ebe@thread.skype'"
query_pto_detector_flag = f"fields @timestamp, @message, @logStream|filter @message like 'False'"
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/conf/sqs_utilities_conf.py
|
"""
The config file will list queue url detail for queues- skype-listener-daily-messages, skype-sender
"""
QUEUE_URL_LIST =['skype-listener-daily-messages']
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/conf/channel_configuration_conf.py
|
"""
The config file will list config details for aws configuration
"""
chat_id = "19:1941d15dada14943b5d742f2acdb99aa@thread.skype"
user_id = "live:.cid.92bd244e945d8335"
channel = "test"
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/filter_message_helper.py
|
"""
Helper module for filter message functionality
"""
import os
import sys
import json
import pytest
import requests
import tests.conf.channel_configuration_conf as channel_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
#Defining current file path
CURR_FILE_PATH = os.path.dirname(os.path.abspath(__file__))
MESSAGES_PATH = os.path.join(CURR_FILE_PATH, '')
DAILY_MESSAGES_URL = 'https://daily-messages.qxf2.com'
#setting environment variable
os.environ['chat_id']= channel_conf.chat_id
os.environ['user_id'] = channel_conf.user_id
# Declaring class Style
class Style():
"""
Declaring Style class
"""
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
MAGENTA = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
UNDERLINE = '\033[4m'
RESET = '\033[0m'
def get_dict(body_string):
"""
Generates dict from message body
:param string
:return dict object
"""
body_string = json.dumps(body_string)
body_string = body_string.replace("'", "\"")
body_string = json.loads(body_string)
message_body_obj = json.loads(body_string)
return message_body_obj
def get_message_body(message):
"""
This method will return message body
"""
msg_body = ""
if 'Messages' in message:
for message in message['Messages']:
if 'Body' in message.keys():
message_body_obj = get_dict(message['Body'])
if 'Message' in message_body_obj.keys():
msg_body = get_dict(message_body_obj['Message'])
else:
print("Message key is not present in the Message Body")
sys.exit()
else:
print("Message does not contain Body")
sys.exit()
else:
print("No messages are retrieved")
with pytest.raises(SystemExit):
sys.exit()
return msg_body
def filter_message(message,chat_id,user_id):
"""
Filter method based on chat_id and user_id
return: Boolean value
"""
message_body = get_message_body(message)
if message_body is not None:
if "chat_id" in message_body and "user_id" in message_body:
if message_body['chat_id']==chat_id and message_body['user_id']==user_id:
print(f'message is from test channel and sender is skype sender lambda')
else:
print(f'Neither message is from test channel nor sender is skype sender lambda')
else:
print(f'Message does not contain required keys')
else:
print(f'Message body is not none')
return True
def compare_message_cloudwatch_log(message_on_channel, message_cloudwatch):
"""
compare message with cloudwatch log message
"""
result_flag = False
if message_on_channel == message_cloudwatch:
result_flag = True
else:
result_flag = False
return result_flag
def get_message(message):
"""
Fetches message from sqs queue
"""
message_body = get_message_body(message)
if message_body is not None:
if "msg" in message_body:
return message_body['msg']
else:
print(f'Message body does not contain message key')
else:
print(f'Message body is none')
def compare_message_with_file(message, endpoint, base_url=DAILY_MESSAGES_URL):
"""
Compare message with the file on daily-messages app
"""
result_flag = False
response = requests.get(base_url+endpoint)
all_messages = response.json().get('msg',[])
if message in all_messages:
result_flag = True
return result_flag
def validate_message_with_culture_file(message_on_channel):
"""
Asserting message on channel with culture file
"""
result_flag = False
if message_on_channel is not None:
endpoint = '/culture/all'
result_flag = compare_message_with_file(message_on_channel, endpoint)
if result_flag is True:
print(Style.CYAN + '---------------------------\
------------------------------------------------')
print(Style.CYAN + 'Step 3b. Validating \
message with culture file------------------------------')
print(Style.GREEN + 'Message \
on channel does match with culture file')
print(Style.CYAN + '-----------\
----------------------------------------------------------------')
else:
print(Style.CYAN + '------------\
---------------------------------------------------------------')
print(Style.GREEN + 'Message \
on channel does match with culture file')
print(Style.CYAN + '---------\
------------------------------------------------------------------')
else:
print(Style.CYAN + 'There is no message on channel')
return result_flag
def validate_message_with_cloudwatch_logs(message_on_channel,message_cloudwatch):
"""
Asserting method on channels with cloudwatch logs
"""
result_flag = False
if message_on_channel is not None:
result_flag = compare_message_cloudwatch_log(message_on_channel,message_cloudwatch)
if result_flag is True:
print(Style.CYAN + '---------------\
------------------------------------------------------------')
print(Style.CYAN + 'Step 3a. Validating \
message with Skype listner SQS Queue-------------------')
print(Style.GREEN + 'Message on channel \
does match with the message from cloudwatch logs')
print(Style.CYAN + '----------------------\
-----------------------------------------------------')
result_flag = validate_message_with_culture_file(message_on_channel)
else:
print(Style.CYAN + '-------------\
--------------------------------------------------------------')
print(Style.RED + 'Message on channel does not match with \
the message from cloudwatch logs')
print(Style.CYAN + '---------------------\
------------------------------------------------------')
else:
print(Style.CYAN + '---------------------------\
------------------------------------------------')
print("No message on channel")
print(Style.CYAN + '------------------------------\
---------------------------------------------')
return result_flag
def publish_compare_result(message,message_cloudwatch):
"""
Publish compare result
"""
result_flag = filter_message\
(message,channel_conf.chat_id,channel_conf.user_id)
if result_flag is True:
message_on_channel= get_message(message)
result_flag = validate_message_with_cloudwatch_logs(message_on_channel,message_cloudwatch)
else:
print(Style.CYAN + '---------------------------------\
------------------------------------------')
print(Style.CYAN + 'No message polled from the queue at this time')
print(Style.CYAN + '----------------------------\
-----------------------------------------------')
return result_flag
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/lambda_helper.py
|
"""
Lambda helper
"""
import os
import sys
import logging
import boto3
import tests.conf.lambda_configuration_conf as lambda_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def trigger_cron_lambda(lambda_name: str):
"""
:return: The AWS response.
Except a response
"""
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
client = boto3.client('lambda')
Event = {}
response = client.invoke(FunctionName=lambda_name,InvocationType='Event',\
LogType='None',Payload=b'{"endpoint": "/message","channel": "test"}')
return response
def get_request_id_from_lambda_response():
"""
:return: The request_id
Except a request_id
"""
response = trigger_cron_lambda(lambda_conf.daily_message_lambda)
request_id = response.get('ResponseMetadata')['RequestId']
return request_id
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/asyncio_helper.py
|
"""
Helper module for asyncio methods
"""
import sys
import asyncio
import logging
import tests.helpers.filter_message_helper
import tests.helpers.sqs_helper
import tests.conf.sqs_utilities_conf as queue_url_conf
# Declaring class Style
class Style():
"""
Declaring Style class
"""
BLACK = '\033[30m'
RED = '\033[31m'
GREEN = '\033[32m'
YELLOW = '\033[33m'
BLUE = '\033[34m'
MAGENTA = '\033[35m'
CYAN = '\033[36m'
WHITE = '\033[37m'
UNDERLINE = '\033[4m'
RESET = '\033[0m'
async def validate_message_with_sqs(queue_url, message_cloudwatch):
"""
Validates message from sqs queue with cloudwatch logs
:param queue_url: URL of the SQS queue
:message_cloudwatch: Message received from cloudwatch logs
"""
_logger = logging.getLogger(__name__)
_logger.setLevel(logging.DEBUG)
message = tests.helpers.sqs_helper.get_message_from_queue(queue_url)
result_flag = tests.helpers.filter_message_helper.publish_compare_result\
(message,message_cloudwatch)
if result_flag is True:
sys.exit()
return result_flag
async def poll_message(message_cloudwatch):
"""
Schedule calls concurrently
"""
while True:
tasks = []
for every_queue_url in queue_url_conf.QUEUE_URL_LIST:
tasks.append(validate_message_with_sqs(every_queue_url,message_cloudwatch))
result = await asyncio.gather(*tasks)
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/sqs_helper.py
|
"""
Helper module for sqs messages
"""
import os
import sys
import boto3
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def get_sqs_client():
"""
Return sqs_client object
:param none
:return sqs_client
"""
sqs_client = boto3.client('sqs')
return sqs_client
def get_sqs_queue(queue_url):
"""
Return queue object from queue_url
:param queue_url
:return queue
"""
queue = boto3.resource('sqs').get_queue_by_name(QueueName=queue_url)
return queue
def get_message_from_queue(queue_url):
"""
get messsage from queue_url
"""
sqs_client = get_sqs_client()
queue = get_sqs_queue(queue_url)
message = sqs_client.receive_message(QueueUrl=queue.url)
return message
def purge_sqs_queue(queue_url):
"""
Reteun status
"""
queue = get_sqs_queue(queue_url)
client = get_sqs_client()
client.purge_queue(QueueUrl=queue.url)
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/cloudwatch_helper.py
|
"""
Helper module for cloudwatch log
"""
import os
import sys
import ast
import collections
from datetime import datetime, timedelta
import time
import boto3
import tests.conf.cloudwatch_configuration_conf as cloudwatch_conf
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def get_data_structure(data):
"""
Method used for converting nested dictionary/list to data similar to tabular form
"""
obj = collections.OrderedDict()
def recurse(dataobject,parent_key=""):
"""
Method will recurse through object
"""
if isinstance(dataobject,list):
# loop through list and call recurse()
for i in range(len(dataobject)):
recurse(dataobject[i],parent_key + "_" + str(i) if parent_key else str(i))
elif isinstance(dataobject,dict):
# loop through dictionary and call recurse()
for key,value in dataobject.items():
recurse(value,parent_key + "_" + key if parent_key else key)
else:
# use the parent_key and store the value to obj
obj[parent_key] = dataobject
recurse(data)
return obj
def get_response_log_daily_messages(request_id,log_group,query):
"""
getiing response from daily message lambda
"""
client = boto3.client('logs')
start_query_response = client.start_query(logGroupName=log_group,\
startTime=int((datetime.today() - timedelta(minutes=5)).timestamp()),\
endTime=int(datetime.now().timestamp()),queryString=query)
query_id = start_query_response['queryId']
response = None
while response is None:
time.sleep(1)
response = client.get_query_results(queryId=query_id)
return response.get('results')
def get_response_log_skype_sender(log_group,query):
"""
getting log from skype_sender
"""
client = boto3.client('logs')
start_query_response = client.start_query(logGroupName=log_group,\
startTime=int((datetime.today() - timedelta(minutes=5)).timestamp()),\
endTime=int(datetime.now().timestamp()),queryString=query)
query_id = start_query_response['queryId']
response = None
while response is None:
time.sleep(1)
response = client.get_query_results(queryId=query_id)
return response
def get_ptr_value(log_group,query):
"""
getting ptr_value from response
"""
client = boto3.client('logs')
start_query_response = client.start_query(logGroupName=log_group,\
startTime=int((datetime.today() - timedelta(minutes=5)).timestamp()),\
endTime=int(datetime.now().timestamp()),queryString=query)
query_id = start_query_response['queryId']
response = None
ptr_value = None
while response is None:
time.sleep(1)
response = client.get_query_results(queryId=query_id)
response_dict = get_data_structure(response)
if cloudwatch_conf.ptr_value in response_dict.keys():
ptr_value = response_dict[cloudwatch_conf.ptr_value]
else:
print(f'log pointer key could not be fetched from response dictionary.')
return ptr_value
def get_message_id(ptr_value):
"""
To get message id
"""
client = boto3.client('logs')
response = client.get_log_record(logRecordPointer=ptr_value)
response_dict = get_data_structure(response)
request_id = response_dict[cloudwatch_conf.record_messageid]
return request_id
def get_full_message(ptr_value):
"""
To get full message
"""
client = boto3.client('logs')
response = client.get_log_record(logRecordPointer=ptr_value)
response_dict = get_data_structure(response)
return response_dict
def get_message(ptr_value):
"""
To get message
"""
client = boto3.client('logs')
response = client.get_log_record(logRecordPointer=ptr_value)
response_dict = get_data_structure(response)
message = response_dict[cloudwatch_conf.record_body]
message_dict = ast.literal_eval(message)
return message_dict['msg']
| 0 |
qxf2_public_repos/qxf2-lambdas/tests
|
qxf2_public_repos/qxf2-lambdas/tests/helpers/generic_utils_helper.py
|
"""
Helper methods for generic utils
"""
import os
import sys
import time
import logging
import pytest
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
def wait(wait_time):
"Performs wait for time provided"
return time.sleep(wait_time)
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/daily_messages/requirements.txt
|
requests==2.24.0
boto3==1.14.43
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/daily_messages/daily_messages.py
|
"""
Get messages for employees from daily-messages.qxf2.com
And post to Skype Sender
"""
import boto3
import requests
BASE_URL = 'http://daily-messages.qxf2.com'
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
def clean_message(message):
"Clean up the message received"
message = message.replace("'", '-')
message = message.replace('"', '-')
return message
def get_message(endpoint):
"Get a message for employees"
response = requests.get(url=BASE_URL+endpoint)
return clean_message(response.json()['msg'])
def write_message(daily_message, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
message = str({'msg':f'{daily_message}', 'channel':channel})
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def lambda_handler(event, context):
"Lambda entry point"
#This lambda expects an event of type {'endpoint':'/blah','channel':'blah'}
message = get_message(event.get('endpoint'))
write_message(message, event.get('channel','test'))
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/holiday_reminder/requirements.txt
|
beautifulsoup4==4.9.3
boto3==1.14.43
python-dateutil==2.8.1
requests==2.25.0
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/holiday_reminder/holiday_reminder.py
|
"""
Get upcoming holidays from Qxf2's holidays page
And post to Skype Sender
"""
import json
import requests
from datetime import datetime
import boto3
from bs4 import BeautifulSoup
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
qxf2_holidays = {'optional':{},'public':{}}
def fetch_holidays():
"Fetch holiday list from holidays webpage"
now = datetime.now()
if (now.month == 12 and now.day == 25):
url = f'https://qxf2.com/{now.year+1}-holidays.html'
else:
url = f'https://qxf2.com/{now.year}-holidays.html'
req = requests.get(url)
soup = BeautifulSoup(req.text, 'html.parser')
tbody = soup.find("tbody")
rows = tbody.find_all('tr')
for row in rows:
cells = row.find_all("td")
qxf2_holidays['public'][cells[0].text] = ''.join(cells[2].text.split())
ul = soup.find_all('ul')
li = ul[5].find_all('li')
for items in li:
items = items.text.split(',')
qxf2_holidays['optional'][items[0]] = ''.join(items[2].split())
def get_holidays():
"Get a holiday message for employees"
fetch_holidays()
msg = ''
today = datetime.today().strftime("%d-%b-%Y")
for key,value in qxf2_holidays.items():
for date,name in qxf2_holidays[key].items():
delta = datetime.strptime(date, "%d-%b-%Y") - datetime.strptime(today, "%d-%b-%Y")
if key == 'optional':
end_string = 'Kindly take it if you have not taken one this year by marking on calendar and informing client about it'
else:
end_string = 'Kindly inform client about it right now'
if delta.days == 7 or delta.days ==3:
msg += f"\n Reminder - {delta.days} days from now {date} is a {name} {key} holiday. {end_string}"
return msg.replace("'","")
def write_message(holiday_reminder_message, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
message = str({'msg':f'{holiday_reminder_message}', 'channel':channel})
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def lambda_handler(event, context):
"lambda entry point"
message = get_holidays()
write_message(message, event.get('channel','main'))
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/sync_neo4j_and_graphql/requirements.txt
|
requests==2.24.0
boto3==1.14.43
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/sync_neo4j_and_graphql/sync_neo4j_graphql.py
|
"""
Lambda to update neo4j database with respect to graphql data
"""
import json
import requests
import os
from urllib.parse import urljoin
API_KEY = os.environ.get('API_KEY')
URL = os.environ.get('URL')
def get_employees_graphql():
"Get the employees from graphql"
get_employee_url = urljoin(URL, 'survey/admin/employees')
graphql_employees = requests.get(get_employee_url,headers={"User":API_KEY}).json()
return graphql_employees
def get_employees_neo4j(employee_email):
"Get employee details from neo4j via email"
get_employee_by_email_url = urljoin(URL, 'survey/admin/get_employee_by_email')
employee_details = requests.post(get_employee_by_email_url, data=employee_email, headers={"User":API_KEY}).json()
return employee_details
def add_new_employee(employee):
"Add new employee to database"
add_employee_url = urljoin(URL, 'survey/admin/new_employee')
emp_data = {"data":{"firstName":employee['firstName'],"lastName":employee['lastName'],"email":employee['email'],"fullName":employee['fullName'],"status":employee['status']}}
emp_data = json.dumps(emp_data)
add_employee = requests.post(add_employee_url, data=emp_data, headers={"User":API_KEY})
return add_employee
def set_employee_status(employee_status_data):
"Update status of employee"
update_status_url = urljoin(URL, 'survey/admin/update_employee_status')
update_status = requests.post(update_status_url, data=employee_status_data, headers={"User":API_KEY}).json()
return update_status
def employee_add_status(add_employee,employee):
if add_employee.status_code == 200:
add_status = "Successfully added new employee %s"%employee
else:
add_status = "Failed to add new employee"
return add_status
def lambda_handler(event, context):
"Method run when Lambda is triggered"
graphql_employees = get_employees_graphql()
add_status = "No new employees to add"
update_status = ["The active status of all the employees are up to date"]
for employee in graphql_employees:
employee_email = {"email":employee['email']}
employee_email = json.dumps(employee_email)
employee_details = get_employees_neo4j(employee_email)
if employee_details == "Employee does not exist" and employee['email'] != os.environ.get('EMAIL'):
add_employee = add_new_employee(employee)
add_status = employee_add_status(add_employee, employee['fullName'])
if employee_details != "Employee does not exist":
if employee_details[0]['employee_details']['status'] != employee['status']:
employee_status_data = {"email":{"email":employee['email']},"status":{"employee_status":employee['status']}}
employee_status_data = json.dumps(employee_status_data)
update_employee_status = set_employee_status(employee_status_data)
if update_status[0] == "The active status of all the employees are up to date":
update_status.pop(0)
update_status.append(update_employee_status[0])
return add_status, update_status
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/qxf2_employees_data/requirements.txt
|
boto3==1.14.43
python-dateutil==2.8.1
requests==2.25.0
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/qxf2_employees_data/qxf2_employees_data.py
|
import json
import boto3
import os
import requests
BASE_URL = os.environ.get('BASE_URL')
PASSWORD = os.environ.get('PASSWORD')
USERNAME = os.environ.get('USERNAME')
def authenticate():
"Return an authenticate code"
query = f"""mutation {{
auth(password: "{PASSWORD}", username: "{USERNAME}") {{
accessToken
refreshToken
}}
}}
"""
response = requests.post(url = BASE_URL, json = {'query': query})
return response.json().get('data',{}).get('auth',{}).get('accessToken',None)
def get_all_employees():
"Query allEmployees"
query = """query
findAllEmployees{
allEmployees{
edges{
node{
email
firstname
lastname
dateJoined
skypeId
isActive
}
}
}
}"""
access_token = authenticate()
headers = {'Authorization': f'Bearer {access_token}'}
response = requests.get(url = BASE_URL, json = {'query': query}, headers =\
headers)
all_employees = response.json().get('data', {}).get('allEmployees', {}).get('edges', [])
return all_employees
def lambda_handler(event, context):
all_employees = get_all_employees()
return {
'statusCode': 200,
'body': all_employees
}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/pto_detector/requirements.txt
|
requests==2.24.0
boto3==1.14.43
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/pto_detector/pto_detector.py
|
"""
Get messages for employees from pto-detector SQS
And post to Skype Sender if the message is a PTO message
"""
import json
import os
import boto3
import requests
IS_PTO_URL = 'https://practice-testing-ai-ml.qxf2.com/is-pto'
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
def clean_message(message):
"Clean up the message received"
message = message.replace("'", '-')
message = message.replace('"', '-')
return message
def get_is_pto(message):
"Check if the message is a PTO message"
response = requests.post(url=IS_PTO_URL,data={'message':message})
result_flag = response.json()['score'] == 1
return result_flag
def write_message(message, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
message = str({'msg':f'{message}', 'channel':channel})
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def get_message_contents(event):
"Retrieve the message contents from the SQS event"
record = event.get('Records')[0]
message = record.get('body')
message = json.loads(message)['Message']
message = json.loads(message)
return message
def lambda_handler(event, context):
"Lambda entry point"
message_contents = get_message_contents(event)
message = message_contents['msg']
channel = message_contents['chat_id']
user = message_contents['user_id']
print(f'{message}, {user}, {channel}')
is_pto_flag = False
if channel == os.environ.get('PTO_CHANNEL') and user != os.environ.get('Qxf2Bot_USER'):
cleaned_message = clean_message(message)
is_pto_flag = get_is_pto(cleaned_message)
print(f'{is_pto_flag}')
if is_pto_flag and user != os.environ.get('Qxf2Bot_USER'):
message_to_send = f'Detected PTO message {cleaned_message}'
write_message(message_to_send, os.environ.get('SEND_CHANNEL'))
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/comment_reviewer/requirements.txt
|
boto3==1.17.90
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/comment_reviewer/comment_reviewer.py
|
"""
This Lambda will :
- Give a list of comment reviewers
"""
import json
import os
import boto3
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
at_Qxf2Bot = '<at id="8:live:.cid.92bd244e945d8335">qxf2bot</at>!'
at_Qxf2Bot_english = '@qxf2bot!'
COMMANDS = [f'comment reviewers, please {at_Qxf2Bot}',
f'i need comment reviewers {at_Qxf2Bot}',
f'comment reviewers please {at_Qxf2Bot}',
f'comment reviewers, please {at_Qxf2Bot_english}',
f'i need comment reviewers {at_Qxf2Bot_english}',
f'comment reviewers please {at_Qxf2Bot_english}']
FIRST_SSM = 'first_comment_reviewer_index'
SECOND_SSM = 'second_comment_reviewer_index'
RESET_COMMANDS = [f'reset primary code reviewer {at_Qxf2Bot}',
f'reset secondary code reviewer {at_Qxf2Bot}']
def read_parameter(client, parameter_name, decryption_flag = False):
"Read a SSM parameter"
parameter = client.get_parameter(Name=parameter_name, WithDecryption = decryption_flag)
return parameter['Parameter']['Value']
def write_parameter(client, parameter_name, value, decryption_flag = False):
"Write to a SSM parameter"
response = client.put_parameter(
Name = parameter_name,
Value = value,
Overwrite = True
)
return True if response['ResponseMetadata']['HTTPStatusCode'] == 200 else False
def get_reviewer_index(reviewer_type):
"Return the index of the reviewer"
client = boto3.client("ssm")
reviewer_index = read_parameter(client, reviewer_type)
return int(reviewer_index)
def update_reviewer_index(reviewer_type,increment=1):
"Increment the reviewer index by 1"
client = boto3.client("ssm")
reviewer_index = read_parameter(client, reviewer_type)
write_parameter(client,
reviewer_type,
str(int(reviewer_index) + increment))
def get_comment_reviewers(reviewer_type):
"Return a list of primary code reviewers"
return os.environ.get(reviewer_type,"").split(',')
def get_message_contents(event):
"Retrieve the message contents from the SQS event"
record = event.get('Records')[0]
message = record.get('body')
message = json.loads(message)['Message']
message = json.loads(message)
return message
def write_message(message, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
print(channel)
message = str({'msg':f'{message}', 'channel':channel})
print(message)
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def in_correct_channel(channel):
"Is the message in the right channel?"
return channel == os.environ.get('channel','')
def is_code_reviewer_command(message):
"Is this a code reviewer command?"
return message.lower() in COMMANDS
def is_reviewer_reset_command(message):
"Is this is a reset command?"
return message.lower() in RESET_COMMANDS
def get_reply():
"Get the primary and secondary code reviewers"
first_comment_reviewers = get_comment_reviewers('first_comment_reviewers')
second_comment_reviewers = get_comment_reviewers('second_comment_reviewers')
primary_index = get_reviewer_index(FIRST_SSM)
secondary_index = get_reviewer_index(SECOND_SSM)
first_comment_reviewer = first_comment_reviewers[primary_index%len(first_comment_reviewers)]
second_comment_reviewer = second_comment_reviewers[secondary_index%len(second_comment_reviewers)]
reply = f'First comment reviewer: {first_comment_reviewer}\n\nSecond comment reviewer: {second_comment_reviewer}\n'
return reply
def update_reviewer_indexes():
"Increment the reviewer indexes by 1"
update_reviewer_index(FIRST_SSM)
update_reviewer_index(SECOND_SSM)
def lambda_handler(event, context):
"Code reviewer lambda"
message_contents = get_message_contents(event)
message = message_contents['msg'].strip()
channel = message_contents['chat_id']
user = message_contents['user_id']
if in_correct_channel(channel):
if is_code_reviewer_command(message):
reply = get_reply()
write_message(reply, os.environ.get('channel',''))
update_reviewer_indexes()
if is_reviewer_reset_command(message):
prev = -1
curr = -1
if 'primary' in message.lower():
prev = get_reviewer_index(FIRST_SSM)
update_reviewer_index(FIRST_SSM, -1)
curr = get_reviewer_index(FIRST_SSM)
if 'secondary' in message.lower():
prev = get_reviewer_index(SECOND_SSM)
update_reviewer_index(SECOND_SSM, -1)
curr = get_reviewer_index(SECOND_SSM)
message = f'Reset index from {prev} to {curr}'
write_message(message, os.environ.get('channel',''))
return {
'statusCode': 200,
'body': json.dumps('Done!')
}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/skype_sender/requirements.txt
|
skpy==0.9.1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/skype_sender/qxf2_skype_sender.py
|
"""
This script will let a user send messages on some Qxf2 Skype channels
Some of the commonly used channels are listed in the environment variable SKYPE_CHANNELS
"""
import json
import os
import requests
def get_dict(my_string):
"Return a dict from a string"
my_string = my_string.replace("'", "\"")
my_string = json.loads(my_string)
return my_string
def get_channel_id(msg_body):
"Return the channel id, default to main if channel not available"
channel = msg_body.get('channel','main')
if channel[-6:].lower() != '.skype':
channel = channel.lower()
channels = get_dict(os.environ.get('SKYPE_CHANNELS'))
channel_id = channels.get(channel,channel)
return channel_id
def post_message(event, context=None):
"Post a message"
print(f'The trigger event is: {event}')
full_msg = get_dict(event['Records'][0]['body'])
if 'msg' in full_msg.keys():
msg = full_msg['msg']
channel_id = get_channel_id(full_msg)
url = os.environ['SKYPE_SENDER_ENDPOINT']
data = {'API_KEY' : os.environ['API_TOKEN'],
'msg' : msg,
'channel' : channel_id}
response = requests.post(url, json=data)
print(f'Received {response.json()} for {msg}')
else:
print('The event had no key called msg in it\'s body')
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/dummy_lambda/requirements.txt
|
requests==2.24.0
boto3==1.14.43
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/dummy_lambda/lambda_function.py
|
"""
Simplest lambda to play with GitHub actions
"""
import json
import boto3
def lambda_handler(event, context):
# TODO implement
return {
'statusCode': 200,
'body': json.dumps('Hello from Lambda!')
}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/url_filtering_lambda_rohini/requirements.txt
|
requests==2.24.0
boto3==1.14.43
validators==0.20.0
openai==0.27.2
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/url_filtering_lambda_rohini/url_filtering_lambda_rohini.py
|
"""
Lambda to to pull URL from ETC channel messages
And post any URL to the newsletter generator
"""
import json
import os
import re
import random
import boto3
import requests
import validators
import openai
EXCLUDE_URL_STRINGS = ['skype.com', 'meet.google.com', 'trello.com/b']
QUEUE_URL = os.environ.get('SKYPE_SENDER_QUEUE_URL')
def clean_message(message):
"Clean up the message received"
message = message.replace("'", '-')
message = message.replace('"', '-')
return message
def get_message_contents(event):
"Retrieve the message contents from the SQS event"
record = event.get('Records')[0]
message = record.get('body')
message = json.loads(message)['Message']
message = json.loads(message)
return message
def process_reply(reply):
"Strip out the title and summary"
title = "FILL THIS PLEASE!"
summary = "Dear Human, FILL THIS PLEASE!"
if 'TITLE:' in reply and 'SUMMARY:' in reply:
title = reply.split('TITLE:')[-1].split('SUMMARY:')[0].strip()
summary = reply.split('SUMMARY:')[-1].strip()
return title, summary
def ask_the_all_knowing_one(input_message, max_tokens=512):
"Return the ChatGPT response"
openai.api_key = os.environ.get('CHATGPT_API_KEY', '')
model_engine = os.environ.get('CHATGPT_VERSION', 'gpt-3.5-turbo')
summary_and_title = ''
input_message = "I want you to format your reply in a specific manner to this request." \
"I am going to send you an article (in quotes at the end of this message)." \
"You tell me its title and summary." \
"Use no more than 3 sentences for the summary." \
"Preface the title with the exact string TITLE: " \
"and preface the summary with the exact string SUMMARY:" \
"If you do not know, then put TITLE: UNKNOWN and SUMMARY: UNKNOWN." \
f"Ok, here is the article '{input_message}'"
try:
if openai.api_key.strip():
response = openai.ChatCompletion.create(
model=model_engine,
messages=[
{"role": "user", "content": input_message},
],
max_tokens=max_tokens
)
summary_and_title = response["choices"][0]["message"]["content"]
else:
print('ChatGPT skipped since no key is setup.')
except Exception as e:
print('Unable to make call to ChatGPT.')
print(f'Python says: {e}')
return summary_and_title
def get_title_summary(article_url):
"Ask ChatGPT for the title and summary"
reply = ask_the_all_knowing_one(article_url)
return process_reply(reply)
def get_url(message):
"Get the URL from the message"
regex = r"(?i)\b((?:https?://|www\d{0,3}[.]|[a-z0-9.\-]+[.][a-z]{2,4}/)"
regex += r"(?:[^\s()<>]+|\(([^\s()<>]+|(\([^\s()<>]+\)))*\))"
regex += r"(?:\(([^\s()<>]+|(\([^\s()<>]+\)))*\)|[^\s`!()\[\]{};:'\".,<>?«»“”‘’]))"
url_patterns = re.findall(regex,message)
urls = []
for url in url_patterns:
if url[0][-1] != '-':
present_flag = False
for exclude_url in EXCLUDE_URL_STRINGS:
if exclude_url in url[0]:
present_flag = True
break
if not present_flag and validators.url(url[0]):
urls.append(url[0])
return urls
def post_to_newsletter(final_url, article_editor, category_id = '5'):
"Method to call the newsletter API and post the url"
url = os.environ.get('URL', '')
category_id = os.environ.get('DEFAULT_CATEGORY', category_id)
headers = {'x-api-key' : os.environ.get('API_KEY_VALUE','')}
response_status = ""
if len(final_url) != 0:
for article_url in final_url:
title, summary = get_title_summary(article_url)
data = {'url': article_url,
'title': title,
'description': summary,
'category_id': category_id,
'article_editor': article_editor}
response = requests.post(url, data = data, headers = headers)
response_status = response.status_code
print(response_status)
return response_status
def pick_random_user(article_editors_list):
"Return a random employee to edit the article"
tmp = article_editors_list[:]
result = [tmp.pop(random.randrange(len(tmp))) for _ in range(1)]
list_to_str = ' '.join(map(str, result))
return list_to_str
def get_article_editor(employee_list):
"Return a list of primary comment reviewers"
return os.environ.get(employee_list,"").split(',')
def write_message(message, channel):
"Send a message to Skype Sender"
# Check if running on localstack or production environment
is_localstack = os.environ.get('LOCALSTACK_ENV') == 'true'
if is_localstack:
sqs = boto3.client('sqs',endpoint_url= 'http://localstack:4566')
else:
sqs = boto3.client('sqs')
print(channel)
message = str({'msg':f'{message}', 'channel':channel})
print(message)
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def get_reply():
"Get the Employee to edit the article for newsletter"
article_editors_list = get_article_editor('employee_list')
article_editor = pick_random_user(article_editors_list)
reply = f'Article editor: {article_editor}'
return reply,article_editor
def lambda_handler(event, context):
"""
Method run when Lambda is triggered
calls the filtering logic
calls the logic to post to endpoint
"""
content = get_message_contents(event)
message = content['msg']
channel = content['chat_id']
user = content['user_id']
print(f'{message}, {user}, {channel}')
response=""
final_url=[]
if channel == os.environ.get('ETC_CHANNEL') and user != os.environ.get('Qxf2Bot_USER'):
print("Getting message posted on ETC ")
cleaned_message = clean_message(message)
final_url=get_url(cleaned_message)
#Filtered URL is printed by lambda
print("Final url is :",final_url)
if final_url:
reply,article_editor = get_reply()
response = post_to_newsletter(final_url, article_editor)
write_message(reply, os.environ.get('ETC_CHANNEL',''))
else:
print("message does not contain any url")
else:
print("Message not from ETC channel")
return {
'statusCode': response,
'body': json.dumps(final_url)
}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/work-anniversary/requirements.txt
|
requests==2.25.0
python-dateutil==2.8.1
Pillow==7.2.0
inflect==5.0.2
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/work-anniversary/user_credentials.py
|
import os
USERNAME = os.environ['USERNAME']
PASSWORD = os.environ['PASSWORD']
CHANNEL_ID = os.environ['CHANNEL_ID']
API_KEY = os.environ['API_KEY']
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/work-anniversary/work_anniversary.py
|
"""
Get employees joining date and post work anniversary image to skype
"""
import datetime
from datetime import date
from dateutil.relativedelta import relativedelta
import json
import os
import requests
import user_credentials as credentials
import inflect
from PIL import Image, ImageDraw, ImageFont
import random,time
import textwrap
import time
SKYPE_URL = 'https://skype-sender.qxf2.com/send-image'
BASE_URL = 'https://qxf2-employees.qxf2.com/graphql'
inflect_obj = inflect.engine()
def authenticate():
"Return an authenticate code"
query = f"""mutation {{
auth(password: "{credentials.PASSWORD}", username: "{credentials.USERNAME}") {{
accessToken
refreshToken
}}
}}
"""
response = requests.post(url = BASE_URL, json = {'query': query})
return response.json().get('data',{}).get('auth',{}).get('accessToken',None)
def get_all_employees():
"Query allEmployees"
query = """query
findAllEmployees{
allEmployees{
edges{
node{
email
firstname
lastname
dateJoined
isActive
}
}
}
}"""
access_token = authenticate()
headers = {'Authorization': f'Bearer {access_token}'}
response = requests.get(url = BASE_URL, json = {'query': query}, headers =\
headers)
all_employees = response.json().get('data', {}).get('allEmployees', {}).get('edges', [])
return all_employees
def is_active_employee(each_node):
"Check the employee is active or not"
result_flag = True if each_node['node']['isActive'] == 'Y' else False
return result_flag
def get_default_quotes(data,difference_in_years):
quote_string = random.choice(data['experienced']) if difference_in_years != '1st' else random.choice(data['1st'])
return quote_string
def calculate_work_anniversary(emp_joined_date, current_date, emp_name):
"Calculate the difference"
msg,quote_string = None,None
if (emp_joined_date.month == current_date.month and emp_joined_date.day == current_date.day):
difference_in_years = inflect_obj.ordinal(relativedelta(current_date, emp_joined_date).years)
msg = f'Happy {difference_in_years} work anniversary {emp_name}'
with open('anniversary_quotes.txt',encoding="utf8") as json_file:
data = json.load(json_file)
if emp_name in data.keys() :
if len(data[emp_name]) >= 1:
quote_string = random.choice(data[emp_name])
else:
quote_string = get_default_quotes(data,difference_in_years)
else:
quote_string = get_default_quotes(data,difference_in_years)
final_quote_string = '\n'.join(textwrap.wrap(quote_string, 70, break_long_words=False,subsequent_indent='\n')) if quote_string is not None else ''
return msg,final_quote_string
def add_text_to_image(message,emp_name,quote_string):
image = Image.open('Work_anniversary_template.png')
draw = ImageDraw.Draw(image)
font1 = ImageFont.truetype('Casanova_Font_Free.ttf', size=130)
font2 = ImageFont.truetype('Casanova_Font_Free.ttf', size=95)
# starting position of the message
(x, y) = (650, 500)
color = 'rgb(128, 0, 128)' # purple color
draw.text((x, y), message, fill=color, font=font1)
(x, y) = (400, 700)
color = 'rgb(255, 69, 0)' # orange color
draw.text((x, y), quote_string, fill=color, font=font2)
filepath = '/tmp/' +emp_name+ '_greeting_card.png'
image.save(filepath,quality=95)
return filepath
def send_image(img, img_name, channel_id = credentials.CHANNEL_ID):
data = {'API_KEY' : credentials.API_KEY,
'img_name' : img_name,
'channel' : channel_id}
files = [
('document', (img_name, open(img, 'rb'), 'application/octet')),
('data', ('data', json.dumps(data), 'application/json')),
]
response = requests.post(SKYPE_URL, files = files)
time.sleep(2)
return response.status_code
def is_qxf2_anniversary(current_date):
"Check if its Qxf2's anniversary"
if (current_date.month == 2 and current_date.day == 1):
qxf2_start_date = datetime.datetime.strptime('01-Feb-13',"%d-%b-%y")
difference_in_years = inflect_obj.ordinal(relativedelta(current_date, qxf2_start_date).years)
msg = f'Qxf2 {difference_in_years} Year Anniversary'
quote_string = 'Wishing a Great Success Ahead'
file_path = add_text_to_image(msg, 'qxf2', quote_string)
send_image(file_path,'work_anniversary')
def is_work_anniversary():
"Get the work anniversary"
emp_data = get_all_employees()
for each_node in emp_data:
employee_active = is_active_employee(each_node)
emp_joined_date = each_node['node']['dateJoined']
if employee_active and emp_joined_date is not None:
emp_name = each_node['node']['firstname'] + " "+each_node['node']['lastname']
emp_joined_date = datetime.datetime.strptime(emp_joined_date,"%d-%b-%y")
current_date = date.today()
message,quote_string = calculate_work_anniversary(emp_joined_date, current_date, emp_name)
if message is not None:
file_path = add_text_to_image(message,emp_name,quote_string)
status_code = send_image(file_path,'work_anniversary')
os.remove(file_path)
is_qxf2_anniversary(current_date)
def lambda_handler(event, context):
"lambda entry point"
is_work_anniversary()
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/work-anniversary/anniversary_quotes.txt
|
{"1st":["Congratulations on your one year of work experience! Every company needs a hard-working and intelligent employee like you. Best of luck!","Today you’ve completed your first year in this workplace — congratulations! It has been lovely to work with you. We wish you all the best!","My colleague, congratulations to you for your one-year of work! You are an excellent colleague, and I wish you the best at this company.", "Congratulations on your first year of work! In just one year, you’ve shown just how capable you are. May the years ahead of you bring even more success!", "This company’s future is brighter because of employees like you. Well done! Enjoy the one-year anniversary of your career!","Your hard work and punctuality set the bar for others. Happy first work anniversary!"],
"experienced":["You are an asset to our company, and I hope you’ll continue on this path of hard work. We are very proud of you, and we wish you a wonderful work anniversary!","You are an ambitious worker and it’s always inspiring to work alongside you. Congratulations on an accomplished year of work!","You are one of a kind. Thank you for helping us grow. Have a happy work anniversary!","We are proud to be associated with you. Your contributions and commitment to our work are unparalleled. Have a happy work anniversary!","Employees like you are the pride and joy of this company. We are proud to have you with here.","Your attitude makes you what you are : a winner! You inspire and motivate the team like no other. Thank you for helping to push us forward. We wish you a happy work anniversary.","Everybody at work is lucky to have you here. We wish you a happy anniversary!","Congratulations on your anniversary with the company! Your high spirit and go-getter attitude is what makes you special. Working with you has been inspiring and motivating.","You are a source of inspiration, as your work ethic is exemplary. Thank you for being there every time we have needed you. Wishing you a happy work anniversary!","You are a dedicated worker and deserve all the success in the world. Keep up the good work, and cheers to your hard work!","We are grateful for your contribution and dedication to our company. Today, you complete a year with us. Wishing you a happy work anniversary.","You are a gem that will always shine. Have a happy work anniversary.","Every company should have someone like you at the forefront. You're talented in every sense of the word. Have a happy work anniversary.","Your dedication and passion for your work make you the success that you are. Keep raising the bar, and have a happy work anniversary.","We are lucky to have you on our team. Wishing you a happy work anniversary.","Your commitment to your job sets an example for everyone else on this team. Have a great work anniversary.","Having an employee like you is a matter of utmost pride and gratitude. Your loyalty and hard work have set an example for everyone in our organisation. Thank you for being a part of our organisation. Happy Work Anniversary!","Words are not enough to thank you for everything that you have done so far for us. However, I would like to extend my warmest greetings on the occasion of your Work Anniversary. Kudos to you!","A committed employee and an awesome human being. That’s what you are and we are glad to have someone like you in our workforce. We wish you a very happy work anniversary and good luck for all the upcoming amazing years of work.","It takes a person with strong character and will power to stay dedicated and loyal at his or her job even after so many years of service. And you are that person! Best wishes on your work anniversary."
],
"Avinash Shetty":["It’s tough to stay put in the same place for a very long time. However, maybe the word “Tough” wasn’t in your dictionary and you have sailed together with us through all the harsh and good times. I would like to congratulate you on achieving this amazing professional milestone. And also wish you the best of luck for times to come.","Happy work anniversary, Avinash! From the start, you have driven our testing along novel paths. We wish you a happy, interesting and creatively fulfilling road ahead.","Very calm and controlled in a conversation, a leader, embodies an informed QA at Qxf2 services","Thank you for being an essential part of Qxf2’s success, Happy work anniversary","You have set the standard for your work dedication. You have shown us how to learn and grow. Happy Anniversary !!!","Your cool head and ability to deal with pressure are huge assets to us and we’re so pleased to being wishing you a happy work anniversary. Hopefully there will be many more to follow !","Congrats on surviving another year at your job. Lesser mortals would have gone insane by now"],
"Annapoorani Gurusamy":["Wish you very happy work anniversary and thank you for keeping a close watch on our code through code review's and creating interview scheduler","Happy work anniversary, Annapoorani! Thank you for gracefully shouldering a heavy and varied workload - we are all the better for it. Our best wishes for the year ahead.","helping hand, good patience Wishing you a happy work anniversary.","Very dedicated in making freshers. interns & senior-interns life happy at Qxf2, you can take Annapoorani out of teaching, but you can never take teaching out of Annapoorani","Your patience and understanding nature make you the best person to work with whenever someone new joins at Qxf2. Thank you for all the hard work you have been doing for Qxf2 from so long and wish you many more successful years ahead!","Your down-to-earth character makes you exactly what you are. We always enjoy working with you. Happy Anniversary!!!"],
"Arunkumar Muralidharan":["Success never comes in a day; it comes with time and hard work, and you are an example of that congratulations on your work anniversary","knowledge hub and it flows to everyone, quick in understanding and resolving, always helpful .Wish you a happy work anniversary."],
"Rajeswari Gali":["Happy work anniversary, employee #1! We look forward to your continued contributions even as the rest of us build upon the foundation you laid.","Your commitment and dedication to your work and everything you are involved in is really motivating and encourages us to give our best. We are proud to have you! Wish you many more successful years ahead at Qxf2."],
"Smitha Rajesh":["You have set an exemplary standard for all of us with your work ethics and your dedication. Thank you for everything you have done for us all these years. Wish you a Happy Work Anniversary!","You are always ready to help, if it is a work problem or organizing games :), Wish you a very happy work anniversary.","helping nature, good friend, provide positive words, Wishing you a happy work anniversary.","Present's her work really well, socalised Qxf2, a leader.","The words that describe you are self-motivated and focused. The energy you bring is always very much appreciated. Happy Anniversary!!!"],
"Indira Nellutla":["A committed employee and an awesome human being. That’s what you are and we are glad to have someone like you in our workforce. We wish you a very happy work anniversary and good luck for all the upcoming amazing years of work","Filled with a positive vibe, confidence, and fearlessness we couldn’t have found anyone better than you for our team. Happy Work Anniversary!","You are a great listener and very approachable at all times. It’s a pleasure to be teaming along with you. Happy work anniversary!","Hard work always pay off. Happy work anniversary","A very positive co-worker, knows the game - have been in the industry for a long time, a pleasure to work with","We are grateful for your contribution and dedication to Qxf2. Hope you continue to be the friend and motivator you are. Wish you many more successful years ahead.","Your cool head and ability to deal with pressure are huge assets to us and we’re so pleased to being wishing you a happy work anniversary. Hopefully there will be many more to follow !","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best! ","We extend heartiest congratulations to you on setting yet another milestone in the journey of your professional life. Today is a good time to smell the roses you planted at Qxf2 Services. Wish you a very happy work anniversary!"],
"Shivahari Picthcaikannu":["Wish you very happy work anniversary to an ardent Arsenal Fan and Superstar of Qxf2 :)","You are never hesitant to make any suggestions and your willingness to share knowledge is very inspiring! Keep up the good work! Happy work anniversary!","Happy work anniversary, Gooner! Year after year, you have been generous with your time, knowledge and skills - and we have all benefited from it! Have a great year ahead!","Congratulations on achieving this anniversary with us. We know you have worked hard for the accomplishment and we truly appreciate your dedication.","Your skills add value every day, and we all enjoy having you around. Your creative thinking is always inspiring us. Happy Anniversary!!!","Congrats on surviving another year at your job. Lesser mortals would have gone insane by now","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best!","Your down-to-earth attitude makes you what you are, a winner. Victoria Concordia Crescit. Wishing you a happy work anniversary"],
"Rohan Dudam":["You are truly a valued associate. Thank you for your fervent efforts, creativity and IOT sessions. Wish you happy work anniversary- Rohan D","Happy work anniversary, Rohan! Your calm, steady and methodical contributions have made Qxf2 a much more interesting place. For many more years to come, we look forward to working with you and the robots you build.","Despite coming from an Electronic background working in IT dauntlessly, everyone's good friend, doesn't hesitate to speak about what he feels about a process at Qxf2","Wishing you many more years of success and innovation, a very happy work anniversary","Employees like you are the pride and joy of a company. We are proud to have you with us. On the way forward, you deserve all you have achieved and more. Wishing you a happy work anniversary."],
"Rohan Joshi":["You are an inspiring individual. Kudos to your amazing years of work!","You are a treasure trove of ideas and a very helpful colleague. You make Qxf2 a fun place to work. It is an absolute pleasure to be working with you and looking forward to more! Best wishes on your Work Anniversary!","May today be a day where you feel dedicated for all the work you do on a daily basis! Happy work Anniversary","Very organised with his work, plan's his work well and everyone's good friend","It’s always inspiring to work alongside you. Congratulations on an accomplished year of work","Your enthusiasm and passion for learning are always inspiring and you are a great inspiration to all the new joiners. Keep Rocking !! Happy Work Anniversary !!!","Your cool head and ability to deal with pressure are huge assets to us and we’re so pleased to being wishing you a happy work anniversary. Hopefully there will be many more to follow !","Congratulation on your work anniversary! You’ve turned the workplace and working strategy into great fun.","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best!"],
"Mohan Kumar":["May the journey of corporate success continue even in the coming years! Happy work Anniversary","Congratulation on your work anniversary! You’ve turned the workplace and working strategy into great fun."],
"Nilaya Indurkar":["With a team member like you, it’s easier for the company to achieve its goals, thank you and happy work anniversary","Wish you a happy work anniversary to one of the co-creator and tester of interview scheduler.","Very Congratulations on your Happy Work Anniversary!. May you accomplish more successful working years with this organization. Wish you a Good Luck"," Another year of excellence! Thanks for all the amazing work you do. Your effort and enthusiasm are much needed, and very much appreciated"],
"Rohini Gopal":["From all of us… happy anniversary! Thank you for your hard work, your generosity, and your contagious enthusiasm.","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best! ","You have set an ideal working and relationship standard for all of us with your work ethics and your dedication to see the prosperity of this company. Thank you for everything you have done for us all these years. Wishing you a Happy Work Anniversary today! We appreciate you a lot."],
"Sravanti Tatiraju":["You have achieved yet another milestone. Kudos to all these years of your great work."],
"Preedhi Vivek":["Congratulations on your accomplishments wherein you set an example of hard work and team spirit. Happy Work Anniversary!","A very supportive colleague, always there to help people, produces quality work with minimum fuss, present's her work very well, constantly thinks about doing the next task well","You are very talented, hardworking and give your best at whatever you do! Your thoughtfulness and empathetic nature make you the go-to-person for anyone. Congratulations on your anniversary with Qxf2 and wish you many more successful years ahead!","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best! "],
"Akkul DN":["A good learner, strives to improve always, an ever ready colleague to have any random conversation about gaming","Congratulation on your work anniversary! You’ve turned the workplace and working strategy into great fun.","Thanks for being such a hilarious colleague with a great sense of humor. Happy work anniversary and all the best! "],
"Kiran CV":["You add that absolute fun element at work! Stay the same and keep making Qxf2 more and more a fun place to work! Your technical prowess is equally inspiring! Happy anniversary!","Strives to produce quality work, likes to learn new stuff, ready to take responsibilities","It only feels like yesterday that you started working with us. Yet here we are, a year later. Congratulations on your work anniversary","Congratulation on your work anniversary! You’ve turned the workplace and working strategy into great fun."],
"Rahul Bhave":["You read and share a lot which has many a time been helpful to most of us. Continue to read, share knowledge and inspire people as always! Happy work anniversary!"],
"Drishya TM":["I extend my heartiest congratulations to you on setting yet another milestone in the journey of your professional life. Wish you a very happy work anniversary!","The energy and vibe you bring along with your positive attitude makes everyone working along with you inspire to give more. Congratulations on your anniversary with Qxf2 and wish you many more successful years ahead."],
"Raghava Nelabhotla":[]
}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/chatgpt/requirements.txt
|
boto3
openai==0.27.0
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/chatgpt/chatgpt.py
|
"""
This Lambda will take a user message and pass it to ChatGPT.
It will then send back ChatGPT's reply to the user.
"""
import json
import os
import boto3
import openai
QUEUE_URL = os.getenv("SKYPE_SENDER_QUEUE", "INVALID SKYPE SENDER QUEUE")
at_Qxf2Bot = os.getenv("AT_QXF2BOT", "INVALID QXF2BOT SETTING")
at_Qxf2Bot_english = "@qxf2bot!"
COMMANDS = [
f"help {at_Qxf2Bot}",
f"help us {at_Qxf2Bot}",
f"help me {at_Qxf2Bot}",
f"help {at_Qxf2Bot_english}",
f"help me {at_Qxf2Bot_english}",
f"help us {at_Qxf2Bot_english}",
]
def get_message_contents(event):
"Retrieve the message contents from the SQS event"
messages = []
for record in event.get("Records"):
message = record.get("body")
message = json.loads(message)["Message"]
message = json.loads(message)
messages.append(message)
return messages
def write_message(message, channel):
"Send a message to Skype Sender"
sqs = boto3.client("sqs")
print(channel)
message = str({"msg": f"{message}", "channel": channel})
print(message)
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def is_help_command(message):
"Is this a user wanting to talk to ChatGPT?"
result_flag = False
for command in COMMANDS:
if command in message.lower():
result_flag = True
break
return result_flag
def get_reply(message):
"Get the reply from ChatGPT"
openai.api_key = os.getenv("OPENAI_API_KEY", "")
model_engine = os.getenv("MODEL_ENGINE", "")
response = openai.ChatCompletion.create(
model=model_engine,
messages=[
{"role": "system", "content": "Keep it short and simple."},
{"role": "user", "content": message},
],
)
reply = f"{response['choices'][0]['message']['content']}. Usage stats: {response['usage']['total_tokens']}"
return reply
def clean_reply(reply):
"Remove quotes and apostrophes"
reply = reply.replace("'", "")
reply = reply.replace('"', '')
return reply
def lambda_handler(event, context):
"Code reviewer lambda"
message_contents = get_message_contents(event)
for message_content in message_contents:
message = message_content["msg"].strip()
channel = message_content["chat_id"]
user = message_content["user_id"]
if is_help_command(message):
reply = get_reply(message)
write_message(clean_reply(reply), channel)
return {"statusCode": 200, "body": json.dumps("Done!")}
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/cloudwatch-alert-notifier/requirements.txt
|
skpy==0.9.1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/cloudwatch-alert-notifier/cloudwatch_alert_notifier.py
|
"""
This script will send the Cloudwatch Alarm notification messages to skype
"""
import json
import os
import requests
def get_channel_id(channel_name='test'):
"Set the channel to send the message to"
channels = json.loads(os.environ.get('SKYPE_CHANNELS').replace("'", "\""))
channel_id = channels.get(channel_name, 'provide default channel id here')
return channel_id
def get_alert_message(full_msg):
"Form alert message to send"
alarm_message = json.loads(json.loads(full_msg).get("Message", {}))
alarm_name = alarm_message.get("AlarmName", "Unknown Alarm Name")
alarm_region = alarm_message.get("Region", "Unknown Region")
alarm_state_reason = alarm_message.get("NewStateReason", "Unknown State Reason")
alarm_state_change_time = alarm_message.get("StateChangeTime", "Unknown State Change Time")
alarm_description = alarm_message.get("AlarmDescription", "Unknown Alarm Description")
# Form meaningful alert message
alert_message = (
f"<b>ALERT:</b> {alarm_name} in {alarm_region}\n"
f"<b>Description:</b> {alarm_description}\n"
f"<b>State Change Time:</b> {alarm_state_change_time}\n"
f"<b>Reason:</b> {alarm_state_reason}\n"
)
return alert_message
def post_message(event, context=None):
"Post a message"
print(f'The trigger event is: {event}')
for record in event['Records']:
full_msg = record['body']
alert_message = get_alert_message(full_msg)
# Print the alert message
print(alert_message)
channel_id = get_channel_id()
url = os.environ['SKYPE_SENDER_ENDPOINT']
data = {
'API_KEY': os.environ['API_TOKEN'],
'msg': alert_message,
'channel': channel_id
}
response = requests.post(url, json=data, timeout=10)
print(f'Received {response.json()} for {alert_message}')
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/survey_reminder/survey_reminder.py
|
"""
This Lambda will :
- On every friday, lambda will pick up a list of employees who have not filled survey.
- Reminder will be sent individually to them.
- At the end of the day, lambda will post employee names who are yet to fill survey.
"""
import os
import json
import boto3
import requests
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
NOT_RESPONDED_USERS_URL = os.environ.get('NOT_RESPONDED_USERS_URL')
def get_all_employees():
"get all employees data"
invokelambda = boto3.client('lambda',region_name=os.environ.get('REGION_NAME'))
response = invokelambda.invoke(FunctionName=os.environ.get('EMPLOYEES_LAMBDA_NAME'),InvocationType="RequestResponse")
emp_data = json.loads(response['Payload'].read())['body']
return emp_data
def get_non_responded_users():
"Gets a list of people who have not filled survey"
response = requests.get(NOT_RESPONDED_USERS_URL, headers={'Accept': 'application/json', 'User': os.environ.get('AUTHORISED_USER')})
return response
def write_message(survey_reminder_message, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
message = str({'msg': f'{survey_reminder_message}', 'channel': channel})
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def get_individual_message():
"Get message for individual employee"
emp_data = get_all_employees()
non_responded_users = get_non_responded_users()
users = [user['email'] for user in non_responded_users.json()]
for each_node in emp_data:
if each_node['node']['email'] in users:
msg = f"Hey {each_node['node']['firstname']},\nReminder to take Help Survey: {os.environ.get('HELP_SURVEY_URL')}"
write_message(msg, '8:'+each_node['node']['skypeId'])
def get_group_message():
"Get group message"
non_responded_users = get_non_responded_users()
user_names = [user['fullName'] for user in non_responded_users.json()]
group_msg = "List of People who have not submitted help survey \n " + \
"\n" .join(user_names)
write_message(group_msg, os.environ.get('CHANNEL_ID'))
def survey_reminder(msg_type):
"Remind Qxf2 employees to take survey"
if msg_type == 'individual':
get_individual_message()
else:
get_group_message()
def lambda_handler(event, context):
"lambda entry point"
survey_reminder(event['msg_type'])
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/survey_reminder/requirements.txt
|
boto3==1.17.90
python-dateutil==2.8.1
requests==2.25.1
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/unique_tech_notifier/requirements.txt
|
boto3==1.14.43
python-dateutil==2.8.1
requests==2.25.0
| 0 |
qxf2_public_repos/qxf2-lambdas
|
qxf2_public_repos/qxf2-lambdas/unique_tech_notifier/unique_tech_notifier.py
|
"""
This lambda will notify on skype channel about the unique tech learnt for the current week based on the survey data
"""
import os
import json
import requests
import boto3
from datetime import date,timedelta
QUEUE_URL = 'https://sqs.ap-south-1.amazonaws.com/285993504765/skype-sender'
ALL_TECH_URL = os.environ.get('ALL_TECH_URL')
WEEKLY_TECH_URL = os.environ.get('WEEKLY_TECH_URL')
AUTHORIZED_USER = os.environ.get('AUTHORIZED_USER')
def write_message(unique_tech_msg, channel):
"Send a message to Skype Sender"
sqs = boto3.client('sqs')
message = str({'msg':f'{unique_tech_msg}', 'channel':channel})
sqs.send_message(QueueUrl=QUEUE_URL, MessageBody=(message))
def get_all_techs():
"Returns a list of technology data"
end_date = str(date.today() - timedelta(days=5))
all_tech = requests.post(ALL_TECH_URL,headers={"Accept":"application/json","User":AUTHORIZED_USER},data=json.dumps({"start_date":"2014-01-01","end_date":end_date}))
tech_list = [tech['technology'] for tech in all_tech.json()]
return tech_list
def get_weekly_tech():
"Returns a list of technlogy data for current week"
todays_date = str(date.today())
weekly_tech = requests.post(WEEKLY_TECH_URL,data=json.dumps({"date":todays_date}),headers={"Accept":"application/json","Content-type":"application/json","User":AUTHORIZED_USER})
weekly_tech_list = [tech['Technology'] for tech in weekly_tech.json()]
return weekly_tech_list
def get_unique_tech():
"Retun weekly unique tech"
unique_tech_list =set(get_weekly_tech()) - set(get_all_techs())
if len(unique_tech_list) != 0:
msg = "List of unique techs learnt this week:\n"+"\n".join(unique_tech_list)
else:
msg = "*No unique techs* learnt this week!! :("
return msg
def lambda_handler(event, context):
"lambda entry point"
message = get_unique_tech()
write_message(message, event.get('channel','main'))
weekly_tech_list = set(get_weekly_tech())
if len(weekly_tech_list) != 0:
msg = "List of techs reported this week:\n"+"\n".join(weekly_tech_list)
else:
msg = "*No techs* reported this week!! :("
write_message(msg, event.get('channel','main'))
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/chess_util.py
|
"""
This module will handle logic pertaining to calculation of control of
white and black over each square in the board
"""
import glob
from chess import pgn
from chess import parse_square
from chess import Board
from chess import WHITE
from chess import BLACK
from chess import SQUARE_NAMES
from chess import Move
from chess import KING
class ChessUtil:
"""Class to publish (num_white_control,num_black_control) tuple
for each square for each ply"""
@staticmethod
def generate_ply_info_list_for_game(game):
"Returns a dict which contains the list of tasks to be run"
board = Board()
ply_no = 0
game_tasks = []
for ply in game.mainline_moves():
board.push(ply)
ply_info = {"ply_no": ply_no, "board": board.copy()}
game_tasks.append(ply_info)
ply_no = ply_no + 1
return {"game_tasks": game_tasks, "ply_count": ply_no}
@staticmethod
def find_control_for_square_for_color(ply_info, color):
"Calculate the number of attackers for each square for a ply"
board = ply_info["board"]
power_of_square_list = []
ply_no = ply_info['ply_no']
color_key = "black"
if color:
color_key = "white"
for square in SQUARE_NAMES:
parsed_square = parse_square(square)
power_of_square_dict = {"ply": ply_no}
power_of_square_dict['square'] = parsed_square
new_board = board.copy()
attackers = board.attackers(color, parsed_square)
if len(attackers) == 0:
power_of_square_dict[color_key] = 0
power_of_square_list.append(power_of_square_dict)
power_of_square = 0
new_board = board.copy()
while len(attackers) != 0:
attacker_list = list(attackers)
attacking_square = attacker_list[0]
if new_board.piece_type_at(attacking_square) == KING and len(attackers) > 1:
attacking_square = attacker_list[1]
elif new_board.piece_type_at(attacking_square) == KING and len(attackers) == 1:
power_of_square = power_of_square + 1
break
new_board.remove_piece_at(attacking_square)
power_of_square = power_of_square + 1
attackers = new_board.attackers(color, parsed_square)
power_of_square_dict[color_key] = power_of_square
power_of_square_list.append(power_of_square_dict)
return power_of_square_list
@staticmethod
def find_control_for_square(ply_info):
"Find control for Black and White"
power_of_square_list = ChessUtil.find_control_for_square_for_color(ply_info, WHITE)
power_of_square_list.extend(ChessUtil.find_control_for_square_for_color(ply_info, BLACK))
return power_of_square_list
@staticmethod
def get_games_from_pgn_files():
"Parse PGN files in the current directory and return a list of parsed game objects"
game_list = []
for file in glob.glob("resources/input/*.pgn"):
file_handle = open(file)
while True:
game = pgn.read_game(file_handle)
if game is None:
break
game_list.append(game)
return game_list
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/LICENSE
|
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/chess_dask_cluster.py
|
"""
This module is for parallelizing the calculation of heatmap for each ply in a game
using dask tasks and parallelizing multiple games in PGNs
"""
import os
import errno
import json
import base64
import yaml
from dask.distributed import LocalCluster
from dask.distributed import Client
from dask.distributed import get_client, as_completed
from dask.distributed import TimeoutError
from distributed import wait
from dask import config
import coiled
from .chess_util import ChessUtil
from .chess_image_generator import ChessImageGenerator
class ChessDaskCluster:
"Class for using dask tasks to parallelize calculation of heatmap"
@staticmethod
def get_game_data(result_list, ply_count):
"Preparing the data for all plies in all the games"
game_data = {}
game_data["white"] = []
game_data["black"] = []
game_data["max_white_value"] = 0
game_data["max_black_value"] = 0
for each in range(ply_count):
ply_mapwhite = [[0 for x in range(8)] for y in range(8)]
ply_mapblack = [[0 for x in range(8)] for y in range(8)]
game_data["white"].append(ply_mapwhite)
game_data["black"].append(ply_mapblack)
for result_for_square in result_list:
ply_no = result_for_square["ply"]
square_index = result_for_square["square"]
row = square_index//8
column = square_index%8
if "white" in result_for_square:
power = result_for_square["white"]
if power > game_data["max_white_value"]:
game_data["max_white_value"] = power
game_data["white"][ply_no][7-row][column] = power
else:
power = result_for_square["black"]
if power > game_data["max_black_value"]:
game_data["max_black_value"] = power
game_data["black"][ply_no][7-row][7-column] = power
return game_data
@staticmethod
def analyse_game_in_worker(game_dict):
"""
For a game, for every ply, generate tasks to be run in parallel in a dask cluster.
One task is created per ply. A worker client is used here because this method
itself is run inside a worker
"""
game = game_dict["game"]
game_no = game_dict["game_no"]
timeout = game_dict["timeout"]
tasks_for_game = ChessUtil.generate_ply_info_list_for_game(game)
worker_client = get_client()
task_futures = worker_client.map(ChessUtil.find_control_for_square,
tasks_for_game["game_tasks"])
game_data = None
try:
wait(task_futures, timeout)
control_list_for_game = worker_client.gather(task_futures)
game_results = []
for ply_list in control_list_for_game:
game_results.extend(ply_list)
game_data = ChessDaskCluster.get_game_data(game_results, tasks_for_game["ply_count"])
game_data["filename"] = str(game_no) + " " + game.headers["Event"]
except TimeoutError:
print("Game timed out: " + str(game_no))
return game_data
def __init__(self):
config_file = os.path.join(os.getcwd(), "config", "config.yaml")
if not os.path.isfile(config_file):
raise FileNotFoundError(
errno.ENOENT, os.strerror(errno.ENOENT), config_file)
with open(config_file) as file:
self.config_values = yaml.full_load(file)
if not "cluster_name" in self.config_values:
self.config_values["cluster_name"] = "chess-cluster"
if not "software_environment_name" in self.config_values:
self.config_values["software_environment_name"] = "chess-env"
if not "n_workers" in self.config_values:
self.config_values["n_workers"] = 50
if not "worker_cpu" in self.config_values:
self.config_values["worker_cpu"] = 1
if not "worker_memory" in self.config_values:
self.config_values["worker_memory"] = 8
if not "scheduler_memory" in self.config_values:
self.config_values["scheduler_memory"] = 16
if not "scheduler_cpu" in self.config_values:
self.config_values["scheduler_cpu"] = 4
if not "game_batch_size" in self.config_values:
self.config_values["game_batch_size"] = 30
if not "timeout_per_game" in self.config_values:
self.config_values["timeout_per_game"] = 60
if not "debug" in self.config_values:
self.config_values["debug"] = False
if self.config_values["use_local_cluster"]:
cluster = LocalCluster(n_workers=self.config_values["n_workers"],
threads_per_worker=1)
else:
coiled.create_software_environment(name=self.config_values["software_environment_name"],
pip="requirements.txt")
cluster = coiled.Cluster(name=self.config_values["cluster_name"],
n_workers=self.config_values["n_workers"],
worker_cpu=self.config_values["worker_cpu"],
worker_memory=str(self.config_values["worker_memory"])
+"GiB",
scheduler_memory=str(self.config_values["scheduler_memory"])
+"GiB",
scheduler_cpu=self.config_values["scheduler_cpu"],
software=self.config_values["software_environment_name"])
self.client = Client(cluster)
def analyse_games_in_cluster(self, game_list):
"Find control heatmap for all the games passed, in parallel in a dask cluster"
game_no = 0
game_futures = []
all_game_results = []
game_master_list = []
timeout = self.config_values["timeout_per_game"]
for game in game_list:
game_master_list.append({"game": game, "game_no": game_no, "timeout": timeout})
game_no = game_no + 1
index = 1
batch = []
for game in game_master_list:
batch.append(game)
if index % self.config_values["game_batch_size"] == 0:
game_futures = self.client.map(ChessDaskCluster.analyse_game_in_worker, batch)
all_game_results.extend(self.client.gather(game_futures))
batch = []
print("Completed games till " + str(index))
index = index + 1
if batch:
game_futures = self.client.map(ChessDaskCluster.analyse_game_in_worker, batch)
all_game_results.extend(self.client.gather(game_futures))
if self.config_values["debug"]:
print("Completed computation, starting image creation")
with open('results.json', 'w', encoding='utf-8') as file_handle:
json.dump(all_game_results, file_handle, ensure_ascii=False, indent=4)
image_futures = self.client.map(ChessImageGenerator.create_gif, all_game_results)
for image_future in as_completed(image_futures):
image_data = image_future.result()
with open(image_data["filename"], 'wb') as file_handle:
file_handle.write(base64.decodebytes(image_data["bytes"]))
file_handle.close
print("Successfully created: " + image_data["filename"])
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/chess_control_heatmap.py
|
"""
This module produces a control heatmap for a chess game which shows which side
controls which squares how many times per ply/move in a Chess board
"""
from .chess_util import ChessUtil
from .chess_dask_cluster import ChessDaskCluster
class ChessControlHeatmap:
"Class to generate the control heatmap based on the input PGN files"
def generate_heatmap_images(self):
"Fetches the input from files and starts to analyze the games"
game_list = ChessUtil.get_games_from_pgn_files()
dask_cluster = ChessDaskCluster()
dask_cluster.analyse_games_in_cluster(game_list)
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/README.md
|
# chess-heatmap-PyPI-package
This repository is part of the chess-heatmap project, which generates control heatmap images for given chess games. It contains the python files(as part of the project) which have been bundled as PyPI python package. The package is available with the name 'chess_heatmap_qxf2'
The chess-heatmap project uses dask collections and runs on a Coiled cluster. Dask tasks are submitted by Scheduler to workers. When the workers start working on the tasks, they need some associated files which are present on the client machine. So, bundled it as package. Workers install this package in their machines and can use the files.
To run the project follow the instructions present in the https://github.com/qxf2/chess-heatmap repository.
| 0 |
qxf2_public_repos
|
qxf2_public_repos/chess-heatmap-PyPI-package/chess_image_generator.py
|
"""
This module is used to represent the control logic data in 2-dimensional form.
The data values are represented as colors in the graph.
"""
import base64
import seaborn as sns
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import matplotlib
class ChessImageGenerator:
"Class to create the gif"
@staticmethod
def create_gif(game_data):
"Prepare the rows and columns for the graph"
matplotlib.use('agg')
if game_data is None or len(game_data["white"])==0:
print("Doing nothing for this game")
return None
gamedata_mapwhite = game_data["white"]
gamedata_mapblack = game_data["black"]
filename = game_data["filename"]
gamemax_controlwhite = game_data["max_white_value"]
gamemax_controlblack = game_data["max_black_value"]
xlabels = ["a", "b", "c", "d", "e", "f", "g", "h"]
ylabels = ["8", "7", "6", "5", "4", "3", "2", "1"]
fig, (ax1, ax2) = plt.subplots(1, 2)
sns.heatmap(gamedata_mapwhite[0], cmap="YlGn", vmax = gamemax_controlwhite,
xticklabels=xlabels,yticklabels=ylabels, ax = ax1)
sns.heatmap(gamedata_mapblack[0], cmap="YlGn", vmax = gamemax_controlblack,
xticklabels=xlabels,yticklabels=ylabels, ax = ax2)
def animate(i):
"""Create a single animated gif containing the heatmaps for all the
different plys in a single game"""
ax1.cla()
ax2.cla()
sns.heatmap(gamedata_mapwhite[i], cmap="YlGn", vmax = gamemax_controlwhite,
xticklabels=xlabels,yticklabels=ylabels, ax = ax1, cbar = None)
sns.heatmap(gamedata_mapblack[i], cmap="YlGn", vmax = gamemax_controlblack,
xticklabels=xlabels,yticklabels=ylabels, ax = ax2, cbar = None)
anim = animation.FuncAnimation(fig, animate, interval=1000,
save_count=len(gamedata_mapwhite))
anim.save(filename + ".gif", dpi=80, writer='imagemagick')
file_handle = open(filename + ".gif", 'rb')
return {"bytes": base64.b64encode(file_handle.read()), "filename": "resources/output/"
+ filename + ".gif"}
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Nav_Menu.py
|
"""
Page object model for the login page
"""
from Page import Page
class Nav_Menu(Page):
"Page object for the side menu"
def start(self):
"Xpath of all the field"
#Navigation Menu
self.inbox = "//a[contains(@href, '#inbox')]"
self.sent_mail = "//a[contains(@href, '#sent')]"
self.drafts= "//a[contains(@href, '#drafts')]"
def select_menu_item(self,menu_item):
"select menu item"
if menu_item=="inbox":
if self.click_element(self.inbox):
self.write ("Opened the inbox")
elif menu_item=="sent_mail":
if self.click_element(self.sent_mail):
self.write ("Opened the sent mail folder")
elif menu_item=="drafts":
if self.click_element(self.drafts):
self.write ("Opened the drafts folder")
else:
print "cannot find given menu_item"
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Test_Rail.py
|
"""
TestRail integration
"""
import dotenv,os,testrail,Conf_Reader
def get_testrail_conf():
"Get the testrail account credentials from the testrail.env file"
testrail_file = os.path.join(os.path.dirname(__file__),'testrail.env')
#TestRail Url
testrail_url = Conf_Reader.get_value(testrail_file,'TESTRAIL_URL')
client = testrail.APIClient(testrail_url)
#TestRail User and Password
client.user = Conf_Reader.get_value(testrail_file,'TESTRAIL_USER')
client.password = Conf_Reader.get_value(testrail_file,'TESTRAIL_PASSWORD')
return client
def update_testrail(case_id,run_id,result_flag,msg=""):
"Update TestRail for a given run_id and case_id"
update_flag = False
#Get the TestRail client account details
client = get_testrail_conf()
#Update the result in TestRail using send_post function.
#Parameters for add_result_for_case is the combination of runid and case id.
#status_id is 1 for Passed, 2 For Blocked, 4 for Retest and 5 for Failed
status_id = 1 if result_flag is True else 5
if run_id is not None:
try:
result = client.send_post(
'add_result_for_case/%s/%s'%(run_id,case_id),
{'status_id': status_id, 'comment': msg })
except Exception,e:
print 'Exception in update_testrail() updating TestRail.'
print 'PYTHON SAYS: '
print e
else:
print 'Updated test result for case: %s in test run: %s with msg:%s'%(case_id,run_id,msg)
return update_flag
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/REQUIREMENTS.txt
|
This is sample code to support Qxf2 Service's tutorial on Page Object Model with Selenium and Python.
URL: http://qxf2.com/blog/page-object-model-selenium-python/
-----------------
1. REQUIREMENTS:
-----------------
a. Python 2.x
b. Selenium webdriver with Python binding
c. Python modules: selenium, python-dotenv, logging
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Login_Page.py
|
"""
Page object model for the login page
"""
from Page import Page
class Login_Page(Page):
"Page object for the Login page"
def start(self):
self.url = ""
self.open(self.url)
# Assert Title of the Login Page and Login
self.assertIn("Gmail", self.driver.title)
"Xpath of all the field"
#Login
self.login_email = "//input[@name='Email']"
self.login_next_button = "//input[@id='next']"
self.login_password = "//input[@placeholder='Password']"
self.login_signin_button = "//input[@id='signIn']"
def login(self,username,password):
"Login using credentials provided"
login_flag = False
self.set_login_email(username)
self.submit_next()
self.set_login_password(password)
self.submit_login()
title = self.driver.title
if username in title:
self.write("Login Success",level='debug')
login_flag = True
else:
self.write("FAIL: Login error",level='debug')
self.write(" Obtained driver title: "+title,level='debug')
self.write(" Expected the string %s in the title"%username,level='debug')
return login_flag
def set_login_email(self,username):
"Set the username on the login screen"
self.set_text(self.login_email,username)
def submit_next(self):
#Weird - GMail sometimes separates the username and password on different steps
self.click_element(self.login_next_button)
self.wait(3)
def set_login_password(self,password):
"Set the password on the login screen"
self.set_text(self.login_password,password)
def submit_login(self):
"Submit the login form"
self.click_element(self.login_signin_button)
self.wait(7)
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Search_Inbox_Test.py
|
"""
Test case for login and search
"""
import dotenv,os,sys,PageFactory,Test_Rail,Conf_Reader
from optparse import OptionParser
from DriverFactory import DriverFactory
def check_options(options):
"Check if the command line options are valid"
options_flag = True
options.config_file = os.path.abspath(options.config_file)
#Check if the config file exists and is a file
if os.path.exists(options.config_file):
if not os.path.isfile(options.config_file):
print '\n****'
print 'Config file provided is not a file: '
print options.config_file
print '****'
options_flag = False
else:
print '\n****'
print 'Unable to locate the provided config file: '
print options.config_file
print '****'
options_flag = False
return options_flag
def run_search_inbox_test(browser,conf,base_url,sauce_flag,browser_version,platform,testrail_run_id):
"Login to Gmail using the page object model"
# get the test account credentials from the .credentials file
credentials_file = os.path.join(os.path.dirname(__file__),'login.credentials')
username = Conf_Reader.get_value(credentials_file,'LOGIN_USER')
password = Conf_Reader.get_value(credentials_file,'LOGIN_PASSWORD')
#Result flag used by TestRail
result_flag = False
#Setup a driver
#create object of driver factory
driver_obj = DriverFactory()
driver = driver_obj.get_web_driver(browser,sauce_flag,browser_version,platform)
driver.maximize_window()
#Create a login page object
login_obj = PageFactory.get_page_object("login",driver)
if (login_obj.login(username,password)):
msg = "Login was successful"
result_flag = True
login_obj.write(msg)
else:
msg = "Login failed"
login_obj.write(msg)
#Create an object for main page with header and menu
main_obj = PageFactory.get_page_object("main",driver)
main_obj.wait(3)
#Search the inbox for message by subject 'POM' and open the message
if main_obj.header_obj.search_by_subject('POM'):
main_obj.write("Search successful")
result_flag = True
else:
main_obj.write("Search text was not found")
result_flag = False
#Go to inbox
main_obj.menu_obj.select_menu_item('inbox')
#Update TestRail
if testrail_run_id is not None:
login_obj.write('About to update TestRail')
case_id = 67
Test_Rail.update_testrail(case_id,testrail_run_id,result_flag,msg=msg)
main_obj.teardown()
#---START OF SCRIPT
if __name__=='__main__':
#Accept command line parameters
usage = "\n----\n%prog -b <OPTIONAL: Browser> -c <OPTIONAL: configuration_file> -u <OPTIONAL: APP URL> -v <OPTIONAL: Browser version> -p <OPTIONAL: Platform> -s <OPTIONAL: sauce lab flag>\n----\nE.g.:%prog -b FF -c .conf -u https://basecamp.com -s Y -v 26 -p \"Windows 8\"\n---"
parser = OptionParser(usage=usage)
parser.add_option("-b","--browser",
dest="browser",
default="firefox",
help="Browser. Valid options are firefox, ie and chrome")
parser.add_option("-c","--config",
dest="config_file",
default=os.path.join(os.path.dirname(__file__),'data.conf'),
help="The full or relative path of the test configuration file")
parser.add_option("-u","--app_url",
dest="url",
default="https://gmail.com",
help="The url of the application")
parser.add_option("-s","--sauce_flag",
dest="sauce_flag",
default="N",
help="Run the test in Sauce labs: Y or N")
parser.add_option("-v","--version",
dest="browser_version",
help="The version of the browser: a whole number",
default=None)
parser.add_option("-p","--platform",
dest="platform",
help="The operating system: Windows 7, Linux",
default="Windows 7")
parser.add_option("-r","--test_run_id",
dest="testrail_run_id",
default=None,
help="The test run id in TestRail")
(options,args) = parser.parse_args()
if check_options(options):
#Run the test only if the options provided are valid
run_search_inbox_test(browser=options.browser,conf=os.path.abspath(options.config_file),base_url=options.url,sauce_flag=options.sauce_flag,browser_version=options.browser_version,platform=options.platform,testrail_run_id=options.testrail_run_id)
else:
print 'ERROR: Received incorrect input arguments'
print parser.print_usage()
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Page.py
|
"""
Page class that all page models can inherit from
There are useful wrappers for common Selenium operations
"""
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
import unittest,time,logging
from Base_Logging import Base_Logging
class Page(unittest.TestCase):
"Page class that all page models can inherit from"
def __init__(self,selenium_driver,base_url='https://mail.google.com/'):
"Constructor"
#We assume relative URLs start without a / in the beginning
if base_url[-1] != '/':
base_url += '/'
self.base_url = base_url
self.driver = selenium_driver
self.start() #Visit and initialize xpaths for the appropriate page
self.log_obj = Base_Logging(level=logging.DEBUG)
def open(self,url):
"Visit the page base_url + url"
url = self.base_url + url
if self.driver.current_url != url:
self.driver.get(url)
def get_page_xpaths(self,section):
"open configurations file,go to right sections,return section obj"
pass
def get_xpath(self,xpath):
"Return the DOM element of the xpath OR the 'None' object if the element is not found"
dom_element = None
dom_element = self.driver.find_element_by_xpath(xpath)
return dom_element
def click_element(self,xpath):
"Click the button supplied"
link = self.get_xpath(xpath)
if link is not None:
try:
link.click()
except Exception,e:
self.write('Exception when clicking link with xpath: %s'%xpath)
self.write(e)
else:
return True
return False
def set_text(self,xpath,value):
"Set the value of the text field"
text_field = self.get_xpath(xpath)
try:
text_field.clear()
except Exception, e:
self.write('ERROR: Could not clear the text field: %s'%xpath)
if value is None:
return
else:
text_field.send_keys(value)
def get_text(self,xpath):
"Return the text for a given xpath or the 'None' object if the element is not found"
text = ''
try:
text = self.get_xpath(xpath).text
except Exception,e:
self.write(e)
return None
else:
return text.encode('utf-8')
def get_dom_text(self,dom_element):
"Return the text of a given DOM element or the 'None' object if the element has no attribute called text"
text = ''
try:
text = dom_element.text
except Exception, e:
self.write(e)
return None
else:
return text.encode('utf-8')
def select_dropdown_option(self, select_locator, option_text):
"Selects the option in the drop-down"
#dropdown = self.driver.find_element_by_id(select_locator)
dropdown = self.driver.find_element_by_xpath(select_locator)
for option in dropdown.find_elements_by_tag_name('option'):
if option.text == option_text:
option.click()
break
def check_element_present(self,xpath):
" This method checks if the web element is present in page or not and returns True or False accordingly"
try:
self.get_xpath(xpath)
except NoSuchElementException:
return False
return True
def submit_search(self):
" Clicks on Search button"
self.click_button(self.search_button)
def teardown(self):
" Tears down the driver"
self.driver.close()
def write(self,msg,level='info'):
" This method can be used to include logging"
#print msg
self.log_obj.write(msg,level)
def wait(self,wait_seconds=5):
" Performs wait for time provided"
time.sleep(wait_seconds)
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Conf_Reader.py
|
"""
A simple conf reader.
For now, we just use dotenv and return a key.
In future, you can make this a class and extend get_value()
"""
import dotenv,os
def get_value(conf,key):
"Return the value in conf for a given key"
value = None
try:
dotenv.load_dotenv(conf)
value = os.environ[key]
except Exception,e:
print 'Exception in get_value'
print 'file: ',conf
print 'key: ',key
return value
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/DriverFactory.py
|
"""
DriverFactory class
NOTE: Change this class as you add support for:
1. SauceLabs/BrowserStack
2. More browsers like Opera
"""
from selenium import webdriver
class DriverFactory():
def __init__(self,browser='ff',sauce_flag='N',browser_version=None,platform=None):
self.browser=browser
self.sauce_flag=sauce_flag
self.browser_version=browser_version
self.platform=platform
def get_web_driver(self,browser,sauce_flag,browser_version,platform):
if (sauce_flag == 'Y'):
web_driver = self.run_sauce(browser,sauce_flag,browser_version,platform)
elif (sauce_flag == 'N'):
web_driver = self.run_local(browser,sauce_flag,browser_version,platform)
else:
print "DriverFactory does not know the browser: ",browser
web_driver = None
return web_driver
def run_sauce(self,browser,sauce_flag,browser_version,platform):
if browser.lower() == 'ff' or browser.lower() == 'firefox':
desired_capabilities = webdriver.DesiredCapabilities.FIREFOX
desired_capabilities['version'] = browser_version
desired_capabilities['platform'] = platform
elif browser.lower() == 'ie':
desired_capabilities = webdriver.DesiredCapabilities.INTERNETEXPLORER
desired_capabilities['version'] = browser_version
desired_capabilities['platform'] = platform
elif browser.lower() == 'chrome':
desired_capabilities = webdriver.DesiredCapabilities.CHROME
desired_capabilities['version'] = browser_version
desired_capabilities['platform'] = platform
desired_capabilities['name'] = 'Testing End to END Basecamp Test'
return webdriver.Remote(
desired_capabilities=desired_capabilities,
command_executor="http://avinash010:13174916-ad64-4642-b0cd-fdcc29dc3b2c@ondemand.saucelabs.com:80/wd/hub"
)
def run_local(self,browser,sauce_flag,browser_version,platform):
if self.browser.lower() == "ff" or self.browser.lower() == 'firefox':
return webdriver.Firefox()
elif self.browser.lower() == "ie":
return webdriver.Ie()
elif self.browser.lower() == "chrome":
return webdriver.Chrome()
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/README.md
|
This is sample code to support Qxf2 Service's tutorial on Page Object Model with Selenium and Python.
URL: http://qxf2.com/blog/page-object-model-selenium-python/
---------
1. SETUP
---------
a. Install Python 2.x
b. Install Selenium
c. Add both to your PATH environment variable
d. If you do not have it already, get pip
e. 'pip install python-dotenv'
f. Update 'login.credentials' with your credentials
-------
2. RUN
-------
a. python Search_Inbox_Test.py
b. For more options: python Search_Inbox_Test.py -h
-----------
3. ISSUES?
-----------
a. If Python complains about an Import exception, please 'pip install $module_name'
b. If you are not setup with the drivers for the web browsers, you will see a helpful error from Selenium telling you where to go and get them
c. If login fails, its likely that you forgot to update the login.credentials file
d. Exception? 'module object has no attribute load_dotenv'? You have the wrong dotenv module. So first 'pip uninstall dotenv' and then 'pip install python-dotenv'
e. Others: Contact mak@qxf2.com
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/PageFactory.py
|
"""
PageFactory uses the factory design pattern.
get_page_object() returns the appropriate page object.
Add elif clauses as and when you implement new pages.
Pages implemented so far:
1. Login
2. Main
"""
from selenium import webdriver
from Login_Page import Login_Page
from Main_Page import Main_Page
def get_page_object(page_name,driver,base_url='https://gmail.com/'):
"Return the appropriate page object based on page_name"
test_obj = None
page_name = page_name.lower()
if page_name == "login":
test_obj = Login_Page(driver,base_url=base_url)
elif page_name == "main":
test_obj = Main_Page(driver,base_url=base_url)
return test_obj
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/login.credentials
|
#Fill out your credentials here (no quotes needed)
LOGIN_USER=
LOGIN_PASSWORD=
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Base_Logging.py
|
"""
Qxf2 Services: A plug-n-play class for logging.
This class wraps around Python's logging module.
"""
import logging, os, inspect
import datetime
import sys
class Base_Logging():
def __init__(self,log_file_name=None,level=logging.DEBUG,format='%(asctime)s|%(caller_func)s|%(levelname)s| %(message)s'):
"Constructor for the logging class"
self.log_file_name=log_file_name
self.level=level
self.format=format
self.log = self.set_log(self.log_file_name,self.level,self.format)
def set_log(self,log_file_name,level,format,test_module_name=None):
"Set logging: 1 stream handler, one file handler"
if test_module_name is None:
test_module_name = self.get_calling_module()
log = logging.getLogger(test_module_name)
self.reset_log(log)
self.set_log_level(log,level)
self.add_stream_handler(log,level,format)
if log_file_name is None:
log_file_name = test_module_name + '.log'
self.add_file_handler(log,level,format,log_file_name)
return log
def get_calling_module(self):
"Get the name of the module"
self.calling_module = inspect.stack()[-1][1].split(os.sep)[-1].split('.')[0]
return self.calling_module
def reset_log(self,log):
"Reset the log handlers if they exist"
try:
log.handlers = []
except Exception,e:
print 'Failed to close the logger object'
print 'Exception', e
def set_log_level(self,log,level=logging.INFO):
log.setLevel(level)
def set_stream_handler_level(self,streamHandler,level):
streamHandler.setLevel(level)
def set_stream_handler_formatter(self,streamHandler,formatter):
streamHandler.setFormatter('')
def add_stream_handler(self,log,handlerLevel,handlerFormat):
streamHandler = logging.StreamHandler()
self.set_stream_handler_level(streamHandler,handlerLevel)
self.set_stream_handler_formatter(streamHandler,handlerFormat)
log.addHandler(streamHandler)
def set_file_handler_level(self,fileHandler,level):
fileHandler.setLevel(level)
def set_file_handler_formatter(self,fileHandler,formatter):
fileHandler.setFormatter(formatter)
def add_file_handler(self,log,handlerLevel,handlerFormat,log_file_name):
fileHandler = logging.FileHandler(log_file_name)
self.set_file_handler_level(fileHandler,handlerLevel)
formatter = logging.Formatter(handlerFormat)
self.set_file_handler_formatter(fileHandler,formatter)
log.addHandler(fileHandler)
def write(self,msg,level='info'):
fname = inspect.stack()[2][3] #May be use a entry-exit decorator instead
d = {'caller_func': fname}
if level.lower()== 'debug':
self.log.debug(msg, extra=d)
elif level.lower()== 'info':
self.log.info(msg, extra=d)
elif level.lower()== 'warn' or level.lower()=='warning':
self.log.warn(msg, extra=d)
elif level.lower()== 'error':
self.log.error(msg, extra=d)
elif level.lower()== 'critical':
self.log.critical(msg, extra=d)
else:
self.log.critical("Unknown level passed for the msg: %s", msg, extra=d)
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/testrail.py
|
#
# TestRail API binding for Python 2.x (API v2, available since
# TestRail 3.0)
#
# Learn more:
#
# http://docs.gurock.com/testrail-api2/start
# http://docs.gurock.com/testrail-api2/accessing
#
# Copyright Gurock Software GmbH. See license.md for details.
#
import urllib2, json, base64
class APIClient:
def __init__(self, base_url):
self.user = ''
self.password = ''
if not base_url.endswith('/'):
base_url += '/'
self.__url = base_url + 'index.php?/api/v2/'
#
# Send Get
#
# Issues a GET request (read) against the API and returns the result
# (as Python dict).
#
# Arguments:
#
# uri The API method to call including parameters
# (e.g. get_case/1)
#
def send_get(self, uri):
return self.__send_request('GET', uri, None)
#
# Send POST
#
# Issues a POST request (write) against the API and returns the result
# (as Python dict).
#
# Arguments:
#
# uri The API method to call including parameters
# (e.g. add_case/1)
# data The data to submit as part of the request (as
# Python dict, strings must be UTF-8 encoded)
#
def send_post(self, uri, data):
return self.__send_request('POST', uri, data)
def __send_request(self, method, uri, data):
url = self.__url + uri
request = urllib2.Request(url)
if (method == 'POST'):
request.add_data(json.dumps(data))
auth = base64.encodestring('%s:%s' % (self.user, self.password)).strip()
request.add_header('Authorization', 'Basic %s' % auth)
request.add_header('Content-Type', 'application/json')
e = None
try:
response = urllib2.urlopen(request).read()
except urllib2.HTTPError as e:
response = e.read()
if response:
result = json.loads(response)
else:
result = {}
if e != None:
if result and 'error' in result:
error = '"' + result['error'] + '"'
else:
error = 'No additional error message received'
raise APIError('TestRail API returned HTTP %s (%s)' %
(e.code, error))
return result
class APIError(Exception):
pass
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Common_Objects_Template.py
|
"""
Page object model for the Main
"""
from selenium import webdriver
from selenium.common.exceptions import NoSuchElementException
from selenium.webdriver.common.keys import Keys
from Page import Page
from Header_Section import Header_Section
from Nav_Menu import Nav_Menu
class Common_Objects_Template(Header_Section,Nav_Menu):
"Page object for the Main page"
def start(self):
"""self.header_obj = Header_Page(self.driver)"""
"Xpath of all the field"
#Main
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/testrail.env
|
#FILL OUT YOUR TESTRAIL DETAILS HERE
TESTRAIL_URL=
TESTRAIL_USER=
TESTRAIL_PASSWORD=
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Main_Page.py
|
"""
Page object model for the main page
"""
from Page import Page
from Header_Section import Header_Section
from Nav_Menu import Nav_Menu
class Main_Page(Page):
"Page object for the Main page"
def start(self):
self.url = ""
self.open(self.url)
#Create a Header Section object
self.header_obj = Header_Section(self.driver)
#Create a Menu object
self.menu_obj = Nav_Menu(self.driver)
| 0 |
qxf2_public_repos
|
qxf2_public_repos/GMail/Header_Section.py
|
"""
Page object model for the page header
"""
from Page import Page
class Header_Section(Page):
"Page object for the page header"
def start(self):
"Xpath of all the field"
#Search and profile
self.search_textbox = "//input[@id='gbqfq']"
self.search_button = "//button[@id='gbqfb']"
self.account_dropdown = "//a[@title='Google Account: test@qxf2.com']"
self.signout_button = "//a[text()='Sign out']"
self.search_result = "//span[contains(text(),'%s')]"
def search_by_subject(self,searchtext):
self.set_text(self.search_textbox,'subject:'+searchtext)
self.click_element(self.search_button)
self.wait(3)
self.driver.refresh()
self.wait(3)
if self.check_element_present(self.search_result%searchtext):
self.click_element(self.search_result%searchtext)
self.write("Message for query '%s'"%searchtext)
return True
else:
self.write("No search result")
return False
| 0 |
qxf2_public_repos
|
qxf2_public_repos/makemework/conftest.py
|
import pytest
from conf import browser_os_name_conf
from utils import post_test_reports_to_slack
from utils.email_pytest_report import Email_Pytest_Report
@pytest.fixture
def browser(request):
"pytest fixture for browser"
return request.config.getoption("-B")
@pytest.fixture
def base_url(request):
"pytest fixture for base url"
return request.config.getoption("-U")
@pytest.fixture
def api_url(request):
"pytest fixture for base url"
return request.config.getoption("-A")
@pytest.fixture
def test_run_id(request):
"pytest fixture for test run id"
return request.config.getoption("-R")
@pytest.fixture
def testrail_flag(request):
"pytest fixture for test rail flag"
return request.config.getoption("-X")
@pytest.fixture
def remote_flag(request):
"pytest fixture for browserstack/sauce flag"
return request.config.getoption("-M")
@pytest.fixture
def browser_version(request):
"pytest fixture for browser version"
return request.config.getoption("--ver")
@pytest.fixture
def os_name(request):
"pytest fixture for os_name"
return request.config.getoption("-P")
@pytest.fixture
def os_version(request):
"pytest fixture for os version"
return request.config.getoption("-O")
@pytest.fixture
def remote_project_name(request):
"pytest fixture for browserStack project name"
return request.config.getoption("--remote_project_name")
@pytest.fixture
def remote_build_name(request):
"pytest fixture for browserStack build name"
return request.config.getoption("--remote_build_name")
@pytest.fixture
def slack_flag(request):
"pytest fixture for sending reports on slack"
return request.config.getoption("-S")
@pytest.fixture
def tesults_flag(request):
"pytest fixture for sending results to tesults"
return request.config.getoption("--tesults")
@pytest.fixture
def mobile_os_name(request):
"pytest fixture for mobile os name"
return request.config.getoption("-G")
@pytest.fixture
def mobile_os_version(request):
"pytest fixture for mobile os version"
return request.config.getoption("-H")
@pytest.fixture
def device_name(request):
"pytest fixture for device name"
return request.config.getoption("-I")
@pytest.fixture
def app_package(request):
"pytest fixture for app package"
return request.config.getoption("-J")
@pytest.fixture
def app_activity(request):
"pytest fixture for app activity"
return request.config.getoption("-K")
@pytest.fixture
def device_flag(request):
"pytest fixture for device flag"
return request.config.getoption("-Q")
@pytest.fixture
def email_pytest_report(request):
"pytest fixture for device flag"
return request.config.getoption("--email_pytest_report")
@pytest.fixture
def app_name(request):
"pytest fixture for app name"
return request.config.getoption("-D")
@pytest.fixture
def app_path(request):
"pytest fixture for app path"
return request.config.getoption("-N")
def pytest_terminal_summary(terminalreporter, exitstatus):
"add additional section in terminal summary reporting."
if terminalreporter.config.getoption("-S").lower() == 'y':
post_test_reports_to_slack.post_reports_to_slack()
elif terminalreporter.config.getoption("--email_pytest_report").lower() == 'y':
#Initialize the Email_Pytest_Report object
email_obj = Email_Pytest_Report()
# Send html formatted email body message with pytest report as an attachment
email_obj.send_test_report_email(html_body_flag=True,attachment_flag=True,report_file_path= 'default')
if terminalreporter.config.getoption("--tesults").lower() == 'y':
Tesults.post_results_to_tesults()
def pytest_generate_tests(metafunc):
"test generator function to run tests across different parameters"
if 'browser' in metafunc.fixturenames:
if metafunc.config.getoption("-M").lower() == 'y':
if metafunc.config.getoption("-B") == ["all"]:
metafunc.parametrize("browser,browser_version,os_name,os_version",
browser_os_name_conf.cross_browser_cross_platform_config)
elif metafunc.config.getoption("-B") == []:
metafunc.parametrize("browser,browser_version,os_name,os_version",
browser_os_name_conf.default_config_list)
else:
config_list = [(metafunc.config.getoption("-B")[0],metafunc.config.getoption("--ver")[0],metafunc.config.getoption("-P")[0],metafunc.config.getoption("-O")[0])]
metafunc.parametrize("browser,browser_version,os_name,os_version",
config_list)
if metafunc.config.getoption("-M").lower() !='y':
if metafunc.config.getoption("-B") == ["all"]:
metafunc.config.option.browser = browser_os_name_conf.local_browsers
metafunc.parametrize("browser", metafunc.config.option.browser)
elif metafunc.config.getoption("-B") == []:
metafunc.parametrize("browser",browser_os_name_conf.default_browser)
else:
config_list_local = [(metafunc.config.getoption("-B")[0])]
metafunc.parametrize("browser", config_list_local)
def pytest_addoption(parser):
parser.addoption("-B","--browser",
dest="browser",
action="append",
default=[],
help="Browser. Valid options are firefox, ie and chrome")
parser.addoption("-U","--app_url",
dest="url",
default="https://weathershopper.pythonanywhere.com",
help="The url of the application")
parser.addoption("-A","--api_url",
dest="url",
default="http://35.167.62.251",
help="The url of the api")
parser.addoption("-X","--testrail_flag",
dest="testrail_flag",
default='N',
help="Y or N. 'Y' if you want to report to TestRail")
parser.addoption("-R","--test_run_id",
dest="test_run_id",
default=None,
help="The test run id in TestRail")
parser.addoption("-M","--remote_flag",
dest="remote_flag",
default="N",
help="Run the test in Browserstack/Sauce Lab: Y or N")
parser.addoption("-O","--os_version",
dest="os_version",
action="append",
help="The operating system: xp, 7",
default=[])
parser.addoption("--ver",
dest="browser_version",
action="append",
help="The version of the browser: a whole number",
default=[])
parser.addoption("-P","--os_name",
dest="os_name",
action="append",
help="The operating system: Windows 7, Linux",
default=[])
parser.addoption("--remote_project_name",
dest="remote_project_name",
help="The project name if its run in BrowserStack",
default=None)
parser.addoption("--remote_build_name",
dest="remote_build_name",
help="The build name if its run in BrowserStack",
default=None)
parser.addoption("-S","--slack_flag",
dest="slack_flag",
default="N",
help="Post the test report on slack channel: Y or N")
parser.addoption("-G","--mobile_os_name",
dest="mobile_os_name",
help="Enter operating system of mobile. Ex: Android, iOS",
default="Android")
parser.addoption("-H","--mobile_os_version",
dest="mobile_os_version",
help="Enter version of operating system of mobile: 8.1.0",
default="8.0")
parser.addoption("-I","--device_name",
dest="device_name",
help="Enter device name. Ex: Emulator, physical device name",
default="Google Pixel")
parser.addoption("-J","--app_package",
dest="app_package",
help="Enter name of app package. Ex: bitcoininfo",
default="com.dudam.rohan.bitcoininfo")
parser.addoption("-K","--app_activity",
dest="app_activity",
help="Enter name of app activity. Ex: .MainActivity",
default=".MainActivity")
parser.addoption("-Q","--device_flag",
dest="device_flag",
help="Enter Y or N. 'Y' if you want to run the test on device. 'N' if you want to run the test on emulator.",
default="N")
parser.addoption("--email_pytest_report",
dest="email_pytest_report",
help="Email pytest report: Y or N",
default="N")
parser.addoption("--tesults",
dest="tesults_flag",
default='N',
help="Y or N. 'Y' if you want to report results with Tesults")
parser.addoption("-D","--app_name",
dest="app_name",
help="Enter application name to be uploaded.Ex:Bitcoin Info_com.dudam.rohan.bitcoininfo.apk.",
default="Bitcoin Info_com.dudam.rohan.bitcoininfo.apk")
parser.addoption("-N","--app_path",
dest="app_path",
help="Enter app path")
| 0 |
qxf2_public_repos
|
qxf2_public_repos/makemework/pytest.ini
|
[pytest]
addopts = -v -s -rsxX --continue-on-collection-errors --tb=short --ignore=utils/Test_Rail.py --ignore=utils/Test_Runner_Class.py -p no:cacheprovider
norecursedirs = .svn _build tmp* log .vscode .git
| 0 |
qxf2_public_repos
|
qxf2_public_repos/makemework/LICENSE
|
MIT License
Copyright (c) 2019 qxf2
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
| 0 |
qxf2_public_repos
|
qxf2_public_repos/makemework/requirements.txt
|
ansimarkup==1.4.0
apipkg==1.5
Appium-Python-Client==0.28
atomicwrites==1.3.0
attrs==22.1.0
better-exceptions-fork==0.2.1.post6
boto3==1.25.1
botocore==1.28.1
certifi==2022.12.7
chardet==3.0.4
colorama==0.4.1
docutils==0.14
execnet==1.6.0
idna==2.7
importlib-metadata==0.18
jmespath==0.9.4
loguru==0.2.5
more-itertools==7.0.0
packaging==19.0
pillow>=6.2.0
pluggy==1.0.0
py==1.11.0
Pygments==2.7.4
pyparsing==2.4.0
pytest==7.2.0
pytest-forked==1.0.2
pytest-xdist==1.22.0
python-dateutil==2.8.0
python-dotenv==0.8.2
requests==2.28.1
s3transfer==0.6.0
selenium==3.13.0
six==1.12.0
urllib3==1.26.12
wcwidth==0.1.7
zipp==0.5.1
| 0 |
qxf2_public_repos
|
qxf2_public_repos/makemework/README.md
|
# Make me work!
Fix the issues in this repo and make this program work. This repository is aimed at folks who have already learnt to *write* basic Python but are looking for more realistic challenges that involve reading a large enough codebase, exploring file structures and making changes to an existing codebase.
The code you are going to run is a Selenium test for the [Weather Shopper](http://weathershopper.pythonanywhere.com/) application. The automated test itself completes the [weather shopper exercise](https://github.com/qxf2/weather-shopper). Your job is to fix the problems in the automated test and make it run successfully.
# Setup
0. This codebase uses Python 3.10.0
1. Fork this repository
2. Clone your forked repository
3. Create a virtualenv and activate it
4. `pip install -r requirements.txt`
5. Install Chrome driver. If you don't know how to, please try:
> [Chrome driver](https://sites.google.com/a/chromium.org/chromedriver/getting-started)
6. Run the test using the command `pytest -k e2e`
The setup instructions are intentionally high-level since this repository is aimed at people with people who have already written Python before. If you are beginner, you will find our [other repository](https://github.com/qxf2/wtfiswronghere) a better place to start.
# Your assignment
The [weather shopper exercise](https://github.com/qxf2/weather-shopper) has been partially completed using the code provided to you. Your assignment is to:
1. fix the errors in the existing code
2. complete the exercise on the payment page
3. use the same design patterns and programming style when solving the exercises
# How to proceed?
1. Run the test using the command `pytest -k e2e`
2. Observe, debug and fix the error
3. Test your fix
4. Commit your change and push
5. Repeat steps 1-4 for the next error
# Example working test
If you fix all the bugs in this code, your test should perform like the gif below:

Remember, you should not stop at just fixing the existing code. You should also complete the instructions on the cart page too!
# Debugging tips
Here are some useful debugging tips that do not involve the use of debugger:
1. Search for strings in all files
2. Search for sub-strings in all files if the exact string does not exist
3. F12 to follow the definition of a method in Visual Studio Code
4. Add debug messages to figure out the flow
5. if True: trick (to get exact error messages, in the test, replace `try:` with `if True:` and comment out the `except` portion)
6. Read the log messages backwards
7. Sometimes the error happens in the line before the failure!
# Notes
1. Use Python 3.10.0
2. We recommend using Visual Studio code as your IDE
3. We recomment using a virtualenv
4. You need to have Chrome driver installed
# About
This repository is created and maintained by [Qxf2 Services](https://qxf2.com/?utm_source=github&utm_medium=click&utm_campaign=Make%20me%20word). Qxf2 provides software testing services for startups.
If your team is working on an early stage product and needs QA, you can hire Qxf2 Services to help. Contact Arun at mak@qxf2.com
| 0 |
qxf2_public_repos/makemework
|
qxf2_public_repos/makemework/tests/test_e2e_purchase_product.py
|
"""
This is a broadstack test for Weather Shopper.
This test will:
a) visit the main page
b) get the temperature
c) based on temperature choose to buy sunscreen or moisturizer
d) add products based on some specified logic
e) verify the cart
f) make a payment
"""
import os,sys,time
sys.path.append(os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from page_objects.PageFactory import PageFactory
from utils.Option_Parser import Option_Parser
import conf.e2e_weather_shopper_conf as conf
def test_e2e_weather_shopper(base_url,browser,browser_version,os_version,os_name,remote_flag,testrail_flag,tesults_flag,test_run_id,remote_project_name,remote_build_name):
"Run the test"
try:
#Initalize flags for tests summary
expected_pass = 0
actual_pass = -1
#Create a test object and fill the example form.
test_obj = PageFactory.get_page_object("Main Page",base_url=base_url)
#Setup and register a driver
start_time = int(time.time()) #Set start_time with current time
test_obj.register_driver(remote_flag,os_name,os_version,browser,browser_version,remote_project_name,remote_build_name)
#Read the temperature
temperature = test_obj.get_temperature()
result_flag = False
if type(temperature) == int:
result_flag = True
test_obj.log_result(result_flag,
positive="Obtained the temperature from the landing page",
negative="Could not to parse the temperature on the landing page.",
level="critical")
#Choose the right product type
product_type = ""
if temperature <= 18:
product_type = "moisturizers"
if temperature >= 34:
product_type = "sunscreens"
result_flag = test_obj.click_buy_button(product_type)
test_obj.log_result(result_flag,
positive="Landed on the %s page after clicking the buy button"%product_type,
negative="Could not land on the %s page after clicking the buy button"%product_type,
level="critical")
#Add products
product_filter_list = conf.PURCHASE_LOGIC[product_type]
product_list = []
for filter_condition in product_filter_list:
cheapest_product = test_obj.get_minimum_priced_product(filter_condition)
product_list.append(cheapest_product)
result_flag = test_obj.add_product(cheapest_product.name)
test_obj.log_result(result_flag,
positive="Added the cheapest product '%s' with '%s'"%(cheapest_product.name,filter_condition),
negative="Could not add the cheapest product '%s' with '%s'"%(cheapest_product.name,filter_condition))
#Go to the cart
result_flag = test_obj.go_to_cart()
test_obj.log_result(result_flag,
positive="Automation is now on the cart page",
negative="Automation is not on the cart page",
level="critical")
#Verify the products displayed on the cart page
result_flag = test_obj.verify_cart(product_list)
test_obj.log_result(result_flag,
positive="Something wrong with the cart. The log messages above will have the details",
negative="Something wrong with the cart. The log messages above will have the details",
level="critical")
#Print out the results
test_obj.write_test_summary()
#Teardown
test_obj.wait(3)
expected_pass = test_obj.result_counter
actual_pass = test_obj.past_counter
test_obj.teardown()
except Exception as e:
print("Exception when trying to run test:%s"%__file__)
print("Python says:%s"%repr(e))
assert expected_pass == actual_pass, "Test failed: %s"%__file__
#---START OF SCRIPT
if __name__=='__main__':
print("Start of %s"%__file__)
#Creating an instance of the class
options_obj = Option_Parser()
options = options_obj.get_options()
#Run the test only if the options provided are valid
if options_obj.check_options(options):
test_e2e_weather_shopper(base_url=options.url,
browser=options.browser,
browser_version=options.browser_version,
os_version=options.os_version,
os_name=options.os_name,
remote_flag=options.remote_flag,
testrail_flag=options.testrail_flag,
tesults_flag=options.tesults_flag,
test_run_id=options.test_run_id,
remote_project_name=options.remote_project_name,
remote_build_name=options.remote_build_name)
else:
print('ERROR: Received incorrect comand line input arguments')
print(option_obj.print_usage())
| 0 |
qxf2_public_repos/makemework
|
qxf2_public_repos/makemework/utils/Test_Rail.py
|
"""
TestRail integration:
* limited to what we need at this time
* we assume TestRail operates in single suite mode
i.e., the default, reccomended mode
API reference: http://docs.gurock.com/testrail-api2/start
"""
import dotenv,os
from utils import testrail
import conf.testrailenv_conf as conf_file
class Test_Rail:
"Wrapper around TestRail's API"
def __init__(self):
"Initialize the TestRail objects"
self.set_testrail_conf()
def set_testrail_conf(self):
"Set the TestRail URL and username, password"
#Set the TestRail URL
self.testrail_url = conf_file.testrail_url
self.client = testrail.APIClient(self.testrail_url)
#TestRail User and Password
self.client.user = conf_file.testrail_user
self.client.password = conf_file.testrail_password
def get_project_id(self,project_name):
"Get the project ID using project name"
project_id=None
projects = self.client.send_get('get_projects')
for project in projects:
if project['name'] == project_name:
project_id = project['id']
break
return project_id
def get_suite_id(self,project_name,suite_name):
"Get the suite ID using project name and suite name"
suite_id=None
project_id = self.get_project_id(project_name)
suites = self.client.send_get('get_suites/%s'%(project_id))
for suite in suites:
if suite['name'] == suite_name:
suite_id = suite['id']
break
return suite_id
def get_milestone_id(self,project_name,milestone_name):
"Get the milestone ID using project name and milestone name"
milestone_id = None
project_id = self.get_project_id(project_name)
milestones = self.client.send_get('get_milestones/%s'%(project_id))
for milestone in milestones:
if milestone['name'] == milestone_name:
milestone_id = milestone['id']
break
return milestone_id
def get_user_id(self,user_name):
"Get the user ID using user name"
user_id=None
users = self.client.send_get('get_users')
for user in users:
if user['name'] == user_name:
user_id = user['id']
break
return user_id
def get_run_id(self,project_name,test_run_name):
"Get the run ID using test name and project name"
run_id=None
project_id = self.get_project_id(project_name)
try:
test_runs = self.client.send_get('get_runs/%s'%(project_id))
except Exception as e:
print('Exception in update_testrail() updating TestRail.')
print('PYTHON SAYS: ')
print(e)
else:
for test_run in test_runs:
if test_run['name'] == test_run_name:
run_id = test_run['id']
break
return run_id
def create_milestone(self,project_name,milestone_name,milestone_description=""):
"Create a new milestone if it does not already exist"
milestone_id = self.get_milestone_id(project_name,milestone_name)
if milestone_id is None:
project_id = self.get_project_id(project_name)
if project_id is not None:
try:
data = {'name':milestone_name,
'description':milestone_description}
result = self.client.send_post('add_milestone/%s'%str(project_id),
data)
except Exception as e:
print('Exception in create_new_project() creating new project.')
print('PYTHON SAYS: ')
print(e)
else:
print('Created the milestone: %s'%milestone_name)
else:
print("Milestone '%s' already exists"%milestone_name)
def create_new_project(self,new_project_name,project_description,show_announcement,suite_mode):
"Create a new project if it does not already exist"
project_id = self.get_project_id(new_project_name)
if project_id is None:
try:
result = self.client.send_post('add_project',
{'name': new_project_name,
'announcement': project_description,
'show_announcement': show_announcement,
'suite_mode': suite_mode,})
except Exception as e:
print('Exception in create_new_project() creating new project.')
print('PYTHON SAYS: ')
print(e)
else:
print("Project already exists %s"%new_project_name)
def create_test_run(self,project_name,test_run_name,milestone_name=None,description="",suite_name=None,case_ids=[],assigned_to=None):
"Create a new test run if it does not already exist"
#reference: http://docs.gurock.com/testrail-api2/reference-runs
project_id = self.get_project_id(project_name)
test_run_id = self.get_run_id(project_name,test_run_name)
if project_id is not None and test_run_id is None:
data = {}
if suite_name is not None:
suite_id = self.get_suite_id(project_name,suite_name)
if suite_id is not None:
data['suite_id'] = suite_id
data['name'] = test_run_name
data['description'] = description
if milestone_name is not None:
milestone_id = self.get_milestone_id(project_name,milestone_name)
if milestone_id is not None:
data['milestone_id'] = milestone_id
if assigned_to is not None:
assignedto_id = self.get_user_id(assigned_to)
if assignedto_id is not None:
data['assignedto_id'] = assignedto_id
if len(case_ids) > 0:
data['case_ids'] = case_ids
data['include_all'] = False
try:
result = self.client.send_post('add_run/%s'%(project_id),data)
except Exception as e:
print('Exception in create_test_run() Creating Test Run.')
print('PYTHON SAYS: ')
print(e)
else:
print('Created the test run: %s'%test_run_name)
else:
if project_id is None:
print("Cannot add test run %s because Project %s was not found"%(test_run_name,project_name))
elif test_run_id is not None:
print("Test run '%s' already exists"%test_run_name)
def delete_project(self,new_project_name,project_description):
"Delete an existing project"
project_id = self.get_project_id(new_project_name)
if project_id is not None:
try:
result = self.client.send_post('delete_project/%s'%(project_id),project_description)
except Exception as e:
print('Exception in delete_project() deleting project.')
print('PYTHON SAYS: ')
print(e)
else:
print('Cant delete the project given project name: %s'%(new_project_name))
def delete_test_run(self,test_run_name,project_name):
"Delete an existing test run"
run_id = self.get_run_id(test_run_name,project_name)
if run_id is not None:
try:
result = self.client.send_post('delete_run/%s'%(run_id),test_run_name)
except Exception as e:
print('Exception in update_testrail() updating TestRail.')
print('PYTHON SAYS: ')
print(e)
else:
print('Cant delete the test run for given project and test run name: %s , %s'%(project_name,test_run_name))
def update_testrail(self,case_id,run_id,result_flag,msg=""):
"Update TestRail for a given run_id and case_id"
update_flag = False
#Update the result in TestRail using send_post function.
#Parameters for add_result_for_case is the combination of runid and case id.
#status_id is 1 for Passed, 2 For Blocked, 4 for Retest and 5 for Failed
status_id = 1 if result_flag is True else 5
if ((run_id is not None) and (case_id != 'None')) :
try:
result = self.client.send_post(
'add_result_for_case/%s/%s'%(run_id,case_id),
{'status_id': status_id, 'comment': msg })
except Exception as e:
print('Exception in update_testrail() updating TestRail.')
print('PYTHON SAYS: ')
print(e)
else:
print('Updated test result for case: %s in test run: %s\n'%(case_id,run_id))
return update_flag
| 0 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.