file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
kmp.py
|
.sum(L,axis=0)))/data.shape[1]) # Average Log likelihood
if iter>=self.em_num_min_steps:
if LL[iter]-LL[iter-1]<self.em_max_diffLL or iter==self.em_num_max_steps-1:
print('EM converged after ',str(iter),' iterations.')
return
print('Max no. of iterations reached')
return
def computeGamma(self,data):
|
#######################################################################################################################
class KMP: #Assumptions: Input is only time; All dofs of output are continuous TODO: Generalize
def __init__(self, input_dofs, robot_dofs, demo_dt, ndemos, data_address):
self.input_dofs = input_dofs
self.robot_dofs = robot_dofs
self.ndemos = ndemos
self.demo_dt = demo_dt
self.norm_data = self.loadData(addr=data_address) # Fetching the data from the saved location
# self.norm_data = self.normalizeData(self.training_data) # Making all demos have the same length
self.demo_duration = 200 * self.demo_dt #self.training_data[0].shape[0] * self.demo_dt
self.data = self.combineData(self.norm_data)
self.gmm_model = GMM(num_vars=(self.input_dofs+self.robot_dofs), data=self.data)
self.model_num_datapts = int(self.demo_duration/self.gmm_model.dt)
self.data_out, self.sigma_out, _ = self.GMR(np.array(range(1,self.model_num_datapts+1)) * self.gmm_model.dt)
####### DEBUGGING ##############
# plt.scatter(self.data[1,:],self.data[2,:])
# for i in range(self.gmm_model.num_states):
# plt.plot(self.gmm_model.mu[1,i],self.gmm_model.mu[2,i], 'ro')
# plt.show()
##################################
self.ref_traj = []
for i in range(self.model_num_datapts):
self.ref_traj.append(ReferenceTrajectoryPoint(t=(i+1)*self.gmm_model.dt, mu=self.data_out[:,i], sigma=self.sigma_out[i,:,:]))
####### DEBUGGING ##############
# ref_path = extractPath(self.ref_traj)
# print(ref_path)
# print(len(ref_path), " ", len(ref_path[0]))
# plt.scatter(ref_path[:, 0], ref_path[:, 1])
# plt.title('Reference Path')
# plt.show()
##################################
print('KMP Initialized with Reference Trajectory')
###################################
def loadData(self, addr):
data = []
for i in range(self.ndemos):
data.append(np.loadtxt(open(addr + str(i + 1) + ""), delimiter=","))
# data is saved as a list of nparrays [nparray(Demo1),nparray(Demo2),...]
return data
#########################
def normalizeData(self, data):
dofs = self.input_dofs + self.robot_dofs
dt = self.demo_dt
sum = 0
for i in range(len(data)):
sum += len(data[i])
mean = sum / len(data)
alpha = []
for i in range(len(data)):
alpha.append(len(data[i]) / mean)
alpha_mean = np.mean(alpha)
mean_t = int(mean)
# normalize the data so that all demos contain same number of data points
ndata = []
for i in range(len(alpha)):
demo_ndata = np.empty((0, dofs))
for j in range(mean_t): # Number of phase steps is same as number of time steps in nominal trajectory, because for nominal traj alpha is 1
z = j * alpha[i] * dt
corr_timestep = z / dt
whole = int(corr_timestep)
frac = corr_timestep - whole
row = []
for k in range(dofs):
if whole == (data[i].shape[0] - 1):
row.append(data[i][whole][k])
else:
row.append(data[i][whole][k] + frac * (data[i][whole + 1][k] - data[i][whole][k]))
demo_ndata = np.append(demo_ndata, [row], axis=0)
ndata.append(demo_ndata)
return ndata
############################
def combineData(self,data):
# total_time_steps = 0
# for i in range(len(data)):
# total_time_steps = total_time_steps + data[i].shape[0]
# time = np.array(range(total_time_steps)) * self.demo_dt
positions = data[0].T
for i in range(1,len(data)):
positions = np.append(positions,data[i].T,axis=1)
# data = np.vstack((time,positions))
return positions
#############################
def GMR(self, data_in_raw):
data_in = data_in_raw.reshape((1,data_in_raw.shape[0]))
num_datapts = data_in.shape[1]
num_varout = self.robot_dofs
diag_reg_factor = 1e-8
mu_tmp = np.zeros((num_varout, self.gmm_model.num_states))
exp_data = np.zeros((num_varout, num_datapts))
exp_sigma = np.zeros((num_datapts, num_varout, num_varout))
H = np.empty((self.gmm_model.num_states, num_datapts))
for t in range(num_datapts):
# Compute activation weights
for i in range(self.gmm_model.num_states):
H[i,t] = self.gmm_model.priors[i] * gaussPDF((data_in[:,t].reshape((-1,1))), self.gmm_model.mu[0:self.input_dofs,i], self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs])
# print(gaussPDF(data_in[:,t].reshape((-1,1)), self.gmm_model.mu[0:self.input_dofs,i], self.gmm_model.sigma[0:self.input_dofs,0:self.input_dofs,i]))
H[:,t] = H[:,t]/(sum(H[:,t]) + 1e-10)
# print(H)
# Compute conditional means
for i in range(self.gmm_model.num_states):
mu_tmp[:,i] = self.gmm_model.mu[self.input_dofs:(self.input_dofs+self.robot_dofs),i] + self.gmm_model.sigma[i,0:self.input_dofs,self.input_dofs:(self.input_dofs+self.robot_dofs)]/self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs] * (data_in[:,t].reshape((-1,1)) - self.gmm_model.mu[0:self.input_dofs,i])
exp_data[:,t] = exp_data[:,t] + H[i,t] * mu_tmp[:,i]
# print("Mu_tmp: ",mu_tmp[:,i])
# print(H[i,t] * mu_tmp[:,i])
# Compute conditional covariance
for i in range(self.gmm_model.num_states):
sigma_tmp = self.gmm_model.sigma[i,self.input_dofs:(self.input_dofs+self.robot_dofs),self.input_dofs:(self.input_dofs+self.robot_dofs)] - self.gmm_model.sigma[i,self.input_dofs:(self.input_dofs+self.robot_dofs),0:self.input_dofs]/self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs] * self.gmm_model.sigma[i,0:self.input_dofs,self.input_dofs:(self.input_dofs+self.robot_dofs)]
# print(sigma_tmp)
exp_sigma[t,:,:] = exp_sigma[t,:,:] + H[i,t] * (sigma_tmp + mu_tmp[:,i]*mu_tmp[:,i].T)
# print(exp_sigma[t,:,:])
exp_sigma[t,:,:] = exp_sigma[t,:,:] - exp_data[:,t] * exp_data[:,t].T + np.eye(num_varout) * diag_reg_factor
return exp_data, exp_sigma, H
def setParams(self, dt, lamda, kh):
self.dt = dt
self.len = int(self.demo_duration/dt)
self.lamda = lamda
self.kh = kh
##################################
def addViaPts(self, via_pts, via_pt_var):
# Search for closest point in ref trajectory
self.new_ref_traj = copy.deepcopy(self.ref_traj)
replace_ind = 0
num_phases = len(via_pts)
phase_size = len(self.ref_traj)/num_phases
for via_pt_ind,via_pt in enumerate(via_pts):
min_dist = float('Inf')
for i in range(math.ceil(via_pt_ind*phase_size), math.floor((via_pt_ind+1)*phase_size)):
dist = distBWPts(self.ref_traj[i].mu[0:2],via_pt)
# print("dist: ", dist)
if dist<min_dist:
min_dist =
|
L = np.zeros((self.num_states,data.shape[1]))
for i in range(self.num_states):
L[i,:] = self.priors[i]*gaussPDF(data,self.mu[:,i],self.sigma[i,:,:])
L_axis0_sum = np.sum(L,axis=0)
gamma = np.divide(L, np.repeat(L_axis0_sum.reshape(1,L_axis0_sum.shape[0]), self.num_states, axis=0))
return L,gamma
|
identifier_body
|
kmp.py
|
_dist = float('Inf')
for i in range(math.ceil(via_pt_ind*phase_size), math.floor((via_pt_ind+1)*phase_size)):
dist = distBWPts(self.ref_traj[i].mu[0:2],via_pt)
# print("dist: ", dist)
if dist<min_dist:
min_dist = dist
# print("min_dist: ", min_dist)
replace_ind = i
if(len(via_pt)==2): #The assumption here is that 1st and last point are always start and end point
if via_pt_ind==0 or via_pt_ind==len(via_pts)-1:
via_pt = np.append(np.array(via_pt), self.ref_traj[replace_ind].mu[2:4])
else:
d = distBWPts(via_pts[via_pt_ind-1], via_pts[via_pt_ind+1])
vel = [(via_pts[via_pt_ind+1][0]-via_pts[via_pt_ind-1][0])/d,(via_pts[via_pt_ind+1][1]-via_pts[via_pt_ind-1][1])/d]
curr_speed = distBWPts((0,0),self.ref_traj[replace_ind].mu[2:4])
vel = [i*curr_speed for i in vel]
via_pt = np.append(np.array(via_pt),vel)
self.new_ref_traj[replace_ind] = ReferenceTrajectoryPoint(t=self.ref_traj[replace_ind].t, mu=np.array(via_pt), sigma=via_pt_var)
###################################
def estimateMatrixMean(self):
D = self.robot_dofs
N = self.len
kc = np.empty((D*N, D*N))
for i in range(N):
for j in range(N):
# print(kernelExtend(self.new_ref_traj[i].t, self.new_ref_traj[j].t, self.kh, self.robot_dofs))
kc[i*D:(i+1)*D, j*D:(j+1)*D] = kernelExtend(self.new_ref_traj[i].t, self.new_ref_traj[j].t, self.kh, self.robot_dofs)
if i==j:
c_temp = self.new_ref_traj[i].sigma
kc[i*D:(i+1)*D, j*D:(j+1)*D] = kc[i*D:(i+1)*D, j*D:(j+1)*D] + self.lamda * c_temp
# print(kc[:200,:200])
Kinv = np.linalg.inv(kc)
return Kinv
###############################
def prediction(self, via_pts, via_pt_var):
print("Starting Prediction")
self.addViaPts(via_pts, via_pt_var)
####### DEBUGGING ##############
new_ref_path = extractPath(self.new_ref_traj)
plt.plot(new_ref_path[:, 0], new_ref_path[:, 1])
plt.show()
################################
Kinv = self.estimateMatrixMean()
# print(Kinv)
self.kmp_pred_traj = []
# print("Kinv: Shape: ", Kinv.shape)
# print(Kinv[:200,:200])
# print(Kinv[-200:,-200:])
for index in range(self.len):
t = index * self.dt
mu = self.kmpPredictMean(t, Kinv)
# print(mu)
self.kmp_pred_traj.append(ReferenceTrajectoryPoint(t=index*self.dt, mu=mu))
return self.kmp_pred_traj
#################################
def kmpPredictMean(self, t, Kinv):
D = self.robot_dofs
N = self.len
# print(t)
k = np.empty((D,N*D))
Y = np.empty((N * D, 1))
for i in range(N):
# print("i.t: ", self.new_ref_traj[i].t)
k[0:D, i*D:(i+1)*D] = kernelExtend(t, self.new_ref_traj[i].t, self.kh, D)
for h in range(D):
Y[i*D+h] = self.new_ref_traj[i].mu[h]
# print('k:',k.T)
# print('Y:',Y.T)
return np.matmul(np.matmul(k,Kinv),Y)
###############################################################################
# Functions
def gaussPDF(data, mu, sigma):
num_vars, num_datapts = data.shape
data = data.T - np.repeat(mu.reshape((1, mu.shape[0])), [num_datapts], axis=0)
if num_vars==1 and num_datapts==1:
prob = data**2 / sigma
prob = np.e ** (-0.5 * prob) / (np.sqrt((2 * np.pi) ** num_vars * sigma))
else:
prob = np.sum(np.multiply(np.matmul(data, np.linalg.inv(sigma)), data), axis=1)
prob = np.e ** (-0.5 * prob) / (np.sqrt((2 * np.pi) ** num_vars * abs(np.linalg.det(sigma))))
# print(prob)
return prob
#################
def distBWPts(pt1, pt2):
x = pt1[0] - pt2[0]
y = pt1[1] - pt2[1]
return abs(np.linalg.norm([x,y],2))
##################
def kernelExtend(ta, tb, h, dim):
dt = 0.001
tadt = ta + dt
tbdt = tb + dt
kt_t = np.e**(-h * (ta-tb) * (ta-tb))
kt_dt_temp = np.e**(-h * (ta-tbdt) * (ta-tbdt))
kt_dt = (kt_dt_temp - kt_t)/dt
kdt_t_temp = np.e**(-h*(tadt-tb)*(tadt-tb))
kdt_t = (kdt_t_temp - kt_t) / dt
kdt_dt_temp = np.e**(-h * (tadt - tbdt) * (tadt - tbdt))
kdt_dt = (kdt_dt_temp - kt_dt_temp - kdt_t_temp + kt_t) / dt / dt
kernel_matrix = np.zeros((dim,dim))
for i in range(int(dim/2)):
kernel_matrix[i,i] = kt_t
kernel_matrix[i,i+int(dim/2)] = kt_dt
kernel_matrix[i+int(dim/2), i] = kdt_t
kernel_matrix[i+int(dim/2), i+int(dim/2)] = kdt_dt
# print(kernel_matrix)
return kernel_matrix
def extractPath(traj):
path = np.empty((len(traj),4))
for i in range(len(traj)):
path[i,:] = traj[i].mu.flatten()
return path
###############################################################################
if __name__ == "__main__":
data_addr = '../../training_data/KMP_div100_dt0.01_dynamic/'
ndemos = len(os.listdir(data_addr))
# ndemos = 10
kmp = KMP(input_dofs=1, robot_dofs=4, demo_dt=0.01, ndemos=ndemos, data_address=data_addr)
# Set KMP params (This dt is KMP's dt)
kmp.setParams(dt=0.005, lamda=1, kh=6)
# Set desired via points
# via_pts = [[11,15,-50,0],[-5,6,-25,-40],[8,-4,30,10],[2,5,-10,3]]
# via_pts = [[8, 10, -50, 0], [-1, 6, -25, -40], [8, -4, 30, 10], [-3, 1, -10, 3]]
via_pts = [[8.9,3.7], [7,9],[7,12], [17,12], [13,8.75],[10.6,3.7]]
via_pt_var = 1e-6 * np.eye(kmp.ref_traj[0].sigma.shape[0])
#KMP Prediction
pred_traj = kmp.prediction(via_pts,via_pt_var)
pred_path = extractPath(pred_traj)
# Plotting
ref_traj = kmp.ref_traj
ref_path = extractPath(ref_traj)
# plt.plot(ref_path[:, 0], ref_path[:, 1], 'r')
# plt.show()
#
plt.figure(1)
plt.plot(pred_path[:, 0], pred_path[:, 1], label="KMP Generated Trajectory")
plt.plot(ref_path[:, 0], ref_path[:, 1], 'g', label="Demonstrated Trajectory")
for via_pt in via_pts:
# plt.plot(via_pt[0], via_pt[1], 'bo', label="Via Points")
plt.plot(via_pt[0], via_pt[1], 'bo')
# plt.plot(via_pts[2][0], via_pts[2][1], 'bo')
# plt.plot(via_pts[3][0], via_pts[3][1], 'bo')
plt.legend(loc='lower left')
|
plt.title('KMP generalization over new via points')
|
random_line_split
|
|
kmp.py
|
.sum(L,axis=0)))/data.shape[1]) # Average Log likelihood
if iter>=self.em_num_min_steps:
if LL[iter]-LL[iter-1]<self.em_max_diffLL or iter==self.em_num_max_steps-1:
print('EM converged after ',str(iter),' iterations.')
return
print('Max no. of iterations reached')
return
def computeGamma(self,data):
L = np.zeros((self.num_states,data.shape[1]))
for i in range(self.num_states):
L[i,:] = self.priors[i]*gaussPDF(data,self.mu[:,i],self.sigma[i,:,:])
L_axis0_sum = np.sum(L,axis=0)
gamma = np.divide(L, np.repeat(L_axis0_sum.reshape(1,L_axis0_sum.shape[0]), self.num_states, axis=0))
return L,gamma
#######################################################################################################################
class
|
: #Assumptions: Input is only time; All dofs of output are continuous TODO: Generalize
def __init__(self, input_dofs, robot_dofs, demo_dt, ndemos, data_address):
self.input_dofs = input_dofs
self.robot_dofs = robot_dofs
self.ndemos = ndemos
self.demo_dt = demo_dt
self.norm_data = self.loadData(addr=data_address) # Fetching the data from the saved location
# self.norm_data = self.normalizeData(self.training_data) # Making all demos have the same length
self.demo_duration = 200 * self.demo_dt #self.training_data[0].shape[0] * self.demo_dt
self.data = self.combineData(self.norm_data)
self.gmm_model = GMM(num_vars=(self.input_dofs+self.robot_dofs), data=self.data)
self.model_num_datapts = int(self.demo_duration/self.gmm_model.dt)
self.data_out, self.sigma_out, _ = self.GMR(np.array(range(1,self.model_num_datapts+1)) * self.gmm_model.dt)
####### DEBUGGING ##############
# plt.scatter(self.data[1,:],self.data[2,:])
# for i in range(self.gmm_model.num_states):
# plt.plot(self.gmm_model.mu[1,i],self.gmm_model.mu[2,i], 'ro')
# plt.show()
##################################
self.ref_traj = []
for i in range(self.model_num_datapts):
self.ref_traj.append(ReferenceTrajectoryPoint(t=(i+1)*self.gmm_model.dt, mu=self.data_out[:,i], sigma=self.sigma_out[i,:,:]))
####### DEBUGGING ##############
# ref_path = extractPath(self.ref_traj)
# print(ref_path)
# print(len(ref_path), " ", len(ref_path[0]))
# plt.scatter(ref_path[:, 0], ref_path[:, 1])
# plt.title('Reference Path')
# plt.show()
##################################
print('KMP Initialized with Reference Trajectory')
###################################
def loadData(self, addr):
data = []
for i in range(self.ndemos):
data.append(np.loadtxt(open(addr + str(i + 1) + ""), delimiter=","))
# data is saved as a list of nparrays [nparray(Demo1),nparray(Demo2),...]
return data
#########################
def normalizeData(self, data):
dofs = self.input_dofs + self.robot_dofs
dt = self.demo_dt
sum = 0
for i in range(len(data)):
sum += len(data[i])
mean = sum / len(data)
alpha = []
for i in range(len(data)):
alpha.append(len(data[i]) / mean)
alpha_mean = np.mean(alpha)
mean_t = int(mean)
# normalize the data so that all demos contain same number of data points
ndata = []
for i in range(len(alpha)):
demo_ndata = np.empty((0, dofs))
for j in range(mean_t): # Number of phase steps is same as number of time steps in nominal trajectory, because for nominal traj alpha is 1
z = j * alpha[i] * dt
corr_timestep = z / dt
whole = int(corr_timestep)
frac = corr_timestep - whole
row = []
for k in range(dofs):
if whole == (data[i].shape[0] - 1):
row.append(data[i][whole][k])
else:
row.append(data[i][whole][k] + frac * (data[i][whole + 1][k] - data[i][whole][k]))
demo_ndata = np.append(demo_ndata, [row], axis=0)
ndata.append(demo_ndata)
return ndata
############################
def combineData(self,data):
# total_time_steps = 0
# for i in range(len(data)):
# total_time_steps = total_time_steps + data[i].shape[0]
# time = np.array(range(total_time_steps)) * self.demo_dt
positions = data[0].T
for i in range(1,len(data)):
positions = np.append(positions,data[i].T,axis=1)
# data = np.vstack((time,positions))
return positions
#############################
def GMR(self, data_in_raw):
data_in = data_in_raw.reshape((1,data_in_raw.shape[0]))
num_datapts = data_in.shape[1]
num_varout = self.robot_dofs
diag_reg_factor = 1e-8
mu_tmp = np.zeros((num_varout, self.gmm_model.num_states))
exp_data = np.zeros((num_varout, num_datapts))
exp_sigma = np.zeros((num_datapts, num_varout, num_varout))
H = np.empty((self.gmm_model.num_states, num_datapts))
for t in range(num_datapts):
# Compute activation weights
for i in range(self.gmm_model.num_states):
H[i,t] = self.gmm_model.priors[i] * gaussPDF((data_in[:,t].reshape((-1,1))), self.gmm_model.mu[0:self.input_dofs,i], self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs])
# print(gaussPDF(data_in[:,t].reshape((-1,1)), self.gmm_model.mu[0:self.input_dofs,i], self.gmm_model.sigma[0:self.input_dofs,0:self.input_dofs,i]))
H[:,t] = H[:,t]/(sum(H[:,t]) + 1e-10)
# print(H)
# Compute conditional means
for i in range(self.gmm_model.num_states):
mu_tmp[:,i] = self.gmm_model.mu[self.input_dofs:(self.input_dofs+self.robot_dofs),i] + self.gmm_model.sigma[i,0:self.input_dofs,self.input_dofs:(self.input_dofs+self.robot_dofs)]/self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs] * (data_in[:,t].reshape((-1,1)) - self.gmm_model.mu[0:self.input_dofs,i])
exp_data[:,t] = exp_data[:,t] + H[i,t] * mu_tmp[:,i]
# print("Mu_tmp: ",mu_tmp[:,i])
# print(H[i,t] * mu_tmp[:,i])
# Compute conditional covariance
for i in range(self.gmm_model.num_states):
sigma_tmp = self.gmm_model.sigma[i,self.input_dofs:(self.input_dofs+self.robot_dofs),self.input_dofs:(self.input_dofs+self.robot_dofs)] - self.gmm_model.sigma[i,self.input_dofs:(self.input_dofs+self.robot_dofs),0:self.input_dofs]/self.gmm_model.sigma[i,0:self.input_dofs,0:self.input_dofs] * self.gmm_model.sigma[i,0:self.input_dofs,self.input_dofs:(self.input_dofs+self.robot_dofs)]
# print(sigma_tmp)
exp_sigma[t,:,:] = exp_sigma[t,:,:] + H[i,t] * (sigma_tmp + mu_tmp[:,i]*mu_tmp[:,i].T)
# print(exp_sigma[t,:,:])
exp_sigma[t,:,:] = exp_sigma[t,:,:] - exp_data[:,t] * exp_data[:,t].T + np.eye(num_varout) * diag_reg_factor
return exp_data, exp_sigma, H
def setParams(self, dt, lamda, kh):
self.dt = dt
self.len = int(self.demo_duration/dt)
self.lamda = lamda
self.kh = kh
##################################
def addViaPts(self, via_pts, via_pt_var):
# Search for closest point in ref trajectory
self.new_ref_traj = copy.deepcopy(self.ref_traj)
replace_ind = 0
num_phases = len(via_pts)
phase_size = len(self.ref_traj)/num_phases
for via_pt_ind,via_pt in enumerate(via_pts):
min_dist = float('Inf')
for i in range(math.ceil(via_pt_ind*phase_size), math.floor((via_pt_ind+1)*phase_size)):
dist = distBWPts(self.ref_traj[i].mu[0:2],via_pt)
# print("dist: ", dist)
if dist<min_dist:
min_dist
|
KMP
|
identifier_name
|
main_model.py
|
.append(transforms.ToTensor())
# if args.frozen:
# transforms_list.append(lambda x: x.repeat(3, 1, 1))
# transforms_list.append(transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225]))
# transform = transforms.Compose(transforms_list)
transform = transforms.ToTensor()
# transform = lambda x: torch.unsqueeze(tnsr(x), 0)
##################################################################
if not os.path.exists('./models'):
os.mkdir('./models')
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('DEVICE: {}'.format(device))
#####################################################################
class trainSketchDataset(Dataset):
"""
getitem returns a sketch, array, and filename
idx will be a list of [sketch_level, filename]
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.num_sketch_levels = len(os.listdir(sketch_dir))
def __len__(self):
return len(os.listdir(self.array_dir))*self.num_sketch_levels
def __getitem__(self, sampler):
idx = next(sampler)
sketch_fp = os.path.join(self.sketch_dir, idx[0], idx[1])
fname = os.path.splitext(idx[1])[0]
array_fp = os.path.join(self.array_dir, fname + '.npy')
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array.astype(np.float32), idx[0], fname
class trainSketchSampler(Sampler):
"""
samples according to schedule decided by sample_method argument
"""
def __init__(self, sketch_dir, weight = 2):
self.sketch_dir = sketch_dir
sketch_levels = os.listdir(sketch_dir)
sketch_levels = sorted(sketch_levels)
self.sketch_levels = sketch_levels
self.num_levels = len(sketch_levels)
self.samples = [os.listdir(os.path.join(sketch_dir, level)) for level in self.sketch_levels]
self.num_samples = np.array([len(level) for level in self.samples])
self.weight_vector = np.linspace(weight, 1, num = self.num_levels)
self.product = self.num_samples * self.weight_vector
self.sum_ = sum(self.product)
def __len__(self):
return sum(self.num_samples)
def __iter__(self):
return self
def __next__(self):
if self.sum_ < 1:
|
prob = self.product/self.sum_
prob[prob < 0] = 0
idx = choice(self.num_levels, p = prob)
self.product[idx] = self.product[idx] - self.weight_vector[idx]
self.sum_ = self.sum_ - self.weight_vector[idx]
idx2 = choice(len(self.samples[idx]))
ret2 = self.samples[idx].pop(idx2)
ret1 = self.sketch_levels[idx]
self.num_samples[idx] -= 1
yield ret1, ret2
class testSketchDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
array_fp = os.path.join(self.array_dir, arr_loc)
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array
class evalDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir):
self.sketch_dir = sketch_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
img = PIL.Image.open(sketch_fp).convert('L')
return transform(np.array(img.resize((256, 256))), arr_loc
###########################################################################
class BasicConvnet(nn.Module):
def __init__(self):
super(BasicConvnet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 7, 4)
# 64
self.conv2 = nn.Conv2d(16, 64, 5, 2)
# 32
self.conv3 = nn.Conv2d(64, 128, 5, 2)
# 16
self.pool1 = nn.MaxPool2d(2, 2)
# 8
self.relu = nn.ReLU()
self.fc1 = nn.Linear(128 * 6 * 6, 1028)
# self.fc2 = nn.Linear
def forward(self, x):
x = self.relu(self.conv1(x))
x = self.relu(self.conv2(x))
x = self.relu(self.conv3(x))
x = self.pool1(x)
x = x.view(16 , -1)
x = self.relu(self.fc1(x))
return x
class FaceSketchModel(nn.Module):
def __init__(self, frozen = False):
"""
vgg_load: specify if you want to load pretrained vgg or if you plan on loading this model from a state_dict
frozen: specify is vgg weights will be frozen
"""
super(FaceSketchModel, self).__init__()
self.frozen = frozen
model = BasicConvnet()
self.BC = model
if frozen:
for param in self.BC.parameters():
param.requires_grad = False
self.last_layer = torch.nn.Linear(1028, 512)
if not frozen:
self.first_layer = nn.Conv2d(1, 3, kernel_size = (5,5), stride = (1,1), padding = (1, 1))
self.relu = nn.ReLU()
self.tform = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
def forward(self, x):
if self.frozen:
x = self.BC(x)
x = self.last_layer(x)
return x
else:
# x = self.first_layer(x)
# x = self.relu(x)
# x - self.tform(x)
x = self.BC(x)
x = self.last_layer(self.relu(x))
return x
############################################################################
def train(model, dataloader, optimizer):
"""
train the model!
val_dataloaders should be a list of dataloaders in descending order of complexity of sketch
"""
criterion = nn.MSELoss()
model.to(device)
all_losses = []
num_batches = 0
train_running_loss = 0.0
batches_loss = 0
listt = np.array(0)
listtt = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
train_running_loss += loss.item()
listt = np.append(listt, loss.item())
batches_loss += loss.item()
all_losses.append(loss.item())
loss.backward()
optimizer.step()
listtt = np.append(listtt, np.var(np.array(output.detach().cpu()), axis = 0).mean())
if i % 199 == 0 and i != 0:
print('loss over 200 batches: {}'.format(batches_loss / 200))
print('loss std over 200 batches: {}'.format(listt.var()**(1/2) ))
print('mean prediction std over 200 batches {}'.format(listtt.mean()))
print('')
batches_loss = 0
lisst = np.array(0)
return all_losses, train_running_loss/num_batches
def test(model, dataloader, j = 0):
"""
test model, output different loss for each category
"""
model.float()
model.to(device)
model.eval()
criterion = nn.MSELoss()
val_loss = 0
num_batches = 0
variances = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
output = model(data
|
raise StopIteration
|
conditional_block
|
main_model.py
|
.append(transforms.ToTensor())
# if args.frozen:
# transforms_list.append(lambda x: x.repeat(3, 1, 1))
# transforms_list.append(transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225]))
# transform = transforms.Compose(transforms_list)
transform = transforms.ToTensor()
# transform = lambda x: torch.unsqueeze(tnsr(x), 0)
##################################################################
if not os.path.exists('./models'):
os.mkdir('./models')
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('DEVICE: {}'.format(device))
#####################################################################
class trainSketchDataset(Dataset):
"""
getitem returns a sketch, array, and filename
idx will be a list of [sketch_level, filename]
"""
def __init__(self, sketch_dir, array_dir):
|
def __len__(self):
return len(os.listdir(self.array_dir))*self.num_sketch_levels
def __getitem__(self, sampler):
idx = next(sampler)
sketch_fp = os.path.join(self.sketch_dir, idx[0], idx[1])
fname = os.path.splitext(idx[1])[0]
array_fp = os.path.join(self.array_dir, fname + '.npy')
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array.astype(np.float32), idx[0], fname
class trainSketchSampler(Sampler):
"""
samples according to schedule decided by sample_method argument
"""
def __init__(self, sketch_dir, weight = 2):
self.sketch_dir = sketch_dir
sketch_levels = os.listdir(sketch_dir)
sketch_levels = sorted(sketch_levels)
self.sketch_levels = sketch_levels
self.num_levels = len(sketch_levels)
self.samples = [os.listdir(os.path.join(sketch_dir, level)) for level in self.sketch_levels]
self.num_samples = np.array([len(level) for level in self.samples])
self.weight_vector = np.linspace(weight, 1, num = self.num_levels)
self.product = self.num_samples * self.weight_vector
self.sum_ = sum(self.product)
def __len__(self):
return sum(self.num_samples)
def __iter__(self):
return self
def __next__(self):
if self.sum_ < 1:
raise StopIteration
prob = self.product/self.sum_
prob[prob < 0] = 0
idx = choice(self.num_levels, p = prob)
self.product[idx] = self.product[idx] - self.weight_vector[idx]
self.sum_ = self.sum_ - self.weight_vector[idx]
idx2 = choice(len(self.samples[idx]))
ret2 = self.samples[idx].pop(idx2)
ret1 = self.sketch_levels[idx]
self.num_samples[idx] -= 1
yield ret1, ret2
class testSketchDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
array_fp = os.path.join(self.array_dir, arr_loc)
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array
class evalDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir):
self.sketch_dir = sketch_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
img = PIL.Image.open(sketch_fp).convert('L')
return transform(np.array(img.resize((256, 256))), arr_loc
###########################################################################
class BasicConvnet(nn.Module):
def __init__(self):
super(BasicConvnet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 7, 4)
# 64
self.conv2 = nn.Conv2d(16, 64, 5, 2)
# 32
self.conv3 = nn.Conv2d(64, 128, 5, 2)
# 16
self.pool1 = nn.MaxPool2d(2, 2)
# 8
self.relu = nn.ReLU()
self.fc1 = nn.Linear(128 * 6 * 6, 1028)
# self.fc2 = nn.Linear
def forward(self, x):
x = self.relu(self.conv1(x))
x = self.relu(self.conv2(x))
x = self.relu(self.conv3(x))
x = self.pool1(x)
x = x.view(16 , -1)
x = self.relu(self.fc1(x))
return x
class FaceSketchModel(nn.Module):
def __init__(self, frozen = False):
"""
vgg_load: specify if you want to load pretrained vgg or if you plan on loading this model from a state_dict
frozen: specify is vgg weights will be frozen
"""
super(FaceSketchModel, self).__init__()
self.frozen = frozen
model = BasicConvnet()
self.BC = model
if frozen:
for param in self.BC.parameters():
param.requires_grad = False
self.last_layer = torch.nn.Linear(1028, 512)
if not frozen:
self.first_layer = nn.Conv2d(1, 3, kernel_size = (5,5), stride = (1,1), padding = (1, 1))
self.relu = nn.ReLU()
self.tform = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
def forward(self, x):
if self.frozen:
x = self.BC(x)
x = self.last_layer(x)
return x
else:
# x = self.first_layer(x)
# x = self.relu(x)
# x - self.tform(x)
x = self.BC(x)
x = self.last_layer(self.relu(x))
return x
############################################################################
def train(model, dataloader, optimizer):
"""
train the model!
val_dataloaders should be a list of dataloaders in descending order of complexity of sketch
"""
criterion = nn.MSELoss()
model.to(device)
all_losses = []
num_batches = 0
train_running_loss = 0.0
batches_loss = 0
listt = np.array(0)
listtt = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
train_running_loss += loss.item()
listt = np.append(listt, loss.item())
batches_loss += loss.item()
all_losses.append(loss.item())
loss.backward()
optimizer.step()
listtt = np.append(listtt, np.var(np.array(output.detach().cpu()), axis = 0).mean())
if i % 199 == 0 and i != 0:
print('loss over 200 batches: {}'.format(batches_loss / 200))
print('loss std over 200 batches: {}'.format(listt.var()**(1/2) ))
print('mean prediction std over 200 batches {}'.format(listtt.mean()))
print('')
batches_loss = 0
lisst = np.array(0)
return all_losses, train_running_loss/num_batches
def test(model, dataloader, j = 0):
"""
test model, output different loss for each category
"""
model.float()
model.to(device)
model.eval()
criterion = nn.MSELoss()
val_loss = 0
num_batches = 0
variances = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
output = model
|
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.num_sketch_levels = len(os.listdir(sketch_dir))
|
identifier_body
|
main_model.py
|
.append(transforms.ToTensor())
# if args.frozen:
# transforms_list.append(lambda x: x.repeat(3, 1, 1))
# transforms_list.append(transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225]))
# transform = transforms.Compose(transforms_list)
transform = transforms.ToTensor()
# transform = lambda x: torch.unsqueeze(tnsr(x), 0)
##################################################################
if not os.path.exists('./models'):
os.mkdir('./models')
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('DEVICE: {}'.format(device))
#####################################################################
class trainSketchDataset(Dataset):
"""
getitem returns a sketch, array, and filename
idx will be a list of [sketch_level, filename]
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.num_sketch_levels = len(os.listdir(sketch_dir))
def __len__(self):
return len(os.listdir(self.array_dir))*self.num_sketch_levels
def __getitem__(self, sampler):
idx = next(sampler)
sketch_fp = os.path.join(self.sketch_dir, idx[0], idx[1])
fname = os.path.splitext(idx[1])[0]
array_fp = os.path.join(self.array_dir, fname + '.npy')
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array.astype(np.float32), idx[0], fname
class trainSketchSampler(Sampler):
"""
samples according to schedule decided by sample_method argument
"""
def __init__(self, sketch_dir, weight = 2):
self.sketch_dir = sketch_dir
sketch_levels = os.listdir(sketch_dir)
sketch_levels = sorted(sketch_levels)
self.sketch_levels = sketch_levels
self.num_levels = len(sketch_levels)
self.samples = [os.listdir(os.path.join(sketch_dir, level)) for level in self.sketch_levels]
self.num_samples = np.array([len(level) for level in self.samples])
self.weight_vector = np.linspace(weight, 1, num = self.num_levels)
self.product = self.num_samples * self.weight_vector
self.sum_ = sum(self.product)
def __len__(self):
return sum(self.num_samples)
def __iter__(self):
return self
def __next__(self):
if self.sum_ < 1:
raise StopIteration
prob = self.product/self.sum_
prob[prob < 0] = 0
idx = choice(self.num_levels, p = prob)
self.product[idx] = self.product[idx] - self.weight_vector[idx]
self.sum_ = self.sum_ - self.weight_vector[idx]
idx2 = choice(len(self.samples[idx]))
ret2 = self.samples[idx].pop(idx2)
ret1 = self.sketch_levels[idx]
self.num_samples[idx] -= 1
yield ret1, ret2
class testSketchDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
array_fp = os.path.join(self.array_dir, arr_loc)
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array
class evalDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir):
self.sketch_dir = sketch_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
img = PIL.Image.open(sketch_fp).convert('L')
return transform(np.array(img.resize((256, 256))), arr_loc
###########################################################################
class BasicConvnet(nn.Module):
def __init__(self):
super(BasicConvnet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 7, 4)
# 64
self.conv2 = nn.Conv2d(16, 64, 5, 2)
# 32
self.conv3 = nn.Conv2d(64, 128, 5, 2)
# 16
self.pool1 = nn.MaxPool2d(2, 2)
# 8
self.relu = nn.ReLU()
self.fc1 = nn.Linear(128 * 6 * 6, 1028)
# self.fc2 = nn.Linear
def forward(self, x):
x = self.relu(self.conv1(x))
x = self.relu(self.conv2(x))
x = self.relu(self.conv3(x))
x = self.pool1(x)
x = x.view(16 , -1)
x = self.relu(self.fc1(x))
return x
class FaceSketchModel(nn.Module):
def __init__(self, frozen = False):
"""
vgg_load: specify if you want to load pretrained vgg or if you plan on loading this model from a state_dict
frozen: specify is vgg weights will be frozen
"""
super(FaceSketchModel, self).__init__()
self.frozen = frozen
model = BasicConvnet()
self.BC = model
if frozen:
for param in self.BC.parameters():
param.requires_grad = False
self.last_layer = torch.nn.Linear(1028, 512)
if not frozen:
self.first_layer = nn.Conv2d(1, 3, kernel_size = (5,5), stride = (1,1), padding = (1, 1))
self.relu = nn.ReLU()
self.tform = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
def forward(self, x):
if self.frozen:
x = self.BC(x)
x = self.last_layer(x)
return x
else:
# x = self.first_layer(x)
# x = self.relu(x)
# x - self.tform(x)
x = self.BC(x)
x = self.last_layer(self.relu(x))
return x
############################################################################
def train(model, dataloader, optimizer):
"""
train the model!
val_dataloaders should be a list of dataloaders in descending order of complexity of sketch
"""
criterion = nn.MSELoss()
model.to(device)
all_losses = []
num_batches = 0
train_running_loss = 0.0
batches_loss = 0
listt = np.array(0)
listtt = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
train_running_loss += loss.item()
listt = np.append(listt, loss.item())
batches_loss += loss.item()
all_losses.append(loss.item())
loss.backward()
optimizer.step()
listtt = np.append(listtt, np.var(np.array(output.detach().cpu()), axis = 0).mean())
if i % 199 == 0 and i != 0:
print('loss over 200 batches: {}'.format(batches_loss / 200))
print('loss std over 200 batches: {}'.format(listt.var()**(1/2) ))
print('mean prediction std over 200 batches {}'.format(listtt.mean()))
print('')
batches_loss = 0
lisst = np.array(0)
return all_losses, train_running_loss/num_batches
def
|
(model, dataloader, j = 0):
"""
test model, output different loss for each category
"""
model.float()
model.to(device)
model.eval()
criterion = nn.MSELoss()
val_loss = 0
num_batches = 0
variances = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
output = model
|
test
|
identifier_name
|
main_model.py
|
.append(transforms.ToTensor())
# if args.frozen:
# transforms_list.append(lambda x: x.repeat(3, 1, 1))
# transforms_list.append(transforms.Normalize(mean=[0.485, 0.456, 0.406],
# std=[0.229, 0.224, 0.225]))
# transform = transforms.Compose(transforms_list)
transform = transforms.ToTensor()
# transform = lambda x: torch.unsqueeze(tnsr(x), 0)
##################################################################
if not os.path.exists('./models'):
os.mkdir('./models')
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
print('DEVICE: {}'.format(device))
#####################################################################
class trainSketchDataset(Dataset):
"""
getitem returns a sketch, array, and filename
idx will be a list of [sketch_level, filename]
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.num_sketch_levels = len(os.listdir(sketch_dir))
def __len__(self):
return len(os.listdir(self.array_dir))*self.num_sketch_levels
def __getitem__(self, sampler):
idx = next(sampler)
sketch_fp = os.path.join(self.sketch_dir, idx[0], idx[1])
fname = os.path.splitext(idx[1])[0]
array_fp = os.path.join(self.array_dir, fname + '.npy')
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array.astype(np.float32), idx[0], fname
class trainSketchSampler(Sampler):
"""
samples according to schedule decided by sample_method argument
"""
def __init__(self, sketch_dir, weight = 2):
self.sketch_dir = sketch_dir
sketch_levels = os.listdir(sketch_dir)
sketch_levels = sorted(sketch_levels)
self.sketch_levels = sketch_levels
self.num_levels = len(sketch_levels)
self.samples = [os.listdir(os.path.join(sketch_dir, level)) for level in self.sketch_levels]
self.num_samples = np.array([len(level) for level in self.samples])
self.weight_vector = np.linspace(weight, 1, num = self.num_levels)
self.product = self.num_samples * self.weight_vector
self.sum_ = sum(self.product)
def __len__(self):
return sum(self.num_samples)
def __iter__(self):
return self
def __next__(self):
if self.sum_ < 1:
raise StopIteration
prob = self.product/self.sum_
prob[prob < 0] = 0
idx = choice(self.num_levels, p = prob)
self.product[idx] = self.product[idx] - self.weight_vector[idx]
self.sum_ = self.sum_ - self.weight_vector[idx]
idx2 = choice(len(self.samples[idx]))
ret2 = self.samples[idx].pop(idx2)
ret1 = self.sketch_levels[idx]
self.num_samples[idx] -= 1
yield ret1, ret2
class testSketchDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir, array_dir):
self.sketch_dir = sketch_dir
self.array_dir = array_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
array_fp = os.path.join(self.array_dir, arr_loc)
img = PIL.Image.open(sketch_fp)
array = np.load(array_fp)
return transform((np.array(img)/255).astype(np.float32)), array
class evalDataset(Dataset):
"""
meant to sample from only one level of sketch
"""
def __init__(self, sketch_dir):
self.sketch_dir = sketch_dir
self.main_dir = os.listdir(sketch_dir)
def __len__(self):
return len(self.main_dir)
def __getitem__(self, idx):
img_loc = self.main_dir[idx]
arr_loc = os.path.splitext(img_loc)[0] + '.npy'
sketch_fp = os.path.join(self.sketch_dir, img_loc)
img = PIL.Image.open(sketch_fp).convert('L')
return transform(np.array(img.resize((256, 256))), arr_loc
###########################################################################
class BasicConvnet(nn.Module):
def __init__(self):
super(BasicConvnet, self).__init__()
self.conv1 = nn.Conv2d(1, 16, 7, 4)
# 64
self.conv2 = nn.Conv2d(16, 64, 5, 2)
# 32
self.conv3 = nn.Conv2d(64, 128, 5, 2)
# 16
self.pool1 = nn.MaxPool2d(2, 2)
# 8
self.relu = nn.ReLU()
|
self.fc1 = nn.Linear(128 * 6 * 6, 1028)
# self.fc2 = nn.Linear
def forward(self, x):
x = self.relu(self.conv1(x))
x = self.relu(self.conv2(x))
x = self.relu(self.conv3(x))
x = self.pool1(x)
x = x.view(16 , -1)
x = self.relu(self.fc1(x))
return x
class FaceSketchModel(nn.Module):
def __init__(self, frozen = False):
"""
vgg_load: specify if you want to load pretrained vgg or if you plan on loading this model from a state_dict
frozen: specify is vgg weights will be frozen
"""
super(FaceSketchModel, self).__init__()
self.frozen = frozen
model = BasicConvnet()
self.BC = model
if frozen:
for param in self.BC.parameters():
param.requires_grad = False
self.last_layer = torch.nn.Linear(1028, 512)
if not frozen:
self.first_layer = nn.Conv2d(1, 3, kernel_size = (5,5), stride = (1,1), padding = (1, 1))
self.relu = nn.ReLU()
self.tform = transforms.Normalize(mean=[0.485, 0.456, 0.406],
std=[0.229, 0.224, 0.225])
def forward(self, x):
if self.frozen:
x = self.BC(x)
x = self.last_layer(x)
return x
else:
# x = self.first_layer(x)
# x = self.relu(x)
# x - self.tform(x)
x = self.BC(x)
x = self.last_layer(self.relu(x))
return x
############################################################################
def train(model, dataloader, optimizer):
"""
train the model!
val_dataloaders should be a list of dataloaders in descending order of complexity of sketch
"""
criterion = nn.MSELoss()
model.to(device)
all_losses = []
num_batches = 0
train_running_loss = 0.0
batches_loss = 0
listt = np.array(0)
listtt = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
optimizer.zero_grad()
output = model(data)
loss = criterion(output, target)
train_running_loss += loss.item()
listt = np.append(listt, loss.item())
batches_loss += loss.item()
all_losses.append(loss.item())
loss.backward()
optimizer.step()
listtt = np.append(listtt, np.var(np.array(output.detach().cpu()), axis = 0).mean())
if i % 199 == 0 and i != 0:
print('loss over 200 batches: {}'.format(batches_loss / 200))
print('loss std over 200 batches: {}'.format(listt.var()**(1/2) ))
print('mean prediction std over 200 batches {}'.format(listtt.mean()))
print('')
batches_loss = 0
lisst = np.array(0)
return all_losses, train_running_loss/num_batches
def test(model, dataloader, j = 0):
"""
test model, output different loss for each category
"""
model.float()
model.to(device)
model.eval()
criterion = nn.MSELoss()
val_loss = 0
num_batches = 0
variances = np.array(0)
for i, data in enumerate(dataloader):
num_batches += 1
data, target = data[0].to(device), data[1].to(device)
output = model(data
|
random_line_split
|
|
row.component.ts
|
// patternStr:'[\u4e00-\u9fa5]+[0-9]*',
patternStr: '[a-zA-Z0-9]+',
patternErr: '表格名称格式不正确,只能为中文名称或中文名称加数字'
}
},
{
|
pattern: false,
}
},
{
name: '列表/视图', eName: 'tableName', type: 'text', validateCon: '请输入列表/视图', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列表字段', eName: 'colEname', type: 'text', validateCon: '请输入列表字段', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列是否显示', eName: 'visible', type: 'radio', validateCon: '请选择列是否显示', radioArr: this.radioArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '是否是必备条件', eName: 'prerequisite', type: 'radio', validateCon: '请选择列是否是必备条件', radioArr: this.prerequisiteArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '类型', eName: 'type', type: 'select', validateCon: '请选择类型', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列宽度', eName: 'width', type: 'number', validateCon: '请输入列宽度', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '排序', eName: 'sortId', type: 'text', validateCon: '请输入排序', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '格式化', eName: 'format', type: 'text', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '用户类型设置', eName: 'userType', type: 'checkbox', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '备注', eName: 'remark', type: 'text', require: false,
validators: {
require: false,
pattern: false,
}
},
];
checkOptions = [
{label: '托运人', value: 'isConsignee', checked: true},
{label: '承运人', value: 'isCarrier'}
];
// 数据弹出框
modalFormVisible = false; // 表单弹窗
modalValidateForm: FormGroup;
typeDataArr: Array<any> = []; // 类型下拉数据
// 确认框
modalTitle: string; // 弹出框标题
deleteCon: string;
deleteVisible: boolean = false; // 确认弹窗
// 表格
// 页数控制
pageSize: number = 30;//条数
totalPage: number;//数据总条数
listLoading: boolean = true;// 表单加载状态
dataSet: Array<any> = []; // 表单数据
selectedData: Array<any> = []; // 选中的数据
validateForm: FormGroup;
private rowid: number;
private status: string;
searchData: any; //存储查询的数据
selectData: Array<any> = [];
private tplModal: NzModalRef;
constructor(private httpUtilService: HttpUtilService,
private fb: FormBuilder,
private nm: NzModalService,
private nn: NzNotificationService,
private http: HttpClient,) {
}
ngOnInit() {
// 数据弹出框初始化
this.modalValidateForm = this.fb.group({});
this.modalFormData = this.modalFormData ? this.modalFormData : [];
for (let i = 0; i < this.modalFormData.length; i++) {
let validatorOrOpts: Array<any> = [];
if (this.modalFormData[i].validators.require) {
validatorOrOpts.push(Validators.required);
}
if (this.modalFormData[i].validators.pattern) {
validatorOrOpts.push(Validators.pattern(this.modalFormData[i].validators.patternStr));
}
this.modalValidateForm.addControl(this.modalFormData[i].eName, new FormControl(
'', validatorOrOpts
));
}
this.listSearch({page: 1, length: this.pageSize});
this.getStatic(this.typeDataArr, 'columnType');
this.getStatic(this.radioArr, 'XSBJ');
this.getStatic(this.prerequisiteArr, 'BBTJ');
}
// 列表查询数据获取
getListSearch(data: any): void {
this.listLoading = true;
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/selectColumnList`;
params.data = data;
this.httpUtilService.request(params).then(
(res: any) => {
this.listLoading = false;
if (res.success) {
this.dataSet = res.data.data.data;
this.totalPage = res.data.data.total;
}
}
);
}
// 列表查询
listSearch(data: any) {
data.page = data.page || 1; //最好有
data.length = data.length || this.pageSize; //最好有
this.searchData = data;
this.listLoading = true;
this.getListSearch(data);
}
btnClick(data: any): void {
switch (data.type.buttonId) {
case 'Export': { //导出
this.btnExport();
}
break;
}
}
/**
* 导出按钮
*/
btnExport(): void {
console.log(this.searchData)
let url=`${environment.baseUrlSystem}column/selectColumnExport`;
this.http.post(url, this.searchData, {responseType: 'blob'}).subscribe(
res => {
let blob = new Blob([res], {type: 'application/vnd.ms-excel'});
let objectUrl = URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = objectUrl;
a.target = '_blank';
a.download = `界面字段设置.xlsx`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
);
}
// 添加数据
addData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/insertList`;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '添加成功!');
} else {
this.nn.error('提示消息', '添加失败!');
}
}
);
}
// 删除数据
deleteData() {
const params = {url: '', data: {tColumns: []}, method: 'POST'};
params.url = `${environment.baseUrl}column/deleteList`;
for (const selectedDatum of this.selectedData) {
params.data.tColumns.push({rowid: selectedDatum.rowid});
}
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.selectedData = [];
this.listSearch(this.searchData);
this.nn.success('提示消息', '删除成功!');
} else {
this.nn.error('提示消息', '删除失败!');
}
}
);
}
// 修改数据
updateData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/updateList`;
data.rowid = this.rowid;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '修改成功!');
} else {
this.nn.error('提示消息', '修改失败!');
}
}
);
}
// 添加
btnAdd(): void {
this.modalFormVisible = true;
this.modalTitle = `系统界面列设置 > 新增`;
this.status = 'add';
this.modalValidateForm.get('userType').setValue(this.checkOptions);
}
// 修改
btnUpdate(data: any): void {
if (!data || data.data.length < 1) {
this.tplModal = this.nm.warning({
nzTitle: '提示信息',
nzContent: '请选择数据后修改!'
});
this
|
name: '列名称', eName: 'colCname', type: 'text', validateCon: '请输入列名称', require: true,
validators: {
require: true,
|
random_line_split
|
row.component.ts
|
patternStr:'[\u4e00-\u9fa5]+[0-9]*',
patternStr: '[a-zA-Z0-9]+',
patternErr: '表格名称格式不正确,只能为中文名称或中文名称加数字'
}
},
{
name: '列名称', eName: 'colCname', type: 'text', validateCon: '请输入列名称', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列表/视图', eName: 'tableName', type: 'text', validateCon: '请输入列表/视图', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列表字段', eName: 'colEname', type: 'text', validateCon: '请输入列表字段', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列是否显示', eName: 'visible', type: 'radio', validateCon: '请选择列是否显示', radioArr: this.radioArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '是否是必备条件', eName: 'prerequisite', type: 'radio', validateCon: '请选择列是否是必备条件', radioArr: this.prerequisiteArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '类型', eName: 'type', type: 'select', validateCon: '请选择类型', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列宽度', eName: 'width', type: 'number', validateCon: '请输入列宽度', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '排序', eName: 'sortId', type: 'text', validateCon: '请输入排序', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '格式化', eName: 'format', type: 'text', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '用户类型设置', eName: 'userType', type: 'checkbox', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '备注', eName: 'remark', type: 'text', require: false,
validators: {
require: false,
pattern: false,
}
},
];
checkOptions = [
{label: '托运人', value: 'isConsignee', checked: true},
{label: '承运人', value: 'isCarrier'}
];
// 数据弹出框
modalFormVisible = false; // 表单弹窗
modalValidateForm: FormGroup;
typeDataArr: Array<any> = []; // 类型下拉数据
// 确认框
modalTitle: string; // 弹出框标题
deleteCon: string;
deleteVisible: boolean = false; // 确认弹窗
// 表格
// 页数控制
pageSize: number = 30;//条数
totalPage: number;//数据总条数
listLoading: boolean = true;// 表单加载状态
dataSet: Array<any> = []; // 表单数据
selectedData: Array<any> = []; // 选中的数据
validateForm: FormGroup;
private rowid: number;
private status: string;
searchData: any; //存储查询的数据
selectData: Array<any> = [];
private tplModal: NzModalRef;
constructor(private httpUtilService: HttpUtilService,
private fb: FormBuilder,
private nm: NzModalService,
private nn: NzNotificationService,
private http: HttpClient,) {
}
ngOnInit() {
// 数据弹出框初始化
this.modalValidateForm = this.fb.group({});
this.modalFormData = this.modalFormData ? this.modalFormData : [];
for (let i = 0; i < this.modalFormData.length; i++) {
let validatorOrOpts: Array<any> = [];
if (this.modalFormData[i].validators.require) {
validatorOrOpts.push(Validators.required);
}
if (this.modalFormData[i].validators.pattern) {
validatorOrOpts.push(Validators.pattern(this.modalFormData[i].validators.patternStr));
}
this.modalValidateForm.addControl(this.modalFormData[i].eName, new FormControl(
'', validatorOrOpts
));
}
this.listSearch({page: 1, length: this.pageSize});
this.getStatic(this.typeDataArr, 'columnType');
this.getStatic(this.radioArr, 'XSBJ');
this.getStatic(this.prerequisiteArr, 'BBTJ');
}
// 列表查询数据获取
getListSearch(data: any): void {
this.listLoading = true;
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/selectColumnList`;
params.data = data;
this.httpUtilService.request(params).then(
(res: any) => {
this.listLoading = false;
if (res.success) {
this.dataSet = res.data.data.data;
this.totalPage = res.data.data.total;
}
}
);
}
// 列表查询
listSearch(data: any) {
data.page = data.page || 1; //最好有
dat
|
xport`;
this.http.post(url, this.searchData, {responseType: 'blob'}).subscribe(
res => {
let blob = new Blob([res], {type: 'application/vnd.ms-excel'});
let objectUrl = URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = objectUrl;
a.target = '_blank';
a.download = `界面字段设置.xlsx`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
);
}
// 添加数据
addData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/insertList`;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '添加成功!');
} else {
this.nn.error('提示消息', '添加失败!');
}
}
);
}
// 删除数据
deleteData() {
const params = {url: '', data: {tColumns: []}, method: 'POST'};
params.url = `${environment.baseUrl}column/deleteList`;
for (const selectedDatum of this.selectedData) {
params.data.tColumns.push({rowid: selectedDatum.rowid});
}
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.selectedData = [];
this.listSearch(this.searchData);
this.nn.success('提示消息', '删除成功!');
} else {
this.nn.error('提示消息', '删除失败!');
}
}
);
}
// 修改数据
updateData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/updateList`;
data.rowid = this.rowid;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '修改成功!');
} else {
this.nn.error('提示消息', '修改失败!');
}
}
);
}
// 添加
btnAdd(): void {
this.modalFormVisible = true;
this.modalTitle = `系统界面列设置 > 新增`;
this.status = 'add';
this.modalValidateForm.get('userType').setValue(this.checkOptions);
}
// 修改
btnUpdate(data: any): void {
if (!data || data.data.length < 1) {
this.tplModal = this.nm.warning({
nzTitle: '提示信息',
nzContent: '请选择数据后修改!'
});
|
a.length = data.length || this.pageSize; //最好有
this.searchData = data;
this.listLoading = true;
this.getListSearch(data);
}
btnClick(data: any): void {
switch (data.type.buttonId) {
case 'Export': { //导出
this.btnExport();
}
break;
}
}
/**
* 导出按钮
*/
btnExport(): void {
console.log(this.searchData)
let url=`${environment.baseUrlSystem}column/selectColumnE
|
identifier_body
|
row.component.ts
|
validators: {
require: true,
pattern: false,
}
},
{
name: '列表字段', eName: 'colEname', type: 'text', validateCon: '请输入列表字段', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列是否显示', eName: 'visible', type: 'radio', validateCon: '请选择列是否显示', radioArr: this.radioArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '是否是必备条件', eName: 'prerequisite', type: 'radio', validateCon: '请选择列是否是必备条件', radioArr: this.prerequisiteArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '类型', eName: 'type', type: 'select', validateCon: '请选择类型', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列宽度', eName: 'width', type: 'number', validateCon: '请输入列宽度', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '排序', eName: 'sortId', type: 'text', validateCon: '请输入排序', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '格式化', eName: 'format', type: 'text', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '用户类型设置', eName: 'userType', type: 'checkbox', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '备注', eName: 'remark', type: 'text', require: false,
validators: {
require: false,
pattern: false,
}
},
];
checkOptions = [
{label: '托运人', value: 'isConsignee', checked: true},
{label: '承运人', value: 'isCarrier'}
];
// 数据弹出框
modalFormVisible = false; // 表单弹窗
modalValidateForm: FormGroup;
typeDataArr: Array<any> = []; // 类型下拉数据
// 确认框
modalTitle: string; // 弹出框标题
deleteCon: string;
deleteVisible: boolean = false; // 确认弹窗
// 表格
// 页数控制
pageSize: number = 30;//条数
totalPage: number;//数据总条数
listLoading: boolean = true;// 表单加载状态
dataSet: Array<any> = []; // 表单数据
selectedData: Array<any> = []; // 选中的数据
validateForm: FormGroup;
private rowid: number;
private status: string;
searchData: any; //存储查询的数据
selectData: Array<any> = [];
private tplModal: NzModalRef;
constructor(private httpUtilService: HttpUtilService,
private fb: FormBuilder,
private nm: NzModalService,
private nn: NzNotificationService,
private http: HttpClient,) {
}
ngOnInit() {
// 数据弹出框初始化
this.modalValidateForm = this.fb.group({});
this.modalFormData = this.modalFormData ? this.modalFormData : [];
for (let i = 0; i < this.modalFormData.length; i++) {
let validatorOrOpts: Array<any> = [];
if (this.modalFormData[i].validators.require) {
validatorOrOpts.push(Validators.required);
}
if (this.modalFormData[i].validators.pattern) {
validatorOrOpts.push(Validators.pattern(this.modalFormData[i].validators.patternStr));
}
this.modalValidateForm.addControl(this.modalFormData[i].eName, new FormControl(
'', validatorOrOpts
));
}
this.listSearch({page: 1, length: this.pageSize});
this.getStatic(this.typeDataArr, 'columnType');
this.getStatic(this.radioArr, 'XSBJ');
this.getStatic(this.prerequisiteArr, 'BBTJ');
}
// 列表查询数据获取
getListSearch(data: any): void {
this.listLoading = true;
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/selectColumnList`;
params.data = data;
this.httpUtilService.request(params).then(
(res: any) => {
this.listLoading = false;
if (res.success) {
this.dataSet = res.data.data.data;
this.totalPage = res.data.data.total;
}
}
);
}
// 列表查询
listSearch(data: any) {
data.page = data.page || 1; //最好有
data.length = data.length || this.pageSize; //最好有
this.searchData = data;
this.listLoading = true;
this.getListSearch(data);
}
btnClick(data: any): void {
switch (data.type.buttonId) {
case 'Export': { //导出
this.btnExport();
}
break;
}
}
/**
* 导出按钮
*/
btnExport(): void {
console.log(this.searchData)
let url=`${environment.baseUrlSystem}column/selectColumnExport`;
this.http.post(url, this.searchData, {responseType: 'blob'}).subscribe(
res => {
let blob = new Blob([res], {type: 'application/vnd.ms-excel'});
let objectUrl = URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = objectUrl;
a.target = '_blank';
a.download = `界面字段设置.xlsx`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
);
}
// 添加数据
addData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/insertList`;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '添加成功!');
} else {
this.nn.error('提示消息', '添加失败!');
}
}
);
}
// 删除数据
deleteData() {
const params = {url: '', data: {tColumns: []}, method: 'POST'};
params.url = `${environment.baseUrl}column/deleteList`;
for (const selectedDatum of this.selectedData) {
params.data.tColumns.push({rowid: selectedDatum.rowid});
}
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.selectedData = [];
this.listSearch(this.searchData);
this.nn.success('提示消息', '删除成功!');
} else {
this.nn.error('提示消息', '删除失败!');
}
}
);
}
// 修改数据
updateData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/updateList`;
data.rowid = this.rowid;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '修改成功!');
} else {
this.nn.error('提示消息', '修改失败!');
}
}
);
}
// 添加
btnAdd(): void {
this.modalFormVisible = true;
this.modalTitle = `系统界面列设置 > 新增`;
this.status = 'add';
this.modalValidateForm.get('userType').setValue(this.checkOptions);
}
// 修改
btnUpdate(data: any): void {
if (!data || data.data.length < 1) {
this.tplModal = this.nm.warning({
nzTitle: '提示信息',
nzContent: '请选择数据后修改!'
});
this.destroyTplModal();
return;
}
if (!data || data.data.length > 1) {
this.tplModal = this.nm.warning({
nzTitle: '提示信息',
nzContent: '请选择一条数据进行修改!'
});
this.destroyTplModal();
return;
}
this.modalFormVisible = true;
this.modalTitle = '系统界面列设置 > 修改';
this.status = 'update';
this.rowid = data.data[0].rowid;
this.modalValidateForm.patchValue(data.data[0]);
const checkOptions = [
{label: '托运人', value: 'isConsignee', checked: Boolean(Number(data
|
.data[0].
|
identifier_name
|
|
row.component.ts
|
// patternStr:'[\u4e00-\u9fa5]+[0-9]*',
patternStr: '[a-zA-Z0-9]+',
patternErr: '表格名称格式不正确,只能为中文名称或中文名称加数字'
}
},
{
name: '列名称', eName: 'colCname', type: 'text', validateCon: '请输入列名称', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列表/视图', eName: 'tableName', type: 'text', validateCon: '请输入列表/视图', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列表字段', eName: 'colEname', type: 'text', validateCon: '请输入列表字段', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列是否显示', eName: 'visible', type: 'radio', validateCon: '请选择列是否显示', radioArr: this.radioArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '是否是必备条件', eName: 'prerequisite', type: 'radio', validateCon: '请选择列是否是必备条件', radioArr: this.prerequisiteArr, require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '类型', eName: 'type', type: 'select', validateCon: '请选择类型', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '列宽度', eName: 'width', type: 'number', validateCon: '请输入列宽度', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '排序', eName: 'sortId', type: 'text', validateCon: '请输入排序', require: true,
validators: {
require: true,
pattern: false,
}
},
{
name: '格式化', eName: 'format', type: 'text', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '用户类型设置', eName: 'userType', type: 'checkbox', validateCon: '请输入格式', require: false,
validators: {
require: false,
pattern: false,
}
},
{
name: '备注', eName: 'remark', type: 'text', require: false,
validators: {
require: false,
pattern: false,
}
},
];
checkOptions = [
{label: '托运人', value: 'isConsignee', checked: true},
{label: '承运人', value: 'isCarrier'}
];
// 数据弹出框
modalFormVisible = false; // 表单弹窗
modalValidateForm: FormGroup;
typeDataArr: Array<any> = []; // 类型下拉数据
// 确认框
modalTitle: string; // 弹出框标题
deleteCon: string;
deleteVisible: boolean = false; // 确认弹窗
// 表格
// 页数控制
pageSize: number = 30;//条数
totalPage: number;//数据总条数
listLoading: boolean = true;// 表单加载状态
dataSet: Array<any> = []; // 表单数据
selectedData: Array<any> = []; // 选中的数据
validateForm: FormGroup;
private rowid: number;
private status: string;
searchData: any; //存储查询的数据
selectData: Array<any> = [];
private tplModal: NzModalRef;
constructor(private httpUtilService: HttpUtilService,
private fb: FormBuilder,
private nm: NzModalService,
private nn: NzNotificationService,
private http: HttpClient,) {
}
ngOnInit() {
// 数据弹出框初始化
this.modalValidateForm = this.fb.group({});
this.modalFormData = this.modalFormData ? this.modalFormData : [];
for (let i = 0; i < this.modalFormData.length; i++) {
let validatorOrOpts: Array<any> = [];
if (this.modalFormData[i].validators.require) {
validatorOrOpts.push(Validators.required);
}
if (this.modalFormData[i].validators.pattern) {
validatorOrOpts.push(Validators.pattern(this.modalFormData[i].validators.patternStr));
}
this.modalValidateForm.addControl(this.modalFormData[i].eName, new FormControl(
'', validatorOrOpts
));
}
this.listSearch({page: 1, length: this.pageSize});
this.getStatic(this.typeDataArr, 'columnType');
this.getStatic(this.radioArr, 'XSBJ');
this.getStatic(this.prerequisiteArr, 'BBTJ');
}
// 列表查询数据获取
getListSearch(data: any): void {
|
ironment.baseUrl}column/selectColumnList`;
params.data = data;
this.httpUtilService.request(params).then(
(res: any) => {
this.listLoading = false;
if (res.success) {
this.dataSet = res.data.data.data;
this.totalPage = res.data.data.total;
}
}
);
}
// 列表查询
listSearch(data: any) {
data.page = data.page || 1; //最好有
data.length = data.length || this.pageSize; //最好有
this.searchData = data;
this.listLoading = true;
this.getListSearch(data);
}
btnClick(data: any): void {
switch (data.type.buttonId) {
case 'Export': { //导出
this.btnExport();
}
break;
}
}
/**
* 导出按钮
*/
btnExport(): void {
console.log(this.searchData)
let url=`${environment.baseUrlSystem}column/selectColumnExport`;
this.http.post(url, this.searchData, {responseType: 'blob'}).subscribe(
res => {
let blob = new Blob([res], {type: 'application/vnd.ms-excel'});
let objectUrl = URL.createObjectURL(blob);
let a = document.createElement('a');
a.href = objectUrl;
a.target = '_blank';
a.download = `界面字段设置.xlsx`;
document.body.appendChild(a);
a.click();
document.body.removeChild(a);
}
);
}
// 添加数据
addData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/insertList`;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '添加成功!');
} else {
this.nn.error('提示消息', '添加失败!');
}
}
);
}
// 删除数据
deleteData() {
const params = {url: '', data: {tColumns: []}, method: 'POST'};
params.url = `${environment.baseUrl}column/deleteList`;
for (const selectedDatum of this.selectedData) {
params.data.tColumns.push({rowid: selectedDatum.rowid});
}
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.selectedData = [];
this.listSearch(this.searchData);
this.nn.success('提示消息', '删除成功!');
} else {
this.nn.error('提示消息', '删除失败!');
}
}
);
}
// 修改数据
updateData(data: any) {
const params = {url: '', data: {}, method: 'POST'};
params.url = `${environment.baseUrl}column/updateList`;
data.rowid = this.rowid;
params.data = data;
Object.assign(params.data, this.processCheckData());
this.httpUtilService.request(params).then(
(res: any) => {
if (res.success) {
this.listSearch(this.searchData);
this.modalFormVisible = false;
this.nn.success('提示消息', '修改成功!');
} else {
this.nn.error('提示消息', '修改失败!');
}
}
);
}
// 添加
btnAdd(): void {
this.modalFormVisible = true;
this.modalTitle = `系统界面列设置 > 新增`;
this.status = 'add';
this.modalValidateForm.get('userType').setValue(this.checkOptions);
}
// 修改
btnUpdate(data: any): void {
if (!data || data.data.length < 1) {
this.tplModal = this.nm.warning({
nzTitle: '提示信息',
nzContent: '请选择数据后修改!'
});
|
this.listLoading = true;
const params = {url: '', data: {}, method: 'POST'};
params.url = `${env
|
conditional_block
|
ingredients.go
|
ParseTextIngredients parses a list of ingredients and
// returns an ingredient list back
func ParseTextIngredients(text string) (ingredientList IngredientList, err error) {
r := &Recipe{FileName: "lines"}
r.FileContent = text
lines := strings.Split(text, "\n")
i := 0
goodLines := make([]string, len(lines))
for _, line := range lines {
line = strings.TrimSpace(line)
if len(line) == 0 {
continue
}
goodLines[i] = line
i++
}
_, r.Lines = scoreLines(goodLines)
err = r.parseRecipe()
if err != nil {
return
}
ingredientList = r.IngredientList()
return
}
// NewFromFile generates a new parser from a HTML file
func NewFromFile(fname string) (r *Recipe, err error) {
r = &Recipe{FileName: fname}
b, err := ioutil.ReadFile(fname)
r.FileContent = string(b)
err = r.parseHTML()
return
}
// NewFromString generates a new parser from a HTML string
func NewFromString(htmlString string) (r *Recipe, err error) {
r = &Recipe{FileName: "string"}
r.FileContent = htmlString
err = r.parseHTML()
return
}
// NewFromURL generates a new parser from a url
func NewFromURL(url string) (r *Recipe, err error) {
client := http.Client{
Timeout: 10 * time.Second,
}
resp, err := client.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
html, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
return NewFromHTML(url, string(html))
}
// NewFromHTML generates a new parser from a HTML text
func NewFromHTML(name, htmlstring string) (r *Recipe, err error) {
r = &Recipe{FileName: name}
r.FileContent = htmlstring
err = r.parseHTML()
return
}
func IngredientsFromURL(url string) (ingredients []Ingredient, err error) {
r, err := NewFromURL(url)
if err != nil {
return
}
ingredients = r.Ingredients
return
}
// Parse is the main parser for a given recipe.
func (r *Recipe) parseHTML() (rerr error) {
if r == nil {
r = &Recipe{}
}
if r.FileContent == "" || r.FileName == "" {
rerr = fmt.Errorf("no file loaded")
return
}
r.Lines, rerr = getIngredientLinesInHTML(r.FileContent)
return r.parseRecipe()
}
func (r *Recipe) parseRecipe() (rerr error) {
goodLines := make([]LineInfo, len(r.Lines))
j := 0
for _, lineInfo := range r.Lines {
if len(strings.TrimSpace(lineInfo.Line)) < 3 || len(strings.TrimSpace(lineInfo.Line)) > 150 {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "serving size") {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "yield") {
continue
}
// singularlize
lineInfo.Ingredient.Measure = Measure{}
// get amount, continue if there is an error
err := lineInfo.getTotalAmount()
if err != nil {
log.Tracef("[%s]: %s (%+v)", lineInfo.Line, err.Error(), lineInfo.AmountInString)
continue
}
// get ingredient, continue if its not found
err = lineInfo.getIngredient()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
continue
}
// get measure
err = lineInfo.getMeasure()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
}
// get comment
if len(lineInfo.MeasureInString) > 0 && len(lineInfo.IngredientsInString) > 0 {
lineInfo.Ingredient.Comment = getOtherInBetweenPositions(lineInfo.Line, lineInfo.MeasureInString[0], lineInfo.IngredientsInString[0])
}
// normalize into cups
lineInfo.Ingredient.Measure.Cups, err = normalizeIngredient(
lineInfo.Ingredient.Name,
lineInfo.Ingredient.Measure.Name,
lineInfo.Ingredient.Measure.Amount,
)
if err != nil {
log.Tracef("[%s]: %s", lineInfo.LineOriginal, err.Error())
} else {
log.Tracef("[%s]: %+v", lineInfo.LineOriginal, lineInfo)
}
goodLines[j] = lineInfo
j++
}
r.Lines = goodLines[:j]
rerr = r.ConvertIngredients()
if rerr != nil {
return
}
// consolidate ingredients
ingredients := make(map[string]Ingredient)
ingredientList := []string{}
for _, line := range r.Lines {
if _, ok := ingredients[line.Ingredient.Name]; ok {
if ingredients[line.Ingredient.Name].Measure.Name == line.Ingredient.Measure.Name {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount + line.Ingredient.Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
} else {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
} else {
ingredientList = append(ingredientList, line.Ingredient.Name)
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: line.Ingredient.Comment,
Measure: Measure{
Name: line.Ingredient.Measure.Name,
Amount: line.Ingredient.Measure.Amount,
Cups: line.Ingredient.Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
}
r.Ingredients = make([]Ingredient, len(ingredients))
for i, ing := range ingredientList {
r.Ingredients[i] = ingredients[ing]
}
return
}
func getIngredientLinesInHTML(htmlS string) (lineInfos []LineInfo, err error) {
doc, err := html.Parse(bytes.NewReader([]byte(htmlS)))
if err != nil {
return
}
var f func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool)
f = func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool) {
childrenLineInfo := []LineInfo{}
// log.Tracef("%+v", n)
score := 0
isScript := n.DataAtom == atom.Script
for c := n.FirstChild; c != nil; c = c.NextSibling {
if isScript {
// try to capture JSON and if successful, do a hard exit
lis, errJSON := extractLinesFromJavascript(c.Data)
if errJSON == nil && len(lis) > 2 {
log.Trace("got ingredients from JSON")
*lineInfos = lis
done = true
return
}
}
var childText string
childText, done = f(c, lineInfos)
if done {
return
}
if childText != "" {
scoreOfLine, lineInfo := scoreLine(childText)
childrenLineInfo = append(childrenLineInfo, lineInfo)
score += scoreOfLine
}
}
if score > 2 && len(childrenLineInfo) < 25 && len(childrenLineInfo) > 2 {
*lineInfos = append(*lineInfos, childrenLineInfo...)
for _, child := range childrenLineInfo {
log.Tracef("[%s]", child.LineOriginal)
}
}
if len(childrenLineInfo) > 0 {
// fmt.Println(childrenLineInfo)
childrenText := make([]string, len(childrenLineInfo))
for i := range childrenLineInfo {
childrenText[i] = childrenLineInfo[i].LineOriginal
}
s = strings.Join(childrenText, " ")
} else if n.DataAtom == 0 && strings.TrimSpace(n.Data) != "" {
s = strings.TrimSpace(n.Data)
}
return
}
f(doc, &lineInfos)
return
}
func extractLinesFromJavascript(jsString string) (lineInfo []LineInfo, err error) {
var arrayMap = []map[string]interface{}{}
var regMap = make(map[string]interface{})
err = json.Unmarshal([]byte(jsString), ®Map)
if err != nil {
err = json.Unmarshal([]byte(jsString), &arrayMap)
if err != nil
|
{
return
}
|
conditional_block
|
|
ingredients.go
|
() {
inflection.AddSingular("(clove)(s)?$", "${1}")
inflection.AddSingular("(potato)(es)?$", "${1}")
inflection.AddSingular("(tomato)(es)?$", "${1}")
inflection.AddUncountable("molasses")
inflection.AddUncountable("bacon")
}
// Recipe contains the info for the file and the lines
type Recipe struct {
FileName string `json:"filename"`
FileContent string `json:"file_content"`
Lines []LineInfo `json:"lines"`
Ingredients []Ingredient `json:"ingredients"`
}
// LineInfo has all the information for the parsing of a given line
type LineInfo struct {
LineOriginal string
Line string `json:",omitempty"`
IngredientsInString []WordPosition `json:",omitempty"`
AmountInString []WordPosition `json:",omitempty"`
MeasureInString []WordPosition `json:",omitempty"`
Ingredient Ingredient `json:",omitempty"`
}
// Ingredient is the basic struct for ingredients
type Ingredient struct {
Name string `json:"name,omitempty"`
Comment string `json:"comment,omitempty"`
Measure Measure `json:"measure,omitempty"`
Line string `json:"line,omitempty"`
}
// Measure includes the amount, name and the cups for conversions
type Measure struct {
Amount float64 `json:"amount"`
Name string `json:"name"`
Cups float64 `json:"cups"`
Weight float64 `json:"weight,omitempty"`
}
// IngredientList is a list of ingredients
type IngredientList struct {
Ingredients []Ingredient `json:"ingredients"`
}
func (il IngredientList) String() string {
s := ""
for _, ing := range il.Ingredients {
name := ing.Name
if ing.Measure.Amount > 1 && ing.Measure.Name == "whole" {
name = inflection.Plural(name)
}
s += fmt.Sprintf("%s %s %s", AmountToString(ing.Measure.Amount), ing.Measure.Name, name)
if ing.Comment != "" {
s += " (" + ing.Comment + ")"
}
s += "\n"
}
return s
}
// Save saves the recipe to a file
func (r *Recipe) Save(fname string) (err error) {
b, err := json.MarshalIndent(r, "", " ")
if err != nil {
return
}
err = ioutil.WriteFile(fname, b, 0644)
return
}
// Load will load a recipe file
func Load(fname string) (r *Recipe, err error) {
b, err := ioutil.ReadFile(fname)
if err != nil {
return
}
r = new(Recipe)
err = json.Unmarshal(b, r)
return
}
// ParseTextIngredients parses a list of ingredients and
// returns an ingredient list back
func ParseTextIngredients(text string) (ingredientList IngredientList, err error) {
r := &Recipe{FileName: "lines"}
r.FileContent = text
lines := strings.Split(text, "\n")
i := 0
goodLines := make([]string, len(lines))
for _, line := range lines {
line = strings.TrimSpace(line)
if len(line) == 0 {
continue
}
goodLines[i] = line
i++
}
_, r.Lines = scoreLines(goodLines)
err = r.parseRecipe()
if err != nil {
return
}
ingredientList = r.IngredientList()
return
}
// NewFromFile generates a new parser from a HTML file
func NewFromFile(fname string) (r *Recipe, err error) {
r = &Recipe{FileName: fname}
b, err := ioutil.ReadFile(fname)
r.FileContent = string(b)
err = r.parseHTML()
return
}
// NewFromString generates a new parser from a HTML string
func NewFromString(htmlString string) (r *Recipe, err error) {
r = &Recipe{FileName: "string"}
r.FileContent = htmlString
err = r.parseHTML()
return
}
// NewFromURL generates a new parser from a url
func NewFromURL(url string) (r *Recipe, err error) {
client := http.Client{
Timeout: 10 * time.Second,
}
resp, err := client.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
html, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
return NewFromHTML(url, string(html))
}
// NewFromHTML generates a new parser from a HTML text
func NewFromHTML(name, htmlstring string) (r *Recipe, err error) {
r = &Recipe{FileName: name}
r.FileContent = htmlstring
err = r.parseHTML()
return
}
func IngredientsFromURL(url string) (ingredients []Ingredient, err error) {
r, err := NewFromURL(url)
if err != nil {
return
}
ingredients = r.Ingredients
return
}
// Parse is the main parser for a given recipe.
func (r *Recipe) parseHTML() (rerr error) {
if r == nil {
r = &Recipe{}
}
if r.FileContent == "" || r.FileName == "" {
rerr = fmt.Errorf("no file loaded")
return
}
r.Lines, rerr = getIngredientLinesInHTML(r.FileContent)
return r.parseRecipe()
}
func (r *Recipe) parseRecipe() (rerr error) {
goodLines := make([]LineInfo, len(r.Lines))
j := 0
for _, lineInfo := range r.Lines {
if len(strings.TrimSpace(lineInfo.Line)) < 3 || len(strings.TrimSpace(lineInfo.Line)) > 150 {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "serving size") {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "yield") {
continue
}
// singularlize
lineInfo.Ingredient.Measure = Measure{}
// get amount, continue if there is an error
err := lineInfo.getTotalAmount()
if err != nil {
log.Tracef("[%s]: %s (%+v)", lineInfo.Line, err.Error(), lineInfo.AmountInString)
continue
}
// get ingredient, continue if its not found
err = lineInfo.getIngredient()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
continue
}
// get measure
err = lineInfo.getMeasure()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
}
// get comment
if len(lineInfo.MeasureInString) > 0 && len(lineInfo.IngredientsInString) > 0 {
lineInfo.Ingredient.Comment = getOtherInBetweenPositions(lineInfo.Line, lineInfo.MeasureInString[0], lineInfo.IngredientsInString[0])
}
// normalize into cups
lineInfo.Ingredient.Measure.Cups, err = normalizeIngredient(
lineInfo.Ingredient.Name,
lineInfo.Ingredient.Measure.Name,
lineInfo.Ingredient.Measure.Amount,
)
if err != nil {
log.Tracef("[%s]: %s", lineInfo.LineOriginal, err.Error())
} else {
log.Tracef("[%s]: %+v", lineInfo.LineOriginal, lineInfo)
}
goodLines[j] = lineInfo
j++
}
r.Lines = goodLines[:j]
rerr = r.ConvertIngredients()
if rerr != nil {
return
}
// consolidate ingredients
ingredients := make(map[string]Ingredient)
ingredientList := []string{}
for _, line := range r.Lines {
if _, ok := ingredients[line.Ingredient.Name]; ok {
if ingredients[line.Ingredient.Name].Measure.Name == line.Ingredient.Measure.Name {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount + line.Ingredient.Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
} else {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
} else {
ingredientList = append(ingredientList, line.Ingredient.Name)
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: line.Ingredient.Comment,
Measure: Measure{
Name: line.Ingredient.Measure.Name,
Amount: line.Ingredient.Measure.Amount,
Cups: line.Ingredient.Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
}
r.Ingredients = make([]Ingredient, len(ingredients))
for
|
init
|
identifier_name
|
|
ingredients.go
|
Recipe{FileName: "lines"}
r.FileContent = text
lines := strings.Split(text, "\n")
i := 0
goodLines := make([]string, len(lines))
for _, line := range lines {
line = strings.TrimSpace(line)
if len(line) == 0 {
continue
}
goodLines[i] = line
i++
}
_, r.Lines = scoreLines(goodLines)
err = r.parseRecipe()
if err != nil {
return
}
ingredientList = r.IngredientList()
return
}
// NewFromFile generates a new parser from a HTML file
func NewFromFile(fname string) (r *Recipe, err error) {
r = &Recipe{FileName: fname}
b, err := ioutil.ReadFile(fname)
r.FileContent = string(b)
err = r.parseHTML()
return
}
// NewFromString generates a new parser from a HTML string
func NewFromString(htmlString string) (r *Recipe, err error) {
r = &Recipe{FileName: "string"}
r.FileContent = htmlString
err = r.parseHTML()
return
}
// NewFromURL generates a new parser from a url
func NewFromURL(url string) (r *Recipe, err error) {
client := http.Client{
Timeout: 10 * time.Second,
}
resp, err := client.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
html, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
return NewFromHTML(url, string(html))
}
// NewFromHTML generates a new parser from a HTML text
func NewFromHTML(name, htmlstring string) (r *Recipe, err error) {
r = &Recipe{FileName: name}
r.FileContent = htmlstring
err = r.parseHTML()
return
}
func IngredientsFromURL(url string) (ingredients []Ingredient, err error) {
r, err := NewFromURL(url)
if err != nil {
return
}
ingredients = r.Ingredients
return
}
// Parse is the main parser for a given recipe.
func (r *Recipe) parseHTML() (rerr error) {
if r == nil {
r = &Recipe{}
}
if r.FileContent == "" || r.FileName == "" {
rerr = fmt.Errorf("no file loaded")
return
}
r.Lines, rerr = getIngredientLinesInHTML(r.FileContent)
return r.parseRecipe()
}
func (r *Recipe) parseRecipe() (rerr error) {
goodLines := make([]LineInfo, len(r.Lines))
j := 0
for _, lineInfo := range r.Lines {
if len(strings.TrimSpace(lineInfo.Line)) < 3 || len(strings.TrimSpace(lineInfo.Line)) > 150 {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "serving size") {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "yield") {
continue
}
// singularlize
lineInfo.Ingredient.Measure = Measure{}
// get amount, continue if there is an error
err := lineInfo.getTotalAmount()
if err != nil {
log.Tracef("[%s]: %s (%+v)", lineInfo.Line, err.Error(), lineInfo.AmountInString)
continue
}
// get ingredient, continue if its not found
err = lineInfo.getIngredient()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
continue
}
// get measure
err = lineInfo.getMeasure()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
}
// get comment
if len(lineInfo.MeasureInString) > 0 && len(lineInfo.IngredientsInString) > 0 {
lineInfo.Ingredient.Comment = getOtherInBetweenPositions(lineInfo.Line, lineInfo.MeasureInString[0], lineInfo.IngredientsInString[0])
}
// normalize into cups
lineInfo.Ingredient.Measure.Cups, err = normalizeIngredient(
lineInfo.Ingredient.Name,
lineInfo.Ingredient.Measure.Name,
lineInfo.Ingredient.Measure.Amount,
)
if err != nil {
log.Tracef("[%s]: %s", lineInfo.LineOriginal, err.Error())
} else {
log.Tracef("[%s]: %+v", lineInfo.LineOriginal, lineInfo)
}
goodLines[j] = lineInfo
j++
}
r.Lines = goodLines[:j]
rerr = r.ConvertIngredients()
if rerr != nil {
return
}
// consolidate ingredients
ingredients := make(map[string]Ingredient)
ingredientList := []string{}
for _, line := range r.Lines {
if _, ok := ingredients[line.Ingredient.Name]; ok {
if ingredients[line.Ingredient.Name].Measure.Name == line.Ingredient.Measure.Name {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount + line.Ingredient.Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
} else {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
} else {
ingredientList = append(ingredientList, line.Ingredient.Name)
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: line.Ingredient.Comment,
Measure: Measure{
Name: line.Ingredient.Measure.Name,
Amount: line.Ingredient.Measure.Amount,
Cups: line.Ingredient.Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
}
r.Ingredients = make([]Ingredient, len(ingredients))
for i, ing := range ingredientList {
r.Ingredients[i] = ingredients[ing]
}
return
}
func getIngredientLinesInHTML(htmlS string) (lineInfos []LineInfo, err error) {
doc, err := html.Parse(bytes.NewReader([]byte(htmlS)))
if err != nil {
return
}
var f func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool)
f = func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool) {
childrenLineInfo := []LineInfo{}
// log.Tracef("%+v", n)
score := 0
isScript := n.DataAtom == atom.Script
for c := n.FirstChild; c != nil; c = c.NextSibling {
if isScript {
// try to capture JSON and if successful, do a hard exit
lis, errJSON := extractLinesFromJavascript(c.Data)
if errJSON == nil && len(lis) > 2 {
log.Trace("got ingredients from JSON")
*lineInfos = lis
done = true
return
}
}
var childText string
childText, done = f(c, lineInfos)
if done {
return
}
if childText != "" {
scoreOfLine, lineInfo := scoreLine(childText)
childrenLineInfo = append(childrenLineInfo, lineInfo)
score += scoreOfLine
}
}
if score > 2 && len(childrenLineInfo) < 25 && len(childrenLineInfo) > 2 {
*lineInfos = append(*lineInfos, childrenLineInfo...)
for _, child := range childrenLineInfo {
log.Tracef("[%s]", child.LineOriginal)
}
}
if len(childrenLineInfo) > 0 {
// fmt.Println(childrenLineInfo)
childrenText := make([]string, len(childrenLineInfo))
for i := range childrenLineInfo {
childrenText[i] = childrenLineInfo[i].LineOriginal
}
s = strings.Join(childrenText, " ")
} else if n.DataAtom == 0 && strings.TrimSpace(n.Data) != "" {
s = strings.TrimSpace(n.Data)
}
return
}
f(doc, &lineInfos)
return
}
func extractLinesFromJavascript(jsString string) (lineInfo []LineInfo, err error) {
var arrayMap = []map[string]interface{}{}
var regMap = make(map[string]interface{})
err = json.Unmarshal([]byte(jsString), ®Map)
if err != nil {
err = json.Unmarshal([]byte(jsString), &arrayMap)
if err != nil {
return
}
|
if len(arrayMap) == 0 {
err = fmt.Errorf("nothing to parse")
return
}
parseMap(arrayMap[0], &lineInfo)
|
random_line_split
|
|
ingredients.go
|
{
b, err := ioutil.ReadFile(fname)
if err != nil {
return
}
r = new(Recipe)
err = json.Unmarshal(b, r)
return
}
// ParseTextIngredients parses a list of ingredients and
// returns an ingredient list back
func ParseTextIngredients(text string) (ingredientList IngredientList, err error) {
r := &Recipe{FileName: "lines"}
r.FileContent = text
lines := strings.Split(text, "\n")
i := 0
goodLines := make([]string, len(lines))
for _, line := range lines {
line = strings.TrimSpace(line)
if len(line) == 0 {
continue
}
goodLines[i] = line
i++
}
_, r.Lines = scoreLines(goodLines)
err = r.parseRecipe()
if err != nil {
return
}
ingredientList = r.IngredientList()
return
}
// NewFromFile generates a new parser from a HTML file
func NewFromFile(fname string) (r *Recipe, err error) {
r = &Recipe{FileName: fname}
b, err := ioutil.ReadFile(fname)
r.FileContent = string(b)
err = r.parseHTML()
return
}
// NewFromString generates a new parser from a HTML string
func NewFromString(htmlString string) (r *Recipe, err error) {
r = &Recipe{FileName: "string"}
r.FileContent = htmlString
err = r.parseHTML()
return
}
// NewFromURL generates a new parser from a url
func NewFromURL(url string) (r *Recipe, err error)
|
// NewFromHTML generates a new parser from a HTML text
func NewFromHTML(name, htmlstring string) (r *Recipe, err error) {
r = &Recipe{FileName: name}
r.FileContent = htmlstring
err = r.parseHTML()
return
}
func IngredientsFromURL(url string) (ingredients []Ingredient, err error) {
r, err := NewFromURL(url)
if err != nil {
return
}
ingredients = r.Ingredients
return
}
// Parse is the main parser for a given recipe.
func (r *Recipe) parseHTML() (rerr error) {
if r == nil {
r = &Recipe{}
}
if r.FileContent == "" || r.FileName == "" {
rerr = fmt.Errorf("no file loaded")
return
}
r.Lines, rerr = getIngredientLinesInHTML(r.FileContent)
return r.parseRecipe()
}
func (r *Recipe) parseRecipe() (rerr error) {
goodLines := make([]LineInfo, len(r.Lines))
j := 0
for _, lineInfo := range r.Lines {
if len(strings.TrimSpace(lineInfo.Line)) < 3 || len(strings.TrimSpace(lineInfo.Line)) > 150 {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "serving size") {
continue
}
if strings.Contains(strings.ToLower(lineInfo.Line), "yield") {
continue
}
// singularlize
lineInfo.Ingredient.Measure = Measure{}
// get amount, continue if there is an error
err := lineInfo.getTotalAmount()
if err != nil {
log.Tracef("[%s]: %s (%+v)", lineInfo.Line, err.Error(), lineInfo.AmountInString)
continue
}
// get ingredient, continue if its not found
err = lineInfo.getIngredient()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
continue
}
// get measure
err = lineInfo.getMeasure()
if err != nil {
log.Tracef("[%s]: %s", lineInfo.Line, err.Error())
}
// get comment
if len(lineInfo.MeasureInString) > 0 && len(lineInfo.IngredientsInString) > 0 {
lineInfo.Ingredient.Comment = getOtherInBetweenPositions(lineInfo.Line, lineInfo.MeasureInString[0], lineInfo.IngredientsInString[0])
}
// normalize into cups
lineInfo.Ingredient.Measure.Cups, err = normalizeIngredient(
lineInfo.Ingredient.Name,
lineInfo.Ingredient.Measure.Name,
lineInfo.Ingredient.Measure.Amount,
)
if err != nil {
log.Tracef("[%s]: %s", lineInfo.LineOriginal, err.Error())
} else {
log.Tracef("[%s]: %+v", lineInfo.LineOriginal, lineInfo)
}
goodLines[j] = lineInfo
j++
}
r.Lines = goodLines[:j]
rerr = r.ConvertIngredients()
if rerr != nil {
return
}
// consolidate ingredients
ingredients := make(map[string]Ingredient)
ingredientList := []string{}
for _, line := range r.Lines {
if _, ok := ingredients[line.Ingredient.Name]; ok {
if ingredients[line.Ingredient.Name].Measure.Name == line.Ingredient.Measure.Name {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount + line.Ingredient.Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
} else {
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: ingredients[line.Ingredient.Name].Comment,
Measure: Measure{
Name: ingredients[line.Ingredient.Name].Measure.Name,
Amount: ingredients[line.Ingredient.Name].Measure.Amount,
Cups: ingredients[line.Ingredient.Name].Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
} else {
ingredientList = append(ingredientList, line.Ingredient.Name)
ingredients[line.Ingredient.Name] = Ingredient{
Name: line.Ingredient.Name,
Comment: line.Ingredient.Comment,
Measure: Measure{
Name: line.Ingredient.Measure.Name,
Amount: line.Ingredient.Measure.Amount,
Cups: line.Ingredient.Measure.Cups + line.Ingredient.Measure.Cups,
},
}
}
}
r.Ingredients = make([]Ingredient, len(ingredients))
for i, ing := range ingredientList {
r.Ingredients[i] = ingredients[ing]
}
return
}
func getIngredientLinesInHTML(htmlS string) (lineInfos []LineInfo, err error) {
doc, err := html.Parse(bytes.NewReader([]byte(htmlS)))
if err != nil {
return
}
var f func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool)
f = func(n *html.Node, lineInfos *[]LineInfo) (s string, done bool) {
childrenLineInfo := []LineInfo{}
// log.Tracef("%+v", n)
score := 0
isScript := n.DataAtom == atom.Script
for c := n.FirstChild; c != nil; c = c.NextSibling {
if isScript {
// try to capture JSON and if successful, do a hard exit
lis, errJSON := extractLinesFromJavascript(c.Data)
if errJSON == nil && len(lis) > 2 {
log.Trace("got ingredients from JSON")
*lineInfos = lis
done = true
return
}
}
var childText string
childText, done = f(c, lineInfos)
if done {
return
}
if childText != "" {
scoreOfLine, lineInfo := scoreLine(childText)
childrenLineInfo = append(childrenLineInfo, lineInfo)
score += scoreOfLine
}
}
if score > 2 && len(childrenLineInfo) < 25 && len(childrenLineInfo) > 2 {
*lineInfos = append(*lineInfos, childrenLineInfo...)
for _, child := range childrenLineInfo {
log.Tracef("[%s]", child.LineOriginal)
}
}
if len(childrenLineInfo) > 0 {
// fmt.Println(childrenLineInfo)
childrenText := make([]string, len(childrenLineInfo))
for i := range childrenLineInfo {
childrenText[i] = childrenLineInfo[i].LineOriginal
}
s = strings.Join(childrenText, " ")
} else if n.DataAtom == 0 && strings.TrimSpace(n.Data) != "" {
s = strings.TrimSpace(n.Data)
}
return
}
f(doc, &lineInfos)
return
}
func extractLinesFromJavascript(jsString string) (lineInfo []LineInfo, err error) {
var arrayMap = []map[string]interface{}{}
var regMap = make(map[string]interface{})
err = json.Unmarshal([]byte
|
{
client := http.Client{
Timeout: 10 * time.Second,
}
resp, err := client.Get(url)
if err != nil {
return
}
defer resp.Body.Close()
html, err := ioutil.ReadAll(resp.Body)
if err != nil {
return
}
return NewFromHTML(url, string(html))
}
|
identifier_body
|
tidy_toys.py
|
_width
global image_height
global toys
global frame
# declare golbal variables
global driveLeft, driveRight
global toys_collected
# define variables
Known_Distance = 100 #100cm
Known_Width = 5 #5cm
image_width = 640
image_height = 480
toys = ["blue", "green", "red"]
target_toy = None # allocates a none value
toys_collected = [] # allocates a null value to the list
#debugging = False #set to False to run in normal mode
# Setup the ThunderBorg Motor Driver board
TB = ThunderBorg3.ThunderBorg()
TB.Init()
if not TB.foundChip:
boards = ThunderBorg3.ScanForThunderBorg()
if len(boards) == 0:
print('No ThunderBorg found, check you are attached :)')
else:
print('No ThunderBorg at address %02X, but we did find boards:' % (TB.i2cAddress))
for board in boards:
print(" %02X (%d) " % (board, board))
print('If you need to change the I�C address change the setup line so it is correct, e.g.')
print('TB.i2cAddress = 0x%02X' % (boards[0]))
sys.exit()
# Ensure the communications failsafe has been enabled!
failsafe = False
for i in range(5):
TB.SetCommsFailsafe(True)
failsafe = TB.GetCommsFailsafe()
if failsafe:
break
if not failsafe:
print('Board %02X failed to report in failsafe mode!' % (TB.i2cAddress))
def startup():
print("Waiting for start command")
# Check for controller
# Check for start command - this will be a button press on the controller
# Check external ultrasonic sensors if fitted to ensure safe to move - return distances
# If not safe move to a safe position?
# If safe to move carry out initial move (reverse, spin 180 degrees) to face the toys
# move to start position, reverse and turn 180 degrees
#TB.SetMotor1(-0.5)
#TB.SetMotor2(-0.5)
#sleep(0.5)
#TB.SetMotor1(-0.5)
#TB.SetMotor2(0.5)
#sleep(0.5)
# Return to main()
def start_position():
print("Start Position")
# will be used if we can identify a safe position to go to before searching for each toy
# maybe a aruco marker insode the front wall
def select_target():
# could be used to select any target dependent upon state?
global toys
#global target_toy
#global toys_collected
if len(toys)==0:
print("No more toys to collect")
target_toy = None
else:
for item in toys:
if item == "blue":
target_toy = "blue"
print("Target selected =", target_toy)
return target_toy
elif item == "green":
target_toy = "green"
print("Target selected =", target_toy)
return target_toy
elif item == "red":
target_toy = "red"
print("Target selected =", target_toy)
return target_toy
#print("current target toy is")
#print(target_toy)
def dri
|
# movement
print("Movement Control")
def grabber(): # grabber control
print("Grabber Control")
# HSV COLOURSPACE START
def find_toy(frame, target_toy):
# convert captured image to HSV colour space to detect colours
toy = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#cv2.imshow("toy", toy)
#key = cv2.waitKey(0)
if target_toy == "blue":
print("searching for blue toy")
#define range of colour to detect
lower__hsv = np.array([88, 131, 0], dtype=np.uint8)
upper_hsv = np.array([145, 255, 255], dtype=np.uint8)
elif target_toy == "green":
print("searching for green toy")
#define range of colour to detect
lower__hsv = np.array([33, 75, 0], dtype=np.uint8)
upper_hsv = np.array([91, 255, 255], dtype=np.uint8)
elif target_toy == "red":
print("searching for red toy")
# define range of colour to detect
lower__hsv = np.array([156, 162, 0], dtype=np.uint8)
upper_hsv = np.array([255, 255, 255], dtype=np.uint8)
#setup the mask to detect only specified colour
mask = cv2.inRange(toy, lower__hsv, upper_hsv)
#cv2.imshow("mask", mask)
#key = cv2.waitKey(0)
# setup the results to display
colour_res = cv2.bitwise_and(frame, frame, mask=mask)
#colour_res = cv2.bitwise_and(frame, frame, mask=mask)
# detect the contours of the shapes and keep the largest
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#was_, blue_contours, hierarchy = cv2.findContours(blue_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contours), contours) for contours in contours]
if len(contour_sizes) > 0:
biggest_contour = max(contour_sizes, key=lambda x: x[0])[1]
# draw a green bounding box around the detected object
x, y, w, h = cv2.boundingRect(biggest_contour)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
#print(w, h)
# HSV COLOURSPACE END
mask = mask # allocates variable mask with data from blue_mask to be passed to position function
#distance(w, frame) # calls distance function and passes 'w' and 'frame'
#position(mask, frame) # calls position function and passes 'mask' and 'frame'
#show frames
#show mask
Z = distance(w, frame)
cx, cy = position(mask, frame)
print("we got here")
print("Z = ", Z)
print("cx, cy = ", cx, cy)
return cx, cy, Z
def distance(w, frame):
#global Z
print("distance, z") #Debugging
# DISTANCE (z) BEGIN
# initialise the known distance from the camera to the object which is 300mm 11.81102 inches
KNOWN_DISTANCE = 100
Z = KNOWN_DISTANCE
# initialise the know object width, which is 50mm or 1.968504 inches
KNOWN_WIDTH = 0.5
D = KNOWN_WIDTH
# d = width in pixels at 100cm = 30 - recheck if camera position changes
d = 30
f = d*Z/D #f = focallength
d = w # w is the perceived width in pixels calculated by OpenCV Contours
Z = D*f/d
print("pixel width =", w)
cv2.putText(frame, "%.1fcm" % (Z), (frame.shape[1] - 400, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# %.1f = 1 decimal point, px = px
# adds the variable w - width to the screen
return Z
# DISTANCE (z) END
def position(mask, frame):
#global cx
#global cy
print("position x, y")
# POSITION (x, y) BEGIN
# convert image to binary
ret, thresh = cv2.threshold(mask, 127,255,0)
# calculate moments of the binary image
M = cv2.moments(thresh)
# calculate the x, y coordinates of the centre
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
# put text and highlight the centre
cv2.circle(frame, (cx, cy), 5, (255, 255, 255), -1)
cv2.putText(frame, "centroid", (cx - 25, cy - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 2
|
ving():
|
identifier_name
|
tidy_toys.py
|
# Ensure the communications failsafe has been enabled!
failsafe = False
for i in range(5):
TB.SetCommsFailsafe(True)
failsafe = TB.GetCommsFailsafe()
if failsafe:
break
if not failsafe:
print('Board %02X failed to report in failsafe mode!' % (TB.i2cAddress))
def startup():
print("Waiting for start command")
# Check for controller
# Check for start command - this will be a button press on the controller
# Check external ultrasonic sensors if fitted to ensure safe to move - return distances
# If not safe move to a safe position?
# If safe to move carry out initial move (reverse, spin 180 degrees) to face the toys
# move to start position, reverse and turn 180 degrees
#TB.SetMotor1(-0.5)
#TB.SetMotor2(-0.5)
#sleep(0.5)
#TB.SetMotor1(-0.5)
#TB.SetMotor2(0.5)
#sleep(0.5)
# Return to main()
def start_position():
print("Start Position")
# will be used if we can identify a safe position to go to before searching for each toy
# maybe a aruco marker insode the front wall
def select_target():
# could be used to select any target dependent upon state?
global toys
#global target_toy
#global toys_collected
if len(toys)==0:
print("No more toys to collect")
target_toy = None
else:
for item in toys:
if item == "blue":
target_toy = "blue"
print("Target selected =", target_toy)
return target_toy
elif item == "green":
target_toy = "green"
print("Target selected =", target_toy)
return target_toy
elif item == "red":
target_toy = "red"
print("Target selected =", target_toy)
return target_toy
#print("current target toy is")
#print(target_toy)
def driving(): # movement
print("Movement Control")
def grabber(): # grabber control
print("Grabber Control")
# HSV COLOURSPACE START
def find_toy(frame, target_toy):
# convert captured image to HSV colour space to detect colours
toy = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#cv2.imshow("toy", toy)
#key = cv2.waitKey(0)
if target_toy == "blue":
print("searching for blue toy")
#define range of colour to detect
lower__hsv = np.array([88, 131, 0], dtype=np.uint8)
upper_hsv = np.array([145, 255, 255], dtype=np.uint8)
elif target_toy == "green":
print("searching for green toy")
#define range of colour to detect
lower__hsv = np.array([33, 75, 0], dtype=np.uint8)
upper_hsv = np.array([91, 255, 255], dtype=np.uint8)
elif target_toy == "red":
print("searching for red toy")
# define range of colour to detect
lower__hsv = np.array([156, 162, 0], dtype=np.uint8)
upper_hsv = np.array([255, 255, 255], dtype=np.uint8)
#setup the mask to detect only specified colour
mask = cv2.inRange(toy, lower__hsv, upper_hsv)
#cv2.imshow("mask", mask)
#key = cv2.waitKey(0)
# setup the results to display
colour_res = cv2.bitwise_and(frame, frame, mask=mask)
#colour_res = cv2.bitwise_and(frame, frame, mask=mask)
# detect the contours of the shapes and keep the largest
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#was_, blue_contours, hierarchy = cv2.findContours(blue_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contours), contours) for contours in contours]
if len(contour_sizes) > 0:
biggest_contour = max(contour_sizes, key=lambda x: x[0])[1]
# draw a green bounding box around the detected object
x, y, w, h = cv2.boundingRect(biggest_contour)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
#print(w, h)
# HSV COLOURSPACE END
mask = mask # allocates variable mask with data from blue_mask to be passed to position function
#distance(w, frame) # calls distance function and passes 'w' and 'frame'
#position(mask, frame) # calls position function and passes 'mask' and 'frame'
#show frames
#show mask
Z = distance(w, frame)
cx, cy = position(mask, frame)
print("we got here")
print("Z = ", Z)
print("cx, cy = ", cx, cy)
return cx, cy, Z
def distance(w, frame):
#global Z
print("distance, z") #Debugging
# DISTANCE (z) BEGIN
# initialise the known distance from the camera to the object which is 300mm 11.81102 inches
KNOWN_DISTANCE = 100
Z = KNOWN_DISTANCE
# initialise the know object width, which is 50mm or 1.968504 inches
KNOWN_WIDTH = 0.5
D = KNOWN_WIDTH
# d = width in pixels at 100cm = 30 - recheck if camera position changes
d = 30
f = d*Z/D #f = focallength
d = w # w is the perceived width in pixels calculated by OpenCV Contours
Z = D*f/d
print("pixel width =", w)
cv2.putText(frame, "%.1fcm" % (Z), (frame.shape[1] - 400, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# %.1f = 1 decimal point, px = px
# adds the variable w - width to the screen
return Z
# DISTANCE (z) END
def position(mask, frame):
#global cx
#global cy
print("position x, y")
# POSITION (x, y) BEGIN
# convert image to binary
ret, thresh = cv2.threshold(mask, 127,255,0)
# calculate moments of the binary image
M = cv2.moments(thresh)
# calculate the x, y coordinates of the centre
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
# put text and highlight the centre
cv2.circle(frame, (cx, cy), 5, (255, 255, 255), -1)
cv2.putText(frame, "centroid", (cx - 25, cy - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
return cx, cy
# POSITION (x, y) END
def drive_to_toy(frame, target_toy, cx, cy, Z):
#global target_toy
#global toys_collected
print("Driving to toy")
print("Distance = ", Z)
print("Position (x, y) = ", cx, cy)
drive = ""
# check distance to target
if Z >= 30:
print("Navigating to target")
# insert driving forward
if cx > 320:
pri
|
nt("steering left")
drive = "steering left"
#Enter motor controls here
#TB.SetMotor1(0.25)
#TB.SetMotor2(0.5)
driveLeft = 0.25
driveRight = 0.50
cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
cv2.putText(frame, "%.1fpx" % (cx), (frame.shape[1] - 200, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
return driveLeft, driveRight
|
conditional_block
|
|
tidy_toys.py
|
image_width
global image_height
global toys
global frame
# declare golbal variables
global driveLeft, driveRight
global toys_collected
# define variables
Known_Distance = 100 #100cm
Known_Width = 5 #5cm
image_width = 640
image_height = 480
toys = ["blue", "green", "red"]
target_toy = None # allocates a none value
toys_collected = [] # allocates a null value to the list
#debugging = False #set to False to run in normal mode
# Setup the ThunderBorg Motor Driver board
TB = ThunderBorg3.ThunderBorg()
TB.Init()
if not TB.foundChip:
boards = ThunderBorg3.ScanForThunderBorg()
if len(boards) == 0:
print('No ThunderBorg found, check you are attached :)')
else:
print('No ThunderBorg at address %02X, but we did find boards:' % (TB.i2cAddress))
for board in boards:
print(" %02X (%d) " % (board, board))
print('If you need to change the I�C address change the setup line so it is correct, e.g.')
print('TB.i2cAddress = 0x%02X' % (boards[0]))
sys.exit()
# Ensure the communications failsafe has been enabled!
failsafe = False
for i in range(5):
TB.SetCommsFailsafe(True)
failsafe = TB.GetCommsFailsafe()
if failsafe:
break
if not failsafe:
print('Board %02X failed to report in failsafe mode!' % (TB.i2cAddress))
def startup():
print("Waiting for start command")
# Check for controller
# Check for start command - this will be a button press on the controller
# Check external ultrasonic sensors if fitted to ensure safe to move - return distances
# If not safe move to a safe position?
# If safe to move carry out initial move (reverse, spin 180 degrees) to face the toys
# move to start position, reverse and turn 180 degrees
#TB.SetMotor1(-0.5)
#TB.SetMotor2(-0.5)
#sleep(0.5)
#TB.SetMotor1(-0.5)
#TB.SetMotor2(0.5)
#sleep(0.5)
# Return to main()
def start_position():
print("Start Position")
# will be used if we can identify a safe position to go to before searching for each toy
# maybe a aruco marker insode the front wall
def select_target():
# could be used to select any target dependent upon state?
global toys
#global target_toy
#global toys_collected
if len(toys)==0:
print("No more toys to collect")
target_toy = None
else:
for item in toys:
if item == "blue":
target_toy = "blue"
print("Target selected =", target_toy)
return target_toy
elif item == "green":
target_toy = "green"
print("Target selected =", target_toy)
return target_toy
elif item == "red":
target_toy = "red"
print("Target selected =", target_toy)
return target_toy
#print("current target toy is")
#print(target_toy)
def driving(): # movement
print("Movement Control")
def grabber(): # grabber control
print("Grabber Control")
# HSV COLOURSPACE START
def find_toy(frame, target_toy):
# convert captured image to HSV colour space to detect colours
toy = cv2.cvtColor(frame, cv2.COLOR_BGR2HSV)
#cv2.imshow("toy", toy)
#key = cv2.waitKey(0)
if target_toy == "blue":
print("searching for blue toy")
#define range of colour to detect
lower__hsv = np.array([88, 131, 0], dtype=np.uint8)
upper_hsv = np.array([145, 255, 255], dtype=np.uint8)
elif target_toy == "green":
print("searching for green toy")
#define range of colour to detect
lower__hsv = np.array([33, 75, 0], dtype=np.uint8)
upper_hsv = np.array([91, 255, 255], dtype=np.uint8)
elif target_toy == "red":
print("searching for red toy")
# define range of colour to detect
lower__hsv = np.array([156, 162, 0], dtype=np.uint8)
upper_hsv = np.array([255, 255, 255], dtype=np.uint8)
#setup the mask to detect only specified colour
mask = cv2.inRange(toy, lower__hsv, upper_hsv)
#cv2.imshow("mask", mask)
#key = cv2.waitKey(0)
# setup the results to display
colour_res = cv2.bitwise_and(frame, frame, mask=mask)
#colour_res = cv2.bitwise_and(frame, frame, mask=mask)
# detect the contours of the shapes and keep the largest
contours, hierarchy = cv2.findContours(mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
#was_, blue_contours, hierarchy = cv2.findContours(blue_mask, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contours), contours) for contours in contours]
if len(contour_sizes) > 0:
biggest_contour = max(contour_sizes, key=lambda x: x[0])[1]
# draw a green bounding box around the detected object
x, y, w, h = cv2.boundingRect(biggest_contour)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
#print(w, h)
# HSV COLOURSPACE END
mask = mask # allocates variable mask with data from blue_mask to be passed to position function
#distance(w, frame) # calls distance function and passes 'w' and 'frame'
#position(mask, frame) # calls position function and passes 'mask' and 'frame'
#show frames
#show mask
Z = distance(w, frame)
cx, cy = position(mask, frame)
print("we got here")
print("Z = ", Z)
print("cx, cy = ", cx, cy)
return cx, cy, Z
|
# DISTANCE (z) BEGIN
# initialise the known distance from the camera to the object which is 300mm 11.81102 inches
KNOWN_DISTANCE = 100
Z = KNOWN_DISTANCE
# initialise the know object width, which is 50mm or 1.968504 inches
KNOWN_WIDTH = 0.5
D = KNOWN_WIDTH
# d = width in pixels at 100cm = 30 - recheck if camera position changes
d = 30
f = d*Z/D #f = focallength
d = w # w is the perceived width in pixels calculated by OpenCV Contours
Z = D*f/d
print("pixel width =", w)
cv2.putText(frame, "%.1fcm" % (Z), (frame.shape[1] - 400, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# %.1f = 1 decimal point, px = px
# adds the variable w - width to the screen
return Z
# DISTANCE (z) END
def position(mask, frame):
#global cx
#global cy
print("position x, y")
# POSITION (x, y) BEGIN
# convert image to binary
ret, thresh = cv2.threshold(mask, 127,255,0)
# calculate moments of the binary image
M = cv2.moments(thresh)
# calculate the x, y coordinates of the centre
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
# put text and highlight the centre
cv2.circle(frame, (cx, cy), 5, (255, 255, 255), -1)
cv2.putText(frame, "centroid", (cx - 25, cy - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 25
|
def distance(w, frame):
#global Z
print("distance, z") #Debugging
|
random_line_split
|
tidy_toys.py
|
AIN_APPROX_SIMPLE)
contour_sizes = [(cv2.contourArea(contours), contours) for contours in contours]
if len(contour_sizes) > 0:
biggest_contour = max(contour_sizes, key=lambda x: x[0])[1]
# draw a green bounding box around the detected object
x, y, w, h = cv2.boundingRect(biggest_contour)
cv2.rectangle(frame, (x, y), (x + w, y + h), (0, 255, 0), 2)
#print(w, h)
# HSV COLOURSPACE END
mask = mask # allocates variable mask with data from blue_mask to be passed to position function
#distance(w, frame) # calls distance function and passes 'w' and 'frame'
#position(mask, frame) # calls position function and passes 'mask' and 'frame'
#show frames
#show mask
Z = distance(w, frame)
cx, cy = position(mask, frame)
print("we got here")
print("Z = ", Z)
print("cx, cy = ", cx, cy)
return cx, cy, Z
def distance(w, frame):
#global Z
print("distance, z") #Debugging
# DISTANCE (z) BEGIN
# initialise the known distance from the camera to the object which is 300mm 11.81102 inches
KNOWN_DISTANCE = 100
Z = KNOWN_DISTANCE
# initialise the know object width, which is 50mm or 1.968504 inches
KNOWN_WIDTH = 0.5
D = KNOWN_WIDTH
# d = width in pixels at 100cm = 30 - recheck if camera position changes
d = 30
f = d*Z/D #f = focallength
d = w # w is the perceived width in pixels calculated by OpenCV Contours
Z = D*f/d
print("pixel width =", w)
cv2.putText(frame, "%.1fcm" % (Z), (frame.shape[1] - 400, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
# %.1f = 1 decimal point, px = px
# adds the variable w - width to the screen
return Z
# DISTANCE (z) END
def position(mask, frame):
#global cx
#global cy
print("position x, y")
# POSITION (x, y) BEGIN
# convert image to binary
ret, thresh = cv2.threshold(mask, 127,255,0)
# calculate moments of the binary image
M = cv2.moments(thresh)
# calculate the x, y coordinates of the centre
cx = int(M["m10"] / M["m00"])
cy = int(M["m01"] / M["m00"])
# put text and highlight the centre
cv2.circle(frame, (cx, cy), 5, (255, 255, 255), -1)
cv2.putText(frame, "centroid", (cx - 25, cy - 25),cv2.FONT_HERSHEY_SIMPLEX, 0.5, (255, 255, 255), 2)
return cx, cy
# POSITION (x, y) END
def drive_to_toy(frame, target_toy, cx, cy, Z):
#global target_toy
#global toys_collected
print("Driving to toy")
print("Distance = ", Z)
print("Position (x, y) = ", cx, cy)
drive = ""
# check distance to target
if Z >= 30:
print("Navigating to target")
# insert driving forward
if cx > 320:
print("steering left")
drive = "steering left"
#Enter motor controls here
#TB.SetMotor1(0.25)
#TB.SetMotor2(0.5)
driveLeft = 0.25
driveRight = 0.50
cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
cv2.putText(frame, "%.1fpx" % (cx), (frame.shape[1] - 200, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
return driveLeft, driveRight
elif cx < 320:
print("steering right")
drive = "steering right"
# Enter motor controls here
#TB.SetMotor1(0.5)
#TB.SetMotor2(0.25)
driveLeft = 0.50
driveRight = 0.25
cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
cv2.putText(frame, "%.1fpx" % (cx), (frame.shape[1] - 200, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
return driveLeft, driveRight
else:
print("straight ahead")
drive = "straight ahead"
# Enter motor controls here
#TB.SetMotor1(0.5)
#TB.SetMotor2(0.5)
driveLeft = 0.50
driveRight = 0.50
cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
cv2.putText(frame, "%.1fpx" % (cx), (frame.shape[1] - 200, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
return driveLeft, driveRight
else: #was elif Z <= 29:
print("target in range")
drive = "target in range"
driveLeft = 0
driveRight = 0
cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
#cv2.putText(frame, "%.1fpx" % (cx), (frame.shape[1] - 200, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 0.75, (0, 255, 0), 2)
return driveLeft, driveRight
#if cx > 300 and cx < 340:
# TB.SetMotor1(0)
# TB.SetMotor2(0)
# driveLeft = 0
# driveRight = 0
# pick_up_toy(target_toy, toys_collected)
#cv2.putText(frame, drive, (50, 50), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 255, 0), 2)
#cv2.putText(frame, "%.1fcm" % (cx), (frame.shape[1] - 400, frame.shape[0] - 100), cv2.FONT_HERSHEY_SIMPLEX, 1.0, (0, 255, 0), 2)
#return driveLeft, driveRight
def search_mode():
driveLeft = 0.5
sleep(1)
driveRight = 0.5
sleep(1)
def pick_up_toy(target_toy, toys_collected):
print("Picking up toy")
sleep(2)
print(target_toy) # test to see if target_toy contains a value?
toys_collected.append(target_toy)
toys.remove(target_toy)
print("Toys collected so far", toys_collected)
if not toys:
print("All toys picked up")
target_toy = None
else:
print("Toys remaining", toys)
find_drop_zone()
sleep(2)
def find_drop_zone():
print("Searching for Drop Zone")
# Need OpenCV code here to find Drop Zone markers, poss two Arrows
# Insert Acuro Marker finding routine here, probably start by reversing and turning left, to face the Drop Zone
def put_down_toy():
print("Put down toy")
def drive_motors(driveLeft, driveRight):
TB.
|
SetMotor1(driveLeft)
TB.SetMotor2(driveRight)
d
|
identifier_body
|
|
TkUtil.py
|
Tkinter with Tkinter.Tk().
Warning: windowingsystem is a fairly recent tk command;
if it is not available then this code does its best to guess
but will not guess aqua.
"""
global g_winSys
if not g_winSys:
tkWdg = _getTkWdg()
try:
g_winSys = tkWdg.tk.call("tk", "windowingsystem")
except tkinter.TclError:
# windowingsystem not supported; take a best guess
if RO.OS.PlatformName == "win":
g_winSys = "win32"
else:
g_winSys = "x11"
return g_winSys
#class TkAdapter:
#_tkWdg = None
#def __init__(self):
#if self._tkWdg is None:
#self._tkWdg = self._getTkWdg()
#self.funcDict = {}
#def after(*args):
#self._tkWdg.after(*args)
#def register(self, func):
#"""Register a function as a tcl function.
#Returns the name of the tcl function.
#Be sure to deregister the function when done
#or delete the TkAdapter
#"""
#funcObj = TclFunc(func)
#funcName = funcObj.tclFuncName
#self.funcDict[funcName] = funcObj
#return funcName
#def deregister(self, funcName):
#"""Deregister a tcl function.
#Raise KeyError if function not found.
#"""
#func = self.funcDict.pop(funcName)
#func.deregister()
#def eval(self, *args):
#"""Evaluate an arbitrary tcl expression and return the result"""
#return self._tkWdg.tk.eval(*args)
#def call(self, *args):
#"""Call a tcl function"""
#return self._tkWdg.tk.call(*args)
class TclFunc(object):
"""Register a python function as a tcl function.
Based on Tkinter's _register method (which, being private,
I prefer not to use explicitly).
If the function call fails, a traceback is printed.
Please call deregister when you no longer
want the tcl function to exist.
"""
tkApp = None
def __init__(self, func, debug=False):
if self.tkApp is None:
self.tkApp = _getTkWdg().tk
self.func = func
self.tclFuncName = "pyfunc%s" % (id(self),)
self.debug = bool(debug)
try:
self.tclFuncName += str(func.__name__)
except AttributeError:
pass
if self.debug:
print("registering tcl function %s for python function %s" % (self.tclFuncName, func))
self.tkApp.createcommand(self.tclFuncName, self)
def __call__(self, *args):
try:
self.func(*args)
except Exception as e:
sys.stderr.write("tcl function %s failed: %s\n" % (self.tclFuncName, e))
traceback.print_exc(file=sys.stderr)
def deregister(self):
"""Deregister callback and delete reference to python function.
Safe to call if already deregistered.
"""
if self.debug:
print("%r.deregister()" % (self,))
if not self.func:
if self.debug:
print("already deregistered")
return
try:
self.tkApp.deletecommand(self.tclFuncName)
except tkinter.TclError as e:
if self.debug:
print("deregistering failed: %r" % (e,))
pass
self.func = None
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.tclFuncName)
def __str__(self):
return self.tclFuncName
class Geometry(object):
"""A class representing a tk geometry
Fields include the following two-element tuples:
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; always positive or (None, None) if extent is unknown
System constants:
- minCorner: minimum visible offset position (platform-dependent)
- screenExtent: x,y extent of all screens put together
(if the screens are not the same size and arranged side by side
then the area will include pixels that are not visible)
WARNING: on some platforms offsetFlipped < 0 is not handled properly.
In particular on Mac OS X with Tk 8.4:
- the offset is actually relative to the top or right offset of the window,
which is dead wrong
- setting the geometry for a window with ngeative offset offset may simply not work,
resulting in a geometry that is not what you asked for
(I have particularly seen this for windows nearly as large as the screen)
That is why the constrainToGeomStr method always returns a tk geometry string with positive corners.
"""
if RO.OS.PlatformName == "mac":
minCorner = (0, 22)
else:
minCorner = (0, 0)
_root = None
_geomRE = re.compile(
r"((?P<width>\d+)x(?P<height>\d+))?(?P<xsign>[+-])(?P<x>[-]?\d+)(?P<ysign>[+-])(?P<y>[-]?\d+)$",
re.IGNORECASE)
def __init__(self, offset, offsetFlipped, extent):
"""Create a new Geometry
Inputs (each is a sequence of two values):
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; you may specify None or (None, None) if the extent is unknown;
however, you may not specify an integer for one axis and None for the other
raise RuntimeError if any input does not have two elements (except that extent may be None)
"""
if len(offset) != 2:
raise RuntimeError("offset=%r does not have two values" % (offset,))
self.offset = tuple(int(val) for val in offset)
if len(offsetFlipped) != 2:
raise RuntimeError("offsetFlipped=%r does not have two values" % (offsetFlipped,))
self.offsetFlipped = tuple(bool(val) for val in offsetFlipped)
if extent is None:
self.extent = (None, None)
else:
if len(extent) != 2:
raise RuntimeError("extent=%r does not have two values" % (extent,))
if None in extent:
self.extent = (None, None)
else:
self.extent = tuple(int(val) for val in extent)
@classmethod
def fromTkStr(cls, geomStr):
"""Create a Geometry from a tk geometry string
Inputs:
- geomStr: tk geometry string
"""
match = cls._geomRE.match(geomStr)
if not match:
raise RuntimeError("Could not parse geomStr string %r" % (geomStr,))
groupDict = match.groupdict()
return cls(
offset = tuple(groupDict[name] for name in ("x", "y")),
offsetFlipped = tuple(cls._flippedFromChar(groupDict[name]) for name in ("xsign", "ysign")),
extent = tuple(groupDict[name] for name in ("width", "height")),
)
def constrained(self, constrainExtent=True, defExtent=50):
"""Return a geometry that is constrain to lie entirely within the screen(s)
Inputs:
- constrainExtent: if True then the extent and offset position are both constrained
else only the offset position is constrained
- defExtent: the extent to assume if the extent is not known; ignored if the extent is known
Returns:
- a geometry string (not a Geometry, but you can trivially convert it to one)
Warnings:
- If the user has multiple screens and they are not the same size or lined up side by side
then the resulting geometry may not be entirely visible, or even partially visiable.
"""
constrainedOffset = []
constrainedExtent = []
for ii in range(2):
extent_ii = self.extent[ii]
if extent_ii is None:
extent_ii = defExtent
corner_ii = self.offset[ii]
minCorner_ii = self.minCorner[ii]
usableScreenExtent_ii = self.screenExtent[ii] - minCorner_ii
tooLarge_ii = extent_ii > usableScreenExtent_ii
if tooLarge_ii and constrainExtent:
|
extent_ii = usableScreenExtent_ii
|
conditional_block
|
|
TkUtil.py
|
#def register(self, func):
#"""Register a function as a tcl function.
#Returns the name of the tcl function.
#Be sure to deregister the function when done
#or delete the TkAdapter
#"""
#funcObj = TclFunc(func)
#funcName = funcObj.tclFuncName
#self.funcDict[funcName] = funcObj
#return funcName
#def deregister(self, funcName):
#"""Deregister a tcl function.
#Raise KeyError if function not found.
#"""
#func = self.funcDict.pop(funcName)
#func.deregister()
#def eval(self, *args):
#"""Evaluate an arbitrary tcl expression and return the result"""
#return self._tkWdg.tk.eval(*args)
#def call(self, *args):
#"""Call a tcl function"""
#return self._tkWdg.tk.call(*args)
class TclFunc(object):
"""Register a python function as a tcl function.
Based on Tkinter's _register method (which, being private,
I prefer not to use explicitly).
If the function call fails, a traceback is printed.
Please call deregister when you no longer
want the tcl function to exist.
"""
tkApp = None
def __init__(self, func, debug=False):
if self.tkApp is None:
self.tkApp = _getTkWdg().tk
self.func = func
self.tclFuncName = "pyfunc%s" % (id(self),)
self.debug = bool(debug)
try:
self.tclFuncName += str(func.__name__)
except AttributeError:
pass
if self.debug:
print("registering tcl function %s for python function %s" % (self.tclFuncName, func))
self.tkApp.createcommand(self.tclFuncName, self)
def __call__(self, *args):
try:
self.func(*args)
except Exception as e:
sys.stderr.write("tcl function %s failed: %s\n" % (self.tclFuncName, e))
traceback.print_exc(file=sys.stderr)
def deregister(self):
"""Deregister callback and delete reference to python function.
Safe to call if already deregistered.
"""
if self.debug:
print("%r.deregister()" % (self,))
if not self.func:
if self.debug:
print("already deregistered")
return
try:
self.tkApp.deletecommand(self.tclFuncName)
except tkinter.TclError as e:
if self.debug:
print("deregistering failed: %r" % (e,))
pass
self.func = None
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.tclFuncName)
def __str__(self):
return self.tclFuncName
class Geometry(object):
"""A class representing a tk geometry
Fields include the following two-element tuples:
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; always positive or (None, None) if extent is unknown
System constants:
- minCorner: minimum visible offset position (platform-dependent)
- screenExtent: x,y extent of all screens put together
(if the screens are not the same size and arranged side by side
then the area will include pixels that are not visible)
WARNING: on some platforms offsetFlipped < 0 is not handled properly.
In particular on Mac OS X with Tk 8.4:
- the offset is actually relative to the top or right offset of the window,
which is dead wrong
- setting the geometry for a window with ngeative offset offset may simply not work,
resulting in a geometry that is not what you asked for
(I have particularly seen this for windows nearly as large as the screen)
That is why the constrainToGeomStr method always returns a tk geometry string with positive corners.
"""
if RO.OS.PlatformName == "mac":
minCorner = (0, 22)
else:
minCorner = (0, 0)
_root = None
_geomRE = re.compile(
r"((?P<width>\d+)x(?P<height>\d+))?(?P<xsign>[+-])(?P<x>[-]?\d+)(?P<ysign>[+-])(?P<y>[-]?\d+)$",
re.IGNORECASE)
def __init__(self, offset, offsetFlipped, extent):
"""Create a new Geometry
Inputs (each is a sequence of two values):
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; you may specify None or (None, None) if the extent is unknown;
however, you may not specify an integer for one axis and None for the other
raise RuntimeError if any input does not have two elements (except that extent may be None)
"""
if len(offset) != 2:
raise RuntimeError("offset=%r does not have two values" % (offset,))
self.offset = tuple(int(val) for val in offset)
if len(offsetFlipped) != 2:
raise RuntimeError("offsetFlipped=%r does not have two values" % (offsetFlipped,))
self.offsetFlipped = tuple(bool(val) for val in offsetFlipped)
if extent is None:
self.extent = (None, None)
else:
if len(extent) != 2:
raise RuntimeError("extent=%r does not have two values" % (extent,))
if None in extent:
self.extent = (None, None)
else:
self.extent = tuple(int(val) for val in extent)
@classmethod
def fromTkStr(cls, geomStr):
"""Create a Geometry from a tk geometry string
Inputs:
- geomStr: tk geometry string
"""
match = cls._geomRE.match(geomStr)
if not match:
raise RuntimeError("Could not parse geomStr string %r" % (geomStr,))
groupDict = match.groupdict()
return cls(
offset = tuple(groupDict[name] for name in ("x", "y")),
offsetFlipped = tuple(cls._flippedFromChar(groupDict[name]) for name in ("xsign", "ysign")),
extent = tuple(groupDict[name] for name in ("width", "height")),
)
def constrained(self, constrainExtent=True, defExtent=50):
"""Return a geometry that is constrain to lie entirely within the screen(s)
Inputs:
- constrainExtent: if True then the extent and offset position are both constrained
else only the offset position is constrained
- defExtent: the extent to assume if the extent is not known; ignored if the extent is known
Returns:
- a geometry string (not a Geometry, but you can trivially convert it to one)
Warnings:
- If the user has multiple screens and they are not the same size or lined up side by side
then the resulting geometry may not be entirely visible, or even partially visiable.
"""
constrainedOffset = []
constrainedExtent = []
for ii in range(2):
extent_ii = self.extent[ii]
if extent_ii is None:
extent_ii = defExtent
corner_ii = self.offset[ii]
minCorner_ii = self.minCorner[ii]
usableScreenExtent_ii = self.screenExtent[ii] - minCorner_ii
tooLarge_ii = extent_ii > usableScreenExtent_ii
if tooLarge_ii and constrainExtent:
extent_ii = usableScreenExtent_ii
if self.offsetFlipped[ii]:
# offset is distance from bottom/right of window to bottom/right of screen
# to avoid tk bugs, the constrained result will NOT use this convention
corner_ii = usableScreenExtent_ii - (corner_ii + extent_ii)
if tooLarge_ii:
corner_ii = minCorner_ii
elif corner_ii < minCorner_ii:
corner_ii = minCorner_ii
elif extent_ii + corner_ii > usableScreenExtent_ii:
# off lower or right edge
corner_ii = usableScreenExtent_ii - extent_ii
constrainedOffset.append(corner_ii)
constrainedExtent.append(extent_ii)
if not self.hasExtent:
constrainedExtent = (None, None)
return type(self)(offset=constrainedOffset, offsetFlipped=(False, False), extent=constrainedExtent)
@property
def hasExtent(self):
return None not in self.extent
@property
def
|
screenExtent
|
identifier_name
|
|
TkUtil.py
|
truncRGB = [max(min(int(val), 0xFFFF), 0) for val in netRGB]
retColor = "#%04x%04x%04x" % tuple(truncRGB)
#print "mixColors(%r); netRGB=%s; truncRGB=%s; retColor=%r" % (colorMultPairs, netRGB, truncRGB, retColor)
return retColor
def colorOK(colorStr):
"""Return True if colorStr is a valid tk color, False otherwise.
"""
tkWdg = _getTkWdg()
try:
tkWdg.winfo_rgb(colorStr)
except tkinter.TclError:
return False
return True
class EvtNoProp(object):
"""Function wrapper that prevents event propagation.
Input: function to bind
"""
def __init__(self, func):
self.func = func
def __call__(self, *args, **kargs):
self.func(*args, **kargs)
return "break"
def getButtonNumbers():
"""Return the button numbers corresponding to
the left, middle and right buttons.
"""
winSys = getWindowingSystem()
if winSys == WSysAqua:
return (1, 3, 2)
else:
return (1, 2, 3)
def getTclVersion():
"""Return the Tcl/Tk version as a string
Returns the result of tcl command "info patchlevel". Some representative return values
(from tcl documentation for tcl_patchLevel):
8.4.16
8.5b3
"""
global g_tkVersion
if g_tkVersion is None:
tkWdg = _getTkWdg()
g_tkVersion = tkWdg.tk.call("info", "patchlevel")
return g_tkVersion
def getWindowingSystem():
"""Return the Tk window system.
Returns one of:
- WSysAqua: the MacOS X native system
- WSysX11: the unix windowing system
- WSysWin: the Windows windowing system
Other values might also be possible.
Please don't call this until you have started Tkinter with Tkinter.Tk().
Warning: windowingsystem is a fairly recent tk command;
if it is not available then this code does its best to guess
but will not guess aqua.
"""
global g_winSys
if not g_winSys:
tkWdg = _getTkWdg()
try:
g_winSys = tkWdg.tk.call("tk", "windowingsystem")
except tkinter.TclError:
# windowingsystem not supported; take a best guess
if RO.OS.PlatformName == "win":
g_winSys = "win32"
else:
g_winSys = "x11"
return g_winSys
#class TkAdapter:
#_tkWdg = None
#def __init__(self):
#if self._tkWdg is None:
#self._tkWdg = self._getTkWdg()
#self.funcDict = {}
#def after(*args):
#self._tkWdg.after(*args)
#def register(self, func):
#"""Register a function as a tcl function.
#Returns the name of the tcl function.
#Be sure to deregister the function when done
#or delete the TkAdapter
#"""
#funcObj = TclFunc(func)
#funcName = funcObj.tclFuncName
#self.funcDict[funcName] = funcObj
#return funcName
#def deregister(self, funcName):
#"""Deregister a tcl function.
#Raise KeyError if function not found.
#"""
#func = self.funcDict.pop(funcName)
#func.deregister()
#def eval(self, *args):
#"""Evaluate an arbitrary tcl expression and return the result"""
#return self._tkWdg.tk.eval(*args)
#def call(self, *args):
#"""Call a tcl function"""
#return self._tkWdg.tk.call(*args)
class TclFunc(object):
"""Register a python function as a tcl function.
Based on Tkinter's _register method (which, being private,
I prefer not to use explicitly).
If the function call fails, a traceback is printed.
Please call deregister when you no longer
want the tcl function to exist.
"""
tkApp = None
def __init__(self, func, debug=False):
if self.tkApp is None:
self.tkApp = _getTkWdg().tk
self.func = func
self.tclFuncName = "pyfunc%s" % (id(self),)
self.debug = bool(debug)
try:
self.tclFuncName += str(func.__name__)
except AttributeError:
pass
if self.debug:
print("registering tcl function %s for python function %s" % (self.tclFuncName, func))
self.tkApp.createcommand(self.tclFuncName, self)
def __call__(self, *args):
try:
self.func(*args)
except Exception as e:
sys.stderr.write("tcl function %s failed: %s\n" % (self.tclFuncName, e))
traceback.print_exc(file=sys.stderr)
def deregister(self):
"""Deregister callback and delete reference to python function.
Safe to call if already deregistered.
"""
if self.debug:
print("%r.deregister()" % (self,))
if not self.func:
if self.debug:
print("already deregistered")
return
try:
self.tkApp.deletecommand(self.tclFuncName)
except tkinter.TclError as e:
if self.debug:
print("deregistering failed: %r" % (e,))
pass
self.func = None
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.tclFuncName)
def __str__(self):
|
class Geometry(object):
"""A class representing a tk geometry
Fields include the following two-element tuples:
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; always positive or (None, None) if extent is unknown
System constants:
- minCorner: minimum visible offset position (platform-dependent)
- screenExtent: x,y extent of all screens put together
(if the screens are not the same size and arranged side by side
then the area will include pixels that are not visible)
WARNING: on some platforms offsetFlipped < 0 is not handled properly.
In particular on Mac OS X with Tk 8.4:
- the offset is actually relative to the top or right offset of the window,
which is dead wrong
- setting the geometry for a window with ngeative offset offset may simply not work,
resulting in a geometry that is not what you asked for
(I have particularly seen this for windows nearly as large as the screen)
That is why the constrainToGeomStr method always returns a tk geometry string with positive corners.
"""
if RO.OS.PlatformName == "mac":
minCorner = (0, 22)
else:
minCorner = (0, 0)
_root = None
_geomRE = re.compile(
r"((?P<width>\d+)x(?P<height>\d+))?(?P<xsign>[+-])(?P<x>[-]?\d+)(?P<ysign>[+-])(?P<y>[-]?\d+)$",
re.IGNORECASE)
def __init__(self, offset, offsetFlipped, extent):
"""Create a new Geometry
Inputs (each is a sequence of two values):
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; you may specify None or (None, None) if the extent is unknown;
however, you may not specify an integer for one axis and None for the other
raise RuntimeError if any input does not have two elements (except that extent may be None)
"""
if len(offset) != 2:
raise RuntimeError("offset=%r does not have two values" % (offset,))
self.offset = tuple(int(val) for val in offset)
if len(offsetFlipped) != 2:
raise RuntimeError("offsetFlipped=%r does not have two values" % (offsetFlipped,))
self.offsetFlipped = tuple(bool(val) for val in offsetFlipped)
if extent is None:
self.extent
|
return self.tclFuncName
|
identifier_body
|
TkUtil.py
|
# windowing system constants
WSysAqua = "aqua"
WSysX11 = "x11"
WSysWin = "win32"
# internal globals
g_tkWdg = None
g_winSys = None
g_tkVersion = None
def addColors(*colorMultPairs):
"""Add colors or scale a color.
Inputs:
- A list of one or more (color, mult) pairs.
Returns sum of (R, G, B) * mult for each (color, mult) pair,
with R, G, and B individually limited to range [0, 0xFFFF].
"""
netRGB = [0, 0, 0]
for color, mult in colorMultPairs:
colorRGB = _getTkWdg().winfo_rgb(color)
netRGB = [netRGB[ii] + (mult * colorRGB[ii]) for ii in range(3)]
truncRGB = [max(min(int(val), 0xFFFF), 0) for val in netRGB]
retColor = "#%04x%04x%04x" % tuple(truncRGB)
#print "mixColors(%r); netRGB=%s; truncRGB=%s; retColor=%r" % (colorMultPairs, netRGB, truncRGB, retColor)
return retColor
def colorOK(colorStr):
"""Return True if colorStr is a valid tk color, False otherwise.
"""
tkWdg = _getTkWdg()
try:
tkWdg.winfo_rgb(colorStr)
except tkinter.TclError:
return False
return True
class EvtNoProp(object):
"""Function wrapper that prevents event propagation.
Input: function to bind
"""
def __init__(self, func):
self.func = func
def __call__(self, *args, **kargs):
self.func(*args, **kargs)
return "break"
def getButtonNumbers():
"""Return the button numbers corresponding to
the left, middle and right buttons.
"""
winSys = getWindowingSystem()
if winSys == WSysAqua:
return (1, 3, 2)
else:
return (1, 2, 3)
def getTclVersion():
"""Return the Tcl/Tk version as a string
Returns the result of tcl command "info patchlevel". Some representative return values
(from tcl documentation for tcl_patchLevel):
8.4.16
8.5b3
"""
global g_tkVersion
if g_tkVersion is None:
tkWdg = _getTkWdg()
g_tkVersion = tkWdg.tk.call("info", "patchlevel")
return g_tkVersion
def getWindowingSystem():
"""Return the Tk window system.
Returns one of:
- WSysAqua: the MacOS X native system
- WSysX11: the unix windowing system
- WSysWin: the Windows windowing system
Other values might also be possible.
Please don't call this until you have started Tkinter with Tkinter.Tk().
Warning: windowingsystem is a fairly recent tk command;
if it is not available then this code does its best to guess
but will not guess aqua.
"""
global g_winSys
if not g_winSys:
tkWdg = _getTkWdg()
try:
g_winSys = tkWdg.tk.call("tk", "windowingsystem")
except tkinter.TclError:
# windowingsystem not supported; take a best guess
if RO.OS.PlatformName == "win":
g_winSys = "win32"
else:
g_winSys = "x11"
return g_winSys
#class TkAdapter:
#_tkWdg = None
#def __init__(self):
#if self._tkWdg is None:
#self._tkWdg = self._getTkWdg()
#self.funcDict = {}
#def after(*args):
#self._tkWdg.after(*args)
#def register(self, func):
#"""Register a function as a tcl function.
#Returns the name of the tcl function.
#Be sure to deregister the function when done
#or delete the TkAdapter
#"""
#funcObj = TclFunc(func)
#funcName = funcObj.tclFuncName
#self.funcDict[funcName] = funcObj
#return funcName
#def deregister(self, funcName):
#"""Deregister a tcl function.
#Raise KeyError if function not found.
#"""
#func = self.funcDict.pop(funcName)
#func.deregister()
#def eval(self, *args):
#"""Evaluate an arbitrary tcl expression and return the result"""
#return self._tkWdg.tk.eval(*args)
#def call(self, *args):
#"""Call a tcl function"""
#return self._tkWdg.tk.call(*args)
class TclFunc(object):
"""Register a python function as a tcl function.
Based on Tkinter's _register method (which, being private,
I prefer not to use explicitly).
If the function call fails, a traceback is printed.
Please call deregister when you no longer
want the tcl function to exist.
"""
tkApp = None
def __init__(self, func, debug=False):
if self.tkApp is None:
self.tkApp = _getTkWdg().tk
self.func = func
self.tclFuncName = "pyfunc%s" % (id(self),)
self.debug = bool(debug)
try:
self.tclFuncName += str(func.__name__)
except AttributeError:
pass
if self.debug:
print("registering tcl function %s for python function %s" % (self.tclFuncName, func))
self.tkApp.createcommand(self.tclFuncName, self)
def __call__(self, *args):
try:
self.func(*args)
except Exception as e:
sys.stderr.write("tcl function %s failed: %s\n" % (self.tclFuncName, e))
traceback.print_exc(file=sys.stderr)
def deregister(self):
"""Deregister callback and delete reference to python function.
Safe to call if already deregistered.
"""
if self.debug:
print("%r.deregister()" % (self,))
if not self.func:
if self.debug:
print("already deregistered")
return
try:
self.tkApp.deletecommand(self.tclFuncName)
except tkinter.TclError as e:
if self.debug:
print("deregistering failed: %r" % (e,))
pass
self.func = None
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, self.tclFuncName)
def __str__(self):
return self.tclFuncName
class Geometry(object):
"""A class representing a tk geometry
Fields include the following two-element tuples:
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is the meaning of x,y offset flipped?
if False (unflipped) then offset is the distance from screen top/left to window top/left
if True (flipped) offset is the distance from window bottom/right to screen bottom/right
- extent: x,y extent; always positive or (None, None) if extent is unknown
System constants:
- minCorner: minimum visible offset position (platform-dependent)
- screenExtent: x,y extent of all screens put together
(if the screens are not the same size and arranged side by side
then the area will include pixels that are not visible)
WARNING: on some platforms offsetFlipped < 0 is not handled properly.
In particular on Mac OS X with Tk 8.4:
- the offset is actually relative to the top or right offset of the window,
which is dead wrong
- setting the geometry for a window with ngeative offset offset may simply not work,
resulting in a geometry that is not what you asked for
(I have particularly seen this for windows nearly as large as the screen)
That is why the constrainToGeomStr method always returns a tk geometry string with positive corners.
"""
if RO.OS.PlatformName == "mac":
minCorner = (0, 22)
else:
minCorner = (0, 0)
_root = None
_geomRE = re.compile(
r"((?P<width>\d+)x(?P<height>\d+))?(?P<xsign>[+-])(?P<x>[-]?\d+)(?P<ysign>[+-])(?P<y>[-]?\d+)$",
re.IGNORECASE)
def __init__(self, offset, offsetFlipped, extent):
"""Create a new Geometry
Inputs (each is a sequence of two values):
- offset: x,y offset of window relative to screen; see also offsetFlipped
- offsetFlipped: is
|
import tkinter
import RO.OS
|
random_line_split
|
|
cleaner.go
|
detail_new_|" +
"detail_related_|" +
"figcaption|" +
"footnote|" +
"foot|" +
"header|" +
"img_popup_single|" +
"js_replies|" +
"[Kk]ona[Ff]ilter|" +
"leading|" +
"legende|" +
"links|" +
"mediaarticlerelated|" +
"menucontainer|" +
"meta$|" +
"navbar|" +
"pagetools|" +
"popup|" +
"post-attributes|" +
"post-title|" +
"relacionado|" +
"retweet|" +
"runaroundLeft|" +
"shoutbox|" +
"site_nav|" +
"socialNetworking|" +
"social_|" +
"socialnetworking|" +
"socialtools|" +
"sponsor|" +
"sub_nav|" +
"subscribe|" +
"tag_|" +
"tags|" +
"the_answers|" +
"timestamp|" +
"tools|" +
"vcard|" +
"welcome_form|" +
"wp-caption-text")
var CAPTIONS_RE = regexp.MustCompile("^caption$")
var GOOGLE_RE = regexp.MustCompile(" google ")
var MORE_RE = regexp.MustCompile("^[^entry-]more.*$")
var FACEBOOK_RE = regexp.MustCompile("[^-]facebook")
var FACEBOOK_BROADCASTING_RE = regexp.MustCompile("facebook-broadcasting")
var TWITTER_RE = regexp.MustCompile("[^-]twitter")
func (this *cleaner) clean(article *Article) *goquery.Document {
if this.config.debug {
log.Println("Starting cleaning phase with Cleaner")
}
docToClean := article.Doc
docToClean = this.cleanArticleTags(docToClean)
docToClean = this.cleanEMTags(docToClean)
docToClean = this.dropCaps(docToClean)
docToClean = this.removeScriptsStyle(docToClean)
docToClean = this.cleanBadTags(docToClean)
docToClean = this.cleanFooter(docToClean)
docToClean = this.cleanAside(docToClean)
docToClean = this.removeNodesRegEx(docToClean, CAPTIONS_RE)
docToClean = this.removeNodesRegEx(docToClean, GOOGLE_RE)
docToClean = this.removeNodesRegEx(docToClean, MORE_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_BROADCASTING_RE)
docToClean = this.removeNodesRegEx(docToClean, TWITTER_RE)
docToClean = this.cleanParaSpans(docToClean)
docToClean = this.convertDivsToParagraphs(docToClean, "div")
docToClean = this.convertDivsToParagraphs(docToClean, "span")
docToClean = this.convertDivsToParagraphs(docToClean, "article")
docToClean = this.convertDivsToParagraphs(docToClean, "pre")
return docToClean
}
func (this *cleaner) cleanArticleTags(doc *goquery.Document) *goquery.Document
|
func (this *cleaner) cleanEMTags(doc *goquery.Document) *goquery.Document {
ems := doc.Find("em")
ems.Each(func(i int, s *goquery.Selection) {
images := s.Find("img")
if images.Length() == 0 {
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d EM tags\n", ems.Size())
}
return doc
}
func (this *cleaner) cleanFooter(doc *goquery.Document) *goquery.Document {
footer := doc.Find("footer")
footer.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanAside(doc *goquery.Document) *goquery.Document {
aside := doc.Find("aside")
aside.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanCites(doc *goquery.Document) *goquery.Document {
cites := doc.Find("cite")
cites.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanDivs(doc *goquery.Document) *goquery.Document {
frames := make(map[string]int)
framesNodes := make(map[string]*list.List)
divs := doc.Find("div")
divs.Each(func(i int, s *goquery.Selection) {
children := s.Children()
if children.Size() == 0 {
text := s.Text()
text = strings.Trim(text, " ")
text = strings.Trim(text, "\t")
text = strings.ToLower(text)
frames[text]++
if framesNodes[text] == nil {
framesNodes[text] = list.New()
}
framesNodes[text].PushBack(s)
}
})
for text, freq := range frames {
if freq > 1 {
selections := framesNodes[text]
for s := selections.Front(); s != nil; s = s.Next() {
selection := s.Value.(*goquery.Selection)
this.config.parser.removeNode(selection)
}
}
}
return doc
}
func (this *cleaner) dropCaps(doc *goquery.Document) *goquery.Document {
items := doc.Find("span")
count := 0 //remove
items.Each(func(i int, s *goquery.Selection) {
attribute, exists := s.Attr("class")
if exists && (strings.Contains(attribute, "dropcap") || strings.Contains(attribute, "drop_cap")) {
count++
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d dropcap tags\n", count)
}
return doc
}
func (this *cleaner) removeScriptsStyle(doc *goquery.Document) *goquery.Document {
if this.config.debug {
log.Println("Starting to remove script tags")
}
scripts := doc.Find("script,noscript,style")
scripts.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
if this.config.debug {
log.Printf("Removed %d script and style tags\n", scripts.Size())
}
//remove comments :) How????
return doc
}
func (this *cleaner) matchNodeRegEx(attribute string, pattern *regexp.Regexp) bool {
return pattern.MatchString(attribute)
}
func (this *cleaner) removeNodesRegEx(doc *goquery.Document, pattern *regexp.Regexp) *goquery.Document {
selectors := [3]string{"id", "class", "name"}
for _, selector := range selectors {
naughtyList := doc.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(i int, s *goquery.Selection) {
attribute, _ := s.Attr(selector)
if this.matchNodeRegEx(attribute, pattern) {
cont++
this.config.parser.removeNode(s)
}
})
if this.config.debug {
log.Printf("regExRemoveNodes %d %s elements found against pattern %s\n", cont, selector, pattern.String())
}
}
return doc
}
func (this *cleaner) cleanBadTags(doc *goquery.Document) *goquery.Document {
body := doc.Find("body")
children := body.Children()
selectors := []string{"id", "class", "name"}
for _, selector := range selectors {
children.Each(func(i int, s *goquery.Selection) {
naughtyList := s.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(j int, e *goquery.Selection) {
attribute, _ := e.Attr(selector)
if this.matchNodeRegEx(attribute, REMOVENODES_RE) {
if this.config.debug {
log.Printf("Cleaning: Removing node with %s: %s\n", selector, this.config.parser.name(selector, e))
}
this.config.parser.removeNode(e)
cont++
}
})
if this.config.debug && cont > 0 {
log.Printf("%d naughty %s elements found", cont, selector)
}
})
}
return doc
}
func (this *cleaner) cleanParaSpans(doc *goquery.Document) *goquery.Document {
spans := doc.Find("span")
spans.Each(func(i int, s *goquery.Selection) {
parent := s.Parent()
if parent != nil && parent.Length() > 0 && parent.Get(0).DataAtom == atom.P {
node := s.Get(0)
node.Data = s.Text()
node.Type = html.TextNode
}
})
return doc
}
func (this *cleaner) getFlushedBuffer(fragment string) []*html.Node {
output := make([]*html.Node, 0)
reader := strings.NewReader(fragment)
document, _ := html.Parse(reader)
body := document.FirstChild.LastChild
for c := body.FirstChild
|
{
tags := [3]string{"id", "name", "class"}
articles := doc.Find("article")
articles.Each(func(i int, s *goquery.Selection) {
for _, tag := range tags {
this.config.parser.delAttr(s, tag)
}
})
return doc
}
|
identifier_body
|
cleaner.go
|
detail_new_|" +
"detail_related_|" +
"figcaption|" +
"footnote|" +
"foot|" +
"header|" +
"img_popup_single|" +
"js_replies|" +
"[Kk]ona[Ff]ilter|" +
"leading|" +
"legende|" +
"links|" +
"mediaarticlerelated|" +
"menucontainer|" +
"meta$|" +
"navbar|" +
"pagetools|" +
"popup|" +
"post-attributes|" +
"post-title|" +
"relacionado|" +
"retweet|" +
"runaroundLeft|" +
"shoutbox|" +
"site_nav|" +
"socialNetworking|" +
"social_|" +
"socialnetworking|" +
"socialtools|" +
"sponsor|" +
"sub_nav|" +
"subscribe|" +
"tag_|" +
"tags|" +
"the_answers|" +
"timestamp|" +
"tools|" +
"vcard|" +
"welcome_form|" +
"wp-caption-text")
var CAPTIONS_RE = regexp.MustCompile("^caption$")
var GOOGLE_RE = regexp.MustCompile(" google ")
var MORE_RE = regexp.MustCompile("^[^entry-]more.*$")
var FACEBOOK_RE = regexp.MustCompile("[^-]facebook")
var FACEBOOK_BROADCASTING_RE = regexp.MustCompile("facebook-broadcasting")
var TWITTER_RE = regexp.MustCompile("[^-]twitter")
func (this *cleaner) clean(article *Article) *goquery.Document {
if this.config.debug {
log.Println("Starting cleaning phase with Cleaner")
}
docToClean := article.Doc
docToClean = this.cleanArticleTags(docToClean)
docToClean = this.cleanEMTags(docToClean)
docToClean = this.dropCaps(docToClean)
docToClean = this.removeScriptsStyle(docToClean)
docToClean = this.cleanBadTags(docToClean)
docToClean = this.cleanFooter(docToClean)
docToClean = this.cleanAside(docToClean)
docToClean = this.removeNodesRegEx(docToClean, CAPTIONS_RE)
docToClean = this.removeNodesRegEx(docToClean, GOOGLE_RE)
docToClean = this.removeNodesRegEx(docToClean, MORE_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_BROADCASTING_RE)
docToClean = this.removeNodesRegEx(docToClean, TWITTER_RE)
docToClean = this.cleanParaSpans(docToClean)
docToClean = this.convertDivsToParagraphs(docToClean, "div")
docToClean = this.convertDivsToParagraphs(docToClean, "span")
docToClean = this.convertDivsToParagraphs(docToClean, "article")
docToClean = this.convertDivsToParagraphs(docToClean, "pre")
return docToClean
}
func (this *cleaner) cleanArticleTags(doc *goquery.Document) *goquery.Document {
tags := [3]string{"id", "name", "class"}
articles := doc.Find("article")
articles.Each(func(i int, s *goquery.Selection) {
for _, tag := range tags {
this.config.parser.delAttr(s, tag)
}
})
return doc
}
func (this *cleaner) cleanEMTags(doc *goquery.Document) *goquery.Document {
ems := doc.Find("em")
ems.Each(func(i int, s *goquery.Selection) {
images := s.Find("img")
if images.Length() == 0 {
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d EM tags\n", ems.Size())
}
return doc
}
func (this *cleaner) cleanFooter(doc *goquery.Document) *goquery.Document {
footer := doc.Find("footer")
footer.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanAside(doc *goquery.Document) *goquery.Document {
aside := doc.Find("aside")
aside.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanCites(doc *goquery.Document) *goquery.Document {
cites := doc.Find("cite")
cites.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanDivs(doc *goquery.Document) *goquery.Document {
frames := make(map[string]int)
framesNodes := make(map[string]*list.List)
divs := doc.Find("div")
divs.Each(func(i int, s *goquery.Selection) {
children := s.Children()
if children.Size() == 0 {
text := s.Text()
text = strings.Trim(text, " ")
text = strings.Trim(text, "\t")
text = strings.ToLower(text)
frames[text]++
if framesNodes[text] == nil {
framesNodes[text] = list.New()
}
framesNodes[text].PushBack(s)
}
})
for text, freq := range frames {
if freq > 1 {
selections := framesNodes[text]
for s := selections.Front(); s != nil; s = s.Next() {
selection := s.Value.(*goquery.Selection)
this.config.parser.removeNode(selection)
}
}
}
return doc
}
func (this *cleaner) dropCaps(doc *goquery.Document) *goquery.Document {
items := doc.Find("span")
count := 0 //remove
items.Each(func(i int, s *goquery.Selection) {
attribute, exists := s.Attr("class")
if exists && (strings.Contains(attribute, "dropcap") || strings.Contains(attribute, "drop_cap")) {
count++
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d dropcap tags\n", count)
}
return doc
}
func (this *cleaner) removeScriptsStyle(doc *goquery.Document) *goquery.Document {
if this.config.debug {
log.Println("Starting to remove script tags")
}
scripts := doc.Find("script,noscript,style")
scripts.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
if this.config.debug {
log.Printf("Removed %d script and style tags\n", scripts.Size())
}
//remove comments :) How????
return doc
}
func (this *cleaner) matchNodeRegEx(attribute string, pattern *regexp.Regexp) bool {
return pattern.MatchString(attribute)
}
func (this *cleaner) removeNodesRegEx(doc *goquery.Document, pattern *regexp.Regexp) *goquery.Document {
selectors := [3]string{"id", "class", "name"}
for _, selector := range selectors {
naughtyList := doc.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(i int, s *goquery.Selection) {
attribute, _ := s.Attr(selector)
if this.matchNodeRegEx(attribute, pattern) {
cont++
this.config.parser.removeNode(s)
}
})
if this.config.debug {
log.Printf("regExRemoveNodes %d %s elements found against pattern %s\n", cont, selector, pattern.String())
}
}
return doc
}
func (this *cleaner) cleanBadTags(doc *goquery.Document) *goquery.Document {
body := doc.Find("body")
children := body.Children()
selectors := []string{"id", "class", "name"}
for _, selector := range selectors {
children.Each(func(i int, s *goquery.Selection) {
naughtyList := s.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(j int, e *goquery.Selection) {
attribute, _ := e.Attr(selector)
if this.matchNodeRegEx(attribute, REMOVENODES_RE) {
if this.config.debug {
log.Printf("Cleaning: Removing node with %s: %s\n", selector, this.config.parser.name(selector, e))
}
this.config.parser.removeNode(e)
cont++
}
})
if this.config.debug && cont > 0 {
log.Printf("%d naughty %s elements found", cont, selector)
}
})
}
return doc
}
func (this *cleaner)
|
(doc *goquery.Document) *goquery.Document {
spans := doc.Find("span")
spans.Each(func(i int, s *goquery.Selection) {
parent := s.Parent()
if parent != nil && parent.Length() > 0 && parent.Get(0).DataAtom == atom.P {
node := s.Get(0)
node.Data = s.Text()
node.Type = html.TextNode
}
})
return doc
}
func (this *cleaner) getFlushedBuffer(fragment string) []*html.Node {
output := make([]*html.Node, 0)
reader := strings.NewReader(fragment)
document, _ := html.Parse(reader)
body := document.FirstChild.LastChild
for c := body.FirstChild;
|
cleanParaSpans
|
identifier_name
|
cleaner.go
|
k]ona[Ff]ilter|" +
"leading|" +
"legende|" +
"links|" +
"mediaarticlerelated|" +
"menucontainer|" +
"meta$|" +
"navbar|" +
"pagetools|" +
"popup|" +
"post-attributes|" +
"post-title|" +
"relacionado|" +
"retweet|" +
"runaroundLeft|" +
"shoutbox|" +
"site_nav|" +
"socialNetworking|" +
"social_|" +
"socialnetworking|" +
"socialtools|" +
"sponsor|" +
"sub_nav|" +
"subscribe|" +
"tag_|" +
"tags|" +
"the_answers|" +
"timestamp|" +
"tools|" +
"vcard|" +
"welcome_form|" +
"wp-caption-text")
var CAPTIONS_RE = regexp.MustCompile("^caption$")
var GOOGLE_RE = regexp.MustCompile(" google ")
var MORE_RE = regexp.MustCompile("^[^entry-]more.*$")
var FACEBOOK_RE = regexp.MustCompile("[^-]facebook")
var FACEBOOK_BROADCASTING_RE = regexp.MustCompile("facebook-broadcasting")
var TWITTER_RE = regexp.MustCompile("[^-]twitter")
func (this *cleaner) clean(article *Article) *goquery.Document {
if this.config.debug {
log.Println("Starting cleaning phase with Cleaner")
}
docToClean := article.Doc
docToClean = this.cleanArticleTags(docToClean)
docToClean = this.cleanEMTags(docToClean)
docToClean = this.dropCaps(docToClean)
docToClean = this.removeScriptsStyle(docToClean)
docToClean = this.cleanBadTags(docToClean)
docToClean = this.cleanFooter(docToClean)
docToClean = this.cleanAside(docToClean)
docToClean = this.removeNodesRegEx(docToClean, CAPTIONS_RE)
docToClean = this.removeNodesRegEx(docToClean, GOOGLE_RE)
docToClean = this.removeNodesRegEx(docToClean, MORE_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_BROADCASTING_RE)
docToClean = this.removeNodesRegEx(docToClean, TWITTER_RE)
docToClean = this.cleanParaSpans(docToClean)
docToClean = this.convertDivsToParagraphs(docToClean, "div")
docToClean = this.convertDivsToParagraphs(docToClean, "span")
docToClean = this.convertDivsToParagraphs(docToClean, "article")
docToClean = this.convertDivsToParagraphs(docToClean, "pre")
return docToClean
}
func (this *cleaner) cleanArticleTags(doc *goquery.Document) *goquery.Document {
tags := [3]string{"id", "name", "class"}
articles := doc.Find("article")
articles.Each(func(i int, s *goquery.Selection) {
for _, tag := range tags {
this.config.parser.delAttr(s, tag)
}
})
return doc
}
func (this *cleaner) cleanEMTags(doc *goquery.Document) *goquery.Document {
ems := doc.Find("em")
ems.Each(func(i int, s *goquery.Selection) {
images := s.Find("img")
if images.Length() == 0 {
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d EM tags\n", ems.Size())
}
return doc
}
func (this *cleaner) cleanFooter(doc *goquery.Document) *goquery.Document {
footer := doc.Find("footer")
footer.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanAside(doc *goquery.Document) *goquery.Document {
aside := doc.Find("aside")
aside.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanCites(doc *goquery.Document) *goquery.Document {
cites := doc.Find("cite")
cites.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanDivs(doc *goquery.Document) *goquery.Document {
frames := make(map[string]int)
framesNodes := make(map[string]*list.List)
divs := doc.Find("div")
divs.Each(func(i int, s *goquery.Selection) {
children := s.Children()
if children.Size() == 0 {
text := s.Text()
text = strings.Trim(text, " ")
text = strings.Trim(text, "\t")
text = strings.ToLower(text)
frames[text]++
if framesNodes[text] == nil {
framesNodes[text] = list.New()
}
framesNodes[text].PushBack(s)
}
})
for text, freq := range frames {
if freq > 1 {
selections := framesNodes[text]
for s := selections.Front(); s != nil; s = s.Next() {
selection := s.Value.(*goquery.Selection)
this.config.parser.removeNode(selection)
}
}
}
return doc
}
func (this *cleaner) dropCaps(doc *goquery.Document) *goquery.Document {
items := doc.Find("span")
count := 0 //remove
items.Each(func(i int, s *goquery.Selection) {
attribute, exists := s.Attr("class")
if exists && (strings.Contains(attribute, "dropcap") || strings.Contains(attribute, "drop_cap")) {
count++
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d dropcap tags\n", count)
}
return doc
}
func (this *cleaner) removeScriptsStyle(doc *goquery.Document) *goquery.Document {
if this.config.debug {
log.Println("Starting to remove script tags")
}
scripts := doc.Find("script,noscript,style")
scripts.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
if this.config.debug {
log.Printf("Removed %d script and style tags\n", scripts.Size())
}
//remove comments :) How????
return doc
}
func (this *cleaner) matchNodeRegEx(attribute string, pattern *regexp.Regexp) bool {
return pattern.MatchString(attribute)
}
func (this *cleaner) removeNodesRegEx(doc *goquery.Document, pattern *regexp.Regexp) *goquery.Document {
selectors := [3]string{"id", "class", "name"}
for _, selector := range selectors {
naughtyList := doc.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(i int, s *goquery.Selection) {
attribute, _ := s.Attr(selector)
if this.matchNodeRegEx(attribute, pattern) {
cont++
this.config.parser.removeNode(s)
}
})
if this.config.debug {
log.Printf("regExRemoveNodes %d %s elements found against pattern %s\n", cont, selector, pattern.String())
}
}
return doc
}
func (this *cleaner) cleanBadTags(doc *goquery.Document) *goquery.Document {
body := doc.Find("body")
children := body.Children()
selectors := []string{"id", "class", "name"}
for _, selector := range selectors {
children.Each(func(i int, s *goquery.Selection) {
naughtyList := s.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(j int, e *goquery.Selection) {
attribute, _ := e.Attr(selector)
if this.matchNodeRegEx(attribute, REMOVENODES_RE) {
if this.config.debug {
log.Printf("Cleaning: Removing node with %s: %s\n", selector, this.config.parser.name(selector, e))
}
this.config.parser.removeNode(e)
cont++
}
})
if this.config.debug && cont > 0 {
log.Printf("%d naughty %s elements found", cont, selector)
}
})
}
return doc
}
func (this *cleaner) cleanParaSpans(doc *goquery.Document) *goquery.Document {
spans := doc.Find("span")
spans.Each(func(i int, s *goquery.Selection) {
parent := s.Parent()
if parent != nil && parent.Length() > 0 && parent.Get(0).DataAtom == atom.P {
node := s.Get(0)
node.Data = s.Text()
node.Type = html.TextNode
}
})
return doc
}
func (this *cleaner) getFlushedBuffer(fragment string) []*html.Node {
output := make([]*html.Node, 0)
reader := strings.NewReader(fragment)
document, _ := html.Parse(reader)
body := document.FirstChild.LastChild
for c := body.FirstChild; c != nil; c = c.NextSibling {
output = append(output, c)
c.Parent = nil
c.PrevSibling = nil
}
|
for _, o := range output {
o.NextSibling = nil
}
|
random_line_split
|
|
cleaner.go
|
detail_new_|" +
"detail_related_|" +
"figcaption|" +
"footnote|" +
"foot|" +
"header|" +
"img_popup_single|" +
"js_replies|" +
"[Kk]ona[Ff]ilter|" +
"leading|" +
"legende|" +
"links|" +
"mediaarticlerelated|" +
"menucontainer|" +
"meta$|" +
"navbar|" +
"pagetools|" +
"popup|" +
"post-attributes|" +
"post-title|" +
"relacionado|" +
"retweet|" +
"runaroundLeft|" +
"shoutbox|" +
"site_nav|" +
"socialNetworking|" +
"social_|" +
"socialnetworking|" +
"socialtools|" +
"sponsor|" +
"sub_nav|" +
"subscribe|" +
"tag_|" +
"tags|" +
"the_answers|" +
"timestamp|" +
"tools|" +
"vcard|" +
"welcome_form|" +
"wp-caption-text")
var CAPTIONS_RE = regexp.MustCompile("^caption$")
var GOOGLE_RE = regexp.MustCompile(" google ")
var MORE_RE = regexp.MustCompile("^[^entry-]more.*$")
var FACEBOOK_RE = regexp.MustCompile("[^-]facebook")
var FACEBOOK_BROADCASTING_RE = regexp.MustCompile("facebook-broadcasting")
var TWITTER_RE = regexp.MustCompile("[^-]twitter")
func (this *cleaner) clean(article *Article) *goquery.Document {
if this.config.debug {
log.Println("Starting cleaning phase with Cleaner")
}
docToClean := article.Doc
docToClean = this.cleanArticleTags(docToClean)
docToClean = this.cleanEMTags(docToClean)
docToClean = this.dropCaps(docToClean)
docToClean = this.removeScriptsStyle(docToClean)
docToClean = this.cleanBadTags(docToClean)
docToClean = this.cleanFooter(docToClean)
docToClean = this.cleanAside(docToClean)
docToClean = this.removeNodesRegEx(docToClean, CAPTIONS_RE)
docToClean = this.removeNodesRegEx(docToClean, GOOGLE_RE)
docToClean = this.removeNodesRegEx(docToClean, MORE_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_RE)
docToClean = this.removeNodesRegEx(docToClean, FACEBOOK_BROADCASTING_RE)
docToClean = this.removeNodesRegEx(docToClean, TWITTER_RE)
docToClean = this.cleanParaSpans(docToClean)
docToClean = this.convertDivsToParagraphs(docToClean, "div")
docToClean = this.convertDivsToParagraphs(docToClean, "span")
docToClean = this.convertDivsToParagraphs(docToClean, "article")
docToClean = this.convertDivsToParagraphs(docToClean, "pre")
return docToClean
}
func (this *cleaner) cleanArticleTags(doc *goquery.Document) *goquery.Document {
tags := [3]string{"id", "name", "class"}
articles := doc.Find("article")
articles.Each(func(i int, s *goquery.Selection) {
for _, tag := range tags {
this.config.parser.delAttr(s, tag)
}
})
return doc
}
func (this *cleaner) cleanEMTags(doc *goquery.Document) *goquery.Document {
ems := doc.Find("em")
ems.Each(func(i int, s *goquery.Selection) {
images := s.Find("img")
if images.Length() == 0 {
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d EM tags\n", ems.Size())
}
return doc
}
func (this *cleaner) cleanFooter(doc *goquery.Document) *goquery.Document {
footer := doc.Find("footer")
footer.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanAside(doc *goquery.Document) *goquery.Document {
aside := doc.Find("aside")
aside.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanCites(doc *goquery.Document) *goquery.Document {
cites := doc.Find("cite")
cites.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
return doc
}
func (this *cleaner) cleanDivs(doc *goquery.Document) *goquery.Document {
frames := make(map[string]int)
framesNodes := make(map[string]*list.List)
divs := doc.Find("div")
divs.Each(func(i int, s *goquery.Selection) {
children := s.Children()
if children.Size() == 0 {
text := s.Text()
text = strings.Trim(text, " ")
text = strings.Trim(text, "\t")
text = strings.ToLower(text)
frames[text]++
if framesNodes[text] == nil {
framesNodes[text] = list.New()
}
framesNodes[text].PushBack(s)
}
})
for text, freq := range frames {
if freq > 1 {
selections := framesNodes[text]
for s := selections.Front(); s != nil; s = s.Next() {
selection := s.Value.(*goquery.Selection)
this.config.parser.removeNode(selection)
}
}
}
return doc
}
func (this *cleaner) dropCaps(doc *goquery.Document) *goquery.Document {
items := doc.Find("span")
count := 0 //remove
items.Each(func(i int, s *goquery.Selection) {
attribute, exists := s.Attr("class")
if exists && (strings.Contains(attribute, "dropcap") || strings.Contains(attribute, "drop_cap")) {
count++
this.config.parser.dropTag(s)
}
})
if this.config.debug {
log.Printf("Cleaning %d dropcap tags\n", count)
}
return doc
}
func (this *cleaner) removeScriptsStyle(doc *goquery.Document) *goquery.Document {
if this.config.debug {
log.Println("Starting to remove script tags")
}
scripts := doc.Find("script,noscript,style")
scripts.Each(func(i int, s *goquery.Selection) {
this.config.parser.removeNode(s)
})
if this.config.debug {
log.Printf("Removed %d script and style tags\n", scripts.Size())
}
//remove comments :) How????
return doc
}
func (this *cleaner) matchNodeRegEx(attribute string, pattern *regexp.Regexp) bool {
return pattern.MatchString(attribute)
}
func (this *cleaner) removeNodesRegEx(doc *goquery.Document, pattern *regexp.Regexp) *goquery.Document {
selectors := [3]string{"id", "class", "name"}
for _, selector := range selectors {
naughtyList := doc.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(i int, s *goquery.Selection) {
attribute, _ := s.Attr(selector)
if this.matchNodeRegEx(attribute, pattern) {
cont++
this.config.parser.removeNode(s)
}
})
if this.config.debug {
log.Printf("regExRemoveNodes %d %s elements found against pattern %s\n", cont, selector, pattern.String())
}
}
return doc
}
func (this *cleaner) cleanBadTags(doc *goquery.Document) *goquery.Document {
body := doc.Find("body")
children := body.Children()
selectors := []string{"id", "class", "name"}
for _, selector := range selectors {
children.Each(func(i int, s *goquery.Selection) {
naughtyList := s.Find("*[" + selector + "]")
cont := 0
naughtyList.Each(func(j int, e *goquery.Selection) {
attribute, _ := e.Attr(selector)
if this.matchNodeRegEx(attribute, REMOVENODES_RE) {
if this.config.debug {
log.Printf("Cleaning: Removing node with %s: %s\n", selector, this.config.parser.name(selector, e))
}
this.config.parser.removeNode(e)
cont++
}
})
if this.config.debug && cont > 0
|
})
}
return doc
}
func (this *cleaner) cleanParaSpans(doc *goquery.Document) *goquery.Document {
spans := doc.Find("span")
spans.Each(func(i int, s *goquery.Selection) {
parent := s.Parent()
if parent != nil && parent.Length() > 0 && parent.Get(0).DataAtom == atom.P {
node := s.Get(0)
node.Data = s.Text()
node.Type = html.TextNode
}
})
return doc
}
func (this *cleaner) getFlushedBuffer(fragment string) []*html.Node {
output := make([]*html.Node, 0)
reader := strings.NewReader(fragment)
document, _ := html.Parse(reader)
body := document.FirstChild.LastChild
for c := body.FirstChild
|
{
log.Printf("%d naughty %s elements found", cont, selector)
}
|
conditional_block
|
新的分钟数据整合.py
|
_range_path)['date'].values.tolist()
rar.extractall(path=file_name.split('.')[0])
# 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐
# 按照月份确定位置
pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)
date_df=date_df.loc[date_df['is_open']==1]
date_list=date_df['cal_date'].tolist()
# ==========================================================================
# 针对的是201911月数据,对应的合约index 放在 target_date_index中
date_df=pd.DataFrame({'date':date_list})
date_df['month']=date_df['date'].str[:6]
target_date=date_df.loc[date_df['month']==month]
target_date_index=target_date.index.values
target_date=target_date['date'].values
# 获取对应目标
data=data.reshape(-1)
contract_main_pool=data[target_date_index]
# 去掉交易所的代码编号
contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values
file_pools=os.listdir(file_name.split('.')[0])
# 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断
if contract_main_pool[0] not in file_pools:
contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]
if contract_main_pool[0] not in file_pools:
print(f'找不到{contract_main_pool[0]}')
# 读取好所有的路径
contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values
# (len(target_date),contract_main_pool.shape[0])
row_1=['市场代码','合约代码', '时间', '开','高', '低', '收', '成交量', '成交额', '持仓量']
orignal_data=[]
orignal_data.append(row_1)
for index in range(len(target_date)):
date=target_date[index]
one_file_path=contract_main_pool[index]
df=pd.read_csv(one_file_path,encoding='gbk')
df['date']=df['时间'].str[:10]
df['date2']=df['date'].str.replace('-','')
result=df.loc[df['date2']==date]
if result.shape[0]>0:
for row_index in range(len(result)):
target_row=result.iloc[row_index].tolist()
clean_row=target_row[:-2]
orignal_data.append(clean_row)
print(f'{contract_kind} {date} finished!')
else:
print(f'没找到合约品种{contract_kind}在{date}')
print(f'{contract_kind}在{month}月的主力合约数据读取完成')
final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])
final_df['date']=final_df['时间'].str[:10]
final_df_date=final_df['date'].unique()
final_df['date']=final_df['时间'].str[:10]
final_df['time']=final_df['时间'].str[10:].str.strip()
final_df['时间']=final_df['date']+' '+final_df['time']
final_df=final_df.sort_values('时间')
final_df['合约代码']=final_df['合约代码'].str.upper()
final_df=final_df.sort_values('时间')
# ===============================增加了从constant_time进行截取================================
final_df['transf_date']=pd.to_datetime(final_df['date'])
final_df.set_index('transf_date',inplace=True)
combine_all_df=pd.DataFrame()
final_df['date2']=final_df['date'].str.replace('-','')
# 按月进行填充
|
#分割到的长度放入容器中
target_num=len(target_df)
#理论长度
theory_num=len(time_0931_15)
#实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的
if target_num>0:
#开始区分2,3情况
have_time=target_df['time'].values.tolist()
lack_time=[x for x in time_0931_15 if x not in have_time]
#检查是不是情况2
if lack_time:
print(f'{target_date[date_index]} 不连续')
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充
if len(lack_time)<len(time_0931_15):
insert_df['合约代码']=target_df['合约代码'].unique()[-1]
#生成一天完整的数据
combine_insert_df=pd.concat([target_df,insert_df])
#将数据添加到容器中
combine_all_df=pd.concat([combine_all_df,combine_insert_df])
#完全没有数据,直接填充
else:
print(f'{target_date[date_index]}empty ')
lack_time=[x for x in time_0931_15]
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#将数据添加到容器
combine_all_df=pd.concat([combine_all_df,insert_df])
combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']
#调整时间
combine_all_df=combine_all_df.sort_values('时间')
combine_all_df.reset_index(inplace=True)
#数据输出,按设定的顺序
combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量','date','time']]
combine_all_df['时间']=combine_all_df['时间'].str.replace('-','')
combine_all_df['date']=combine_all_df['date'].str.replace('-','')
# combine_all_df.to_csv(save_month_fill_data_path,index=False,encoding='utf-8-sig')
# ==========================储存数据=================================================
combine_df=combine_all_df.copy()
contract_type=contract_kind
combine_df=combine_df.sort_values('时间')
# ====================================================================开始截取============================================================
# end_time+1其实是可以作为每次截取的起点,终点下一个就是起点,不过要加上0,而终点的位置也可以是end_time+1,因为end_time+1只能取end_time
# 按照下午15:15统一截取
end_time='15:15:00'
end_index=np.where(combine_df['time']==end_time)[0]+1
end_index=np.hstack(([0],end_index))
start=end_index[:-1]
end=end_index[1:]
# ================================================================缺失第一个交易日前一天的夜盘数据==========================================
# 这里的选择构造一个虚拟的时间戳,来满足缺失的夜盘数据
# 按照上一步的截取方法,第一个交易日缺少前一天的夜盘数据
last_day=date_df['date'].iloc[target_date_index[0]-1]
last_day=last_day[:4]+'-'+last_day[4:6]+'-'+last_day[6:]
first_day_have=combine_df[start[0]:end[0]]['time'].values
full_time=combine_df['time'].unique()
full_time.sort()
first_day_lack=[x for x in full_time[-179:]]
first_day_lack.sort()
lack_array=np.empty(shape=(len(first_day_lack),12))
lack_array.fill(np.nan)
# ===============================准备缺失部分df==========================================================================================
first_day
|
# 设置了存放按月填充的路径
for date_index in range(len(target_date)):
#按日期进行分割
target_df=final_df.loc[final_df['date2']==target_date[date_index]]
|
random_line_split
|
新的分钟数据整合.py
|
_path)['date'].values.tolist()
rar.extractall(path=file_name.split('.')[0])
# 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐
# 按照月份确定位置
pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)
date_df=date_df.loc[date_df['is_open']==1]
date_list=date_df['cal_date'].tolist()
# ==========================================================================
# 针对的是201911月数据,对应的合约index 放在 target_date_index中
date_df=pd.DataFrame({'date':date_list})
date_df['month']=date_df['date'].str[:6]
target_date=date_df.loc[date_df['month']==month]
target_date_index=target_date.index.values
target_date=target_date['date'].values
# 获取对应目标
data=data.reshape(-1)
contract_main_pool=data[target_date_index]
# 去掉交易所的代码编号
contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values
file_pools=os.listdir(file_name.split('.')[0])
# 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断
if contract_main_pool[0] not in file_pools:
contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]
if contract_main_pool[0] not in file_pools:
print(f'找不到{contract_main_pool[0]}')
# 读取好所有的路径
contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values
# (len(target_date),contract_main_pool.shape[0])
row_1=['市场代码','合约代码', '时间', '开','高', '低', '收', '成交量', '成交额', '持仓量']
orignal_data=[]
orignal_data.append(row_1)
for index in range(len(target_date)):
date=target_date[index]
one_file_path=contract_main_pool[index]
df=pd.read_csv(one_file_path,encoding='gbk')
df['date']=df['时间'].str[:10]
df['date2']=df['date'].str.replace('-','')
result=df.loc[df['date2']==date]
|
for row_index in range(len(result)):
target_row=result.iloc[row_index].tolist()
clean_row=target_row[:-2]
orignal_data.append(clean_row)
print(f'{contract_kind} {date} finished!')
else:
print(f'没找到合约品种{contract_kind}在{date}')
print(f'{contract_kind}在{month}月的主力合约数据读取完成')
final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])
final_df['date']=final_df['时间'].str[:10]
final_df_date=final_df['date'].unique()
final_df['date']=final_df['时间'].str[:10]
final_df['time']=final_df['时间'].str[10:].str.strip()
final_df['时间']=final_df['date']+' '+final_df['time']
final_df=final_df.sort_values('时间')
final_df['合约代码']=final_df['合约代码'].str.upper()
final_df=final_df.sort_values('时间')
# ===============================增加了从constant_time进行截取================================
final_df['transf_date']=pd.to_datetime(final_df['date'])
final_df.set_index('transf_date',inplace=True)
combine_all_df=pd.DataFrame()
final_df['date2']=final_df['date'].str.replace('-','')
# 按月进行填充
# 设置了存放按月填充的路径
for date_index in range(len(target_date)):
#按日期进行分割
target_df=final_df.loc[final_df['date2']==target_date[date_index]]
#分割到的长度放入容器中
target_num=len(target_df)
#理论长度
theory_num=len(time_0931_15)
#实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的
if target_num>0:
#开始区分2,3情况
have_time=target_df['time'].values.tolist()
lack_time=[x for x in time_0931_15 if x not in have_time]
#检查是不是情况2
if lack_time:
print(f'{target_date[date_index]} 不连续')
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充
if len(lack_time)<len(time_0931_15):
insert_df['合约代码']=target_df['合约代码'].unique()[-1]
#生成一天完整的数据
combine_insert_df=pd.concat([target_df,insert_df])
#将数据添加到容器中
combine_all_df=pd.concat([combine_all_df,combine_insert_df])
#完全没有数据,直接填充
else:
print(f'{target_date[date_index]}empty ')
lack_time=[x for x in time_0931_15]
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#将数据添加到容器
combine_all_df=pd.concat([combine_all_df,insert_df])
combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']
#调整时间
combine_all_df=combine_all_df.sort_values('时间')
combine_all_df.reset_index(inplace=True)
#数据输出,按设定的顺序
combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量','date','time']]
combine_all_df['时间']=combine_all_df['时间'].str.replace('-','')
combine_all_df['date']=combine_all_df['date'].str.replace('-','')
# combine_all_df.to_csv(save_month_fill_data_path,index=False,encoding='utf-8-sig')
# ==========================储存数据=================================================
combine_df=combine_all_df.copy()
contract_type=contract_kind
combine_df=combine_df.sort_values('时间')
# ====================================================================开始截取============================================================
# end_time+1其实是可以作为每次截取的起点,终点下一个就是起点,不过要加上0,而终点的位置也可以是end_time+1,因为end_time+1只能取end_time
# 按照下午15:15统一截取
end_time='15:15:00'
end_index=np.where(combine_df['time']==end_time)[0]+1
end_index=np.hstack(([0],end_index))
start=end_index[:-1]
end=end_index[1:]
# ================================================================缺失第一个交易日前一天的夜盘数据==========================================
# 这里的选择构造一个虚拟的时间戳,来满足缺失的夜盘数据
# 按照上一步的截取方法,第一个交易日缺少前一天的夜盘数据
last_day=date_df['date'].iloc[target_date_index[0]-1]
last_day=last_day[:4]+'-'+last_day[4:6]+'-'+last_day[6:]
first_day_have=combine_df[start[0]:end[0]]['time'].values
full_time=combine_df['time'].unique()
full_time.sort()
first_day_lack=[x for x in full_time[-179:]]
first_day_lack.sort()
lack_array=np.empty(shape=(len(first_day_lack),12))
lack_array.fill(np.nan)
# ===============================准备缺失部分df==========================================================================================
first_day
|
if result.shape[0]>0:
|
conditional_block
|
新的分钟数据整合.py
|
path:str,rar_data_file_path:str,clean_data_path:str,time_range_path:str,end_date:str,commodity_bool=True):
'''
用于更新月度的商品期货数据
year_month:'201911'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911
contract_kind:放对应品种的list 类似['A','B']
main_code_path:对应存放主力合约的地方
rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名
clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处
time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv
end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动
commodity_bool:商品期货对应True,金融期货False,默认商品期货
'''
month=year_month
if commodity_bool:
file_name=rar_data_file_path+'FutAC_Min1_Std_'+month+'.rar'
else:
file_name=rar_data_file_path+'FutSF_Min1_Std_'+month+'.rar'
orignial_path=main_code_path
specifi_path=orignial_path+contract_kind+'_1day_main.npy'
rar = rarfile.RarFile(file_name,pwd='www.jinshuyuan.net')
# 原始的处理好的数据
orignal_clean_csv_path=clean_data_path
pwd='www.jinshuyuan.net'
data=np.load(specifi_path)
time_0931_15=pd.read_csv(time_range_path)['date'].values.tolist()
rar.extractall(path=file_name.split('.')[0])
# 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐
# 按照月份确定位置
pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)
date_df=date_df.loc[date_df['is_open']==1]
date_list=date_df['cal_date'].tolist()
# ==========================================================================
# 针对的是201911月数据,对应的合约index 放在 target_date_index中
date_df=pd.DataFrame({'date':date_list})
date_df['month']=date_df['date'].str[:6]
target_date=date_df.loc[date_df['month']==month]
target_date_index=target_date.index.values
target_date=target_date['date'].values
# 获取对应目标
data=data.reshape(-1)
contract_main_pool=data[target_date_index]
# 去掉交易所的代码编号
contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values
file_pools=os.listdir(file_name.split('.')[0])
# 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断
if contract_main_pool[0] not in file_pools:
contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]
if contract_main_pool[0] not in file_pools:
print(f'找不到{contract_main_pool[0]}')
# 读取好所有的路径
contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values
# (len(target_date),contract_main_pool.shape[0])
row_1=['市场代码','合约代码', '时间', '开','高', '低', '收', '成交量', '成交额', '持仓量']
orignal_data=[]
orignal_data.append(row_1)
for index in range(len(target_date)):
date=target_date[index]
one_file_path=contract_main_pool[index]
df=pd.read_csv(one_file_path,encoding='gbk')
df['date']=df['时间'].str[:10]
df['date2']=df['date'].str.replace('-','')
result=df.loc[df['date2']==date]
if result.shape[0]>0:
for row_index in range(len(result)):
target_row=result.iloc[row_index].tolist()
clean_row=target_row[:-2]
orignal_data.append(clean_row)
print(f'{contract_kind} {date} finished!')
else:
print(f'没找到合约品种{contract_kind}在{date}')
print(f'{contract_kind}在{month}月的主力合约数据读取完成')
final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])
final_df['date']=final_df['时间'].str[:10]
final_df_date=final_df['date'].unique()
final_df['date']=final_df['时间'].str[:10]
final_df['time']=final_df['时间'].str[10:].str.strip()
final_df['时间']=final_df['date']+' '+final_df['time']
final_df=final_df.sort_values('时间')
final_df['合约代码']=final_df['合约代码'].str.upper()
final_df=final_df.sort_values('时间')
# ===============================增加了从constant_time进行截取================================
final_df['transf_date']=pd.to_datetime(final_df['date'])
final_df.set_index('transf_date',inplace=True)
combine_all_df=pd.DataFrame()
final_df['date2']=final_df['date'].str.replace('-','')
# 按月进行填充
# 设置了存放按月填充的路径
for date_index in range(len(target_date)):
#按日期进行分割
target_df=final_df.loc[final_df['date2']==target_date[date_index]]
#分割到的长度放入容器中
target_num=len(target_df)
#理论长度
theory_num=len(time_0931_15)
#实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的
if target_num>0:
#开始区分2,3情况
have_time=target_df['time'].values.tolist()
lack_time=[x for x in time_0931_15 if x not in have_time]
#检查是不是情况2
if lack_time:
print(f'{target_date[date_index]} 不连续')
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充
if len(lack_time)<len(time_0931_15):
insert_df['合约代码']=target_df['合约代码'].unique()[-1]
#生成一天完整的数据
combine_insert_df=pd.concat([target_df,insert_df])
#将数据添加到容器中
combine_all_df=pd.concat([combine_all_df,combine_insert_df])
#完全没有数据,直接填充
else:
print(f'{target_date[date_index]}empty ')
lack_time=[x for x in time_0931_15]
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#将数据添加到容器
combine_all_df=pd.concat([combine_all_df,insert_df])
combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']
#调整时间
combine_all_df=combine_all_df.sort_values('时间')
combine_all_df.reset_index(inplace=True)
#数据输出,按设定的顺序
combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '
|
ct_kind:str,main_code_
|
identifier_name
|
|
新的分钟数据整合.py
|
data=np.load(specifi_path)
time_0931_15=pd.read_csv(time_range_path)['date'].values.tolist()
rar.extractall(path=file_name.split('.')[0])
# 首先需要输入end_date 确保截取的时间长度和main主力合约的时间对齐
# 按照月份确定位置
pro = ts.pro_api('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
ts.set_token('3d832df2966f27c20e6ff243ab1d53a35a4adc1c64b353cc370ac7d6')
date_df=pro.trade_cal(exchange='DCE', start_date='20100101', end_date=end_date)
date_df=date_df.loc[date_df['is_open']==1]
date_list=date_df['cal_date'].tolist()
# ==========================================================================
# 针对的是201911月数据,对应的合约index 放在 target_date_index中
date_df=pd.DataFrame({'date':date_list})
date_df['month']=date_df['date'].str[:6]
target_date=date_df.loc[date_df['month']==month]
target_date_index=target_date.index.values
target_date=target_date['date'].values
# 获取对应目标
data=data.reshape(-1)
contract_main_pool=data[target_date_index]
# 去掉交易所的代码编号
contract_main_pool=(pd.Series(contract_main_pool).str.split('.').str[0]+'.csv').values
file_pools=os.listdir(file_name.split('.')[0])
# 郑州期货交易所是大写,其它都是小写,这里需要逻辑判断
if contract_main_pool[0] not in file_pools:
contract_main_pool=[contract_file.lower() for contract_file in contract_main_pool]
if contract_main_pool[0] not in file_pools:
print(f'找不到{contract_main_pool[0]}')
# 读取好所有的路径
contract_main_pool=(file_name.split('.')[0]+'/'+pd.Series(contract_main_pool)).values
# (len(target_date),contract_main_pool.shape[0])
row_1=['市场代码','合约代码', '时间', '开','高', '低', '收', '成交量', '成交额', '持仓量']
orignal_data=[]
orignal_data.append(row_1)
for index in range(len(target_date)):
date=target_date[index]
one_file_path=contract_main_pool[index]
df=pd.read_csv(one_file_path,encoding='gbk')
df['date']=df['时间'].str[:10]
df['date2']=df['date'].str.replace('-','')
result=df.loc[df['date2']==date]
if result.shape[0]>0:
for row_index in range(len(result)):
target_row=result.iloc[row_index].tolist()
clean_row=target_row[:-2]
orignal_data.append(clean_row)
print(f'{contract_kind} {date} finished!')
else:
print(f'没找到合约品种{contract_kind}在{date}')
print(f'{contract_kind}在{month}月的主力合约数据读取完成')
final_df=pd.DataFrame(orignal_data[1:],columns=orignal_data[0])
final_df['date']=final_df['时间'].str[:10]
final_df_date=final_df['date'].unique()
final_df['date']=final_df['时间'].str[:10]
final_df['time']=final_df['时间'].str[10:].str.strip()
final_df['时间']=final_df['date']+' '+final_df['time']
final_df=final_df.sort_values('时间')
final_df['合约代码']=final_df['合约代码'].str.upper()
final_df=final_df.sort_values('时间')
# ===============================增加了从constant_time进行截取================================
final_df['transf_date']=pd.to_datetime(final_df['date'])
final_df.set_index('transf_date',inplace=True)
combine_all_df=pd.DataFrame()
final_df['date2']=final_df['date'].str.replace('-','')
# 按月进行填充
# 设置了存放按月填充的路径
for date_index in range(len(target_date)):
#按日期进行分割
target_df=final_df.loc[final_df['date2']==target_date[date_index]]
#分割到的长度放入容器中
target_num=len(target_df)
#理论长度
theory_num=len(time_0931_15)
#实际上两种情况:1.是交易日但完全没有数据2.是交易日,只有部分数据 3.是交易日,数据也是完整的
if target_num>0:
#开始区分2,3情况
have_time=target_df['time'].values.tolist()
lack_time=[x for x in time_0931_15 if x not in have_time]
#检查是不是情况2
if lack_time:
print(f'{target_date[date_index]} 不连续')
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#缺少时间的个数小于time_0931_15则说明,当天并不是完全没数据,只是部分数据缺失,因此要对合约代码进行填充
if len(lack_time)<len(time_0931_15):
insert_df['合约代码']=target_df['合约代码'].unique()[-1]
#生成一天完整的数据
combine_insert_df=pd.concat([target_df,insert_df])
#将数据添加到容器中
combine_all_df=pd.concat([combine_all_df,combine_insert_df])
#完全没有数据,直接填充
else:
print(f'{target_date[date_index]}empty ')
lack_time=[x for x in time_0931_15]
#一共12列,先全部填充nan的时候,最后再把已知填入
insert_array=np.empty(shape=(len(lack_time),12))
insert_array.fill(np.nan)
insert_df=pd.DataFrame(insert_array,columns=['市场代码','合约代码','时间','开','高','低','收','成交量','成交额','持仓量','date','time'])
insert_df['date']=target_date[date_index]
insert_df['time']=lack_time
#将数据添加到容器
combine_all_df=pd.concat([combine_all_df,insert_df])
combine_all_df['时间']=combine_all_df['date']+' '+combine_all_df['time']
#调整时间
combine_all_df=combine_all_df.sort_values('时间')
combine_all_df.reset_index(inplace=True)
#数据输出,按设定的顺序
combine_all_df=combine_all_df[['市场代码', '合约代码', '时间', '开', '高', '低', '收', '成交量', '成交额', '持仓量','date','time']]
combine_all_df['时间']=combine_all_df['时间'].str.replace('-','')
combine_all_df['date']=combine_all_df['date'].str.replace('-','')
# combine_all_df
|
'字符串年份和月份,对应的是FutAC_Min1_Std_后面的数字,如FutAC_Min1_Std_201911
contract_kind:放对应品种的list 类似['A','B']
main_code_path:对应存放主力合约的地方
rar_data_file_path: 对应的是存放rar数据如FutAC_Min1_Std_201911.rar的位置,不包括对应的文件名
clean_data_path:对应存放分钟数据的位置,处理好的新数据会追加到对应位置下的对应品种处
time_range_path:放置交易时间文件的路径,包括文件名 如 D:/统一所有品种时间范围.csv
end_date :'20200103' 今日的日期,用来请求tushare中的交易日历,数据的读取合并都是以交易日历的时间驱动
commodity_bool:商品期货对应True,金融期货False,默认商品期货
'''
month=year_month
if commodity_bool:
file_name=rar_data_file_path+'FutAC_Min1_Std_'+month+'.rar'
else:
file_name=rar_data_file_path+'FutSF_Min1_Std_'+month+'.rar'
orignial_path=main_code_path
specifi_path=orignial_path+contract_kind+'_1day_main.npy'
rar = rarfile.RarFile(file_name,pwd='www.jinshuyuan.net')
# 原始的处理好的数据
orignal_clean_csv_path=clean_data_path
pwd='www.jinshuyuan.net'
|
identifier_body
|
|
conf.go
|
"time"
)
type urlHolder struct {
scheme string
host string
port int
}
type config struct {
securityProviders []securityProvider
urlHolder *urlHolder
pathStyle bool
cname bool
sslVerify bool
endpoint string
signature SignatureType
region string
connectTimeout int
socketTimeout int
headerTimeout int
idleConnTimeout int
finalTimeout int
maxRetryCount int
proxyURL string
maxConnsPerHost int
pemCerts []byte
transport *http.Transport
roundTripper http.RoundTripper
httpClient *http.Client
ctx context.Context
maxRedirectCount int
userAgent string
enableCompression bool
}
func (conf config) String() string {
return fmt.Sprintf("[endpoint:%s, signature:%s, pathStyle:%v, region:%s"+
"\nconnectTimeout:%d, socketTimeout:%dheaderTimeout:%d, idleConnTimeout:%d"+
"\nmaxRetryCount:%d, maxConnsPerHost:%d, sslVerify:%v, maxRedirectCount:%d]",
conf.endpoint, conf.signature, conf.pathStyle, conf.region,
conf.connectTimeout, conf.socketTimeout, conf.headerTimeout, conf.idleConnTimeout,
conf.maxRetryCount, conf.maxConnsPerHost, conf.sslVerify, conf.maxRedirectCount,
)
}
type configurer func(conf *config)
func WithSecurityProviders(sps ...securityProvider) configurer {
return func(conf *config) {
for _, sp := range sps {
if sp != nil {
conf.securityProviders = append(conf.securityProviders, sp)
}
}
}
}
// WithSslVerify is a wrapper for WithSslVerifyAndPemCerts.
func WithSslVerify(sslVerify bool) configurer {
return WithSslVerifyAndPemCerts(sslVerify, nil)
}
// WithSslVerifyAndPemCerts is a configurer for ObsClient to set conf.sslVerify and conf.pemCerts.
func WithSslVerifyAndPemCerts(sslVerify bool, pemCerts []byte) configurer {
return func(conf *config) {
conf.sslVerify = sslVerify
conf.pemCerts = pemCerts
}
}
// WithHeaderTimeout is a configurer for ObsClient to set the timeout period of obtaining the response headers.
func WithHeaderTimeout(headerTimeout int) configurer {
return func(conf *config) {
conf.headerTimeout = headerTimeout
}
}
// WithProxyUrl is a configurer for ObsClient to set HTTP proxy.
func WithProxyUrl(proxyURL string) configurer {
return func(conf *config) {
conf.proxyURL = proxyURL
}
}
// WithMaxConnections is a configurer for ObsClient to set the maximum number of idle HTTP connections.
func WithMaxConnections(maxConnsPerHost int) configurer {
return func(conf *config) {
conf.maxConnsPerHost = maxConnsPerHost
}
}
// WithPathStyle is a configurer for ObsClient.
func WithPathStyle(pathStyle bool) configurer {
return func(conf *config) {
conf.pathStyle = pathStyle
}
}
// WithSignature is a configurer for ObsClient.
func WithSignature(signature SignatureType) configurer {
return func(conf *config) {
conf.signature = signature
}
}
// WithRegion is a configurer for ObsClient.
func WithRegion(region string) configurer {
return func(conf *config) {
conf.region = region
}
}
// WithConnectTimeout is a configurer for ObsClient to set timeout period for establishing
// an http/https connection, in seconds.
func WithConnectTimeout(connectTimeout int) configurer {
return func(conf *config) {
conf.connectTimeout = connectTimeout
}
}
// WithSocketTimeout is a configurer for ObsClient to set the timeout duration for transmitting data at
// the socket layer, in seconds.
func WithSocketTimeout(socketTimeout int) configurer {
return func(conf *config) {
conf.socketTimeout = socketTimeout
}
}
// WithIdleConnTimeout is a configurer for ObsClient to set the timeout period of an idle HTTP connection
// in the connection pool, in seconds.
func WithIdleConnTimeout(idleConnTimeout int) configurer {
return func(conf *config) {
conf.idleConnTimeout = idleConnTimeout
}
}
// WithMaxRetryCount is a configurer for ObsClient to set the maximum number of retries when an HTTP/HTTPS connection is abnormal.
func WithMaxRetryCount(maxRetryCount int) configurer {
return func(conf *config) {
conf.maxRetryCount = maxRetryCount
}
}
// WithSecurityToken is a configurer for ObsClient to set the security token in the temporary access keys.
func WithSecurityToken(securityToken string) configurer {
return func(conf *config) {
for _, sp := range conf.securityProviders {
if bsp, ok := sp.(*BasicSecurityProvider); ok {
sh := bsp.getSecurity()
bsp.refresh(sh.ak, sh.sk, securityToken)
break
}
}
}
}
// WithHttpTransport is a configurer for ObsClient to set the customized http Transport.
func WithHttpTransport(transport *http.Transport) configurer {
return func(conf *config) {
conf.transport = transport
}
}
func WithHttpClient(httpClient *http.Client) configurer {
return func(conf *config) {
conf.httpClient = httpClient
}
}
// WithRequestContext is a configurer for ObsClient to set the context for each HTTP request.
func WithRequestContext(ctx context.Context) configurer {
return func(conf *config) {
conf.ctx = ctx
}
}
// WithCustomDomainName is a configurer for ObsClient.
func WithCustomDomainName(cname bool) configurer {
return func(conf *config) {
conf.cname = cname
}
}
// WithMaxRedirectCount is a configurer for ObsClient to set the maximum number of times that the request is redirected.
func WithMaxRedirectCount(maxRedirectCount int) configurer {
return func(conf *config) {
conf.maxRedirectCount = maxRedirectCount
}
}
// WithUserAgent is a configurer for ObsClient to set the User-Agent.
func WithUserAgent(userAgent string) configurer {
return func(conf *config) {
conf.userAgent = userAgent
}
}
// WithEnableCompression is a configurer for ObsClient to set the Transport.DisableCompression.
func WithEnableCompression(enableCompression bool) configurer {
return func(conf *config) {
conf.enableCompression = enableCompression
}
}
func (conf *config) prepareConfig() {
if conf.connectTimeout <= 0 {
conf.connectTimeout = DEFAULT_CONNECT_TIMEOUT
}
if conf.socketTimeout <= 0 {
conf.socketTimeout = DEFAULT_SOCKET_TIMEOUT
}
conf.finalTimeout = conf.socketTimeout * 10
if conf.headerTimeout <= 0 {
conf.headerTimeout = DEFAULT_HEADER_TIMEOUT
}
if conf.idleConnTimeout < 0 {
conf.idleConnTimeout = DEFAULT_IDLE_CONN_TIMEOUT
}
if conf.maxRetryCount < 0 {
conf.maxRetryCount = DEFAULT_MAX_RETRY_COUNT
}
if conf.maxConnsPerHost <= 0 {
conf.maxConnsPerHost = DEFAULT_MAX_CONN_PER_HOST
}
if conf.maxRedirectCount < 0 {
conf.maxRedirectCount = DEFAULT_MAX_REDIRECT_COUNT
}
if conf.pathStyle && conf.signature == SignatureObs {
conf.signature = SignatureV2
}
}
func (conf *config) initConfigWithDefault() error {
conf.endpoint = strings.TrimSpace(conf.endpoint)
if conf.endpoint == "" {
return errors.New("endpoint is not set")
}
if index := strings.Index(conf.endpoint, "?"); index > 0 {
conf.endpoint = conf.endpoint[:index]
}
for strings.LastIndex(conf.endpoint, "/") == len(conf.endpoint)-1 {
conf.endpoint = conf.endpoint[:len(conf.endpoint)-1]
}
if conf.signature == "" {
conf.signature = DEFAULT_SIGNATURE
}
urlHolder := &urlHolder{}
var address string
if strings.HasPrefix(conf.endpoint, "https://") {
urlHolder.scheme = "https"
address = conf.endpoint[len("https://"):]
} else if strings.HasPrefix(conf.endpoint, "http://") {
urlHolder.scheme = "http"
address = conf.endpoint[len("http://"):]
} else {
urlHolder.scheme = "https"
address = conf.endpoint
}
addr := strings.Split(address, ":")
if len(addr) == 2 {
if port, err := strconv.Atoi(addr[1]); err == nil {
urlHolder.port = port
}
}
urlHolder.host = addr[0]
if urlHolder.port == 0 {
if urlHolder.scheme == "https" {
urlHolder.port = 443
} else {
urlHolder.port = 80
}
}
if IsIP(urlHolder.host) {
conf.pathStyle = true
}
conf.urlHolder = urlHolder
conf.region = strings.TrimSpace(conf.region)
if conf.region == "" {
conf.region = DEFAULT_REGION
}
conf.prepareConfig()
conf.proxyURL = strings.TrimSpace(conf.proxyURL)
return nil
}
func (conf *config) getTransport() error {
if conf.transport == nil {
conf.transport = &http.Transport{
Dial: func(network, addr string) (net.Conn, error) {
conn,
|
"strconv"
"strings"
|
random_line_split
|
|
conf.go
|
d]",
conf.endpoint, conf.signature, conf.pathStyle, conf.region,
conf.connectTimeout, conf.socketTimeout, conf.headerTimeout, conf.idleConnTimeout,
conf.maxRetryCount, conf.maxConnsPerHost, conf.sslVerify, conf.maxRedirectCount,
)
}
type configurer func(conf *config)
func WithSecurityProviders(sps ...securityProvider) configurer {
return func(conf *config) {
for _, sp := range sps {
if sp != nil {
conf.securityProviders = append(conf.securityProviders, sp)
}
}
}
}
// WithSslVerify is a wrapper for WithSslVerifyAndPemCerts.
func WithSslVerify(sslVerify bool) configurer {
return WithSslVerifyAndPemCerts(sslVerify, nil)
}
// WithSslVerifyAndPemCerts is a configurer for ObsClient to set conf.sslVerify and conf.pemCerts.
func WithSslVerifyAndPemCerts(sslVerify bool, pemCerts []byte) configurer {
return func(conf *config) {
conf.sslVerify = sslVerify
conf.pemCerts = pemCerts
}
}
// WithHeaderTimeout is a configurer for ObsClient to set the timeout period of obtaining the response headers.
func
|
(headerTimeout int) configurer {
return func(conf *config) {
conf.headerTimeout = headerTimeout
}
}
// WithProxyUrl is a configurer for ObsClient to set HTTP proxy.
func WithProxyUrl(proxyURL string) configurer {
return func(conf *config) {
conf.proxyURL = proxyURL
}
}
// WithMaxConnections is a configurer for ObsClient to set the maximum number of idle HTTP connections.
func WithMaxConnections(maxConnsPerHost int) configurer {
return func(conf *config) {
conf.maxConnsPerHost = maxConnsPerHost
}
}
// WithPathStyle is a configurer for ObsClient.
func WithPathStyle(pathStyle bool) configurer {
return func(conf *config) {
conf.pathStyle = pathStyle
}
}
// WithSignature is a configurer for ObsClient.
func WithSignature(signature SignatureType) configurer {
return func(conf *config) {
conf.signature = signature
}
}
// WithRegion is a configurer for ObsClient.
func WithRegion(region string) configurer {
return func(conf *config) {
conf.region = region
}
}
// WithConnectTimeout is a configurer for ObsClient to set timeout period for establishing
// an http/https connection, in seconds.
func WithConnectTimeout(connectTimeout int) configurer {
return func(conf *config) {
conf.connectTimeout = connectTimeout
}
}
// WithSocketTimeout is a configurer for ObsClient to set the timeout duration for transmitting data at
// the socket layer, in seconds.
func WithSocketTimeout(socketTimeout int) configurer {
return func(conf *config) {
conf.socketTimeout = socketTimeout
}
}
// WithIdleConnTimeout is a configurer for ObsClient to set the timeout period of an idle HTTP connection
// in the connection pool, in seconds.
func WithIdleConnTimeout(idleConnTimeout int) configurer {
return func(conf *config) {
conf.idleConnTimeout = idleConnTimeout
}
}
// WithMaxRetryCount is a configurer for ObsClient to set the maximum number of retries when an HTTP/HTTPS connection is abnormal.
func WithMaxRetryCount(maxRetryCount int) configurer {
return func(conf *config) {
conf.maxRetryCount = maxRetryCount
}
}
// WithSecurityToken is a configurer for ObsClient to set the security token in the temporary access keys.
func WithSecurityToken(securityToken string) configurer {
return func(conf *config) {
for _, sp := range conf.securityProviders {
if bsp, ok := sp.(*BasicSecurityProvider); ok {
sh := bsp.getSecurity()
bsp.refresh(sh.ak, sh.sk, securityToken)
break
}
}
}
}
// WithHttpTransport is a configurer for ObsClient to set the customized http Transport.
func WithHttpTransport(transport *http.Transport) configurer {
return func(conf *config) {
conf.transport = transport
}
}
func WithHttpClient(httpClient *http.Client) configurer {
return func(conf *config) {
conf.httpClient = httpClient
}
}
// WithRequestContext is a configurer for ObsClient to set the context for each HTTP request.
func WithRequestContext(ctx context.Context) configurer {
return func(conf *config) {
conf.ctx = ctx
}
}
// WithCustomDomainName is a configurer for ObsClient.
func WithCustomDomainName(cname bool) configurer {
return func(conf *config) {
conf.cname = cname
}
}
// WithMaxRedirectCount is a configurer for ObsClient to set the maximum number of times that the request is redirected.
func WithMaxRedirectCount(maxRedirectCount int) configurer {
return func(conf *config) {
conf.maxRedirectCount = maxRedirectCount
}
}
// WithUserAgent is a configurer for ObsClient to set the User-Agent.
func WithUserAgent(userAgent string) configurer {
return func(conf *config) {
conf.userAgent = userAgent
}
}
// WithEnableCompression is a configurer for ObsClient to set the Transport.DisableCompression.
func WithEnableCompression(enableCompression bool) configurer {
return func(conf *config) {
conf.enableCompression = enableCompression
}
}
func (conf *config) prepareConfig() {
if conf.connectTimeout <= 0 {
conf.connectTimeout = DEFAULT_CONNECT_TIMEOUT
}
if conf.socketTimeout <= 0 {
conf.socketTimeout = DEFAULT_SOCKET_TIMEOUT
}
conf.finalTimeout = conf.socketTimeout * 10
if conf.headerTimeout <= 0 {
conf.headerTimeout = DEFAULT_HEADER_TIMEOUT
}
if conf.idleConnTimeout < 0 {
conf.idleConnTimeout = DEFAULT_IDLE_CONN_TIMEOUT
}
if conf.maxRetryCount < 0 {
conf.maxRetryCount = DEFAULT_MAX_RETRY_COUNT
}
if conf.maxConnsPerHost <= 0 {
conf.maxConnsPerHost = DEFAULT_MAX_CONN_PER_HOST
}
if conf.maxRedirectCount < 0 {
conf.maxRedirectCount = DEFAULT_MAX_REDIRECT_COUNT
}
if conf.pathStyle && conf.signature == SignatureObs {
conf.signature = SignatureV2
}
}
func (conf *config) initConfigWithDefault() error {
conf.endpoint = strings.TrimSpace(conf.endpoint)
if conf.endpoint == "" {
return errors.New("endpoint is not set")
}
if index := strings.Index(conf.endpoint, "?"); index > 0 {
conf.endpoint = conf.endpoint[:index]
}
for strings.LastIndex(conf.endpoint, "/") == len(conf.endpoint)-1 {
conf.endpoint = conf.endpoint[:len(conf.endpoint)-1]
}
if conf.signature == "" {
conf.signature = DEFAULT_SIGNATURE
}
urlHolder := &urlHolder{}
var address string
if strings.HasPrefix(conf.endpoint, "https://") {
urlHolder.scheme = "https"
address = conf.endpoint[len("https://"):]
} else if strings.HasPrefix(conf.endpoint, "http://") {
urlHolder.scheme = "http"
address = conf.endpoint[len("http://"):]
} else {
urlHolder.scheme = "https"
address = conf.endpoint
}
addr := strings.Split(address, ":")
if len(addr) == 2 {
if port, err := strconv.Atoi(addr[1]); err == nil {
urlHolder.port = port
}
}
urlHolder.host = addr[0]
if urlHolder.port == 0 {
if urlHolder.scheme == "https" {
urlHolder.port = 443
} else {
urlHolder.port = 80
}
}
if IsIP(urlHolder.host) {
conf.pathStyle = true
}
conf.urlHolder = urlHolder
conf.region = strings.TrimSpace(conf.region)
if conf.region == "" {
conf.region = DEFAULT_REGION
}
conf.prepareConfig()
conf.proxyURL = strings.TrimSpace(conf.proxyURL)
return nil
}
func (conf *config) getTransport() error {
if conf.transport == nil {
conf.transport = &http.Transport{
Dial: func(network, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(network, addr, time.Second*time.Duration(conf.connectTimeout))
if err != nil {
return nil, err
}
return getConnDelegate(conn, conf.socketTimeout, conf.finalTimeout), nil
},
MaxIdleConns: conf.maxConnsPerHost,
MaxIdleConnsPerHost: conf.maxConnsPerHost,
ResponseHeaderTimeout: time.Second * time.Duration(conf.headerTimeout),
IdleConnTimeout: time.Second * time.Duration(conf.idleConnTimeout),
}
if conf.proxyURL != "" {
proxyURL, err := url.Parse(conf.proxyURL)
if err != nil {
return err
}
conf.transport.Proxy = http.ProxyURL(proxyURL)
}
tlsConfig := &tls.Config{InsecureSkipVerify: !conf.sslVerify}
if conf.sslVerify && conf.pemCerts != nil {
pool := x509.NewCertPool()
pool.AppendCertsFromPEM(conf.pemCerts)
tlsConfig.RootCAs = pool
}
conf.transport.TLSClientConfig = tlsConfig
conf.transport.DisableCompression = !conf.enableCompression
|
WithHeaderTimeout
|
identifier_name
|
conf.go
|
]",
conf.endpoint, conf.signature, conf.pathStyle, conf.region,
conf.connectTimeout, conf.socketTimeout, conf.headerTimeout, conf.idleConnTimeout,
conf.maxRetryCount, conf.maxConnsPerHost, conf.sslVerify, conf.maxRedirectCount,
)
}
type configurer func(conf *config)
func WithSecurityProviders(sps ...securityProvider) configurer {
return func(conf *config) {
for _, sp := range sps {
if sp != nil {
conf.securityProviders = append(conf.securityProviders, sp)
}
}
}
}
// WithSslVerify is a wrapper for WithSslVerifyAndPemCerts.
func WithSslVerify(sslVerify bool) configurer {
return WithSslVerifyAndPemCerts(sslVerify, nil)
}
// WithSslVerifyAndPemCerts is a configurer for ObsClient to set conf.sslVerify and conf.pemCerts.
func WithSslVerifyAndPemCerts(sslVerify bool, pemCerts []byte) configurer {
return func(conf *config) {
conf.sslVerify = sslVerify
conf.pemCerts = pemCerts
}
}
// WithHeaderTimeout is a configurer for ObsClient to set the timeout period of obtaining the response headers.
func WithHeaderTimeout(headerTimeout int) configurer {
return func(conf *config) {
conf.headerTimeout = headerTimeout
}
}
// WithProxyUrl is a configurer for ObsClient to set HTTP proxy.
func WithProxyUrl(proxyURL string) configurer {
return func(conf *config) {
conf.proxyURL = proxyURL
}
}
// WithMaxConnections is a configurer for ObsClient to set the maximum number of idle HTTP connections.
func WithMaxConnections(maxConnsPerHost int) configurer {
return func(conf *config) {
conf.maxConnsPerHost = maxConnsPerHost
}
}
// WithPathStyle is a configurer for ObsClient.
func WithPathStyle(pathStyle bool) configurer {
return func(conf *config) {
conf.pathStyle = pathStyle
}
}
// WithSignature is a configurer for ObsClient.
func WithSignature(signature SignatureType) configurer {
return func(conf *config) {
conf.signature = signature
}
}
// WithRegion is a configurer for ObsClient.
func WithRegion(region string) configurer {
return func(conf *config) {
conf.region = region
}
}
// WithConnectTimeout is a configurer for ObsClient to set timeout period for establishing
// an http/https connection, in seconds.
func WithConnectTimeout(connectTimeout int) configurer {
return func(conf *config) {
conf.connectTimeout = connectTimeout
}
}
// WithSocketTimeout is a configurer for ObsClient to set the timeout duration for transmitting data at
// the socket layer, in seconds.
func WithSocketTimeout(socketTimeout int) configurer {
return func(conf *config) {
conf.socketTimeout = socketTimeout
}
}
// WithIdleConnTimeout is a configurer for ObsClient to set the timeout period of an idle HTTP connection
// in the connection pool, in seconds.
func WithIdleConnTimeout(idleConnTimeout int) configurer {
return func(conf *config) {
conf.idleConnTimeout = idleConnTimeout
}
}
// WithMaxRetryCount is a configurer for ObsClient to set the maximum number of retries when an HTTP/HTTPS connection is abnormal.
func WithMaxRetryCount(maxRetryCount int) configurer {
return func(conf *config) {
conf.maxRetryCount = maxRetryCount
}
}
// WithSecurityToken is a configurer for ObsClient to set the security token in the temporary access keys.
func WithSecurityToken(securityToken string) configurer {
return func(conf *config) {
for _, sp := range conf.securityProviders {
if bsp, ok := sp.(*BasicSecurityProvider); ok {
sh := bsp.getSecurity()
bsp.refresh(sh.ak, sh.sk, securityToken)
break
}
}
}
}
// WithHttpTransport is a configurer for ObsClient to set the customized http Transport.
func WithHttpTransport(transport *http.Transport) configurer {
return func(conf *config) {
conf.transport = transport
}
}
func WithHttpClient(httpClient *http.Client) configurer {
return func(conf *config) {
conf.httpClient = httpClient
}
}
// WithRequestContext is a configurer for ObsClient to set the context for each HTTP request.
func WithRequestContext(ctx context.Context) configurer {
return func(conf *config) {
conf.ctx = ctx
}
}
// WithCustomDomainName is a configurer for ObsClient.
func WithCustomDomainName(cname bool) configurer {
return func(conf *config) {
conf.cname = cname
}
}
// WithMaxRedirectCount is a configurer for ObsClient to set the maximum number of times that the request is redirected.
func WithMaxRedirectCount(maxRedirectCount int) configurer {
return func(conf *config) {
conf.maxRedirectCount = maxRedirectCount
}
}
// WithUserAgent is a configurer for ObsClient to set the User-Agent.
func WithUserAgent(userAgent string) configurer {
return func(conf *config) {
conf.userAgent = userAgent
}
}
// WithEnableCompression is a configurer for ObsClient to set the Transport.DisableCompression.
func WithEnableCompression(enableCompression bool) configurer
|
func (conf *config) prepareConfig() {
if conf.connectTimeout <= 0 {
conf.connectTimeout = DEFAULT_CONNECT_TIMEOUT
}
if conf.socketTimeout <= 0 {
conf.socketTimeout = DEFAULT_SOCKET_TIMEOUT
}
conf.finalTimeout = conf.socketTimeout * 10
if conf.headerTimeout <= 0 {
conf.headerTimeout = DEFAULT_HEADER_TIMEOUT
}
if conf.idleConnTimeout < 0 {
conf.idleConnTimeout = DEFAULT_IDLE_CONN_TIMEOUT
}
if conf.maxRetryCount < 0 {
conf.maxRetryCount = DEFAULT_MAX_RETRY_COUNT
}
if conf.maxConnsPerHost <= 0 {
conf.maxConnsPerHost = DEFAULT_MAX_CONN_PER_HOST
}
if conf.maxRedirectCount < 0 {
conf.maxRedirectCount = DEFAULT_MAX_REDIRECT_COUNT
}
if conf.pathStyle && conf.signature == SignatureObs {
conf.signature = SignatureV2
}
}
func (conf *config) initConfigWithDefault() error {
conf.endpoint = strings.TrimSpace(conf.endpoint)
if conf.endpoint == "" {
return errors.New("endpoint is not set")
}
if index := strings.Index(conf.endpoint, "?"); index > 0 {
conf.endpoint = conf.endpoint[:index]
}
for strings.LastIndex(conf.endpoint, "/") == len(conf.endpoint)-1 {
conf.endpoint = conf.endpoint[:len(conf.endpoint)-1]
}
if conf.signature == "" {
conf.signature = DEFAULT_SIGNATURE
}
urlHolder := &urlHolder{}
var address string
if strings.HasPrefix(conf.endpoint, "https://") {
urlHolder.scheme = "https"
address = conf.endpoint[len("https://"):]
} else if strings.HasPrefix(conf.endpoint, "http://") {
urlHolder.scheme = "http"
address = conf.endpoint[len("http://"):]
} else {
urlHolder.scheme = "https"
address = conf.endpoint
}
addr := strings.Split(address, ":")
if len(addr) == 2 {
if port, err := strconv.Atoi(addr[1]); err == nil {
urlHolder.port = port
}
}
urlHolder.host = addr[0]
if urlHolder.port == 0 {
if urlHolder.scheme == "https" {
urlHolder.port = 443
} else {
urlHolder.port = 80
}
}
if IsIP(urlHolder.host) {
conf.pathStyle = true
}
conf.urlHolder = urlHolder
conf.region = strings.TrimSpace(conf.region)
if conf.region == "" {
conf.region = DEFAULT_REGION
}
conf.prepareConfig()
conf.proxyURL = strings.TrimSpace(conf.proxyURL)
return nil
}
func (conf *config) getTransport() error {
if conf.transport == nil {
conf.transport = &http.Transport{
Dial: func(network, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(network, addr, time.Second*time.Duration(conf.connectTimeout))
if err != nil {
return nil, err
}
return getConnDelegate(conn, conf.socketTimeout, conf.finalTimeout), nil
},
MaxIdleConns: conf.maxConnsPerHost,
MaxIdleConnsPerHost: conf.maxConnsPerHost,
ResponseHeaderTimeout: time.Second * time.Duration(conf.headerTimeout),
IdleConnTimeout: time.Second * time.Duration(conf.idleConnTimeout),
}
if conf.proxyURL != "" {
proxyURL, err := url.Parse(conf.proxyURL)
if err != nil {
return err
}
conf.transport.Proxy = http.ProxyURL(proxyURL)
}
tlsConfig := &tls.Config{InsecureSkipVerify: !conf.sslVerify}
if conf.sslVerify && conf.pemCerts != nil {
pool := x509.NewCertPool()
pool.AppendCertsFromPEM(conf.pemCerts)
tlsConfig.RootCAs = pool
}
conf.transport.TLSClientConfig = tlsConfig
conf.transport.DisableCompression = !conf.enableCompression
|
{
return func(conf *config) {
conf.enableCompression = enableCompression
}
}
|
identifier_body
|
conf.go
|
]",
conf.endpoint, conf.signature, conf.pathStyle, conf.region,
conf.connectTimeout, conf.socketTimeout, conf.headerTimeout, conf.idleConnTimeout,
conf.maxRetryCount, conf.maxConnsPerHost, conf.sslVerify, conf.maxRedirectCount,
)
}
type configurer func(conf *config)
func WithSecurityProviders(sps ...securityProvider) configurer {
return func(conf *config) {
for _, sp := range sps {
if sp != nil {
conf.securityProviders = append(conf.securityProviders, sp)
}
}
}
}
// WithSslVerify is a wrapper for WithSslVerifyAndPemCerts.
func WithSslVerify(sslVerify bool) configurer {
return WithSslVerifyAndPemCerts(sslVerify, nil)
}
// WithSslVerifyAndPemCerts is a configurer for ObsClient to set conf.sslVerify and conf.pemCerts.
func WithSslVerifyAndPemCerts(sslVerify bool, pemCerts []byte) configurer {
return func(conf *config) {
conf.sslVerify = sslVerify
conf.pemCerts = pemCerts
}
}
// WithHeaderTimeout is a configurer for ObsClient to set the timeout period of obtaining the response headers.
func WithHeaderTimeout(headerTimeout int) configurer {
return func(conf *config) {
conf.headerTimeout = headerTimeout
}
}
// WithProxyUrl is a configurer for ObsClient to set HTTP proxy.
func WithProxyUrl(proxyURL string) configurer {
return func(conf *config) {
conf.proxyURL = proxyURL
}
}
// WithMaxConnections is a configurer for ObsClient to set the maximum number of idle HTTP connections.
func WithMaxConnections(maxConnsPerHost int) configurer {
return func(conf *config) {
conf.maxConnsPerHost = maxConnsPerHost
}
}
// WithPathStyle is a configurer for ObsClient.
func WithPathStyle(pathStyle bool) configurer {
return func(conf *config) {
conf.pathStyle = pathStyle
}
}
// WithSignature is a configurer for ObsClient.
func WithSignature(signature SignatureType) configurer {
return func(conf *config) {
conf.signature = signature
}
}
// WithRegion is a configurer for ObsClient.
func WithRegion(region string) configurer {
return func(conf *config) {
conf.region = region
}
}
// WithConnectTimeout is a configurer for ObsClient to set timeout period for establishing
// an http/https connection, in seconds.
func WithConnectTimeout(connectTimeout int) configurer {
return func(conf *config) {
conf.connectTimeout = connectTimeout
}
}
// WithSocketTimeout is a configurer for ObsClient to set the timeout duration for transmitting data at
// the socket layer, in seconds.
func WithSocketTimeout(socketTimeout int) configurer {
return func(conf *config) {
conf.socketTimeout = socketTimeout
}
}
// WithIdleConnTimeout is a configurer for ObsClient to set the timeout period of an idle HTTP connection
// in the connection pool, in seconds.
func WithIdleConnTimeout(idleConnTimeout int) configurer {
return func(conf *config) {
conf.idleConnTimeout = idleConnTimeout
}
}
// WithMaxRetryCount is a configurer for ObsClient to set the maximum number of retries when an HTTP/HTTPS connection is abnormal.
func WithMaxRetryCount(maxRetryCount int) configurer {
return func(conf *config) {
conf.maxRetryCount = maxRetryCount
}
}
// WithSecurityToken is a configurer for ObsClient to set the security token in the temporary access keys.
func WithSecurityToken(securityToken string) configurer {
return func(conf *config) {
for _, sp := range conf.securityProviders {
if bsp, ok := sp.(*BasicSecurityProvider); ok {
sh := bsp.getSecurity()
bsp.refresh(sh.ak, sh.sk, securityToken)
break
}
}
}
}
// WithHttpTransport is a configurer for ObsClient to set the customized http Transport.
func WithHttpTransport(transport *http.Transport) configurer {
return func(conf *config) {
conf.transport = transport
}
}
func WithHttpClient(httpClient *http.Client) configurer {
return func(conf *config) {
conf.httpClient = httpClient
}
}
// WithRequestContext is a configurer for ObsClient to set the context for each HTTP request.
func WithRequestContext(ctx context.Context) configurer {
return func(conf *config) {
conf.ctx = ctx
}
}
// WithCustomDomainName is a configurer for ObsClient.
func WithCustomDomainName(cname bool) configurer {
return func(conf *config) {
conf.cname = cname
}
}
// WithMaxRedirectCount is a configurer for ObsClient to set the maximum number of times that the request is redirected.
func WithMaxRedirectCount(maxRedirectCount int) configurer {
return func(conf *config) {
conf.maxRedirectCount = maxRedirectCount
}
}
// WithUserAgent is a configurer for ObsClient to set the User-Agent.
func WithUserAgent(userAgent string) configurer {
return func(conf *config) {
conf.userAgent = userAgent
}
}
// WithEnableCompression is a configurer for ObsClient to set the Transport.DisableCompression.
func WithEnableCompression(enableCompression bool) configurer {
return func(conf *config) {
conf.enableCompression = enableCompression
}
}
func (conf *config) prepareConfig() {
if conf.connectTimeout <= 0 {
conf.connectTimeout = DEFAULT_CONNECT_TIMEOUT
}
if conf.socketTimeout <= 0 {
conf.socketTimeout = DEFAULT_SOCKET_TIMEOUT
}
conf.finalTimeout = conf.socketTimeout * 10
if conf.headerTimeout <= 0 {
conf.headerTimeout = DEFAULT_HEADER_TIMEOUT
}
if conf.idleConnTimeout < 0 {
conf.idleConnTimeout = DEFAULT_IDLE_CONN_TIMEOUT
}
if conf.maxRetryCount < 0 {
conf.maxRetryCount = DEFAULT_MAX_RETRY_COUNT
}
if conf.maxConnsPerHost <= 0 {
conf.maxConnsPerHost = DEFAULT_MAX_CONN_PER_HOST
}
if conf.maxRedirectCount < 0 {
conf.maxRedirectCount = DEFAULT_MAX_REDIRECT_COUNT
}
if conf.pathStyle && conf.signature == SignatureObs {
conf.signature = SignatureV2
}
}
func (conf *config) initConfigWithDefault() error {
conf.endpoint = strings.TrimSpace(conf.endpoint)
if conf.endpoint == "" {
return errors.New("endpoint is not set")
}
if index := strings.Index(conf.endpoint, "?"); index > 0 {
conf.endpoint = conf.endpoint[:index]
}
for strings.LastIndex(conf.endpoint, "/") == len(conf.endpoint)-1 {
conf.endpoint = conf.endpoint[:len(conf.endpoint)-1]
}
if conf.signature == "" {
conf.signature = DEFAULT_SIGNATURE
}
urlHolder := &urlHolder{}
var address string
if strings.HasPrefix(conf.endpoint, "https://") {
urlHolder.scheme = "https"
address = conf.endpoint[len("https://"):]
} else if strings.HasPrefix(conf.endpoint, "http://") {
urlHolder.scheme = "http"
address = conf.endpoint[len("http://"):]
} else {
urlHolder.scheme = "https"
address = conf.endpoint
}
addr := strings.Split(address, ":")
if len(addr) == 2 {
if port, err := strconv.Atoi(addr[1]); err == nil {
urlHolder.port = port
}
}
urlHolder.host = addr[0]
if urlHolder.port == 0 {
if urlHolder.scheme == "https" {
urlHolder.port = 443
} else {
urlHolder.port = 80
}
}
if IsIP(urlHolder.host) {
conf.pathStyle = true
}
conf.urlHolder = urlHolder
conf.region = strings.TrimSpace(conf.region)
if conf.region == "" {
conf.region = DEFAULT_REGION
}
conf.prepareConfig()
conf.proxyURL = strings.TrimSpace(conf.proxyURL)
return nil
}
func (conf *config) getTransport() error {
if conf.transport == nil {
conf.transport = &http.Transport{
Dial: func(network, addr string) (net.Conn, error) {
conn, err := net.DialTimeout(network, addr, time.Second*time.Duration(conf.connectTimeout))
if err != nil {
return nil, err
}
return getConnDelegate(conn, conf.socketTimeout, conf.finalTimeout), nil
},
MaxIdleConns: conf.maxConnsPerHost,
MaxIdleConnsPerHost: conf.maxConnsPerHost,
ResponseHeaderTimeout: time.Second * time.Duration(conf.headerTimeout),
IdleConnTimeout: time.Second * time.Duration(conf.idleConnTimeout),
}
if conf.proxyURL != "" {
proxyURL, err := url.Parse(conf.proxyURL)
if err != nil
|
conf.transport.Proxy = http.ProxyURL(proxyURL)
}
tlsConfig := &tls.Config{InsecureSkipVerify: !conf.sslVerify}
if conf.sslVerify && conf.pemCerts != nil {
pool := x509.NewCertPool()
pool.AppendCertsFromPEM(conf.pemCerts)
tlsConfig.RootCAs = pool
}
conf.transport.TLSClientConfig = tlsConfig
conf.transport.DisableCompression = !conf.enableCompression
|
{
return err
}
|
conditional_block
|
PROGRAM.py
|
(mode,text):
keyboard=text
if keyboard=='': return
if mode==1:
try:
window_id = xbmcgui.getCurrentWindowDialogId()
window = xbmcgui.Window(window_id)
keyboard = mixARABIC(keyboard)
window.getControl(311).setLabel(keyboard)
except: pass
elif mode==0:
ttype='X'
check=isinstance(keyboard, unicode)
if check==True: ttype='U'
new1=str(type(keyboard))+' '+keyboard+' '+ttype+' '
for i in range(0,len(keyboard),1):
new1 += hex(ord(keyboard[i])).replace('0x','')+' '
keyboard = mixARABIC(keyboard)
ttype='X'
check=isinstance(keyboard, unicode)
if check==True: ttype='U'
new2=str(type(keyboard))+' '+keyboard+' '+ttype+' '
for i in range(0,len(keyboard),1):
new2 += hex(ord(keyboard[i])).replace('0x','')+' '
xbmcgui.Dialog().ok(new1,new2)
return
def SEND_MESSAGE():
xbmcgui.Dialog().ok('المبرمج لا يعلم الغيب','اذا كانت لديك مشكلة فاذن أقرأ قسم المشاكل والحلول واذا لم تجد الحل هناك فاذن اكتب رسالة عن المكان والوقت والحال الذي تحدث فيه المشكلة واكتب جميع التفاصيل لان المبرمج لا يعلم الغيب')
xbmcgui.Dialog().ok('عنوان الايميل','اذا كنت تريد ان تسأل وتحتاج جواب من المبرمج فاذن يجب عليك اضافة عنوان البريد الالكتروني email الخاص بك الى رسالتك لانها الطريقة الوحيدة للوصول اليك')
search = KEYBOARD('Write a message اكتب رسالة')
if search == '': return
message = search
subject = 'Message: From Arabic Videos'
result = SEND_EMAIL(subject,message,'yes','','EMAIL-FROM-USERS')
# url = 'my API and/or SMTP server'
# payload = '{"api_key":"MY API KEY","to":["me@email.com"],"sender":"me@email.com","subject":"From Arabic Videos","text_body":"'+message+'"}'
# #auth=("api", "my personal api key"),
# #response = requests.request('POST',url, data=payload, headers='', auth='')
# response = requests.post(url, data=payload, headers='', auth='')
# if response.status_code == 200:
# xbmcgui.Dialog().ok('تم الارسال بنجاح','')
# else:
# xbmcgui.Dialog().ok('خطأ في الارسال','Error {}: {!r}'.format(response.status_code, response.content))
# FROMemailAddress = 'me@email.com'
# TOemailAddress = 'me@email.com'
# header = ''
# #header += 'From: ' + FROMemailAddress
# #header += '\nTo: ' + emailAddress
# #header += '\nCc: ' + emailAddress
# header += '\nSubject: من كودي الفيديو العربي'
# server = smtplib.SMTP('smtp-server',25)
# #server.starttls()
# server.login('username','password')
# response = server.sendmail(FROMemailAddress,TOemailAddress, header + '\n' + message)
# server.quit()
# xbmcgui.Dialog().ok('Response',str(response))
return
def DMCA():
text = ' نفي: البرنامج لا يوجد له اي سيرفر يستضيف اي محتويات. البرنامج يستخدم روابط وتضمين لمحتويات مرفوعة على سيرفرات خارجية. البرنامج غير مسؤول عن اي محتويات تم تحميلها على سيرفرات ومواقع خارجية "مواقع طرف 3". جميع الاسماء والماركات والصور والمنشورات هي خاصة باصحابها. البرنامج لا ينتهك حقوق الطبع والنشر وقانون الألفية للملكية الرقمية DMCA اذا كان لديك شكوى خاصة بالروابط والتضامين الخارجية فالرجاء التواصل مع ادارة هذه السيرفرات والمواقع الخارجية'
xbmcgui.Dialog().textviewer('حقوق الطبع والنشر وقانون الألفية للملكية الرقمية',text)
text = 'Disclaimer: The program does not host any content on any server. The program just use linking to or embedding content that was uploaded to popular Online Video hosting sites. All trademarks, Videos, trade names, service marks, copyrighted work, logos referenced herein belong to their respective owners/companies. The program is not responsible for what other people upload to 3rd party sites. We urge all copyright owners, to recognize that the links contained within this site are located somewhere else on the web or video embedded are from other various site. If you have any legal issues please contact appropriate media file owners/hosters.'
xbmcgui.Dialog().textviewer('Digital Millennium Copyright Act (DMCA)',text)
return
def HTTPS_TEST():
html = openURL('https://www.google.com','','','','PROGRAM-1st')
#xbmcgui.Dialog().ok('Checking SSL',html)
if 'html' in html:
xbmcgui.Dialog().ok('الاتصال المشفر','جيد جدا ... الاتصال المشفر (الربط المشفر) يعمل على جهازك ... وجهازك قادر على استخدام المواقع المشفرة')
else:
xbmcgui.Dialog().ok('الاتصال المشفر','مشكلة ... الاتصال المشفر (الربط المشفر) لا يعمل على جهازك ... وجهازك غير قادر على استخدام المواقع المشفرة')
from PROBLEMS import MAIN as PROBLEMS_MAIN
PROBLEMS_MAIN(152)
return
def SERVERS_TYPE():
text = 'السيرفرات العامة هي سيرفرات خارجية وغير جيدة لان الكثير منها ممنوع او محذوف او خطأ بسبب حقوق الطبع وحقوق الالفية الرقمية ولا توجد طريقة لفحصها او اصلاحها \n\n السيرفرات الخاصة هي سيرفرات يتحكم فيها الموقع الاصلي وهي جيدة نسبيا ولا توجد طريقة لفحصها او اصلاحها \n\n الرجاء قبل الاعتراض على السيرفرات وقبل مراسلة المبرمج افحص الفيديو والسيرفر على الموقع الاصلي'
xbmcgui.Dialog().textviewer('مواقع تستخدم سيرفرات عامة',text)
return
def GLOBAL_SEARCH(search=''):
if search=='': search = KEYBOARD()
if search == '': return
search = search.lower()
addDir('1. [COLOR FFC89008]YUT [/COLOR]'+search+' - موقع يوتيوب','',149,'','',search)
addDir('2. [COLOR FFC89008]SHF [/COLOR]'+search+' - موقع شوف ماكس','',59,'','',search)
addDir('3. [COLOR FFC89008]EGB [/COLOR]'+search+' - موقع ايجي بيست','',9,'','',search) #129
addDir('4. [COLOR FFC89008]KLA [/COLOR]'+search+' - موقع كل العرب','',19,'','',search)
addDir('5. [COLOR FFC89008]PNT [/COLOR]'+search+' - موقع بانيت','',39,'','',search)
addDir('6. [COLOR FFC89008]IFL [/COLOR]'+search+' - موقع قناة اي فيلم','',29,'','',search)
addDir('7. [COLOR FFC89008]KWT [/COLOR]'+search+' - موقع قناة الكوثر','',139,'','',search)
addDir('8. [COLOR FFC89008]MRF [/COLOR]'+search+' - موقع قناة المعارف','',49,'','',search)
addDir('9. [COLOR FFC89008]FTM [/COLOR]'+search+' - موقع المنبر الفاطمي','',69,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('10. [COLOR FFC89008]MVZ [/COLOR]'+search+' - موقع موفيزلاند اونلاين','',189,'','',search)
addDir('11. [COLOR FFC89008]AKM [/COLOR]'+search+' - موقع اكوام','',79,'','',search)
addDir('12. [COLOR FFC89008]HEL [/COLOR]'+search+' - موقع هلال يوتيوب','',99,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('13. [COLOR FFC89008]SHA [/COLOR]'+search+' - موقع شاهد فوريو','',119,'','',search)
addDir('14. [COLOR FFC89
|
FIX_KEYBOARD
|
identifier_name
|
|
PROGRAM.py
|
اذن أقرأ قسم المشاكل والحلول واذا لم تجد الحل هناك فاذن اكتب رسالة عن المكان والوقت والحال الذي تحدث فيه المشكلة واكتب جميع التفاصيل لان المبرمج لا يعلم الغيب')
xbmcgui.Dialog().ok('عنوان الايميل','اذا كنت تريد ان تسأل وتحتاج جواب من المبرمج فاذن يجب عليك اضافة عنوان البريد الالكتروني email الخاص بك الى رسالتك لانها الطريقة الوحيدة للوصول اليك')
search = KEYBOARD('Write a message اكتب رسالة')
if search == '': return
message = search
subject = 'Message: From Arabic Videos'
result = SEND_EMAIL(subject,message,'yes','','EMAIL-FROM-USERS')
# url = 'my API and/or SMTP server'
# payload = '{"api_key":"MY API KEY","to":["me@email.com"],"sender":"me@email.com","subject":"From Arabic Videos","text_body":"'+mes
|
"}'
# #auth=("api", "my personal api key"),
# #response = requests.request('POST',url, data=payload, headers='', auth='')
# response = requests.post(url, data=payload, headers='', auth='')
# if response.status_code == 200:
# xbmcgui.Dialog().ok('تم الارسال بنجاح','')
# else:
# xbmcgui.Dialog().ok('خطأ في الارسال','Error {}: {!r}'.format(response.status_code, response.content))
# FROMemailAddress = 'me@email.com'
# TOemailAddress = 'me@email.com'
# header = ''
# #header += 'From: ' + FROMemailAddress
# #header += '\nTo: ' + emailAddress
# #header += '\nCc: ' + emailAddress
# header += '\nSubject: من كودي الفيديو العربي'
# server = smtplib.SMTP('smtp-server',25)
# #server.starttls()
# server.login('username','password')
# response = server.sendmail(FROMemailAddress,TOemailAddress, header + '\n' + message)
# server.quit()
# xbmcgui.Dialog().ok('Response',str(response))
return
def DMCA():
text = ' نفي: البرنامج لا يوجد له اي سيرفر يستضيف اي محتويات. البرنامج يستخدم روابط وتضمين لمحتويات مرفوعة على سيرفرات خارجية. البرنامج غير مسؤول عن اي محتويات تم تحميلها على سيرفرات ومواقع خارجية "مواقع طرف 3". جميع الاسماء والماركات والصور والمنشورات هي خاصة باصحابها. البرنامج لا ينتهك حقوق الطبع والنشر وقانون الألفية للملكية الرقمية DMCA اذا كان لديك شكوى خاصة بالروابط والتضامين الخارجية فالرجاء التواصل مع ادارة هذه السيرفرات والمواقع الخارجية'
xbmcgui.Dialog().textviewer('حقوق الطبع والنشر وقانون الألفية للملكية الرقمية',text)
text = 'Disclaimer: The program does not host any content on any server. The program just use linking to or embedding content that was uploaded to popular Online Video hosting sites. All trademarks, Videos, trade names, service marks, copyrighted work, logos referenced herein belong to their respective owners/companies. The program is not responsible for what other people upload to 3rd party sites. We urge all copyright owners, to recognize that the links contained within this site are located somewhere else on the web or video embedded are from other various site. If you have any legal issues please contact appropriate media file owners/hosters.'
xbmcgui.Dialog().textviewer('Digital Millennium Copyright Act (DMCA)',text)
return
def HTTPS_TEST():
html = openURL('https://www.google.com','','','','PROGRAM-1st')
#xbmcgui.Dialog().ok('Checking SSL',html)
if 'html' in html:
xbmcgui.Dialog().ok('الاتصال المشفر','جيد جدا ... الاتصال المشفر (الربط المشفر) يعمل على جهازك ... وجهازك قادر على استخدام المواقع المشفرة')
else:
xbmcgui.Dialog().ok('الاتصال المشفر','مشكلة ... الاتصال المشفر (الربط المشفر) لا يعمل على جهازك ... وجهازك غير قادر على استخدام المواقع المشفرة')
from PROBLEMS import MAIN as PROBLEMS_MAIN
PROBLEMS_MAIN(152)
return
def SERVERS_TYPE():
text = 'السيرفرات العامة هي سيرفرات خارجية وغير جيدة لان الكثير منها ممنوع او محذوف او خطأ بسبب حقوق الطبع وحقوق الالفية الرقمية ولا توجد طريقة لفحصها او اصلاحها \n\n السيرفرات الخاصة هي سيرفرات يتحكم فيها الموقع الاصلي وهي جيدة نسبيا ولا توجد طريقة لفحصها او اصلاحها \n\n الرجاء قبل الاعتراض على السيرفرات وقبل مراسلة المبرمج افحص الفيديو والسيرفر على الموقع الاصلي'
xbmcgui.Dialog().textviewer('مواقع تستخدم سيرفرات عامة',text)
return
def GLOBAL_SEARCH(search=''):
if search=='': search = KEYBOARD()
if search == '': return
search = search.lower()
addDir('1. [COLOR FFC89008]YUT [/COLOR]'+search+' - موقع يوتيوب','',149,'','',search)
addDir('2. [COLOR FFC89008]SHF [/COLOR]'+search+' - موقع شوف ماكس','',59,'','',search)
addDir('3. [COLOR FFC89008]EGB [/COLOR]'+search+' - موقع ايجي بيست','',9,'','',search) #129
addDir('4. [COLOR FFC89008]KLA [/COLOR]'+search+' - موقع كل العرب','',19,'','',search)
addDir('5. [COLOR FFC89008]PNT [/COLOR]'+search+' - موقع بانيت','',39,'','',search)
addDir('6. [COLOR FFC89008]IFL [/COLOR]'+search+' - موقع قناة اي فيلم','',29,'','',search)
addDir('7. [COLOR FFC89008]KWT [/COLOR]'+search+' - موقع قناة الكوثر','',139,'','',search)
addDir('8. [COLOR FFC89008]MRF [/COLOR]'+search+' - موقع قناة المعارف','',49,'','',search)
addDir('9. [COLOR FFC89008]FTM [/COLOR]'+search+' - موقع المنبر الفاطمي','',69,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('10. [COLOR FFC89008]MVZ [/COLOR]'+search+' - موقع موفيزلاند اونلاين','',189,'','',search)
addDir('11. [COLOR FFC89008]AKM [/COLOR]'+search+' - موقع اكوام','',79,'','',search)
addDir('12. [COLOR FFC89008]HEL [/COLOR]'+search+' - موقع هلال يوتيوب','',99,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('13. [COLOR FFC89008]SHA [/COLOR]'+search+' - موقع شاهد فوريو','',119,'','',search)
addDir('14. [COLOR FFC89008]HLA [/COLOR]'+search+' - موقع هلا سيما','',89,'','',search)
xbmcplugin.endOfDirectory(addon_handle)
return
def VERSION():
url = 'https://raw.githubusercontent.com/emadmahdi/KODI/master/addons.xml'
html = openURL(url,'','','','PROGRAM-VERSION-1st')
latestVER = re.findall('plugin.video.arabicvideos" name="Arabic Videos" version="(.*?)"',html,re.DOTALL)[0]
currentVER = xbmc.getInfoLabel('System.AddonVersion(plugin.video.arabicvideos)')
latestVER2 = re.findall('repository.emad" name="EMAD Repository" version="(.*?)"',html,re.DOTALL)[0]
currentVER2 = xbmc.getInfoLabel('System.AddonVersion(repository.emad)')
if latestVER > currentVER:
message1 = 'الرجاء تحديث البرنامج لحل المشاكل'
message3 = '\n\n' + 'جرب اغلاق كودي وتشغيله وانتظر التحديث الاوتوماتيكي'
else:
message1 = 'لا توجد اي تحديثات للبرنامج حاليا'
message3 = '\n\n' + 'الرجاء ابلاغ المبرمج عن اي مشكلة تواجهك
|
sage+'
|
conditional_block
|
PROGRAM.py
|
اذن أقرأ قسم المشاكل والحلول واذا لم تجد الحل هناك فاذن اكتب رسالة عن المكان والوقت والحال الذي تحدث فيه المشكلة واكتب جميع التفاصيل لان المبرمج لا يعلم الغيب')
xbmcgui.Dialog().ok('عنوان الايميل','اذا كنت تريد ان تسأل وتحتاج جواب من المبرمج فاذن يجب عليك اضافة عنوان البريد الالكتروني email الخاص بك الى رسالتك لانها الطريقة الوحيدة للوصول اليك')
search = KEYBOARD('Write a message اكتب رسالة')
if search == '': return
message = search
subject = 'Message: From Arabic Videos'
result = SEND_EMAIL(subject,message,'yes','','EMAIL-FROM-USERS')
# url = 'my API and/or SMTP server'
# payload = '{"api_key":"MY API KEY","to":["me@email.com"],"sender":"me@email.com","subject":"From Arabic Videos","text_body":"'+message+'"}'
# #auth=("api", "my personal api key"),
# #response = requests.request('POST',url, data=payload, headers='', auth='')
# response = requests.post(url, data=payload, headers='', auth='')
# if response.status_code == 200:
# xbmcgui.Dialog().ok('تم الارسال بنجاح','')
# else:
# xbmcgui.Dialog().ok('خطأ في الارسال','Error {}: {!r}'.format(response.status_code, response.content))
# FROMemailAddress = 'me@email.com'
# TOemailAddress = 'me@email.com'
# header = ''
# #header += 'From: ' + FROMemailAddress
# #header += '\nTo: ' + emailAddress
# #header += '\nCc: ' + emailAddress
# header += '\nSubject: من كودي الفيديو العربي'
# server = smtplib.SMTP('smtp-server',25)
# #server.starttls()
# server.login('username','password')
# response = server.sendmail(FROMemailAddress,TOemailAddress, header + '\n' + message)
# server.quit()
# xbmcgui.Dialog().ok('Response',str(response))
return
def DMCA():
text = ' نفي: البرنامج لا يوجد له اي سيرفر يستضيف اي محتويات. البرنامج يستخدم روابط وتضمين لمحتويات مرفوعة على سيرفرات خارجية. البرنامج غير مسؤول عن اي محتويات تم تحميلها على سيرفرات ومواقع خارجية "مواقع طرف 3". جميع الاسماء والماركات والصور والمنشورات هي خاصة باصحابها. البرنامج لا ينتهك حقوق الطبع والنشر وقانون الألفية للملكية الرقمية DMCA اذا كان لديك شكوى خاصة بالروابط والتضامين الخارجية فالرجاء التواصل مع ادارة هذه السيرفرات والمواقع الخارجية'
xbmcgui.Dialog().textviewer('حقوق الطبع والنشر وقانون الألفية للملكية الرقمية',text)
text = 'Disclaimer: The program does not host any content on any server. The program just use linking to or embedding content that was uploaded to popular Online Video hosting sites. All trademarks, Videos, trade names, service marks, copyrighted work, logos referenced herein belong to their respective owners/companies. The program is not responsible for what other people upload to 3rd party sites. We urge all copyright owners, to recognize that the links contained within this site are located somewhere else on the web or video embedded are from other various site. If you have any legal issues please contact appropriate media file owners/hosters.'
xbmcgui.Dialog().textviewer('Digital Millennium Copyright Act (DMCA)',text)
return
def HTTPS_TEST():
html = openURL('https://www.google.com','','','','PROGRAM-1st')
#xbmcgui.Dialog().ok('Checking SSL',html)
if 'html' in html:
xbmcgui.Dialog().ok('الاتصال المشفر','جيد جدا ... الاتصال المشفر (الربط المشفر) يعمل على جهازك ... وجهازك قادر على استخدام المواقع المشفرة')
else:
xbmcgui.Dialog().ok('الاتصال المشفر','مشكلة ... الاتصال المشفر (الربط المشفر) لا يعمل على جهازك ... وجهازك غير قادر على استخدام المواقع المشفرة')
from PROBLEMS import MAIN as PROBLEMS_MAIN
PROBLEMS_MAIN(152)
return
def SERVERS_TYPE():
text = 'السيرفرات العامة هي سيرفرات خارجية وغير جيدة لان الكثير منها ممنوع او محذوف او خطأ بسبب حقوق الطبع وحقوق الالفية الرقمية ولا توجد طريقة لفحصها او اصلاحها \n\n السيرفرات الخاصة هي سيرفرات يتحكم فيها الموقع الاصلي وهي جيدة نسبيا ولا توجد طريقة لفحصها او اصلاحها \n\n الرجاء قبل الاعتراض على السيرفرات وقبل مراسلة المبرمج افحص الفيديو والسيرفر على الموقع الاصلي'
xbmcgui.Dialog().textviewer('مواقع تستخدم سيرفرات عامة',text)
return
def GLOBAL_SEARCH(search=''):
if search=='': search = KEYBOARD()
if search == '': return
search = search.lower()
addDir('1. [COLOR FFC89008]YUT [/COLOR]'+search+' - موقع يوتيوب','',149,'','',search)
addDir('2. [COLOR FFC89008]SHF [/COLOR]'+search+' - موقع شوف ماكس','',59,'','',search)
addDir('3. [COLOR FFC89008]EGB [/COLOR]'+search+' - موقع ايجي بيست','',9,'','',search) #129
addDir('4. [COLOR FFC89008]KLA [/COLOR]'+search+' - موقع كل العرب','',19,'','',search)
addDir('5. [COLOR FFC89008]PNT [/COLOR]'+search+' - موقع بانيت','',39,'','',search)
addDir('6. [COLOR FFC89008]IFL [/COLOR]'+search+' - موقع قناة اي فيلم','',29,'','',search)
addDir('7. [COLOR FFC89008]KWT [/COLOR]'+search+' - موقع قناة الكوثر','',139,'','',search)
addDir('8. [COLOR FFC89008]MRF [/COLOR]'+search+' - موقع قناة المعارف','',49,'','',search)
addDir('9. [COLOR FFC89008]FTM [/COLOR]'+search+' - موقع المنبر الفاطمي','',69,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('10. [COLOR FFC89008]MVZ [/COLOR]'+search+' - موقع موفيزلاند اونلاين','',189,'','',search)
addDir('11. [COLOR FFC89008]AKM [/COLOR]'+search+' - موقع اكوام','',79,'','',search)
addDir('12. [COLOR FFC89008]HEL [/COLOR]'+search+' - موقع هلال يوتيو
|
ب','',99,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('13. [COLOR FFC89008]SHA [/COLOR]'+search+' - موقع شاهد فوريو','',119,'','',search)
addDir('14. [COLOR FFC89008]HLA [/COLOR]'+search+' - موقع هلا سيما','',89,'','',search)
xbmcplugin.endOfDirectory(addon_handle)
return
def VERSION():
url = 'https://raw.githubusercontent.com/emadmahdi/KODI/master/addons.xml'
html = openURL(url,'','','','PROGRAM-VERSION-1st')
latestVER = re.findall('plugin.video.arabicvideos" name="Arabic Videos" version="(.*?)"',html,re.DOTALL)[0]
currentVER = xbmc.getInfoLabel('System.AddonVersion(plugin.video.arabicvideos)')
latestVER2 = re.findall('repository.emad" name="EMAD Repository" version="(.*?)"',html,re.DOTALL)[0]
currentVER2 = xbmc.getInfoLabel('System.AddonVersion(repository.emad)')
if latestVER > currentVER:
message1 = 'الرجاء تحديث البرنامج لحل المشاكل'
message3 = '\n\n' + 'جرب اغلاق كودي وتشغيله وانتظر التحديث الاوتوماتيكي'
else:
message1 = 'لا توجد اي تحديثات للبرنامج حاليا'
message3 = '\n\n' + 'الرجاء ابلاغ المبرمج عن اي مشكلة تواجهك'
|
identifier_body
|
|
PROGRAM.py
|
فر) يعمل على جهازك ... وجهازك قادر على استخدام المواقع المشفرة')
else:
xbmcgui.Dialog().ok('الاتصال المشفر','مشكلة ... الاتصال المشفر (الربط المشفر) لا يعمل على جهازك ... وجهازك غير قادر على استخدام المواقع المشفرة')
from PROBLEMS import MAIN as PROBLEMS_MAIN
PROBLEMS_MAIN(152)
return
def SERVERS_TYPE():
text = 'السيرفرات العامة هي سيرفرات خارجية وغير جيدة لان الكثير منها ممنوع او محذوف او خطأ بسبب حقوق الطبع وحقوق الالفية الرقمية ولا توجد طريقة لفحصها او اصلاحها \n\n السيرفرات الخاصة هي سيرفرات يتحكم فيها الموقع الاصلي وهي جيدة نسبيا ولا توجد طريقة لفحصها او اصلاحها \n\n الرجاء قبل الاعتراض على السيرفرات وقبل مراسلة المبرمج افحص الفيديو والسيرفر على الموقع الاصلي'
xbmcgui.Dialog().textviewer('مواقع تستخدم سيرفرات عامة',text)
return
def GLOBAL_SEARCH(search=''):
if search=='': search = KEYBOARD()
if search == '': return
search = search.lower()
addDir('1. [COLOR FFC89008]YUT [/COLOR]'+search+' - موقع يوتيوب','',149,'','',search)
addDir('2. [COLOR FFC89008]SHF [/COLOR]'+search+' - موقع شوف ماكس','',59,'','',search)
addDir('3. [COLOR FFC89008]EGB [/COLOR]'+search+' - موقع ايجي بيست','',9,'','',search) #129
addDir('4. [COLOR FFC89008]KLA [/COLOR]'+search+' - موقع كل العرب','',19,'','',search)
addDir('5. [COLOR FFC89008]PNT [/COLOR]'+search+' - موقع بانيت','',39,'','',search)
addDir('6. [COLOR FFC89008]IFL [/COLOR]'+search+' - موقع قناة اي فيلم','',29,'','',search)
addDir('7. [COLOR FFC89008]KWT [/COLOR]'+search+' - موقع قناة الكوثر','',139,'','',search)
addDir('8. [COLOR FFC89008]MRF [/COLOR]'+search+' - موقع قناة المعارف','',49,'','',search)
addDir('9. [COLOR FFC89008]FTM [/COLOR]'+search+' - موقع المنبر الفاطمي','',69,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('10. [COLOR FFC89008]MVZ [/COLOR]'+search+' - موقع موفيزلاند اونلاين','',189,'','',search)
addDir('11. [COLOR FFC89008]AKM [/COLOR]'+search+' - موقع اكوام','',79,'','',search)
addDir('12. [COLOR FFC89008]HEL [/COLOR]'+search+' - موقع هلال يوتيوب','',99,'','',search)
addDir('[COLOR FFC89008]=========================[/COLOR]','',9999)
addDir('13. [COLOR FFC89008]SHA [/COLOR]'+search+' - موقع شاهد فوريو','',119,'','',search)
addDir('14. [COLOR FFC89008]HLA [/COLOR]'+search+' - موقع هلا سيما','',89,'','',search)
xbmcplugin.endOfDirectory(addon_handle)
return
def VERSION():
url = 'https://raw.githubusercontent.com/emadmahdi/KODI/master/addons.xml'
html = openURL(url,'','','','PROGRAM-VERSION-1st')
latestVER = re.findall('plugin.video.arabicvideos" name="Arabic Videos" version="(.*?)"',html,re.DOTALL)[0]
currentVER = xbmc.getInfoLabel('System.AddonVersion(plugin.video.arabicvideos)')
latestVER2 = re.findall('repository.emad" name="EMAD Repository" version="(.*?)"',html,re.DOTALL)[0]
currentVER2 = xbmc.getInfoLabel('System.AddonVersion(repository.emad)')
if latestVER > currentVER:
message1 = 'الرجاء تحديث البرنامج لحل المشاكل'
message3 = '\n\n' + 'جرب اغلاق كودي وتشغيله وانتظر التحديث الاوتوماتيكي'
else:
message1 = 'لا توجد اي تحديثات للبرنامج حاليا'
message3 = '\n\n' + 'الرجاء ابلاغ المبرمج عن اي مشكلة تواجهك'
if currentVER2=='': currentVER2='لا يوجد'
else: currentVER2 = ' ' + currentVER2
message2 = 'الاصدار الاخير للبرنامج المتوفر الان هو : ' + latestVER
message2 += '\n' + 'الاصدار الذي انت تستخدمه للبرنامج هو : ' + currentVER
message2 += '\n' + 'الاصدار الاخير لمخزن عماد المتوفر الان هو : ' + latestVER2
message2 += '\n' + 'الاصدار الذي انت تستخدمه لمخزن عماد هو : ' + currentVER2
message3 += '\n\n' + 'علما ان التحديث الاوتوماتيكي لا يعمل اذا لم يكن لديك في كودي مخزن عماد EMAD Repository'
message3 += '\n\n' + 'ملفات التنصيب مع التعليمات متوفرة على هذا الرابط'
message3 += '\n' + 'https://github.com/emadmahdi/KODI'
xbmcgui.Dialog().textviewer(message1,message2+message3)
return ''
def RANDOM():
headers = { 'User-Agent' : '' }
url = 'https://www.bestrandoms.com/random-arabic-words'
payload = { 'quantity' : '4' }
data = urllib.urlencode(payload)
#xbmcgui.Dialog().ok('',str(data))
html = openURL(url,data,headers,'','PROGRAM-RANDOM-1st')
block = re.findall('list-unstyled(.*?)clearfix',html,re.DOTALL)
items = re.findall('<span>(.*?)</span>.*?<span>(.*?)</span>',html,re.DOTALL)
arbLIST = []
engLIST = []
for arbWORD, engWORD in items:
arbLIST.append(arbWORD.lower())
engLIST.append(engWORD.lower())
list = ['كلمات عشوائية عربية','كلمات عشوائية انكليزية']
selection = xbmcgui.Dialog().select('اختر اللغة:', list)
if selection == -1: return
elif selection==0: list = arbLIST
else: list = engLIST
#xbmcgui.Dialog().ok('',str(html))
selection = xbmcgui.Dialog().select('اختر كلمة للبحث عنها:', list)
if selection == -1: return
search = list[selection]
GLOBAL_SEARCH(search)
return
def CLOSED():
xbmcgui.Dialog().ok('الموقع الاصلي للأسف مغلق','')
return
def TESTINGS():
#url = ''
#PLAY_VIDEO(url)
#import xbmcaddon
#settings = xbmcaddon.Addon(id=addon_id)
#settings.setSetting('test1','hello test1')
#var = settings.getSetting('test2')
#xbmc.log('EMAD11 ' + str(var) + ' 11EMAD',level=xbmc.LOGNOTICE)
#import subprocess
#var = subprocess.check_output('wmic csproduct get UUID')
#xbmc.log('EMAD11 ' + str(var) + ' 11EMAD',level=xbmc.LOGNOTICE)
#import os
#var = os.popen("wmic diskdrive get serialnumber").read()
#xbmc.log('EMAD11 ' + str(var) + ' 11EMAD',level=xbmc.LOGNOTICE)
#import requests
#var = dummyClientID(32)
#xbmcgui.Dialog().ok(var,'')
#xbmc.log('EMAD11' + html + '11EMAD',level=xbmc.LOGNOTICE)
url = ''
urllist = [
''
]
#play_item = xbmcgui.ListItem(path=url, thumbnailImage='')
#play_item.setInfo(type="Video", infoLabels={"Title":''})
# Pass the item to the Kodi player.
#xbmcplugin.setResolvedUrl(addon_handle, True, listitem=play_item)
# directly play the item.
#xbmc.Player().play(url, play_item)
#import RESOLVERS
#url = RESOLVERS.PLAY(urllist,script_name,'no')
|
random_line_split
|
||
api_op_ListSMSSandboxPhoneNumbers.go
|
.com/sns/latest/dg/sns-sms-sandbox.html)
// in the Amazon SNS Developer Guide.
func (c *Client) ListSMSSandboxPhoneNumbers(ctx context.Context, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListSMSSandboxPhoneNumbers", params, optFns, c.addOperationListSMSSandboxPhoneNumbersMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListSMSSandboxPhoneNumbersOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListSMSSandboxPhoneNumbersInput struct {
// The maximum number of phone numbers to return.
MaxResults *int32
// Token that the previous ListSMSSandboxPhoneNumbersInput request returns.
NextToken *string
noSmithyDocumentSerde
}
type ListSMSSandboxPhoneNumbersOutput struct {
// A list of the calling account's pending and verified phone numbers.
//
// This member is required.
PhoneNumbers []types.SMSSandboxPhoneNumber
// A NextToken string is returned when you call the ListSMSSandboxPhoneNumbersInput
// operation if additional pages of records are available.
NextToken *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationListSMSSandboxPhoneNumbersMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsquery_serializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsquery_deserializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
if err = addlegacyEndpointContextSetter(stack, options); err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack, options); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addListSMSSandboxPhoneNumbersResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(options.Region), middleware.Before); err != nil {
return err
}
if err = awsmiddleware.AddRecursionDetection(stack); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
if err = addendpointDisableHTTPSMiddleware(stack, options); err != nil {
return err
}
return nil
}
// ListSMSSandboxPhoneNumbersAPIClient is a client that implements the
// ListSMSSandboxPhoneNumbers operation.
type ListSMSSandboxPhoneNumbersAPIClient interface {
ListSMSSandboxPhoneNumbers(context.Context, *ListSMSSandboxPhoneNumbersInput, ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error)
}
var _ ListSMSSandboxPhoneNumbersAPIClient = (*Client)(nil)
// ListSMSSandboxPhoneNumbersPaginatorOptions is the paginator options for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginatorOptions struct {
// The maximum number of phone numbers to return.
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListSMSSandboxPhoneNumbersPaginator is a paginator for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginator struct {
options ListSMSSandboxPhoneNumbersPaginatorOptions
client ListSMSSandboxPhoneNumbersAPIClient
params *ListSMSSandboxPhoneNumbersInput
nextToken *string
firstPage bool
}
// NewListSMSSandboxPhoneNumbersPaginator returns a new
// ListSMSSandboxPhoneNumbersPaginator
func NewListSMSSandboxPhoneNumbersPaginator(client ListSMSSandboxPhoneNumbersAPIClient, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*ListSMSSandboxPhoneNumbersPaginatorOptions)) *ListSMSSandboxPhoneNumbersPaginator {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
options := ListSMSSandboxPhoneNumbersPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListSMSSandboxPhoneNumbersPaginator{
options: options,
client: client,
params: params,
firstPage: true,
nextToken: params.NextToken,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListSMSSandboxPhoneNumbersPaginator) HasMorePages() bool {
return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0)
}
// NextPage retrieves the next ListSMSSandboxPhoneNumbers page.
func (p *ListSMSSandboxPhoneNumbersPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListSMSSandboxPhoneNumbers(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken &&
prevToken != nil &&
p.nextToken != nil &&
*prevToken == *p.nextToken {
p.nextToken = nil
}
return result, nil
}
func newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "sns",
OperationName: "ListSMSSandboxPhoneNumbers",
}
}
type opListSMSSandboxPhoneNumbersResolveEndpointMiddleware struct {
EndpointResolver EndpointResolverV2
BuiltInResolver builtInParameterResolver
}
func (*opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) ID() string
|
func (m *opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
if awsmiddleware.GetRequiresLegacyEndpoints(ctx) {
return next.HandleSerialize(ctx, in)
}
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
if m.EndpointResolver == nil {
return out, metadata, fmt.Errorf("expected endpoint resolver to not be nil")
}
params := EndpointParameters{}
m.BuiltInResolver.ResolveBuiltIns(¶ms)
var resolvedEndpoint smithyendpoints.Endpoint
resolvedEndpoint, err = m.EndpointResolver.ResolveEndpoint(ctx, params)
if err != nil {
return out, metadata, fmt.Errorf("failed to resolve service endpoint, %w", err)
}
req.URL = &resolvedEndpoint.URI
for k := range resolvedEndpoint.Headers {
req.Header.Set(
k,
resolvedEndpoint.Headers.Get(k),
)
}
authSchemes, err := internalauth.GetAuthenticationSchemes(&resolvedEndpoint.Properties)
if err != nil {
var nfe *internalauth.NoAuthenticationSchemesFoundError
if errors.As(err, &nfe) {
// if no auth scheme is found, default to sigv4
signingName := "sns"
signingRegion := m.BuiltInResolver.(*builtInResolver).Region
ctx = awsmiddleware.SetSigningName
|
{
return "ResolveEndpointV2"
}
|
identifier_body
|
api_op_ListSMSSandboxPhoneNumbers.go
|
.com/sns/latest/dg/sns-sms-sandbox.html)
// in the Amazon SNS Developer Guide.
func (c *Client) ListSMSSandboxPhoneNumbers(ctx context.Context, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListSMSSandboxPhoneNumbers", params, optFns, c.addOperationListSMSSandboxPhoneNumbersMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListSMSSandboxPhoneNumbersOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListSMSSandboxPhoneNumbersInput struct {
// The maximum number of phone numbers to return.
MaxResults *int32
// Token that the previous ListSMSSandboxPhoneNumbersInput request returns.
NextToken *string
noSmithyDocumentSerde
}
type ListSMSSandboxPhoneNumbersOutput struct {
// A list of the calling account's pending and verified phone numbers.
//
// This member is required.
PhoneNumbers []types.SMSSandboxPhoneNumber
// A NextToken string is returned when you call the ListSMSSandboxPhoneNumbersInput
// operation if additional pages of records are available.
NextToken *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationListSMSSandboxPhoneNumbersMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsquery_serializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsquery_deserializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
if err = addlegacyEndpointContextSetter(stack, options); err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack, options); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addListSMSSandboxPhoneNumbersResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(options.Region), middleware.Before); err != nil {
return err
}
if err = awsmiddleware.AddRecursionDetection(stack); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
if err = addendpointDisableHTTPSMiddleware(stack, options); err != nil
|
return nil
}
// ListSMSSandboxPhoneNumbersAPIClient is a client that implements the
// ListSMSSandboxPhoneNumbers operation.
type ListSMSSandboxPhoneNumbersAPIClient interface {
ListSMSSandboxPhoneNumbers(context.Context, *ListSMSSandboxPhoneNumbersInput, ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error)
}
var _ ListSMSSandboxPhoneNumbersAPIClient = (*Client)(nil)
// ListSMSSandboxPhoneNumbersPaginatorOptions is the paginator options for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginatorOptions struct {
// The maximum number of phone numbers to return.
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListSMSSandboxPhoneNumbersPaginator is a paginator for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginator struct {
options ListSMSSandboxPhoneNumbersPaginatorOptions
client ListSMSSandboxPhoneNumbersAPIClient
params *ListSMSSandboxPhoneNumbersInput
nextToken *string
firstPage bool
}
// NewListSMSSandboxPhoneNumbersPaginator returns a new
// ListSMSSandboxPhoneNumbersPaginator
func NewListSMSSandboxPhoneNumbersPaginator(client ListSMSSandboxPhoneNumbersAPIClient, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*ListSMSSandboxPhoneNumbersPaginatorOptions)) *ListSMSSandboxPhoneNumbersPaginator {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
options := ListSMSSandboxPhoneNumbersPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListSMSSandboxPhoneNumbersPaginator{
options: options,
client: client,
params: params,
firstPage: true,
nextToken: params.NextToken,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListSMSSandboxPhoneNumbersPaginator) HasMorePages() bool {
return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0)
}
// NextPage retrieves the next ListSMSSandboxPhoneNumbers page.
func (p *ListSMSSandboxPhoneNumbersPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListSMSSandboxPhoneNumbers(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken &&
prevToken != nil &&
p.nextToken != nil &&
*prevToken == *p.nextToken {
p.nextToken = nil
}
return result, nil
}
func newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "sns",
OperationName: "ListSMSSandboxPhoneNumbers",
}
}
type opListSMSSandboxPhoneNumbersResolveEndpointMiddleware struct {
EndpointResolver EndpointResolverV2
BuiltInResolver builtInParameterResolver
}
func (*opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) ID() string {
return "ResolveEndpointV2"
}
func (m *opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
if awsmiddleware.GetRequiresLegacyEndpoints(ctx) {
return next.HandleSerialize(ctx, in)
}
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
if m.EndpointResolver == nil {
return out, metadata, fmt.Errorf("expected endpoint resolver to not be nil")
}
params := EndpointParameters{}
m.BuiltInResolver.ResolveBuiltIns(¶ms)
var resolvedEndpoint smithyendpoints.Endpoint
resolvedEndpoint, err = m.EndpointResolver.ResolveEndpoint(ctx, params)
if err != nil {
return out, metadata, fmt.Errorf("failed to resolve service endpoint, %w", err)
}
req.URL = &resolvedEndpoint.URI
for k := range resolvedEndpoint.Headers {
req.Header.Set(
k,
resolvedEndpoint.Headers.Get(k),
)
}
authSchemes, err := internalauth.GetAuthenticationSchemes(&resolvedEndpoint.Properties)
if err != nil {
var nfe *internalauth.NoAuthenticationSchemesFoundError
if errors.As(err, &nfe) {
// if no auth scheme is found, default to sigv4
signingName := "sns"
signingRegion := m.BuiltInResolver.(*builtInResolver).Region
ctx = awsmiddleware.SetSigningName
|
{
return err
}
|
conditional_block
|
api_op_ListSMSSandboxPhoneNumbers.go
|
.amazon.com/sns/latest/dg/sns-sms-sandbox.html)
// in the Amazon SNS Developer Guide.
func (c *Client) ListSMSSandboxPhoneNumbers(ctx context.Context, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListSMSSandboxPhoneNumbers", params, optFns, c.addOperationListSMSSandboxPhoneNumbersMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListSMSSandboxPhoneNumbersOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListSMSSandboxPhoneNumbersInput struct {
// The maximum number of phone numbers to return.
MaxResults *int32
// Token that the previous ListSMSSandboxPhoneNumbersInput request returns.
NextToken *string
|
// A list of the calling account's pending and verified phone numbers.
//
// This member is required.
PhoneNumbers []types.SMSSandboxPhoneNumber
// A NextToken string is returned when you call the ListSMSSandboxPhoneNumbersInput
// operation if additional pages of records are available.
NextToken *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationListSMSSandboxPhoneNumbersMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsquery_serializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsquery_deserializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
if err = addlegacyEndpointContextSetter(stack, options); err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack, options); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addListSMSSandboxPhoneNumbersResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(options.Region), middleware.Before); err != nil {
return err
}
if err = awsmiddleware.AddRecursionDetection(stack); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
if err = addendpointDisableHTTPSMiddleware(stack, options); err != nil {
return err
}
return nil
}
// ListSMSSandboxPhoneNumbersAPIClient is a client that implements the
// ListSMSSandboxPhoneNumbers operation.
type ListSMSSandboxPhoneNumbersAPIClient interface {
ListSMSSandboxPhoneNumbers(context.Context, *ListSMSSandboxPhoneNumbersInput, ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error)
}
var _ ListSMSSandboxPhoneNumbersAPIClient = (*Client)(nil)
// ListSMSSandboxPhoneNumbersPaginatorOptions is the paginator options for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginatorOptions struct {
// The maximum number of phone numbers to return.
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListSMSSandboxPhoneNumbersPaginator is a paginator for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginator struct {
options ListSMSSandboxPhoneNumbersPaginatorOptions
client ListSMSSandboxPhoneNumbersAPIClient
params *ListSMSSandboxPhoneNumbersInput
nextToken *string
firstPage bool
}
// NewListSMSSandboxPhoneNumbersPaginator returns a new
// ListSMSSandboxPhoneNumbersPaginator
func NewListSMSSandboxPhoneNumbersPaginator(client ListSMSSandboxPhoneNumbersAPIClient, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*ListSMSSandboxPhoneNumbersPaginatorOptions)) *ListSMSSandboxPhoneNumbersPaginator {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
options := ListSMSSandboxPhoneNumbersPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListSMSSandboxPhoneNumbersPaginator{
options: options,
client: client,
params: params,
firstPage: true,
nextToken: params.NextToken,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListSMSSandboxPhoneNumbersPaginator) HasMorePages() bool {
return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0)
}
// NextPage retrieves the next ListSMSSandboxPhoneNumbers page.
func (p *ListSMSSandboxPhoneNumbersPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListSMSSandboxPhoneNumbers(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken &&
prevToken != nil &&
p.nextToken != nil &&
*prevToken == *p.nextToken {
p.nextToken = nil
}
return result, nil
}
func newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "sns",
OperationName: "ListSMSSandboxPhoneNumbers",
}
}
type opListSMSSandboxPhoneNumbersResolveEndpointMiddleware struct {
EndpointResolver EndpointResolverV2
BuiltInResolver builtInParameterResolver
}
func (*opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) ID() string {
return "ResolveEndpointV2"
}
func (m *opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
if awsmiddleware.GetRequiresLegacyEndpoints(ctx) {
return next.HandleSerialize(ctx, in)
}
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
if m.EndpointResolver == nil {
return out, metadata, fmt.Errorf("expected endpoint resolver to not be nil")
}
params := EndpointParameters{}
m.BuiltInResolver.ResolveBuiltIns(¶ms)
var resolvedEndpoint smithyendpoints.Endpoint
resolvedEndpoint, err = m.EndpointResolver.ResolveEndpoint(ctx, params)
if err != nil {
return out, metadata, fmt.Errorf("failed to resolve service endpoint, %w", err)
}
req.URL = &resolvedEndpoint.URI
for k := range resolvedEndpoint.Headers {
req.Header.Set(
k,
resolvedEndpoint.Headers.Get(k),
)
}
authSchemes, err := internalauth.GetAuthenticationSchemes(&resolvedEndpoint.Properties)
if err != nil {
var nfe *internalauth.NoAuthenticationSchemesFoundError
if errors.As(err, &nfe) {
// if no auth scheme is found, default to sigv4
signingName := "sns"
signingRegion := m.BuiltInResolver.(*builtInResolver).Region
ctx = awsmiddleware.SetSigningName
|
noSmithyDocumentSerde
}
type ListSMSSandboxPhoneNumbersOutput struct {
|
random_line_split
|
api_op_ListSMSSandboxPhoneNumbers.go
|
.amazon.com/sns/latest/dg/sns-sms-sandbox.html)
// in the Amazon SNS Developer Guide.
func (c *Client) ListSMSSandboxPhoneNumbers(ctx context.Context, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
result, metadata, err := c.invokeOperation(ctx, "ListSMSSandboxPhoneNumbers", params, optFns, c.addOperationListSMSSandboxPhoneNumbersMiddlewares)
if err != nil {
return nil, err
}
out := result.(*ListSMSSandboxPhoneNumbersOutput)
out.ResultMetadata = metadata
return out, nil
}
type ListSMSSandboxPhoneNumbersInput struct {
// The maximum number of phone numbers to return.
MaxResults *int32
// Token that the previous ListSMSSandboxPhoneNumbersInput request returns.
NextToken *string
noSmithyDocumentSerde
}
type ListSMSSandboxPhoneNumbersOutput struct {
// A list of the calling account's pending and verified phone numbers.
//
// This member is required.
PhoneNumbers []types.SMSSandboxPhoneNumber
// A NextToken string is returned when you call the ListSMSSandboxPhoneNumbersInput
// operation if additional pages of records are available.
NextToken *string
// Metadata pertaining to the operation's result.
ResultMetadata middleware.Metadata
noSmithyDocumentSerde
}
func (c *Client) addOperationListSMSSandboxPhoneNumbersMiddlewares(stack *middleware.Stack, options Options) (err error) {
err = stack.Serialize.Add(&awsAwsquery_serializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
err = stack.Deserialize.Add(&awsAwsquery_deserializeOpListSMSSandboxPhoneNumbers{}, middleware.After)
if err != nil {
return err
}
if err = addlegacyEndpointContextSetter(stack, options); err != nil {
return err
}
if err = addSetLoggerMiddleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddClientRequestIDMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddComputeContentLengthMiddleware(stack); err != nil {
return err
}
if err = addResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = v4.AddComputePayloadSHA256Middleware(stack); err != nil {
return err
}
if err = addRetryMiddlewares(stack, options); err != nil {
return err
}
if err = addHTTPSignerV4Middleware(stack, options); err != nil {
return err
}
if err = awsmiddleware.AddRawResponseToMetadata(stack); err != nil {
return err
}
if err = awsmiddleware.AddRecordResponseTiming(stack); err != nil {
return err
}
if err = addClientUserAgent(stack, options); err != nil {
return err
}
if err = smithyhttp.AddErrorCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = smithyhttp.AddCloseResponseBodyMiddleware(stack); err != nil {
return err
}
if err = addListSMSSandboxPhoneNumbersResolveEndpointMiddleware(stack, options); err != nil {
return err
}
if err = stack.Initialize.Add(newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(options.Region), middleware.Before); err != nil {
return err
}
if err = awsmiddleware.AddRecursionDetection(stack); err != nil {
return err
}
if err = addRequestIDRetrieverMiddleware(stack); err != nil {
return err
}
if err = addResponseErrorMiddleware(stack); err != nil {
return err
}
if err = addRequestResponseLogging(stack, options); err != nil {
return err
}
if err = addendpointDisableHTTPSMiddleware(stack, options); err != nil {
return err
}
return nil
}
// ListSMSSandboxPhoneNumbersAPIClient is a client that implements the
// ListSMSSandboxPhoneNumbers operation.
type ListSMSSandboxPhoneNumbersAPIClient interface {
ListSMSSandboxPhoneNumbers(context.Context, *ListSMSSandboxPhoneNumbersInput, ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error)
}
var _ ListSMSSandboxPhoneNumbersAPIClient = (*Client)(nil)
// ListSMSSandboxPhoneNumbersPaginatorOptions is the paginator options for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginatorOptions struct {
// The maximum number of phone numbers to return.
Limit int32
// Set to true if pagination should stop if the service returns a pagination token
// that matches the most recent token provided to the service.
StopOnDuplicateToken bool
}
// ListSMSSandboxPhoneNumbersPaginator is a paginator for
// ListSMSSandboxPhoneNumbers
type ListSMSSandboxPhoneNumbersPaginator struct {
options ListSMSSandboxPhoneNumbersPaginatorOptions
client ListSMSSandboxPhoneNumbersAPIClient
params *ListSMSSandboxPhoneNumbersInput
nextToken *string
firstPage bool
}
// NewListSMSSandboxPhoneNumbersPaginator returns a new
// ListSMSSandboxPhoneNumbersPaginator
func NewListSMSSandboxPhoneNumbersPaginator(client ListSMSSandboxPhoneNumbersAPIClient, params *ListSMSSandboxPhoneNumbersInput, optFns ...func(*ListSMSSandboxPhoneNumbersPaginatorOptions)) *ListSMSSandboxPhoneNumbersPaginator {
if params == nil {
params = &ListSMSSandboxPhoneNumbersInput{}
}
options := ListSMSSandboxPhoneNumbersPaginatorOptions{}
if params.MaxResults != nil {
options.Limit = *params.MaxResults
}
for _, fn := range optFns {
fn(&options)
}
return &ListSMSSandboxPhoneNumbersPaginator{
options: options,
client: client,
params: params,
firstPage: true,
nextToken: params.NextToken,
}
}
// HasMorePages returns a boolean indicating whether more pages are available
func (p *ListSMSSandboxPhoneNumbersPaginator)
|
() bool {
return p.firstPage || (p.nextToken != nil && len(*p.nextToken) != 0)
}
// NextPage retrieves the next ListSMSSandboxPhoneNumbers page.
func (p *ListSMSSandboxPhoneNumbersPaginator) NextPage(ctx context.Context, optFns ...func(*Options)) (*ListSMSSandboxPhoneNumbersOutput, error) {
if !p.HasMorePages() {
return nil, fmt.Errorf("no more pages available")
}
params := *p.params
params.NextToken = p.nextToken
var limit *int32
if p.options.Limit > 0 {
limit = &p.options.Limit
}
params.MaxResults = limit
result, err := p.client.ListSMSSandboxPhoneNumbers(ctx, ¶ms, optFns...)
if err != nil {
return nil, err
}
p.firstPage = false
prevToken := p.nextToken
p.nextToken = result.NextToken
if p.options.StopOnDuplicateToken &&
prevToken != nil &&
p.nextToken != nil &&
*prevToken == *p.nextToken {
p.nextToken = nil
}
return result, nil
}
func newServiceMetadataMiddleware_opListSMSSandboxPhoneNumbers(region string) *awsmiddleware.RegisterServiceMetadata {
return &awsmiddleware.RegisterServiceMetadata{
Region: region,
ServiceID: ServiceID,
SigningName: "sns",
OperationName: "ListSMSSandboxPhoneNumbers",
}
}
type opListSMSSandboxPhoneNumbersResolveEndpointMiddleware struct {
EndpointResolver EndpointResolverV2
BuiltInResolver builtInParameterResolver
}
func (*opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) ID() string {
return "ResolveEndpointV2"
}
func (m *opListSMSSandboxPhoneNumbersResolveEndpointMiddleware) HandleSerialize(ctx context.Context, in middleware.SerializeInput, next middleware.SerializeHandler) (
out middleware.SerializeOutput, metadata middleware.Metadata, err error,
) {
if awsmiddleware.GetRequiresLegacyEndpoints(ctx) {
return next.HandleSerialize(ctx, in)
}
req, ok := in.Request.(*smithyhttp.Request)
if !ok {
return out, metadata, fmt.Errorf("unknown transport type %T", in.Request)
}
if m.EndpointResolver == nil {
return out, metadata, fmt.Errorf("expected endpoint resolver to not be nil")
}
params := EndpointParameters{}
m.BuiltInResolver.ResolveBuiltIns(¶ms)
var resolvedEndpoint smithyendpoints.Endpoint
resolvedEndpoint, err = m.EndpointResolver.ResolveEndpoint(ctx, params)
if err != nil {
return out, metadata, fmt.Errorf("failed to resolve service endpoint, %w", err)
}
req.URL = &resolvedEndpoint.URI
for k := range resolvedEndpoint.Headers {
req.Header.Set(
k,
resolvedEndpoint.Headers.Get(k),
)
}
authSchemes, err := internalauth.GetAuthenticationSchemes(&resolvedEndpoint.Properties)
if err != nil {
var nfe *internalauth.NoAuthenticationSchemesFoundError
if errors.As(err, &nfe) {
// if no auth scheme is found, default to sigv4
signingName := "sns"
signingRegion := m.BuiltInResolver.(*builtInResolver).Region
ctx = awsmiddleware.SetSigningName
|
HasMorePages
|
identifier_name
|
app.js
|
é uma linha do arquivo
var fileArr = e.target.result.split('\n');
// Monta a div que irá exibir os dados
var cnpj = fileArr[2].slice(3, 17);
var div = '<p><strong>CNPJ:</strong> ' + formataCnpj(cnpj) + '</p>';
div += '<p><strong>Transportadora:</strong> ' + fileArr[2].slice(17, 57) + '</p>';
div += '<div class="panel panel-default">';
div += '<div class="panel-heading">Ocorrências</div>';
div += '<table class="table no-"><thead><th>Código</th><th>Descrição</th></thead>';
// Passa em cada linha do arquivo
for (var i = 0; i < fileArr.length; i++) {
div += '<tr>';
var fileLine = fileArr[i].split(',');
for (var j = 0; j < fileLine.length; j++) {
//
|
'</tr>';
};
div += '</table>';
// Seta a div montada com os dados
areaUpload.classList.add('hidden');
btnNewReader.classList.remove('hidden');
// Exibe os dados
exibeArquivo.innerHTML = div;
};
btnNewReader.addEventListener('click', function() {
areaUpload.classList.remove('hidden');
btnNewReader.classList.add('hidden');
exibeArquivo.innerHTML = '';
});
var formataCnpj = function(cnpj) {
return cnpj.slice(0, 2) + '.' +
cnpj.slice(2, 5) + '.' +
cnpj.slice(5, 8) + '/' +
cnpj.slice(8, 12) + '-' +
cnpj.slice(12, 14);
};
function dadosOcorrencia(cod) {
var listaOcorrencias = [
{cod: '00', descricao: "Processo de Transporte já Iniciado"},
{cod: '01', descricao: "Entrega Realizada Normalmente"},
{cod: '02', descricao: "Entrega Fora da Data Programada"},
{cod: '03', descricao: "Recusa por Falta de Pedido de Compra"},
{cod: '04', descricao: "Recusa por Pedido de Compra Cancelado"},
{cod: '05', descricao: "Falta de Espaço Físico no Depósito do Cliente Destino"},
{cod: '06', descricao: "Endereço do Cliente Destino não Localizado"},
{cod: '07', descricao: "Devolução não Autorizada pelo Cliente"},
{cod: '08', descricao: "Preço Mercadoria em Desacordo com o Pedido Compra"},
{cod: '09', descricao: "Mercadoria em Desacordo com o Pedido Compra"},
{cod: '10', descricao: "Cliente Destino somente Recebe Mercadoria com Frete Pago"},
{cod: '11', descricao: "Recusa por Deficiência Embalagem Mercadoria"},
{cod: '12', descricao: "Redespacho não Indicado"},
{cod: '13', descricao: "Transportadora não Atende a Cidade do Cliente Destino"},
{cod: '14', descricao: "Mercadoria Sinistrada"},
{cod: '15', descricao: "Embalagem Sinistrada"},
{cod: '16', descricao: "Pedido de Compras em Duplicidade"},
{cod: '17', descricao: "Mercadoria fora da Embalagem de Atacadista"},
{cod: '18', descricao: "Mercadorias Trocadas"},
{cod: '19', descricao: "Reentrega Solicitada pelo Cliente"},
{cod: '20', descricao: "Entrega Prejudicada por Horário/Falta de Tempo Hábil"},
{cod: '21', descricao: "Estabelecimento Fechado"},
{cod: '22', descricao: "Reentrega sem Cobrança do Cliente"},
{cod: '23', descricao: "Extravio de Mercadoria em Trânsito"},
{cod: '24', descricao: "Mercadoria Reentregue ao Cliente Destino"},
{cod: '25', descricao: "Mercadoria Devolvida ao Cliente de Origem"},
{cod: '26', descricao: "Nota Fiscal Retida pela Fiscalização"},
{cod: '27', descricao: "Roubo de Carga"},
{cod: '28', descricao: "Mercadoria Retida até Segunda Ordem"},
{cod: '29', descricao: "Cliente Retira Mercadoria na Transportadora"},
{cod: '30', descricao: "Problema com a Documentação (Nota Fiscal e/ou CTRC)"},
{cod: '31', descricao: "Entrega com Indenização Efetuada"},
{cod: '32', descricao: "Falta com Solicitação de Reposição"},
{cod: '33', descricao: "Falta com Busca/Reconferência"},
{cod: '34', descricao: "Cliente Fechado para Balanço"},
{cod: '35', descricao: "Quantidade de Produto em Desacordo (Nota Fiscal e/ou Pedido)"},
{cod: '36', descricao: "Extravio de documentos pela Cia. Aérea - Cód. Aéreo"},
{cod: '37', descricao: "Extravio de carga pela Cia. Aérea – Cód. Aéreo"},
{cod: '39', descricao: "Corte de carga na pista–Cód. Aéreo"},
{cod: '40', descricao: "Aeroporto fechado na origem - Cód. Aéreo"},
{cod: '41', descricao: "Pedido de Compra Incompleto"},
{cod: '42', descricao: "Nota Fiscal com Produtos de Setores Diferentes"},
{cod: '43', descricao: "Feriado Local/Nacional"},
{cod: '44', descricao: "Excesso de Veículos"},
{cod: '45', descricao: "Cliente Destino Encerrou Atividades"},
{cod: '46', descricao: "Responsável de Recebimento Ausente"},
{cod: '47', descricao: "Cliente Destino em Greve"},
{cod: '48', descricao: "Aeroporto fechado no destino - Cód. Aéreo"},
{cod: '49', descricao: "Vôo cancelado - Cód. Transp. Aéreo"},
{cod: '50', descricao: "Greve nacional (Greve Geral)"},
{cod: '51', descricao: "Mercadoria Vencida (Data de Validade Expirada)"},
{cod: '52', descricao: "Mercadoria Redespachada (Entregue para Redespacho)"},
{cod: '53', descricao: "Mercadoria não foi Embarcada, Permanecendo na Origem"},
{cod: '54', descricao: "Mercadoria Embarcada sem CTRC ou CTRC não Embarcado"},
{cod: '55', descricao: "Endereço Transp. de Redespacho não localizado/não informado 056 Cliente não Aceita Mercadoria com Pagamento de Reembolso 057 Transp. não atende a cidade da transportadora de redespacho 058 Quebra do Veiculo de Entrega"},
{cod: '59', descricao: "Cliente sem Verba para Pagar o Frete"},
{cod: '60', descricao: "Endereço de Entrega Errado"},
{cod: '61', descricao: "Cliente sem Verba para Reembolso"},
{cod: '62', descricao: "Recusa da Carga por Valor de Frete Errado"},
{cod: '63', descricao: "Identificação do Cliente não Informada/Enviada/Insuficiente 064 Cliente não Identificado/Cadastrado"},
{cod: '65', descricao: "Entrar em Contato com o Comprador"},
{cod: '66', descricao: "Troca não Disponível"},
{cod: '67', descricao: "Fins Estatísticos"},
{cod: '68', descricao: "Data de Entrega Diferente do Pedido"},
{cod: '69', descricao: "Substituição Tributária
|
Verifica se o registro é de uma ocorrência
if (fileLine[j].slice(0, 3) == '342') {
var ocorrencia = dadosOcorrencia(fileLine[j].slice(28, 30));
div += '<td>' + ocorrencia.cod + '</td>';
div += '<td>' + ocorrencia.descricao + '</td>';
}
};
div +=
|
conditional_block
|
app.js
|
// Passa os dados do arquivo para um aray onde cada índice é uma linha do arquivo
var fileArr = e.target.result.split('\n');
// Monta a div que irá exibir os dados
var cnpj = fileArr[2].slice(3, 17);
var div = '<p><strong>CNPJ:</strong> ' + formataCnpj(cnpj) + '</p>';
div += '<p><strong>Transportadora:</strong> ' + fileArr[2].slice(17, 57) + '</p>';
div += '<div class="panel panel-default">';
div += '<div class="panel-heading">Ocorrências</div>';
div += '<table class="table no-"><thead><th>Código</th><th>Descrição</th></thead>';
// Passa em cada linha do arquivo
for (var i = 0; i < fileArr.length; i++) {
div += '<tr>';
var fileLine = fileArr[i].split(',');
for (var j = 0; j < fileLine.length; j++) {
// Verifica se o registro é de uma ocorrência
if (fileLine[j].slice(0, 3) == '342') {
var ocorrencia = dadosOcorrencia(fileLine[j].slice(28, 30));
div += '<td>' + ocorrencia.cod + '</td>';
div += '<td>' + ocorrencia.descricao + '</td>';
}
};
div += '</tr>';
};
div += '</table>';
// Seta a div montada com os dados
areaUpload.classList.add('hidden');
btnNewReader.classList.remove('hidden');
// Exibe os dados
exibeArquivo.innerHTML = div;
};
btnNewReader.addEventListener('click', function() {
areaUpload.classList.remove('hidden');
btnNewReader.classList.add('hidden');
exibeArquivo.innerHTML = '';
});
var formataCnpj = function(cnpj) {
return cnpj.slice(0, 2) + '.' +
cnpj.slice(2, 5) + '.' +
cnpj.slice(5, 8) + '/' +
cnpj.slice(8, 12) + '-' +
cnpj.slice(12, 14);
};
function dadosOcorrencia(cod) {
var listaOcorrencias = [
{cod: '00', descricao: "Processo de Transporte já Iniciado"},
{cod: '01', descricao: "Entrega Realizada Normalmente"},
{cod: '02', descricao: "Entrega Fora da Data Programada"},
{cod: '03', descricao: "Recusa por Falta de Pedido de Compra"},
{cod: '04', descricao: "Recusa por Pedido de Compra Cancelado"},
{cod: '05', descricao: "Falta de Espaço Físico no Depósito do Cliente Destino"},
{cod: '06', descricao: "Endereço do Cliente Destino não Localizado"},
{cod: '07', descricao: "Devolução não Autorizada pelo Cliente"},
{cod: '08', descricao: "Preço Mercadoria em Desacordo com o Pedido Compra"},
{cod: '09', descricao: "Mercadoria em Desacordo com o Pedido Compra"},
{cod: '10', descricao: "Cliente Destino somente Recebe Mercadoria com Frete Pago"},
{cod: '11', descricao: "Recusa por Deficiência Embalagem Mercadoria"},
{cod: '12', descricao: "Redespacho não Indicado"},
{cod: '13', descricao: "Transportadora não Atende a Cidade do Cliente Destino"},
{cod: '14', descricao: "Mercadoria Sinistrada"},
{cod: '15', descricao: "Embalagem Sinistrada"},
{cod: '16', descricao: "Pedido de Compras em Duplicidade"},
{cod: '17', descricao: "Mercadoria fora da Embalagem de Atacadista"},
{cod: '18', descricao: "Mercadorias Trocadas"},
{cod: '19', descricao: "Reentrega Solicitada pelo Cliente"},
{cod: '20', descricao: "Entrega Prejudicada por Horário/Falta de Tempo Hábil"},
{cod: '21', descricao: "Estabelecimento Fechado"},
{cod: '22', descricao: "Reentrega sem Cobrança do Cliente"},
{cod: '23', descricao: "Extravio de Mercadoria em Trânsito"},
{cod: '24', descricao: "Mercadoria Reentregue ao Cliente Destino"},
{cod: '25', descricao: "Mercadoria Devolvida ao Cliente de Origem"},
{cod: '26', descricao: "Nota Fiscal Retida pela Fiscalização"},
{cod: '27', descricao: "Roubo de Carga"},
{cod: '28', descricao: "Mercadoria Retida até Segunda Ordem"},
{cod: '29', descricao: "Cliente Retira Mercadoria na Transportadora"},
{cod: '30', descricao: "Problema com a Documentação (Nota Fiscal e/ou CTRC)"},
{cod: '31', descricao: "Entrega com Indenização Efetuada"},
{cod: '32', descricao: "Falta com Solicitação de Reposição"},
{cod: '33', descricao: "Falta com Busca/Reconferência"},
{cod: '34', descricao: "Cliente Fechado para Balanço"},
{cod: '35', descricao: "Quantidade de Produto em Desacordo (Nota Fiscal e/ou Pedido)"},
{cod: '36', descricao: "Extravio de documentos pela Cia. Aérea - Cód. Aéreo"},
{cod: '37', descricao: "Extravio de carga pela Cia. Aérea – Cód. Aéreo"},
{cod: '39', descricao: "Corte de carga na pista–Cód. Aéreo"},
{cod: '40', descricao: "Aeroporto fechado na origem - Cód. Aéreo"},
{cod: '41', descricao: "Pedido de Compra Incompleto"},
{cod: '42', descricao: "Nota Fiscal com Produtos de Setores Diferentes"},
{cod: '43', descricao: "Feriado Local/Nacional"},
{cod: '44', descricao: "Excesso de Veículos"},
{cod: '45', descricao: "Cliente Destino Encerrou Atividades"},
{cod: '46', descricao: "Responsável de Recebimento Ausente"},
{cod: '47', descricao: "Cliente Destino em Greve"},
{cod: '48', descricao: "Aeroporto fechado no destino - Cód. Aéreo"},
{cod: '49', descricao: "Vôo cancelado - Cód. Transp. Aéreo"},
{cod: '50', descricao: "Greve nacional (Greve Geral)"},
{cod: '51', descricao: "Mercadoria Vencida (Data de Validade Expirada)"},
{cod: '52', descricao: "Mercadoria Redespachada (Entregue para Redespacho)"},
{cod: '53', descricao: "Mercadoria não foi Embarcada, Permanecendo na Origem"},
{cod: '54', descricao: "Mercadoria Embarcada sem CTRC ou CTRC não Embarcado"},
{cod: '55', descricao: "Endereço Transp. de Redespacho não localizado/não informado 056 Cliente não Aceita Mercadoria com Pagamento de Reembolso 057 Transp. não atende a cidade da transportadora de redespacho 058 Quebra do Veiculo de Entrega"},
{cod: '59', descricao: "Cliente sem Verba para Pagar o Frete"},
{cod: '60', descricao: "Endereço de Entrega Errado"},
{cod: '61', descricao: "Cliente sem Verba para Reembolso"},
{cod: '62', descricao: "Recusa da Carga por Valor de Frete Errado"},
{cod: '63', descricao: "Identificação do Cliente não Informada/Enviada/Insuficiente 064 Cliente não Identificado/Cadastrado"},
{cod: '65', descricao: "Entrar em Contato com o Comprador"},
{cod: '66', descricao: "Troca não Disponível"},
{cod: '67', descricao: "Fins Estatísticos"},
{cod: '68', descricao: "Data de Entrega Diferente do
|
o(e) {
|
identifier_name
|
|
app.js
|
índice é uma linha do arquivo
var fileArr = e.target.result.split('\n');
// Monta a div que irá exibir os dados
var cnpj = fileArr[2].slice(3, 17);
var div = '<p><strong>CNPJ:</strong> ' + formataCnpj(cnpj) + '</p>';
div += '<p><strong>Transportadora:</strong> ' + fileArr[2].slice(17, 57) + '</p>';
div += '<div class="panel panel-default">';
div += '<div class="panel-heading">Ocorrências</div>';
div += '<table class="table no-"><thead><th>Código</th><th>Descrição</th></thead>';
// Passa em cada linha do arquivo
for (var i = 0; i < fileArr.length; i++) {
div += '<tr>';
var fileLine = fileArr[i].split(',');
for (var j = 0; j < fileLine.length; j++) {
// Verifica se o registro é de uma ocorrência
if (fileLine[j].slice(0, 3) == '342') {
var ocorrencia = dadosOcorrencia(fileLine[j].slice(28, 30));
div += '<td>' + ocorrencia.cod + '</td>';
div += '<td>' + ocorrencia.descricao + '</td>';
}
};
div += '</tr>';
};
div += '</table>';
// Seta a div montada com os dados
areaUpload.classList.add('hidden');
btnNewReader.classList.remove('hidden');
// Exibe os dados
exibeArquivo.innerHTML = div;
|
};
btnNewReader.addEventListener('click', function() {
areaUpload.classList.remove('hidden');
btnNewReader.classList.add('hidden');
exibeArquivo.innerHTML = '';
});
var formataCnpj = function(cnpj) {
return cnpj.slice(0, 2) + '.' +
cnpj.slice(2, 5) + '.' +
cnpj.slice(5, 8) + '/' +
cnpj.slice(8, 12) + '-' +
cnpj.slice(12, 14);
};
function dadosOcorrencia(cod) {
var listaOcorrencias = [
{cod: '00', descricao: "Processo de Transporte já Iniciado"},
{cod: '01', descricao: "Entrega Realizada Normalmente"},
{cod: '02', descricao: "Entrega Fora da Data Programada"},
{cod: '03', descricao: "Recusa por Falta de Pedido de Compra"},
{cod: '04', descricao: "Recusa por Pedido de Compra Cancelado"},
{cod: '05', descricao: "Falta de Espaço Físico no Depósito do Cliente Destino"},
{cod: '06', descricao: "Endereço do Cliente Destino não Localizado"},
{cod: '07', descricao: "Devolução não Autorizada pelo Cliente"},
{cod: '08', descricao: "Preço Mercadoria em Desacordo com o Pedido Compra"},
{cod: '09', descricao: "Mercadoria em Desacordo com o Pedido Compra"},
{cod: '10', descricao: "Cliente Destino somente Recebe Mercadoria com Frete Pago"},
{cod: '11', descricao: "Recusa por Deficiência Embalagem Mercadoria"},
{cod: '12', descricao: "Redespacho não Indicado"},
{cod: '13', descricao: "Transportadora não Atende a Cidade do Cliente Destino"},
{cod: '14', descricao: "Mercadoria Sinistrada"},
{cod: '15', descricao: "Embalagem Sinistrada"},
{cod: '16', descricao: "Pedido de Compras em Duplicidade"},
{cod: '17', descricao: "Mercadoria fora da Embalagem de Atacadista"},
{cod: '18', descricao: "Mercadorias Trocadas"},
{cod: '19', descricao: "Reentrega Solicitada pelo Cliente"},
{cod: '20', descricao: "Entrega Prejudicada por Horário/Falta de Tempo Hábil"},
{cod: '21', descricao: "Estabelecimento Fechado"},
{cod: '22', descricao: "Reentrega sem Cobrança do Cliente"},
{cod: '23', descricao: "Extravio de Mercadoria em Trânsito"},
{cod: '24', descricao: "Mercadoria Reentregue ao Cliente Destino"},
{cod: '25', descricao: "Mercadoria Devolvida ao Cliente de Origem"},
{cod: '26', descricao: "Nota Fiscal Retida pela Fiscalização"},
{cod: '27', descricao: "Roubo de Carga"},
{cod: '28', descricao: "Mercadoria Retida até Segunda Ordem"},
{cod: '29', descricao: "Cliente Retira Mercadoria na Transportadora"},
{cod: '30', descricao: "Problema com a Documentação (Nota Fiscal e/ou CTRC)"},
{cod: '31', descricao: "Entrega com Indenização Efetuada"},
{cod: '32', descricao: "Falta com Solicitação de Reposição"},
{cod: '33', descricao: "Falta com Busca/Reconferência"},
{cod: '34', descricao: "Cliente Fechado para Balanço"},
{cod: '35', descricao: "Quantidade de Produto em Desacordo (Nota Fiscal e/ou Pedido)"},
{cod: '36', descricao: "Extravio de documentos pela Cia. Aérea - Cód. Aéreo"},
{cod: '37', descricao: "Extravio de carga pela Cia. Aérea – Cód. Aéreo"},
{cod: '39', descricao: "Corte de carga na pista–Cód. Aéreo"},
{cod: '40', descricao: "Aeroporto fechado na origem - Cód. Aéreo"},
{cod: '41', descricao: "Pedido de Compra Incompleto"},
{cod: '42', descricao: "Nota Fiscal com Produtos de Setores Diferentes"},
{cod: '43', descricao: "Feriado Local/Nacional"},
{cod: '44', descricao: "Excesso de Veículos"},
{cod: '45', descricao: "Cliente Destino Encerrou Atividades"},
{cod: '46', descricao: "Responsável de Recebimento Ausente"},
{cod: '47', descricao: "Cliente Destino em Greve"},
{cod: '48', descricao: "Aeroporto fechado no destino - Cód. Aéreo"},
{cod: '49', descricao: "Vôo cancelado - Cód. Transp. Aéreo"},
{cod: '50', descricao: "Greve nacional (Greve Geral)"},
{cod: '51', descricao: "Mercadoria Vencida (Data de Validade Expirada)"},
{cod: '52', descricao: "Mercadoria Redespachada (Entregue para Redespacho)"},
{cod: '53', descricao: "Mercadoria não foi Embarcada, Permanecendo na Origem"},
{cod: '54', descricao: "Mercadoria Embarcada sem CTRC ou CTRC não Embarcado"},
{cod: '55', descricao: "Endereço Transp. de Redespacho não localizado/não informado 056 Cliente não Aceita Mercadoria com Pagamento de Reembolso 057 Transp. não atende a cidade da transportadora de redespacho 058 Quebra do Veiculo de Entrega"},
{cod: '59', descricao: "Cliente sem Verba para Pagar o Frete"},
{cod: '60', descricao: "Endereço de Entrega Errado"},
{cod: '61', descricao: "Cliente sem Verba para Reembolso"},
{cod: '62', descricao: "Recusa da Carga por Valor de Frete Errado"},
{cod: '63', descricao: "Identificação do Cliente não Informada/Enviada/Insuficiente 064 Cliente não Identificado/Cadastrado"},
{cod: '65', descricao: "Entrar em Contato com o Comprador"},
{cod: '66', descricao: "Troca não Disponível"},
{cod: '67', descricao: "Fins Estatísticos"},
{cod: '68', descricao: "Data de Entrega Diferente do Pedido"},
{cod: '69', descricao: "Substituição Tributária
|
random_line_split
|
|
app.js
|
é uma linha do arquivo
var fileArr = e.target.result.split('\n');
// Monta a div que irá exibir os dados
var cnpj = fileArr[2].slice(3, 17);
var div = '<p><strong>CNPJ:</strong> ' + formataCnpj(cnpj) + '</p>';
div += '<p><strong>Transportadora:</strong> ' + fileArr[2].slice(17, 57) + '</p>';
div += '<div class="panel panel-default">';
div += '<div class="panel-heading">Ocorrências</div>';
div += '<table class="table no-"><thead><th>Código</th><th>Descrição</th></thead>';
// Passa em cada linha do arquivo
for (var i = 0; i < fileArr.length; i++) {
div += '<tr>';
var fileLine = fileArr[i].split(',');
for (var j = 0; j < fileLine.length; j++) {
// Verifica se o registro é de uma ocorrência
if (fileLine[j].slice(0, 3) == '342') {
var ocorrencia = dadosOcorrencia(fileLine[j].slice(28, 30));
div += '<td>' + ocorrencia.cod + '</td>';
div += '<td>' + ocorrencia.descricao + '</td>';
}
};
div += '</tr>';
};
div += '</table>';
// Seta a div montada com os dados
areaUpload.classList.add('hidden');
btnNewReader.classList.remove('hidden');
// Exibe os dados
exibeArquivo.innerHTML = div;
};
btnNewReader.addEventListener('click', function() {
areaUpload.classList.remove('hidden');
btnNewReader.classList.add('hidden');
exibeArquivo.innerHTML = '';
});
var formataCnpj = function(cnpj) {
return cnpj.slice(0, 2) + '.' +
cnpj.slice(2, 5) + '.' +
cnpj.slice(5, 8) + '/' +
cnpj.slice(8, 12) + '-' +
cnpj.slice(12, 14);
};
function dadosOcorrencia(cod) {
var listaOco
|
{cod: '19', descricao: "Reentrega Solicitada pelo Cliente"},
{cod: '20', descricao: "Entrega Prejudicada por Horário/Falta de Tempo Hábil"},
{cod: '21', descricao: "Estabelecimento Fechado"},
{cod: '22', descricao: "Reentrega sem Cobrança do Cliente"},
{cod: '23', descricao: "Extravio de Mercadoria em Trânsito"},
{cod: '24', descricao: "Mercadoria Reentregue ao Cliente Destino"},
{cod: '25', descricao: "Mercadoria Devolvida ao Cliente de Origem"},
{cod: '26', descricao: "Nota Fiscal Retida pela Fiscalização"},
{cod: '27', descricao: "Roubo de Carga"},
{cod: '28', descricao: "Mercadoria Retida até Segunda Ordem"},
{cod: '29', descricao: "Cliente Retira Mercadoria na Transportadora"},
{cod: '30', descricao: "Problema com a Documentação (Nota Fiscal e/ou CTRC)"},
{cod: '31', descricao: "Entrega com Indenização Efetuada"},
{cod: '32', descricao: "Falta com Solicitação de Reposição"},
{cod: '33', descricao: "Falta com Busca/Reconferência"},
{cod: '34', descricao: "Cliente Fechado para Balanço"},
{cod: '35', descricao: "Quantidade de Produto em Desacordo (Nota Fiscal e/ou Pedido)"},
{cod: '36', descricao: "Extravio de documentos pela Cia. Aérea - Cód. Aéreo"},
{cod: '37', descricao: "Extravio de carga pela Cia. Aérea – Cód. Aéreo"},
{cod: '39', descricao: "Corte de carga na pista–Cód. Aéreo"},
{cod: '40', descricao: "Aeroporto fechado na origem - Cód. Aéreo"},
{cod: '41', descricao: "Pedido de Compra Incompleto"},
{cod: '42', descricao: "Nota Fiscal com Produtos de Setores Diferentes"},
{cod: '43', descricao: "Feriado Local/Nacional"},
{cod: '44', descricao: "Excesso de Veículos"},
{cod: '45', descricao: "Cliente Destino Encerrou Atividades"},
{cod: '46', descricao: "Responsável de Recebimento Ausente"},
{cod: '47', descricao: "Cliente Destino em Greve"},
{cod: '48', descricao: "Aeroporto fechado no destino - Cód. Aéreo"},
{cod: '49', descricao: "Vôo cancelado - Cód. Transp. Aéreo"},
{cod: '50', descricao: "Greve nacional (Greve Geral)"},
{cod: '51', descricao: "Mercadoria Vencida (Data de Validade Expirada)"},
{cod: '52', descricao: "Mercadoria Redespachada (Entregue para Redespacho)"},
{cod: '53', descricao: "Mercadoria não foi Embarcada, Permanecendo na Origem"},
{cod: '54', descricao: "Mercadoria Embarcada sem CTRC ou CTRC não Embarcado"},
{cod: '55', descricao: "Endereço Transp. de Redespacho não localizado/não informado 056 Cliente não Aceita Mercadoria com Pagamento de Reembolso 057 Transp. não atende a cidade da transportadora de redespacho 058 Quebra do Veiculo de Entrega"},
{cod: '59', descricao: "Cliente sem Verba para Pagar o Frete"},
{cod: '60', descricao: "Endereço de Entrega Errado"},
{cod: '61', descricao: "Cliente sem Verba para Reembolso"},
{cod: '62', descricao: "Recusa da Carga por Valor de Frete Errado"},
{cod: '63', descricao: "Identificação do Cliente não Informada/Enviada/Insuficiente 064 Cliente não Identificado/Cadastrado"},
{cod: '65', descricao: "Entrar em Contato com o Comprador"},
{cod: '66', descricao: "Troca não Disponível"},
{cod: '67', descricao: "Fins Estatísticos"},
{cod: '68', descricao: "Data de Entrega Diferente do Pedido"},
{cod: '69', descricao: "Substituição Tributária
|
rrencias = [
{cod: '00', descricao: "Processo de Transporte já Iniciado"},
{cod: '01', descricao: "Entrega Realizada Normalmente"},
{cod: '02', descricao: "Entrega Fora da Data Programada"},
{cod: '03', descricao: "Recusa por Falta de Pedido de Compra"},
{cod: '04', descricao: "Recusa por Pedido de Compra Cancelado"},
{cod: '05', descricao: "Falta de Espaço Físico no Depósito do Cliente Destino"},
{cod: '06', descricao: "Endereço do Cliente Destino não Localizado"},
{cod: '07', descricao: "Devolução não Autorizada pelo Cliente"},
{cod: '08', descricao: "Preço Mercadoria em Desacordo com o Pedido Compra"},
{cod: '09', descricao: "Mercadoria em Desacordo com o Pedido Compra"},
{cod: '10', descricao: "Cliente Destino somente Recebe Mercadoria com Frete Pago"},
{cod: '11', descricao: "Recusa por Deficiência Embalagem Mercadoria"},
{cod: '12', descricao: "Redespacho não Indicado"},
{cod: '13', descricao: "Transportadora não Atende a Cidade do Cliente Destino"},
{cod: '14', descricao: "Mercadoria Sinistrada"},
{cod: '15', descricao: "Embalagem Sinistrada"},
{cod: '16', descricao: "Pedido de Compras em Duplicidade"},
{cod: '17', descricao: "Mercadoria fora da Embalagem de Atacadista"},
{cod: '18', descricao: "Mercadorias Trocadas"},
|
identifier_body
|
mc.py
|
):
self.pool.add(canonical((i, j)))
for piece in self.my_pieces:
self.pool.remove(canonical(piece))
def feed(self):
# Save
if self.history_pointer == 0:
self.my_init()
# Game simulation
# team = self.position % 2
while self.history_pointer < len(self.history):
# Read and proc next event
event, *args = self.history[self.history_pointer]
self.history_pointer += 1
if event == Event.MOVE:
position, piece, head = args
self.remaining[position] -= 1
if position != self.position:
self.pool.remove(canonical(piece))
elif event == Event.PASS:
position = args
self.dont_have[position] |= 1 << self.heads[0]
self.dont_have[position] |= 1 << self.heads[1]
elif event == Event.NEW_GAME:
pass
else:
raise ValueError(f"Invalid event: {event}")
def sample(self):
order = list(self.pool)
shuffle(order)
pieces = [[] for _ in range(4)]
for pos in range(4):
if pos == self.position:
pieces[pos] = deepcopy(self.pieces)
else:
r = self.remaining[pos]
pieces[pos] = order[:r]
order = order[r:]
assert len(pieces[pos]) == r
assert len(order) == 0
return pieces
def choice(self):
self.feed()
NUM_SAMPLING = 10
NUM_EXPANDED = 2000
scores = {} # Score of each move (piece, head)
winpredictions = {}
for _ in range(NUM_SAMPLING):
distribution = self.sample()
cscores, cwinpredictions = montecarlo(distribution, tuple(self.heads), self.position, NUM_EXPANDED)
for move, scr in cscores.items():
scores[move] = scores.get(move, 0.) + scr
for move, scr in cwinpredictions.items():
winpredictions[move] = winpredictions.get(move, 0.) + scr
assert len(scores) > 0
best_score = -1.
best_move = None
# for move, scr in scores.items():
for move, scr in winpredictions.items():
if scr > best_score:
best_score = scr
best_move = move
logger.info(f"Best move: {best_move}")
logger.info(f"Expected score: {winpredictions[best_move] / NUM_SAMPLING}")
return best_move
# Utils for Montecarlo
class Node:
WIN_POINTS = 2
TIE_POINTS = 1
EXPLORATION = 2.
def __init__(self, state):
self.state = state
self.visit_count = 0
# Wins | Tie | Loose
self.rate = [0, 0, 0]
self.children = None
self.end_node = None
def score(self, parent_visit_count, me):
if self.visit_count == 0:
return float('inf')
assert sum(self.rate) == self.visit_count
if me: # Current player is from my team
exploitation = self.rate[0] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
else: # Current player is NOT from my team
exploitation = self.rate[2] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
# Mean of all simulations so far
exploitation /= self.visit_count
exploration = Node.EXPLORATION * (log(parent_visit_count) / self.visit_count) ** .5
score = exploitation + exploration
logger.debug(f"Exploitation: {exploitation}")
logger.debug(f"Exploration: {exploration}")
logger.debug(f"Score: {score}")
return score
def intersect(pieceA, pieceB):
""" Check if two 2-len tuples have at least one element in common
"""
return pieceA[0] in pieceB or pieceA[1] in pieceB
def winner(state, position, distribution):
""" Find winner in current state
"""
WIN, TIE, LOOSE = 0, 1, 2
team = position & 1
mask, heads, pos = state
winner_team = None
light_hand = float('inf')
|
for i in range(4):
# Player `i` don't have any remaining piece
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
winner_team = i & 1
break
hand = 0
for j in range(7):
if (mask >> (i * 7 + j)) & 1:
hand += sum(distribution[i][j])
if hand < light_hand:
light_hand = hand
light_player = set()
if hand == light_hand:
light_player.add(i & 1)
if winner_team is None:
if len(light_player) == 2:
return TIE
winner_team = list(light_player)[0]
return WIN if winner_team == team else LOOSE
def is_over(state, distribution):
""" Check if game is over
"""
mask, heads, pos = state
exist_move = False
for i in range(4):
# Player `i` doesn't have any piece left
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
return True
for j in range(7):
if ((mask >> (i * 7 + j)) & 1) and intersect(distribution[i][j], heads):
exist_move = True
return not exist_move
def neighbors(state, distribution):
mask, heads, pos = state
count = 0
for i in range(7):
# If player contains this piece yet
if ((mask >> (7 * pos + i)) & 1) == 1:
piece = distribution[pos][i]
# If piece can be played through head_0
if heads[0] in piece or heads[0] == -1:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0] ^ piece[0] ^ piece[1], heads[1])
npos = (pos + 1) & 3 # % 4
count += 1
yield (nmask, nheads, npos)
# If piece can be played through head_1
if heads[1] in distribution[pos][i]:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0], heads[1] ^ piece[0] ^ piece[1])
npos = (pos + 1) & 3
count += 1
yield (nmask, nheads, npos)
# Player can't make any valid move other than pass
if count == 0:
npos = (pos + 1) & 3
yield (mask, heads, npos)
def show(state):
mask, heads, pos = state
print(f"{bin(mask)} | {heads[0]} {heads[1]} | {pos}")
def montecarlo(distribution, heads, position, NUM_EXPANDED):
"""
state: (bitmask, heads, pos)
bitmask: 7 bits each player 2**28 states that denotes which pieces are still holding relative to `distribution`
parent visit count: PC
visit count: VC
win count: WC
exploration control: K
WC / VC + K * sqrt(log(PC) / VC)
"""
team = position & 1
# Compute first state
mask = 0
for dist in reversed(distribution):
assert len(dist) <= 7
mask <<= 7
mask |= (1 << len(dist)) - 1
heads = tuple(heads)
pos = position
start = (mask, heads, pos)
# Initialize states for MonteCarlo Tree Search
state_map = {start: Node(start)}
# Run MonteCarlo
iterations = 0
logger.debug(f"Start montecarlo from: {bin(mask)} | {heads} | {pos}")
while True:
iterations += 1
# Stop condition
if len(state_map) >= NUM_EXPANDED or \
iterations >= 1e4:
logger.debug(f"Iterations: {iterations}")
logger.debug(f"Number of states: {len(state_map)}")
break
cur = start
# path = [state_map[cur]]
path = []
# Traverse the tree search from the root down to one leaf
while True:
# show(cur)
node = state_map[cur]
path.append(node)
if node.visit_count == 0:
break
best_score = float('-inf')
best_child = None
for child in node.children:
scr = child.score(node.visit_count, (cur[2] & 1) == team)
if scr > best_score:
best_score = scr
best_child
|
light_player = set()
|
random_line_split
|
mc.py
|
):
self.pool.add(canonical((i, j)))
for piece in self.my_pieces:
self.pool.remove(canonical(piece))
def feed(self):
# Save
if self.history_pointer == 0:
self.my_init()
# Game simulation
# team = self.position % 2
while self.history_pointer < len(self.history):
# Read and proc next event
event, *args = self.history[self.history_pointer]
self.history_pointer += 1
if event == Event.MOVE:
position, piece, head = args
self.remaining[position] -= 1
if position != self.position:
self.pool.remove(canonical(piece))
elif event == Event.PASS:
position = args
self.dont_have[position] |= 1 << self.heads[0]
self.dont_have[position] |= 1 << self.heads[1]
elif event == Event.NEW_GAME:
pass
else:
raise ValueError(f"Invalid event: {event}")
def sample(self):
order = list(self.pool)
shuffle(order)
pieces = [[] for _ in range(4)]
for pos in range(4):
|
assert len(order) == 0
return pieces
def choice(self):
self.feed()
NUM_SAMPLING = 10
NUM_EXPANDED = 2000
scores = {} # Score of each move (piece, head)
winpredictions = {}
for _ in range(NUM_SAMPLING):
distribution = self.sample()
cscores, cwinpredictions = montecarlo(distribution, tuple(self.heads), self.position, NUM_EXPANDED)
for move, scr in cscores.items():
scores[move] = scores.get(move, 0.) + scr
for move, scr in cwinpredictions.items():
winpredictions[move] = winpredictions.get(move, 0.) + scr
assert len(scores) > 0
best_score = -1.
best_move = None
# for move, scr in scores.items():
for move, scr in winpredictions.items():
if scr > best_score:
best_score = scr
best_move = move
logger.info(f"Best move: {best_move}")
logger.info(f"Expected score: {winpredictions[best_move] / NUM_SAMPLING}")
return best_move
# Utils for Montecarlo
class Node:
WIN_POINTS = 2
TIE_POINTS = 1
EXPLORATION = 2.
def __init__(self, state):
self.state = state
self.visit_count = 0
# Wins | Tie | Loose
self.rate = [0, 0, 0]
self.children = None
self.end_node = None
def score(self, parent_visit_count, me):
if self.visit_count == 0:
return float('inf')
assert sum(self.rate) == self.visit_count
if me: # Current player is from my team
exploitation = self.rate[0] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
else: # Current player is NOT from my team
exploitation = self.rate[2] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
# Mean of all simulations so far
exploitation /= self.visit_count
exploration = Node.EXPLORATION * (log(parent_visit_count) / self.visit_count) ** .5
score = exploitation + exploration
logger.debug(f"Exploitation: {exploitation}")
logger.debug(f"Exploration: {exploration}")
logger.debug(f"Score: {score}")
return score
def intersect(pieceA, pieceB):
""" Check if two 2-len tuples have at least one element in common
"""
return pieceA[0] in pieceB or pieceA[1] in pieceB
def winner(state, position, distribution):
""" Find winner in current state
"""
WIN, TIE, LOOSE = 0, 1, 2
team = position & 1
mask, heads, pos = state
winner_team = None
light_hand = float('inf')
light_player = set()
for i in range(4):
# Player `i` don't have any remaining piece
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
winner_team = i & 1
break
hand = 0
for j in range(7):
if (mask >> (i * 7 + j)) & 1:
hand += sum(distribution[i][j])
if hand < light_hand:
light_hand = hand
light_player = set()
if hand == light_hand:
light_player.add(i & 1)
if winner_team is None:
if len(light_player) == 2:
return TIE
winner_team = list(light_player)[0]
return WIN if winner_team == team else LOOSE
def is_over(state, distribution):
""" Check if game is over
"""
mask, heads, pos = state
exist_move = False
for i in range(4):
# Player `i` doesn't have any piece left
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
return True
for j in range(7):
if ((mask >> (i * 7 + j)) & 1) and intersect(distribution[i][j], heads):
exist_move = True
return not exist_move
def neighbors(state, distribution):
mask, heads, pos = state
count = 0
for i in range(7):
# If player contains this piece yet
if ((mask >> (7 * pos + i)) & 1) == 1:
piece = distribution[pos][i]
# If piece can be played through head_0
if heads[0] in piece or heads[0] == -1:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0] ^ piece[0] ^ piece[1], heads[1])
npos = (pos + 1) & 3 # % 4
count += 1
yield (nmask, nheads, npos)
# If piece can be played through head_1
if heads[1] in distribution[pos][i]:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0], heads[1] ^ piece[0] ^ piece[1])
npos = (pos + 1) & 3
count += 1
yield (nmask, nheads, npos)
# Player can't make any valid move other than pass
if count == 0:
npos = (pos + 1) & 3
yield (mask, heads, npos)
def show(state):
mask, heads, pos = state
print(f"{bin(mask)} | {heads[0]} {heads[1]} | {pos}")
def montecarlo(distribution, heads, position, NUM_EXPANDED):
"""
state: (bitmask, heads, pos)
bitmask: 7 bits each player 2**28 states that denotes which pieces are still holding relative to `distribution`
parent visit count: PC
visit count: VC
win count: WC
exploration control: K
WC / VC + K * sqrt(log(PC) / VC)
"""
team = position & 1
# Compute first state
mask = 0
for dist in reversed(distribution):
assert len(dist) <= 7
mask <<= 7
mask |= (1 << len(dist)) - 1
heads = tuple(heads)
pos = position
start = (mask, heads, pos)
# Initialize states for MonteCarlo Tree Search
state_map = {start: Node(start)}
# Run MonteCarlo
iterations = 0
logger.debug(f"Start montecarlo from: {bin(mask)} | {heads} | {pos}")
while True:
iterations += 1
# Stop condition
if len(state_map) >= NUM_EXPANDED or \
iterations >= 1e4:
logger.debug(f"Iterations: {iterations}")
logger.debug(f"Number of states: {len(state_map)}")
break
cur = start
# path = [state_map[cur]]
path = []
# Traverse the tree search from the root down to one leaf
while True:
# show(cur)
node = state_map[cur]
path.append(node)
if node.visit_count == 0:
break
best_score = float('-inf')
best_child = None
for child in node.children:
scr = child.score(node.visit_count, (cur[2] & 1) == team)
if scr > best_score:
best_score = scr
best_child
|
if pos == self.position:
pieces[pos] = deepcopy(self.pieces)
else:
r = self.remaining[pos]
pieces[pos] = order[:r]
order = order[r:]
assert len(pieces[pos]) == r
|
conditional_block
|
mc.py
|
):
self.pool.add(canonical((i, j)))
for piece in self.my_pieces:
self.pool.remove(canonical(piece))
def feed(self):
# Save
if self.history_pointer == 0:
self.my_init()
# Game simulation
# team = self.position % 2
while self.history_pointer < len(self.history):
# Read and proc next event
event, *args = self.history[self.history_pointer]
self.history_pointer += 1
if event == Event.MOVE:
position, piece, head = args
self.remaining[position] -= 1
if position != self.position:
self.pool.remove(canonical(piece))
elif event == Event.PASS:
position = args
self.dont_have[position] |= 1 << self.heads[0]
self.dont_have[position] |= 1 << self.heads[1]
elif event == Event.NEW_GAME:
pass
else:
raise ValueError(f"Invalid event: {event}")
def sample(self):
order = list(self.pool)
shuffle(order)
pieces = [[] for _ in range(4)]
for pos in range(4):
if pos == self.position:
pieces[pos] = deepcopy(self.pieces)
else:
r = self.remaining[pos]
pieces[pos] = order[:r]
order = order[r:]
assert len(pieces[pos]) == r
assert len(order) == 0
return pieces
def
|
(self):
self.feed()
NUM_SAMPLING = 10
NUM_EXPANDED = 2000
scores = {} # Score of each move (piece, head)
winpredictions = {}
for _ in range(NUM_SAMPLING):
distribution = self.sample()
cscores, cwinpredictions = montecarlo(distribution, tuple(self.heads), self.position, NUM_EXPANDED)
for move, scr in cscores.items():
scores[move] = scores.get(move, 0.) + scr
for move, scr in cwinpredictions.items():
winpredictions[move] = winpredictions.get(move, 0.) + scr
assert len(scores) > 0
best_score = -1.
best_move = None
# for move, scr in scores.items():
for move, scr in winpredictions.items():
if scr > best_score:
best_score = scr
best_move = move
logger.info(f"Best move: {best_move}")
logger.info(f"Expected score: {winpredictions[best_move] / NUM_SAMPLING}")
return best_move
# Utils for Montecarlo
class Node:
WIN_POINTS = 2
TIE_POINTS = 1
EXPLORATION = 2.
def __init__(self, state):
self.state = state
self.visit_count = 0
# Wins | Tie | Loose
self.rate = [0, 0, 0]
self.children = None
self.end_node = None
def score(self, parent_visit_count, me):
if self.visit_count == 0:
return float('inf')
assert sum(self.rate) == self.visit_count
if me: # Current player is from my team
exploitation = self.rate[0] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
else: # Current player is NOT from my team
exploitation = self.rate[2] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
# Mean of all simulations so far
exploitation /= self.visit_count
exploration = Node.EXPLORATION * (log(parent_visit_count) / self.visit_count) ** .5
score = exploitation + exploration
logger.debug(f"Exploitation: {exploitation}")
logger.debug(f"Exploration: {exploration}")
logger.debug(f"Score: {score}")
return score
def intersect(pieceA, pieceB):
""" Check if two 2-len tuples have at least one element in common
"""
return pieceA[0] in pieceB or pieceA[1] in pieceB
def winner(state, position, distribution):
""" Find winner in current state
"""
WIN, TIE, LOOSE = 0, 1, 2
team = position & 1
mask, heads, pos = state
winner_team = None
light_hand = float('inf')
light_player = set()
for i in range(4):
# Player `i` don't have any remaining piece
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
winner_team = i & 1
break
hand = 0
for j in range(7):
if (mask >> (i * 7 + j)) & 1:
hand += sum(distribution[i][j])
if hand < light_hand:
light_hand = hand
light_player = set()
if hand == light_hand:
light_player.add(i & 1)
if winner_team is None:
if len(light_player) == 2:
return TIE
winner_team = list(light_player)[0]
return WIN if winner_team == team else LOOSE
def is_over(state, distribution):
""" Check if game is over
"""
mask, heads, pos = state
exist_move = False
for i in range(4):
# Player `i` doesn't have any piece left
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
return True
for j in range(7):
if ((mask >> (i * 7 + j)) & 1) and intersect(distribution[i][j], heads):
exist_move = True
return not exist_move
def neighbors(state, distribution):
mask, heads, pos = state
count = 0
for i in range(7):
# If player contains this piece yet
if ((mask >> (7 * pos + i)) & 1) == 1:
piece = distribution[pos][i]
# If piece can be played through head_0
if heads[0] in piece or heads[0] == -1:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0] ^ piece[0] ^ piece[1], heads[1])
npos = (pos + 1) & 3 # % 4
count += 1
yield (nmask, nheads, npos)
# If piece can be played through head_1
if heads[1] in distribution[pos][i]:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0], heads[1] ^ piece[0] ^ piece[1])
npos = (pos + 1) & 3
count += 1
yield (nmask, nheads, npos)
# Player can't make any valid move other than pass
if count == 0:
npos = (pos + 1) & 3
yield (mask, heads, npos)
def show(state):
mask, heads, pos = state
print(f"{bin(mask)} | {heads[0]} {heads[1]} | {pos}")
def montecarlo(distribution, heads, position, NUM_EXPANDED):
"""
state: (bitmask, heads, pos)
bitmask: 7 bits each player 2**28 states that denotes which pieces are still holding relative to `distribution`
parent visit count: PC
visit count: VC
win count: WC
exploration control: K
WC / VC + K * sqrt(log(PC) / VC)
"""
team = position & 1
# Compute first state
mask = 0
for dist in reversed(distribution):
assert len(dist) <= 7
mask <<= 7
mask |= (1 << len(dist)) - 1
heads = tuple(heads)
pos = position
start = (mask, heads, pos)
# Initialize states for MonteCarlo Tree Search
state_map = {start: Node(start)}
# Run MonteCarlo
iterations = 0
logger.debug(f"Start montecarlo from: {bin(mask)} | {heads} | {pos}")
while True:
iterations += 1
# Stop condition
if len(state_map) >= NUM_EXPANDED or \
iterations >= 1e4:
logger.debug(f"Iterations: {iterations}")
logger.debug(f"Number of states: {len(state_map)}")
break
cur = start
# path = [state_map[cur]]
path = []
# Traverse the tree search from the root down to one leaf
while True:
# show(cur)
node = state_map[cur]
path.append(node)
if node.visit_count == 0:
break
best_score = float('-inf')
best_child = None
for child in node.children:
scr = child.score(node.visit_count, (cur[2] & 1) == team)
if scr > best_score:
best_score = scr
best
|
choice
|
identifier_name
|
mc.py
|
):
self.pool.add(canonical((i, j)))
for piece in self.my_pieces:
self.pool.remove(canonical(piece))
def feed(self):
# Save
if self.history_pointer == 0:
self.my_init()
# Game simulation
# team = self.position % 2
while self.history_pointer < len(self.history):
# Read and proc next event
event, *args = self.history[self.history_pointer]
self.history_pointer += 1
if event == Event.MOVE:
position, piece, head = args
self.remaining[position] -= 1
if position != self.position:
self.pool.remove(canonical(piece))
elif event == Event.PASS:
position = args
self.dont_have[position] |= 1 << self.heads[0]
self.dont_have[position] |= 1 << self.heads[1]
elif event == Event.NEW_GAME:
pass
else:
raise ValueError(f"Invalid event: {event}")
def sample(self):
order = list(self.pool)
shuffle(order)
pieces = [[] for _ in range(4)]
for pos in range(4):
if pos == self.position:
pieces[pos] = deepcopy(self.pieces)
else:
r = self.remaining[pos]
pieces[pos] = order[:r]
order = order[r:]
assert len(pieces[pos]) == r
assert len(order) == 0
return pieces
def choice(self):
self.feed()
NUM_SAMPLING = 10
NUM_EXPANDED = 2000
scores = {} # Score of each move (piece, head)
winpredictions = {}
for _ in range(NUM_SAMPLING):
distribution = self.sample()
cscores, cwinpredictions = montecarlo(distribution, tuple(self.heads), self.position, NUM_EXPANDED)
for move, scr in cscores.items():
scores[move] = scores.get(move, 0.) + scr
for move, scr in cwinpredictions.items():
winpredictions[move] = winpredictions.get(move, 0.) + scr
assert len(scores) > 0
best_score = -1.
best_move = None
# for move, scr in scores.items():
for move, scr in winpredictions.items():
if scr > best_score:
best_score = scr
best_move = move
logger.info(f"Best move: {best_move}")
logger.info(f"Expected score: {winpredictions[best_move] / NUM_SAMPLING}")
return best_move
# Utils for Montecarlo
class Node:
WIN_POINTS = 2
TIE_POINTS = 1
EXPLORATION = 2.
def __init__(self, state):
self.state = state
self.visit_count = 0
# Wins | Tie | Loose
self.rate = [0, 0, 0]
self.children = None
self.end_node = None
def score(self, parent_visit_count, me):
if self.visit_count == 0:
return float('inf')
assert sum(self.rate) == self.visit_count
if me: # Current player is from my team
exploitation = self.rate[0] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
else: # Current player is NOT from my team
exploitation = self.rate[2] * Node.WIN_POINTS + self.rate[1] * Node.TIE_POINTS
# Mean of all simulations so far
exploitation /= self.visit_count
exploration = Node.EXPLORATION * (log(parent_visit_count) / self.visit_count) ** .5
score = exploitation + exploration
logger.debug(f"Exploitation: {exploitation}")
logger.debug(f"Exploration: {exploration}")
logger.debug(f"Score: {score}")
return score
def intersect(pieceA, pieceB):
""" Check if two 2-len tuples have at least one element in common
"""
return pieceA[0] in pieceB or pieceA[1] in pieceB
def winner(state, position, distribution):
""" Find winner in current state
"""
WIN, TIE, LOOSE = 0, 1, 2
team = position & 1
mask, heads, pos = state
winner_team = None
light_hand = float('inf')
light_player = set()
for i in range(4):
# Player `i` don't have any remaining piece
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
winner_team = i & 1
break
hand = 0
for j in range(7):
if (mask >> (i * 7 + j)) & 1:
hand += sum(distribution[i][j])
if hand < light_hand:
light_hand = hand
light_player = set()
if hand == light_hand:
light_player.add(i & 1)
if winner_team is None:
if len(light_player) == 2:
return TIE
winner_team = list(light_player)[0]
return WIN if winner_team == team else LOOSE
def is_over(state, distribution):
""" Check if game is over
"""
mask, heads, pos = state
exist_move = False
for i in range(4):
# Player `i` doesn't have any piece left
if (mask >> (7 * i)) & ((1 << 7) - 1) == 0:
return True
for j in range(7):
if ((mask >> (i * 7 + j)) & 1) and intersect(distribution[i][j], heads):
exist_move = True
return not exist_move
def neighbors(state, distribution):
mask, heads, pos = state
count = 0
for i in range(7):
# If player contains this piece yet
if ((mask >> (7 * pos + i)) & 1) == 1:
piece = distribution[pos][i]
# If piece can be played through head_0
if heads[0] in piece or heads[0] == -1:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0] ^ piece[0] ^ piece[1], heads[1])
npos = (pos + 1) & 3 # % 4
count += 1
yield (nmask, nheads, npos)
# If piece can be played through head_1
if heads[1] in distribution[pos][i]:
nmask = mask ^ (1 << (7 * pos + i))
nheads = (heads[0], heads[1] ^ piece[0] ^ piece[1])
npos = (pos + 1) & 3
count += 1
yield (nmask, nheads, npos)
# Player can't make any valid move other than pass
if count == 0:
npos = (pos + 1) & 3
yield (mask, heads, npos)
def show(state):
mask, heads, pos = state
print(f"{bin(mask)} | {heads[0]} {heads[1]} | {pos}")
def montecarlo(distribution, heads, position, NUM_EXPANDED):
|
heads = tuple(heads)
pos = position
start = (mask, heads, pos)
# Initialize states for MonteCarlo Tree Search
state_map = {start: Node(start)}
# Run MonteCarlo
iterations = 0
logger.debug(f"Start montecarlo from: {bin(mask)} | {heads} | {pos}")
while True:
iterations += 1
# Stop condition
if len(state_map) >= NUM_EXPANDED or \
iterations >= 1e4:
logger.debug(f"Iterations: {iterations}")
logger.debug(f"Number of states: {len(state_map)}")
break
cur = start
# path = [state_map[cur]]
path = []
# Traverse the tree search from the root down to one leaf
while True:
# show(cur)
node = state_map[cur]
path.append(node)
if node.visit_count == 0:
break
best_score = float('-inf')
best_child = None
for child in node.children:
scr = child.score(node.visit_count, (cur[2] & 1) == team)
if scr > best_score:
best_score = scr
best_child =
|
"""
state: (bitmask, heads, pos)
bitmask: 7 bits each player 2**28 states that denotes which pieces are still holding relative to `distribution`
parent visit count: PC
visit count: VC
win count: WC
exploration control: K
WC / VC + K * sqrt(log(PC) / VC)
"""
team = position & 1
# Compute first state
mask = 0
for dist in reversed(distribution):
assert len(dist) <= 7
mask <<= 7
mask |= (1 << len(dist)) - 1
|
identifier_body
|
csn.py
|
_id_node_counter = 1
_or_nodes = 0
_leaf_nodes = 0
_or_edges = 0
_clt_edges = 0
_cltrees = 0
_depth = 0
_mean_depth = 0
@classmethod
def init_stats(cls):
Csn._id_node_counter = 1
Csn._or_nodes = 0
Csn._leaf_nodes = 0
Csn._or_edges = 0
Csn._clt_edges = 0
Csn._cltrees = 0
Csn._depth = 0
Csn._mean_depth = 0
def __init__(self, data, clt = None, ll = 0.0, min_instances = 5, min_features = 3,
alpha = 1.0, n_original_samples = None,
leaf_vars = [], depth = 1,
multilabel = False, n_labels=0, ml_tree_structure=0, xcnet=False):
|
if clt is None:
COC = [[] for i in range(data.shape[0])]
for r in range(data.shape[0]):
for f in range(data.shape[1]):
if data[r,f]>0:
COC[r].append(f)
self.node.cltree = Cltree()
self.node.cltree.fit(data, alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
self.orig_ll = self.node.cltree.score_samples_log_proba(self.data)
sparsity = 0.0
sparsity = len(self.data.nonzero()[0])
sparsity /= (self.data.shape[1] * self.data.shape[0])
logger.info("Dataset sparsity: %f", sparsity)
else:
self.node.cltree = clt
self.orig_ll = ll
self.scope = self.node.cltree.scope
self.id = Csn._id_node_counter
Csn._id_node_counter = Csn._id_node_counter + 1
print("Block", self.id, "on", len(self.scope), "features and", self.data.shape[0], "instances, local ll:", self.orig_ll)
if self.data.shape[0] > self.min_instances:
if self.data.shape[1] >= self.min_features:
self.or_cut()
else:
print( " > no cutting due to few features")
else:
print(" > no cutting due to few instances")
if is_tree_node(self.node):
if self.depth > Csn._depth:
Csn._depth = self.depth
Csn._mean_depth = Csn._mean_depth + self.depth
Csn._leaf_nodes = Csn._leaf_nodes + 1
Csn._cltrees = Csn._cltrees + 1
Csn._clt_edges = Csn._clt_edges + self.node.cltree.num_edges
def check_correctness(self,k):
mean = 0.0
for world in itertools.product([0,1], repeat=k):
prob = np.exp(self._score_sample_log_proba(world))
mean = mean + prob
return mean
def show(self):
""" WRITEME """
print ("Learned Cut Set Network")
# self._showl(0)
print("OR nodes:", Csn._or_nodes)
print("Leaves:", Csn._leaf_nodes)
print("Cltrees:", Csn._cltrees)
print("Edges outgoing OR nodes:", Csn._or_edges)
print("Edges in CLtrees:", Csn._clt_edges)
print("Total edges:", Csn._or_edges + Csn._clt_edges)
print("Total nodes:", Csn._or_nodes + Csn._leaf_nodes + Csn._and_nodes)
print("Depth:", Csn._depth)
print("Mean Depth:", Csn._mean_depth / Csn._leaf_nodes)
def _showl(self,level):
""" WRITEME """
if is_or_node(self.node):
print(self.id,"OR", self.node.left_weight,self.node.left_child.id,self.node.right_child.id,"on",self.scope[self.node.or_feature])
self.node.left_child._showl(level+1)
self.node.right_child._showl(level+1)
elif is_and_node(self.node):
print(self.id, "AND", end="")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] == None:
print("()", end="")
else:
print("(",self.node.children_left[i].id,self.node.children_right[i].id,"on",self.node.cltree.scope[self.tree_forest[i][self.node.or_features[i]]],")", end="")
print("")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] is not None:
self.node.children_left[i]._showl(level+1)
self.node.children_right[i]._showl(level+1)
elif is_sum_node(self.node):
print(self.id,"SUM", self.node.weights)
for c in self.node.children:
c._showl(level+1)
else:
print(self.id, "LEAF", end=" ")
if self.node.cltree.is_forest():
print("Forest")
else:
print("Tree")
print(self.node.cltree.tree)
print(self.node.cltree.scope)
def mpe(self, evidence = {}):
""" WRITEME """
return self.node.mpe(evidence)
def marginal_inference(self, evidence = {}):
""" WRITEME """
return self.node.marginal_inference(evidence)
def naiveMPE(self, evidence = {}):
maxprob = -np.inf
maxstate = []
for w in (itertools.product([0, 1], repeat=self.n)):
ver = True
for var, state in evidence.items():
if w[var] != state:
ver = False
break
if ver:
prob = self.score_sample_log_proba(w)
print(prob)
if prob > maxprob:
maxprob = prob
maxstate = w
return (maxstate, maxprob)
def score_sample_log_proba(self,x):
return self.node.score_sample_log_proba(x)
def score_samples_log_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = self.score_sample_log_proba(X[i])
m = np.sum(Prob) / X.shape[0]
return m
def score_samples_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = np.exp(self.score_sample_log_proba(X[i]))
return Prob
def or_cut(self):
print(" > trying to cut ... ")
sys.stdout.flush()
found = False
bestlik = self.orig_ll
best_clt_l = None
best_clt_r = None
best_feature_cut = None
best_left_weight = 0.0
best_right_weight = 0.0
best_right_data = None
best_left_data = None
best_v_ll = 0.0
best_gain = -np.inf
best_left_sample_weight = None
best_right_sample_weight = None
cutting_features = []
for f in range(self.node.cltree.n_features):
if self.scope[f] not in self.leaf_vars:
cutting_features.append(f)
selected = cutting_features
if self.xcnet:
selected = [random.choice(selected)]
bestlik = -np.inf
ll = 0.0
CL_l = None
CL_r = None
feature = None
left_weight = 0.0
right_weight = 0.0
left_data = None
right_data = None
l_ll = 0.0
r_ll = 0.0
for feature in selected:
condition = self.data[:,feature]==0
new_features = np.ones(self.data.shape[1], dtype=bool)
new_features[feature] = False
left_data = self.data[condition,:][:, new_features]
right_data = self.data[~condition,:][:, new_features]
left_weight = (left_data.shape[0] ) / (self.data.shape[0] )
right_weight = (right_data.shape[0] ) / (self.data.shape[0] )
if left_data.shape[0] > 0 and right_data.shape[0] > 0:
left_scope = np.concatenate((self.node.clt
|
self.min_instances = min_instances
self.min_features = min_features
self.alpha = alpha
self.depth = depth
self.data = data
self.node = TreeNode()
self.multilabel = multilabel
self.n_labels = n_labels
self.ml_tree_structure = ml_tree_structure
self.xcnet = xcnet
self.leaf_vars = leaf_vars
self.n = data.shape[1]
if n_original_samples is None:
self.n_original_samples = self.data.shape[0]
else:
self.n_original_samples = n_original_samples
|
identifier_body
|
csn.py
|
ll = 0.0, min_instances = 5, min_features = 3,
alpha = 1.0, n_original_samples = None,
leaf_vars = [], depth = 1,
multilabel = False, n_labels=0, ml_tree_structure=0, xcnet=False):
self.min_instances = min_instances
self.min_features = min_features
self.alpha = alpha
self.depth = depth
self.data = data
self.node = TreeNode()
self.multilabel = multilabel
self.n_labels = n_labels
self.ml_tree_structure = ml_tree_structure
self.xcnet = xcnet
self.leaf_vars = leaf_vars
self.n = data.shape[1]
if n_original_samples is None:
self.n_original_samples = self.data.shape[0]
else:
self.n_original_samples = n_original_samples
if clt is None:
COC = [[] for i in range(data.shape[0])]
for r in range(data.shape[0]):
for f in range(data.shape[1]):
if data[r,f]>0:
COC[r].append(f)
self.node.cltree = Cltree()
self.node.cltree.fit(data, alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
self.orig_ll = self.node.cltree.score_samples_log_proba(self.data)
sparsity = 0.0
sparsity = len(self.data.nonzero()[0])
sparsity /= (self.data.shape[1] * self.data.shape[0])
logger.info("Dataset sparsity: %f", sparsity)
else:
self.node.cltree = clt
self.orig_ll = ll
self.scope = self.node.cltree.scope
self.id = Csn._id_node_counter
Csn._id_node_counter = Csn._id_node_counter + 1
print("Block", self.id, "on", len(self.scope), "features and", self.data.shape[0], "instances, local ll:", self.orig_ll)
if self.data.shape[0] > self.min_instances:
if self.data.shape[1] >= self.min_features:
self.or_cut()
else:
print( " > no cutting due to few features")
else:
print(" > no cutting due to few instances")
if is_tree_node(self.node):
if self.depth > Csn._depth:
Csn._depth = self.depth
Csn._mean_depth = Csn._mean_depth + self.depth
Csn._leaf_nodes = Csn._leaf_nodes + 1
Csn._cltrees = Csn._cltrees + 1
Csn._clt_edges = Csn._clt_edges + self.node.cltree.num_edges
def check_correctness(self,k):
mean = 0.0
for world in itertools.product([0,1], repeat=k):
prob = np.exp(self._score_sample_log_proba(world))
mean = mean + prob
return mean
def show(self):
""" WRITEME """
print ("Learned Cut Set Network")
# self._showl(0)
print("OR nodes:", Csn._or_nodes)
print("Leaves:", Csn._leaf_nodes)
print("Cltrees:", Csn._cltrees)
print("Edges outgoing OR nodes:", Csn._or_edges)
print("Edges in CLtrees:", Csn._clt_edges)
print("Total edges:", Csn._or_edges + Csn._clt_edges)
print("Total nodes:", Csn._or_nodes + Csn._leaf_nodes + Csn._and_nodes)
print("Depth:", Csn._depth)
print("Mean Depth:", Csn._mean_depth / Csn._leaf_nodes)
def _showl(self,level):
""" WRITEME """
if is_or_node(self.node):
print(self.id,"OR", self.node.left_weight,self.node.left_child.id,self.node.right_child.id,"on",self.scope[self.node.or_feature])
self.node.left_child._showl(level+1)
self.node.right_child._showl(level+1)
elif is_and_node(self.node):
print(self.id, "AND", end="")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] == None:
print("()", end="")
else:
print("(",self.node.children_left[i].id,self.node.children_right[i].id,"on",self.node.cltree.scope[self.tree_forest[i][self.node.or_features[i]]],")", end="")
print("")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] is not None:
self.node.children_left[i]._showl(level+1)
self.node.children_right[i]._showl(level+1)
elif is_sum_node(self.node):
print(self.id,"SUM", self.node.weights)
for c in self.node.children:
c._showl(level+1)
else:
print(self.id, "LEAF", end=" ")
if self.node.cltree.is_forest():
print("Forest")
else:
print("Tree")
print(self.node.cltree.tree)
print(self.node.cltree.scope)
def mpe(self, evidence = {}):
""" WRITEME """
return self.node.mpe(evidence)
def marginal_inference(self, evidence = {}):
""" WRITEME """
return self.node.marginal_inference(evidence)
def naiveMPE(self, evidence = {}):
maxprob = -np.inf
maxstate = []
for w in (itertools.product([0, 1], repeat=self.n)):
ver = True
for var, state in evidence.items():
if w[var] != state:
ver = False
break
if ver:
prob = self.score_sample_log_proba(w)
print(prob)
if prob > maxprob:
maxprob = prob
maxstate = w
return (maxstate, maxprob)
def score_sample_log_proba(self,x):
return self.node.score_sample_log_proba(x)
def score_samples_log_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = self.score_sample_log_proba(X[i])
m = np.sum(Prob) / X.shape[0]
return m
def score_samples_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = np.exp(self.score_sample_log_proba(X[i]))
return Prob
def or_cut(self):
print(" > trying to cut ... ")
sys.stdout.flush()
found = False
bestlik = self.orig_ll
best_clt_l = None
best_clt_r = None
best_feature_cut = None
best_left_weight = 0.0
best_right_weight = 0.0
best_right_data = None
best_left_data = None
best_v_ll = 0.0
best_gain = -np.inf
best_left_sample_weight = None
best_right_sample_weight = None
cutting_features = []
for f in range(self.node.cltree.n_features):
if self.scope[f] not in self.leaf_vars:
cutting_features.append(f)
selected = cutting_features
if self.xcnet:
selected = [random.choice(selected)]
bestlik = -np.inf
ll = 0.0
CL_l = None
CL_r = None
feature = None
left_weight = 0.0
right_weight = 0.0
left_data = None
right_data = None
l_ll = 0.0
r_ll = 0.0
for feature in selected:
condition = self.data[:,feature]==0
new_features = np.ones(self.data.shape[1], dtype=bool)
new_features[feature] = False
left_data = self.data[condition,:][:, new_features]
right_data = self.data[~condition,:][:, new_features]
left_weight = (left_data.shape[0] ) / (self.data.shape[0] )
right_weight = (right_data.shape[0] ) / (self.data.shape[0] )
if left_data.shape[0] > 0 and right_data.shape[0] > 0:
left_scope = np.concatenate((self.node.cltree.scope[0:feature],self.node.cltree.scope[feature+1:]))
right_scope = np.concatenate((self.node.cltree.scope[0:feature],self.node.cltree.scope[feature+1:]))
CL_l = Cltree()
CL_r = Cltree()
CL_l.fit(left_data,scope=left_scope,alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
CL_r.fit(right_data,scope=right_scope,alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
l_ll = CL_l.score_samples_log_proba(left_data)
r_ll = CL_r.score_samples_log_proba(right_data)
|
random_line_split
|
||
csn.py
|
_id_node_counter = 1
_or_nodes = 0
_leaf_nodes = 0
_or_edges = 0
_clt_edges = 0
_cltrees = 0
_depth = 0
_mean_depth = 0
@classmethod
def init_stats(cls):
Csn._id_node_counter = 1
Csn._or_nodes = 0
Csn._leaf_nodes = 0
Csn._or_edges = 0
Csn._clt_edges = 0
Csn._cltrees = 0
Csn._depth = 0
Csn._mean_depth = 0
def __init__(self, data, clt = None, ll = 0.0, min_instances = 5, min_features = 3,
alpha = 1.0, n_original_samples = None,
leaf_vars = [], depth = 1,
multilabel = False, n_labels=0, ml_tree_structure=0, xcnet=False):
self.min_instances = min_instances
self.min_features = min_features
self.alpha = alpha
self.depth = depth
self.data = data
self.node = TreeNode()
self.multilabel = multilabel
self.n_labels = n_labels
self.ml_tree_structure = ml_tree_structure
self.xcnet = xcnet
self.leaf_vars = leaf_vars
self.n = data.shape[1]
if n_original_samples is None:
self.n_original_samples = self.data.shape[0]
else:
self.n_original_samples = n_original_samples
if clt is None:
COC = [[] for i in range(data.shape[0])]
for r in range(data.shape[0]):
for f in range(data.shape[1]):
if data[r,f]>0:
COC[r].append(f)
self.node.cltree = Cltree()
self.node.cltree.fit(data, alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
self.orig_ll = self.node.cltree.score_samples_log_proba(self.data)
sparsity = 0.0
sparsity = len(self.data.nonzero()[0])
sparsity /= (self.data.shape[1] * self.data.shape[0])
logger.info("Dataset sparsity: %f", sparsity)
else:
self.node.cltree = clt
self.orig_ll = ll
self.scope = self.node.cltree.scope
self.id = Csn._id_node_counter
Csn._id_node_counter = Csn._id_node_counter + 1
print("Block", self.id, "on", len(self.scope), "features and", self.data.shape[0], "instances, local ll:", self.orig_ll)
if self.data.shape[0] > self.min_instances:
if self.data.shape[1] >= self.min_features:
self.or_cut()
else:
print( " > no cutting due to few features")
else:
print(" > no cutting due to few instances")
if is_tree_node(self.node):
if self.depth > Csn._depth:
Csn._depth = self.depth
Csn._mean_depth = Csn._mean_depth + self.depth
Csn._leaf_nodes = Csn._leaf_nodes + 1
Csn._cltrees = Csn._cltrees + 1
Csn._clt_edges = Csn._clt_edges + self.node.cltree.num_edges
def check_correctness(self,k):
mean = 0.0
for world in itertools.product([0,1], repeat=k):
prob = np.exp(self._score_sample_log_proba(world))
mean = mean + prob
return mean
def show(self):
""" WRITEME """
print ("Learned Cut Set Network")
# self._showl(0)
print("OR nodes:", Csn._or_nodes)
print("Leaves:", Csn._leaf_nodes)
print("Cltrees:", Csn._cltrees)
print("Edges outgoing OR nodes:", Csn._or_edges)
print("Edges in CLtrees:", Csn._clt_edges)
print("Total edges:", Csn._or_edges + Csn._clt_edges)
print("Total nodes:", Csn._or_nodes + Csn._leaf_nodes + Csn._and_nodes)
print("Depth:", Csn._depth)
print("Mean Depth:", Csn._mean_depth / Csn._leaf_nodes)
def _showl(self,level):
""" WRITEME """
if is_or_node(self.node):
print(self.id,"OR", self.node.left_weight,self.node.left_child.id,self.node.right_child.id,"on",self.scope[self.node.or_feature])
self.node.left_child._showl(level+1)
self.node.right_child._showl(level+1)
elif is_and_node(self.node):
print(self.id, "AND", end="")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] == None:
print("()", end="")
else:
print("(",self.node.children_left[i].id,self.node.children_right[i].id,"on",self.node.cltree.scope[self.tree_forest[i][self.node.or_features[i]]],")", end="")
print("")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] is not None:
self.node.children_left[i]._showl(level+1)
self.node.children_right[i]._showl(level+1)
elif is_sum_node(self.node):
print(self.id,"SUM", self.node.weights)
for c in self.node.children:
c._showl(level+1)
else:
print(self.id, "LEAF", end=" ")
if self.node.cltree.is_forest():
print("Forest")
else:
print("Tree")
print(self.node.cltree.tree)
print(self.node.cltree.scope)
def mpe(self, evidence = {}):
""" WRITEME """
return self.node.mpe(evidence)
def marginal_inference(self, evidence = {}):
""" WRITEME """
return self.node.marginal_inference(evidence)
def naiveMPE(self, evidence = {}):
maxprob = -np.inf
maxstate = []
for w in (itertools.product([0, 1], repeat=self.n)):
ver = True
for var, state in evidence.items():
if w[var] != state:
ver = False
break
if ver:
prob = self.score_sample_log_proba(w)
print(prob)
if prob > maxprob:
maxprob = prob
maxstate = w
return (maxstate, maxprob)
def score_sample_log_proba(self,x):
return self.node.score_sample_log_proba(x)
def score_samples_log_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = self.score_sample_log_proba(X[i])
m = np.sum(Prob) / X.shape[0]
return m
def
|
(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = np.exp(self.score_sample_log_proba(X[i]))
return Prob
def or_cut(self):
print(" > trying to cut ... ")
sys.stdout.flush()
found = False
bestlik = self.orig_ll
best_clt_l = None
best_clt_r = None
best_feature_cut = None
best_left_weight = 0.0
best_right_weight = 0.0
best_right_data = None
best_left_data = None
best_v_ll = 0.0
best_gain = -np.inf
best_left_sample_weight = None
best_right_sample_weight = None
cutting_features = []
for f in range(self.node.cltree.n_features):
if self.scope[f] not in self.leaf_vars:
cutting_features.append(f)
selected = cutting_features
if self.xcnet:
selected = [random.choice(selected)]
bestlik = -np.inf
ll = 0.0
CL_l = None
CL_r = None
feature = None
left_weight = 0.0
right_weight = 0.0
left_data = None
right_data = None
l_ll = 0.0
r_ll = 0.0
for feature in selected:
condition = self.data[:,feature]==0
new_features = np.ones(self.data.shape[1], dtype=bool)
new_features[feature] = False
left_data = self.data[condition,:][:, new_features]
right_data = self.data[~condition,:][:, new_features]
left_weight = (left_data.shape[0] ) / (self.data.shape[0] )
right_weight = (right_data.shape[0] ) / (self.data.shape[0] )
if left_data.shape[0] > 0 and right_data.shape[0] > 0:
left_scope = np.concatenate((self.node.c
|
score_samples_proba
|
identifier_name
|
csn.py
|
_id_node_counter = 1
_or_nodes = 0
_leaf_nodes = 0
_or_edges = 0
_clt_edges = 0
_cltrees = 0
_depth = 0
_mean_depth = 0
@classmethod
def init_stats(cls):
Csn._id_node_counter = 1
Csn._or_nodes = 0
Csn._leaf_nodes = 0
Csn._or_edges = 0
Csn._clt_edges = 0
Csn._cltrees = 0
Csn._depth = 0
Csn._mean_depth = 0
def __init__(self, data, clt = None, ll = 0.0, min_instances = 5, min_features = 3,
alpha = 1.0, n_original_samples = None,
leaf_vars = [], depth = 1,
multilabel = False, n_labels=0, ml_tree_structure=0, xcnet=False):
self.min_instances = min_instances
self.min_features = min_features
self.alpha = alpha
self.depth = depth
self.data = data
self.node = TreeNode()
self.multilabel = multilabel
self.n_labels = n_labels
self.ml_tree_structure = ml_tree_structure
self.xcnet = xcnet
self.leaf_vars = leaf_vars
self.n = data.shape[1]
if n_original_samples is None:
self.n_original_samples = self.data.shape[0]
else:
self.n_original_samples = n_original_samples
if clt is None:
COC = [[] for i in range(data.shape[0])]
for r in range(data.shape[0]):
|
self.node.cltree = Cltree()
self.node.cltree.fit(data, alpha=self.alpha,
multilabel = self.multilabel, n_labels=self.n_labels, ml_tree_structure=self.ml_tree_structure)
self.orig_ll = self.node.cltree.score_samples_log_proba(self.data)
sparsity = 0.0
sparsity = len(self.data.nonzero()[0])
sparsity /= (self.data.shape[1] * self.data.shape[0])
logger.info("Dataset sparsity: %f", sparsity)
else:
self.node.cltree = clt
self.orig_ll = ll
self.scope = self.node.cltree.scope
self.id = Csn._id_node_counter
Csn._id_node_counter = Csn._id_node_counter + 1
print("Block", self.id, "on", len(self.scope), "features and", self.data.shape[0], "instances, local ll:", self.orig_ll)
if self.data.shape[0] > self.min_instances:
if self.data.shape[1] >= self.min_features:
self.or_cut()
else:
print( " > no cutting due to few features")
else:
print(" > no cutting due to few instances")
if is_tree_node(self.node):
if self.depth > Csn._depth:
Csn._depth = self.depth
Csn._mean_depth = Csn._mean_depth + self.depth
Csn._leaf_nodes = Csn._leaf_nodes + 1
Csn._cltrees = Csn._cltrees + 1
Csn._clt_edges = Csn._clt_edges + self.node.cltree.num_edges
def check_correctness(self,k):
mean = 0.0
for world in itertools.product([0,1], repeat=k):
prob = np.exp(self._score_sample_log_proba(world))
mean = mean + prob
return mean
def show(self):
""" WRITEME """
print ("Learned Cut Set Network")
# self._showl(0)
print("OR nodes:", Csn._or_nodes)
print("Leaves:", Csn._leaf_nodes)
print("Cltrees:", Csn._cltrees)
print("Edges outgoing OR nodes:", Csn._or_edges)
print("Edges in CLtrees:", Csn._clt_edges)
print("Total edges:", Csn._or_edges + Csn._clt_edges)
print("Total nodes:", Csn._or_nodes + Csn._leaf_nodes + Csn._and_nodes)
print("Depth:", Csn._depth)
print("Mean Depth:", Csn._mean_depth / Csn._leaf_nodes)
def _showl(self,level):
""" WRITEME """
if is_or_node(self.node):
print(self.id,"OR", self.node.left_weight,self.node.left_child.id,self.node.right_child.id,"on",self.scope[self.node.or_feature])
self.node.left_child._showl(level+1)
self.node.right_child._showl(level+1)
elif is_and_node(self.node):
print(self.id, "AND", end="")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] == None:
print("()", end="")
else:
print("(",self.node.children_left[i].id,self.node.children_right[i].id,"on",self.node.cltree.scope[self.tree_forest[i][self.node.or_features[i]]],")", end="")
print("")
for i in range(len(self.tree_forest)):
if self.node.or_features[i] is not None:
self.node.children_left[i]._showl(level+1)
self.node.children_right[i]._showl(level+1)
elif is_sum_node(self.node):
print(self.id,"SUM", self.node.weights)
for c in self.node.children:
c._showl(level+1)
else:
print(self.id, "LEAF", end=" ")
if self.node.cltree.is_forest():
print("Forest")
else:
print("Tree")
print(self.node.cltree.tree)
print(self.node.cltree.scope)
def mpe(self, evidence = {}):
""" WRITEME """
return self.node.mpe(evidence)
def marginal_inference(self, evidence = {}):
""" WRITEME """
return self.node.marginal_inference(evidence)
def naiveMPE(self, evidence = {}):
maxprob = -np.inf
maxstate = []
for w in (itertools.product([0, 1], repeat=self.n)):
ver = True
for var, state in evidence.items():
if w[var] != state:
ver = False
break
if ver:
prob = self.score_sample_log_proba(w)
print(prob)
if prob > maxprob:
maxprob = prob
maxstate = w
return (maxstate, maxprob)
def score_sample_log_proba(self,x):
return self.node.score_sample_log_proba(x)
def score_samples_log_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = self.score_sample_log_proba(X[i])
m = np.sum(Prob) / X.shape[0]
return m
def score_samples_proba(self, X):
Prob = X[:,0]*0.0
for i in range(X.shape[0]):
Prob[i] = np.exp(self.score_sample_log_proba(X[i]))
return Prob
def or_cut(self):
print(" > trying to cut ... ")
sys.stdout.flush()
found = False
bestlik = self.orig_ll
best_clt_l = None
best_clt_r = None
best_feature_cut = None
best_left_weight = 0.0
best_right_weight = 0.0
best_right_data = None
best_left_data = None
best_v_ll = 0.0
best_gain = -np.inf
best_left_sample_weight = None
best_right_sample_weight = None
cutting_features = []
for f in range(self.node.cltree.n_features):
if self.scope[f] not in self.leaf_vars:
cutting_features.append(f)
selected = cutting_features
if self.xcnet:
selected = [random.choice(selected)]
bestlik = -np.inf
ll = 0.0
CL_l = None
CL_r = None
feature = None
left_weight = 0.0
right_weight = 0.0
left_data = None
right_data = None
l_ll = 0.0
r_ll = 0.0
for feature in selected:
condition = self.data[:,feature]==0
new_features = np.ones(self.data.shape[1], dtype=bool)
new_features[feature] = False
left_data = self.data[condition,:][:, new_features]
right_data = self.data[~condition,:][:, new_features]
left_weight = (left_data.shape[0] ) / (self.data.shape[0] )
right_weight = (right_data.shape[0] ) / (self.data.shape[0] )
if left_data.shape[0] > 0 and right_data.shape[0] > 0:
left_scope = np.concatenate((self.node.c
|
for f in range(data.shape[1]):
if data[r,f]>0:
COC[r].append(f)
|
conditional_block
|
network.rs
|
Mojang, MojangHasJoinedResponse};
use crate::packets::*;
use crate::player::Player;
use openssl::pkey::Private;
use openssl::rsa::{Padding, Rsa};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration};
struct Connection {
packet_receiver: mpsc::Receiver<PacketBuffer>,
stream: TcpStream,
alive: bool,
}
impl Connection {
fn new(stream: TcpStream) -> Connection {
println!("New connection!");
let reader = stream.try_clone().unwrap();
let (tx, rx) = mpsc::channel();
let connection = Connection {
packet_receiver: rx,
stream,
alive: true,
};
thread::spawn(|| {
Connection::handle_connection(reader, tx);
});
connection
}
fn handle_connection(mut stream: TcpStream, packet_sender: mpsc::Sender<PacketBuffer>) {
loop {
let mut data = vec![0u8; 512];
let length = stream.read(&mut data).unwrap();
if length == 0 {
thread::sleep(Duration::from_millis(2));
continue;
}
data.drain(length..);
data.shrink_to_fit();
packet_sender.send(data).unwrap();
}
}
fn receive_packets(&mut self) -> Vec<PacketBuffer> {
let mut packets = Vec::new();
loop {
match self.packet_receiver.try_recv() {
Ok(packet) => packets.push(packet),
Err(mpsc::TryRecvError::Empty) => return packets,
Err(mpsc::TryRecvError::Disconnected) => {
self.alive = false;
return packets;
}
}
}
}
}
pub struct Client {
connection: Connection,
state: NetworkState,
pub shared_secret: Option<Vec<u8>>,
pub compressed: bool,
verify_token: Option<Vec<u8>>,
player: Option<Player>,
username: Option<String>,
id: u32,
}
impl Client {
fn new(stream: TcpStream, id: u32) -> Client {
let connection = Connection::new(stream);
Client {
connection,
state: NetworkState::HANDSHAKING,
shared_secret: None,
compressed: false,
verify_token: None,
player: None,
username: None,
id,
}
}
fn send_packet(&mut self, encoder: &PacketEncoder) {
let buffer = encoder.finalize(self.compressed, &self.shared_secret);
self.connection.stream.write(buffer.as_slice()).unwrap();
}
}
#[derive(Serialize, Deserialize)]
pub struct ServerConfig {
max_players: i32,
motd: String,
}
pub struct Server {
clients: Vec<Client>,
client_receiver: mpsc::Receiver<Client>,
key_pair: Rsa<Private>,
mojang: Mojang,
}
impl Server {
fn new() -> Server {
let rsa = Rsa::generate(1024).unwrap();
let (tx, rx) = mpsc::channel();
let server = Server {
clients: Vec::new(),
key_pair: rsa,
mojang: Mojang::new(),
client_receiver: rx
};
server.listen_for_connections(tx);
server
}
fn get_client(&self, client_id: u32) -> &Client {
self.clients.iter().filter(|client| client.id == client_id).collect::<Vec<&Client>>()[0]
}
fn listen_for_connections(&self, sender: mpsc::Sender<Client>) {
let mut next_id = 0;
thread::spawn(move || {
let listener = TcpListener::bind("0.0.0.0:25566").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
let client = Client::new(stream, next_id);
sender.send(client).unwrap();
next_id += 1;
}
});
}
fn unknown_packet(id: i32) {
eprintln!("Unknown packet with id: {}", id);
}
fn handle_packet(&mut self, client: usize, packet: PacketBuffer) {
let client = self.clients.get_mut(client).unwrap();
let decoder = PacketDecoder::new(packet, client);
println!(
"Packet received: {}, with the length of: {}",
decoder.packet_id, decoder.length
);
let state = client.state;
match state {
NetworkState::HANDSHAKING => match decoder.packet_id {
0x00 => {
let packet = S00Handshake::decode(decoder);
println!("New state: {:#?}", packet.next_state);
client.state = packet.next_state;
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::STATUS => match decoder.packet_id {
0x00 => {
let json_response = json!({
"version": {
"name": "RustMC 1.15.1",
"protocol": 575
},
"players": {
"max": 100,
"online": 1,
"sample": [],
},
"description": {
"text": "Hello World!",
"color": "gold"
}
})
.to_string();
let response_encoder = C00Response { json_response }.encode();
client.send_packet(&response_encoder);
}
0x01 => {
let packet = S01Ping::decode(decoder);
let pong_encoder = C01Pong {
payload: packet.payload,
}
.encode();
client.send_packet(&pong_encoder);
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::LOGIN => match decoder.packet_id {
0x00 => {
let packet = S00LoginStart::decode(decoder);
let public_key = self.key_pair.public_key_to_der().unwrap();
let verify_token = rand::thread_rng().gen::<[u8; 4]>().to_vec();
let request_encoder = C01EcryptionRequest {
server_id: "".to_string(),
public_key_length: public_key.len() as i32,
public_key,
verify_token_length: 4,
verify_token: verify_token.clone(),
}
.encode();
client.verify_token = Some(verify_token);
client.username = Some(packet.name);
client.send_packet(&request_encoder);
}
0x01 => {
let packet = S01EncryptionResponse::decode(decoder);
let mut received_verify_token = vec![0u8; packet.verify_token_length as usize];
let length_decrypted = self
.key_pair
.private_decrypt(
packet.verify_token.as_slice(),
received_verify_token.as_mut(),
Padding::PKCS1,
)
.unwrap();
received_verify_token.drain(length_decrypted..received_verify_token.len());
if &received_verify_token == client.verify_token.as_ref().unwrap() {
// Start login process
println!("Starting login process");
/*self.mojang.send_has_joined(
&clients[client].username.unwrap(),
clients[client].id
);*/
} else {
println!("Verify token incorrent!!");
}
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::PLAY => match decoder.packet_id {
_ => Server::unknown_packet(decoder.packet_id),
},
}
}
fn on_mojang_has_joined_response(&mut self, client_id: u32, result: MojangHasJoinedResponse) {
let client = self.get_client(client_id);
}
fn receive_packets(&mut self) {
let num_clients = self.clients.len();
for client in 0..num_clients {
let mut packets = self.clients[client]
.connection
.receive_packets();
for packet_batch in packets.drain(..) {
for packet in PacketDecoder::new_batch(packet_batch, &self.clients[client]) {
println!("{}", utils::to_hex_string(&packet.buffer));
self.handle_packet(client, packet.buffer);
}
}
}
}
fn receive_clients(&mut self) {
let result = self.client_receiver.try_recv();
if let Ok(client) = result {
self.clients.push(client);
}
}
fn poll_mojang(&mut self) { // TODO: Clean up maybe
let mut finished_indicies = Vec::new();
for (i, pending) in self.mojang.has_joined_pending.iter().enumerate() {
if pending.result.is_some() {
finished_indicies.push(i);
}
}
for index in finished_indicies {
let response = self.mojang.has_joined_pending.remove(index);
self.on_mojang_has_joined_response(response.client_id, response.result.unwrap());
}
self.mojang.clean();
}
fn start(mut self) {
println!("Listening for connections...");
//let mut last_tick_time = SystemTime::now();
|
loop {
/*let now = SystemTime::now();
let time_since = now.duration_since(last_tick_time).unwrap().as_millis();
if time_since > 50 {
last_tick_time = now;
}
*/
self
|
random_line_split
|
|
network.rs
|
Mojang, MojangHasJoinedResponse};
use crate::packets::*;
use crate::player::Player;
use openssl::pkey::Private;
use openssl::rsa::{Padding, Rsa};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration};
struct Connection {
packet_receiver: mpsc::Receiver<PacketBuffer>,
stream: TcpStream,
alive: bool,
}
impl Connection {
fn new(stream: TcpStream) -> Connection {
println!("New connection!");
let reader = stream.try_clone().unwrap();
let (tx, rx) = mpsc::channel();
let connection = Connection {
packet_receiver: rx,
stream,
alive: true,
};
thread::spawn(|| {
Connection::handle_connection(reader, tx);
});
connection
}
fn handle_connection(mut stream: TcpStream, packet_sender: mpsc::Sender<PacketBuffer>) {
loop {
let mut data = vec![0u8; 512];
let length = stream.read(&mut data).unwrap();
if length == 0 {
thread::sleep(Duration::from_millis(2));
continue;
}
data.drain(length..);
data.shrink_to_fit();
packet_sender.send(data).unwrap();
}
}
fn receive_packets(&mut self) -> Vec<PacketBuffer> {
let mut packets = Vec::new();
loop {
match self.packet_receiver.try_recv() {
Ok(packet) => packets.push(packet),
Err(mpsc::TryRecvError::Empty) => return packets,
Err(mpsc::TryRecvError::Disconnected) => {
self.alive = false;
return packets;
}
}
}
}
}
pub struct Client {
connection: Connection,
state: NetworkState,
pub shared_secret: Option<Vec<u8>>,
pub compressed: bool,
verify_token: Option<Vec<u8>>,
player: Option<Player>,
username: Option<String>,
id: u32,
}
impl Client {
fn new(stream: TcpStream, id: u32) -> Client {
let connection = Connection::new(stream);
Client {
connection,
state: NetworkState::HANDSHAKING,
shared_secret: None,
compressed: false,
verify_token: None,
player: None,
username: None,
id,
}
}
fn send_packet(&mut self, encoder: &PacketEncoder) {
let buffer = encoder.finalize(self.compressed, &self.shared_secret);
self.connection.stream.write(buffer.as_slice()).unwrap();
}
}
#[derive(Serialize, Deserialize)]
pub struct ServerConfig {
max_players: i32,
motd: String,
}
pub struct Server {
clients: Vec<Client>,
client_receiver: mpsc::Receiver<Client>,
key_pair: Rsa<Private>,
mojang: Mojang,
}
impl Server {
fn new() -> Server {
let rsa = Rsa::generate(1024).unwrap();
let (tx, rx) = mpsc::channel();
let server = Server {
clients: Vec::new(),
key_pair: rsa,
mojang: Mojang::new(),
client_receiver: rx
};
server.listen_for_connections(tx);
server
}
fn get_client(&self, client_id: u32) -> &Client {
self.clients.iter().filter(|client| client.id == client_id).collect::<Vec<&Client>>()[0]
}
fn listen_for_connections(&self, sender: mpsc::Sender<Client>) {
let mut next_id = 0;
thread::spawn(move || {
let listener = TcpListener::bind("0.0.0.0:25566").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
let client = Client::new(stream, next_id);
sender.send(client).unwrap();
next_id += 1;
}
});
}
fn unknown_packet(id: i32) {
eprintln!("Unknown packet with id: {}", id);
}
fn handle_packet(&mut self, client: usize, packet: PacketBuffer) {
let client = self.clients.get_mut(client).unwrap();
let decoder = PacketDecoder::new(packet, client);
println!(
"Packet received: {}, with the length of: {}",
decoder.packet_id, decoder.length
);
let state = client.state;
match state {
NetworkState::HANDSHAKING => match decoder.packet_id {
0x00 => {
let packet = S00Handshake::decode(decoder);
println!("New state: {:#?}", packet.next_state);
client.state = packet.next_state;
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::STATUS => match decoder.packet_id {
0x00 => {
let json_response = json!({
"version": {
"name": "RustMC 1.15.1",
"protocol": 575
},
"players": {
"max": 100,
"online": 1,
"sample": [],
},
"description": {
"text": "Hello World!",
"color": "gold"
}
})
.to_string();
let response_encoder = C00Response { json_response }.encode();
client.send_packet(&response_encoder);
}
0x01 => {
let packet = S01Ping::decode(decoder);
let pong_encoder = C01Pong {
payload: packet.payload,
}
.encode();
client.send_packet(&pong_encoder);
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::LOGIN => match decoder.packet_id {
0x00 => {
let packet = S00LoginStart::decode(decoder);
let public_key = self.key_pair.public_key_to_der().unwrap();
let verify_token = rand::thread_rng().gen::<[u8; 4]>().to_vec();
let request_encoder = C01EcryptionRequest {
server_id: "".to_string(),
public_key_length: public_key.len() as i32,
public_key,
verify_token_length: 4,
verify_token: verify_token.clone(),
}
.encode();
client.verify_token = Some(verify_token);
client.username = Some(packet.name);
client.send_packet(&request_encoder);
}
0x01 => {
let packet = S01EncryptionResponse::decode(decoder);
let mut received_verify_token = vec![0u8; packet.verify_token_length as usize];
let length_decrypted = self
.key_pair
.private_decrypt(
packet.verify_token.as_slice(),
received_verify_token.as_mut(),
Padding::PKCS1,
)
.unwrap();
received_verify_token.drain(length_decrypted..received_verify_token.len());
if &received_verify_token == client.verify_token.as_ref().unwrap() {
// Start login process
println!("Starting login process");
/*self.mojang.send_has_joined(
&clients[client].username.unwrap(),
clients[client].id
);*/
} else {
println!("Verify token incorrent!!");
}
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::PLAY => match decoder.packet_id {
_ => Server::unknown_packet(decoder.packet_id),
},
}
}
fn on_mojang_has_joined_response(&mut self, client_id: u32, result: MojangHasJoinedResponse) {
let client = self.get_client(client_id);
}
fn receive_packets(&mut self) {
let num_clients = self.clients.len();
for client in 0..num_clients {
let mut packets = self.clients[client]
.connection
.receive_packets();
for packet_batch in packets.drain(..) {
for packet in PacketDecoder::new_batch(packet_batch, &self.clients[client]) {
println!("{}", utils::to_hex_string(&packet.buffer));
self.handle_packet(client, packet.buffer);
}
}
}
}
fn receive_clients(&mut self) {
let result = self.client_receiver.try_recv();
if let Ok(client) = result {
self.clients.push(client);
}
}
fn poll_mojang(&mut self)
|
fn start(mut self) {
println!("Listening for connections...");
//let mut last_tick_time = SystemTime::now();
loop {
/*let now = SystemTime::now();
let time_since = now.duration_since(last_tick_time).unwrap().as_millis();
if time_since > 50 {
last_tick_time = now;
}
*/
|
{ // TODO: Clean up maybe
let mut finished_indicies = Vec::new();
for (i, pending) in self.mojang.has_joined_pending.iter().enumerate() {
if pending.result.is_some() {
finished_indicies.push(i);
}
}
for index in finished_indicies {
let response = self.mojang.has_joined_pending.remove(index);
self.on_mojang_has_joined_response(response.client_id, response.result.unwrap());
}
self.mojang.clean();
}
|
identifier_body
|
network.rs
|
Mojang, MojangHasJoinedResponse};
use crate::packets::*;
use crate::player::Player;
use openssl::pkey::Private;
use openssl::rsa::{Padding, Rsa};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration};
struct Connection {
packet_receiver: mpsc::Receiver<PacketBuffer>,
stream: TcpStream,
alive: bool,
}
impl Connection {
fn new(stream: TcpStream) -> Connection {
println!("New connection!");
let reader = stream.try_clone().unwrap();
let (tx, rx) = mpsc::channel();
let connection = Connection {
packet_receiver: rx,
stream,
alive: true,
};
thread::spawn(|| {
Connection::handle_connection(reader, tx);
});
connection
}
fn handle_connection(mut stream: TcpStream, packet_sender: mpsc::Sender<PacketBuffer>) {
loop {
let mut data = vec![0u8; 512];
let length = stream.read(&mut data).unwrap();
if length == 0 {
thread::sleep(Duration::from_millis(2));
continue;
}
data.drain(length..);
data.shrink_to_fit();
packet_sender.send(data).unwrap();
}
}
fn receive_packets(&mut self) -> Vec<PacketBuffer> {
let mut packets = Vec::new();
loop {
match self.packet_receiver.try_recv() {
Ok(packet) => packets.push(packet),
Err(mpsc::TryRecvError::Empty) => return packets,
Err(mpsc::TryRecvError::Disconnected) => {
self.alive = false;
return packets;
}
}
}
}
}
pub struct Client {
connection: Connection,
state: NetworkState,
pub shared_secret: Option<Vec<u8>>,
pub compressed: bool,
verify_token: Option<Vec<u8>>,
player: Option<Player>,
username: Option<String>,
id: u32,
}
impl Client {
fn new(stream: TcpStream, id: u32) -> Client {
let connection = Connection::new(stream);
Client {
connection,
state: NetworkState::HANDSHAKING,
shared_secret: None,
compressed: false,
verify_token: None,
player: None,
username: None,
id,
}
}
fn send_packet(&mut self, encoder: &PacketEncoder) {
let buffer = encoder.finalize(self.compressed, &self.shared_secret);
self.connection.stream.write(buffer.as_slice()).unwrap();
}
}
#[derive(Serialize, Deserialize)]
pub struct ServerConfig {
max_players: i32,
motd: String,
}
pub struct Server {
clients: Vec<Client>,
client_receiver: mpsc::Receiver<Client>,
key_pair: Rsa<Private>,
mojang: Mojang,
}
impl Server {
fn new() -> Server {
let rsa = Rsa::generate(1024).unwrap();
let (tx, rx) = mpsc::channel();
let server = Server {
clients: Vec::new(),
key_pair: rsa,
mojang: Mojang::new(),
client_receiver: rx
};
server.listen_for_connections(tx);
server
}
fn get_client(&self, client_id: u32) -> &Client {
self.clients.iter().filter(|client| client.id == client_id).collect::<Vec<&Client>>()[0]
}
fn listen_for_connections(&self, sender: mpsc::Sender<Client>) {
let mut next_id = 0;
thread::spawn(move || {
let listener = TcpListener::bind("0.0.0.0:25566").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
let client = Client::new(stream, next_id);
sender.send(client).unwrap();
next_id += 1;
}
});
}
fn unknown_packet(id: i32) {
eprintln!("Unknown packet with id: {}", id);
}
fn handle_packet(&mut self, client: usize, packet: PacketBuffer) {
let client = self.clients.get_mut(client).unwrap();
let decoder = PacketDecoder::new(packet, client);
println!(
"Packet received: {}, with the length of: {}",
decoder.packet_id, decoder.length
);
let state = client.state;
match state {
NetworkState::HANDSHAKING => match decoder.packet_id {
0x00 => {
let packet = S00Handshake::decode(decoder);
println!("New state: {:#?}", packet.next_state);
client.state = packet.next_state;
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::STATUS => match decoder.packet_id {
0x00 => {
let json_response = json!({
"version": {
"name": "RustMC 1.15.1",
"protocol": 575
},
"players": {
"max": 100,
"online": 1,
"sample": [],
},
"description": {
"text": "Hello World!",
"color": "gold"
}
})
.to_string();
let response_encoder = C00Response { json_response }.encode();
client.send_packet(&response_encoder);
}
0x01 => {
let packet = S01Ping::decode(decoder);
let pong_encoder = C01Pong {
payload: packet.payload,
}
.encode();
client.send_packet(&pong_encoder);
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::LOGIN => match decoder.packet_id {
0x00 => {
let packet = S00LoginStart::decode(decoder);
let public_key = self.key_pair.public_key_to_der().unwrap();
let verify_token = rand::thread_rng().gen::<[u8; 4]>().to_vec();
let request_encoder = C01EcryptionRequest {
server_id: "".to_string(),
public_key_length: public_key.len() as i32,
public_key,
verify_token_length: 4,
verify_token: verify_token.clone(),
}
.encode();
client.verify_token = Some(verify_token);
client.username = Some(packet.name);
client.send_packet(&request_encoder);
}
0x01 => {
let packet = S01EncryptionResponse::decode(decoder);
let mut received_verify_token = vec![0u8; packet.verify_token_length as usize];
let length_decrypted = self
.key_pair
.private_decrypt(
packet.verify_token.as_slice(),
received_verify_token.as_mut(),
Padding::PKCS1,
)
.unwrap();
received_verify_token.drain(length_decrypted..received_verify_token.len());
if &received_verify_token == client.verify_token.as_ref().unwrap() {
// Start login process
println!("Starting login process");
/*self.mojang.send_has_joined(
&clients[client].username.unwrap(),
clients[client].id
);*/
} else {
println!("Verify token incorrent!!");
}
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::PLAY => match decoder.packet_id {
_ => Server::unknown_packet(decoder.packet_id),
},
}
}
fn on_mojang_has_joined_response(&mut self, client_id: u32, result: MojangHasJoinedResponse) {
let client = self.get_client(client_id);
}
fn
|
(&mut self) {
let num_clients = self.clients.len();
for client in 0..num_clients {
let mut packets = self.clients[client]
.connection
.receive_packets();
for packet_batch in packets.drain(..) {
for packet in PacketDecoder::new_batch(packet_batch, &self.clients[client]) {
println!("{}", utils::to_hex_string(&packet.buffer));
self.handle_packet(client, packet.buffer);
}
}
}
}
fn receive_clients(&mut self) {
let result = self.client_receiver.try_recv();
if let Ok(client) = result {
self.clients.push(client);
}
}
fn poll_mojang(&mut self) { // TODO: Clean up maybe
let mut finished_indicies = Vec::new();
for (i, pending) in self.mojang.has_joined_pending.iter().enumerate() {
if pending.result.is_some() {
finished_indicies.push(i);
}
}
for index in finished_indicies {
let response = self.mojang.has_joined_pending.remove(index);
self.on_mojang_has_joined_response(response.client_id, response.result.unwrap());
}
self.mojang.clean();
}
fn start(mut self) {
println!("Listening for connections...");
//let mut last_tick_time = SystemTime::now();
loop {
/*let now = SystemTime::now();
let time_since = now.duration_since(last_tick_time).unwrap().as_millis();
if time_since > 50 {
last_tick_time = now;
}
*/
|
receive_packets
|
identifier_name
|
network.rs
|
Mojang, MojangHasJoinedResponse};
use crate::packets::*;
use crate::player::Player;
use openssl::pkey::Private;
use openssl::rsa::{Padding, Rsa};
use rand::Rng;
use serde::{Deserialize, Serialize};
use serde_json::json;
use std::io::prelude::*;
use std::net::{TcpListener, TcpStream};
use std::sync::mpsc;
use std::thread;
use std::time::{Duration};
struct Connection {
packet_receiver: mpsc::Receiver<PacketBuffer>,
stream: TcpStream,
alive: bool,
}
impl Connection {
fn new(stream: TcpStream) -> Connection {
println!("New connection!");
let reader = stream.try_clone().unwrap();
let (tx, rx) = mpsc::channel();
let connection = Connection {
packet_receiver: rx,
stream,
alive: true,
};
thread::spawn(|| {
Connection::handle_connection(reader, tx);
});
connection
}
fn handle_connection(mut stream: TcpStream, packet_sender: mpsc::Sender<PacketBuffer>) {
loop {
let mut data = vec![0u8; 512];
let length = stream.read(&mut data).unwrap();
if length == 0 {
thread::sleep(Duration::from_millis(2));
continue;
}
data.drain(length..);
data.shrink_to_fit();
packet_sender.send(data).unwrap();
}
}
fn receive_packets(&mut self) -> Vec<PacketBuffer> {
let mut packets = Vec::new();
loop {
match self.packet_receiver.try_recv() {
Ok(packet) => packets.push(packet),
Err(mpsc::TryRecvError::Empty) => return packets,
Err(mpsc::TryRecvError::Disconnected) => {
self.alive = false;
return packets;
}
}
}
}
}
pub struct Client {
connection: Connection,
state: NetworkState,
pub shared_secret: Option<Vec<u8>>,
pub compressed: bool,
verify_token: Option<Vec<u8>>,
player: Option<Player>,
username: Option<String>,
id: u32,
}
impl Client {
fn new(stream: TcpStream, id: u32) -> Client {
let connection = Connection::new(stream);
Client {
connection,
state: NetworkState::HANDSHAKING,
shared_secret: None,
compressed: false,
verify_token: None,
player: None,
username: None,
id,
}
}
fn send_packet(&mut self, encoder: &PacketEncoder) {
let buffer = encoder.finalize(self.compressed, &self.shared_secret);
self.connection.stream.write(buffer.as_slice()).unwrap();
}
}
#[derive(Serialize, Deserialize)]
pub struct ServerConfig {
max_players: i32,
motd: String,
}
pub struct Server {
clients: Vec<Client>,
client_receiver: mpsc::Receiver<Client>,
key_pair: Rsa<Private>,
mojang: Mojang,
}
impl Server {
fn new() -> Server {
let rsa = Rsa::generate(1024).unwrap();
let (tx, rx) = mpsc::channel();
let server = Server {
clients: Vec::new(),
key_pair: rsa,
mojang: Mojang::new(),
client_receiver: rx
};
server.listen_for_connections(tx);
server
}
fn get_client(&self, client_id: u32) -> &Client {
self.clients.iter().filter(|client| client.id == client_id).collect::<Vec<&Client>>()[0]
}
fn listen_for_connections(&self, sender: mpsc::Sender<Client>) {
let mut next_id = 0;
thread::spawn(move || {
let listener = TcpListener::bind("0.0.0.0:25566").unwrap();
for stream in listener.incoming() {
let stream = stream.unwrap();
let client = Client::new(stream, next_id);
sender.send(client).unwrap();
next_id += 1;
}
});
}
fn unknown_packet(id: i32) {
eprintln!("Unknown packet with id: {}", id);
}
fn handle_packet(&mut self, client: usize, packet: PacketBuffer) {
let client = self.clients.get_mut(client).unwrap();
let decoder = PacketDecoder::new(packet, client);
println!(
"Packet received: {}, with the length of: {}",
decoder.packet_id, decoder.length
);
let state = client.state;
match state {
NetworkState::HANDSHAKING => match decoder.packet_id {
0x00 => {
let packet = S00Handshake::decode(decoder);
println!("New state: {:#?}", packet.next_state);
client.state = packet.next_state;
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::STATUS => match decoder.packet_id {
0x00 => {
let json_response = json!({
"version": {
"name": "RustMC 1.15.1",
"protocol": 575
},
"players": {
"max": 100,
"online": 1,
"sample": [],
},
"description": {
"text": "Hello World!",
"color": "gold"
}
})
.to_string();
let response_encoder = C00Response { json_response }.encode();
client.send_packet(&response_encoder);
}
0x01 => {
let packet = S01Ping::decode(decoder);
let pong_encoder = C01Pong {
payload: packet.payload,
}
.encode();
client.send_packet(&pong_encoder);
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::LOGIN => match decoder.packet_id {
0x00 => {
let packet = S00LoginStart::decode(decoder);
let public_key = self.key_pair.public_key_to_der().unwrap();
let verify_token = rand::thread_rng().gen::<[u8; 4]>().to_vec();
let request_encoder = C01EcryptionRequest {
server_id: "".to_string(),
public_key_length: public_key.len() as i32,
public_key,
verify_token_length: 4,
verify_token: verify_token.clone(),
}
.encode();
client.verify_token = Some(verify_token);
client.username = Some(packet.name);
client.send_packet(&request_encoder);
}
0x01 => {
let packet = S01EncryptionResponse::decode(decoder);
let mut received_verify_token = vec![0u8; packet.verify_token_length as usize];
let length_decrypted = self
.key_pair
.private_decrypt(
packet.verify_token.as_slice(),
received_verify_token.as_mut(),
Padding::PKCS1,
)
.unwrap();
received_verify_token.drain(length_decrypted..received_verify_token.len());
if &received_verify_token == client.verify_token.as_ref().unwrap() {
// Start login process
println!("Starting login process");
/*self.mojang.send_has_joined(
&clients[client].username.unwrap(),
clients[client].id
);*/
} else {
println!("Verify token incorrent!!");
}
}
_ => Server::unknown_packet(decoder.packet_id),
},
NetworkState::PLAY => match decoder.packet_id {
_ => Server::unknown_packet(decoder.packet_id),
},
}
}
fn on_mojang_has_joined_response(&mut self, client_id: u32, result: MojangHasJoinedResponse) {
let client = self.get_client(client_id);
}
fn receive_packets(&mut self) {
let num_clients = self.clients.len();
for client in 0..num_clients {
let mut packets = self.clients[client]
.connection
.receive_packets();
for packet_batch in packets.drain(..) {
for packet in PacketDecoder::new_batch(packet_batch, &self.clients[client]) {
println!("{}", utils::to_hex_string(&packet.buffer));
self.handle_packet(client, packet.buffer);
}
}
}
}
fn receive_clients(&mut self) {
let result = self.client_receiver.try_recv();
if let Ok(client) = result
|
}
fn poll_mojang(&mut self) { // TODO: Clean up maybe
let mut finished_indicies = Vec::new();
for (i, pending) in self.mojang.has_joined_pending.iter().enumerate() {
if pending.result.is_some() {
finished_indicies.push(i);
}
}
for index in finished_indicies {
let response = self.mojang.has_joined_pending.remove(index);
self.on_mojang_has_joined_response(response.client_id, response.result.unwrap());
}
self.mojang.clean();
}
fn start(mut self) {
println!("Listening for connections...");
//let mut last_tick_time = SystemTime::now();
loop {
/*let now = SystemTime::now();
let time_since = now.duration_since(last_tick_time).unwrap().as_millis();
if time_since > 50 {
last_tick_time = now;
}
*/
|
{
self.clients.push(client);
}
|
conditional_block
|
tautils.py
|
, ta_file):
""" Write data to a file. """
file_handle = file(ta_file, "w")
file_handle.write(data_to_string(data))
file_handle.close()
def data_to_string(data):
""" JSON dump a string. """
return json_dump(data).replace(']], ', ']],\n')
def do_dialog(dialog, suffix, load_save_folder):
""" Open a file dialog. """
_result = None
file_filter = gtk.FileFilter()
file_filter.add_pattern('*' + suffix)
file_filter.set_name("Turtle Art")
dialog.add_filter(file_filter)
dialog.set_current_folder(load_save_folder)
_response = dialog.run()
if _response == gtk.RESPONSE_OK:
_result = dialog.get_filename()
load_save_folder = dialog.get_current_folder()
dialog.destroy()
return _result, load_save_folder
def save_picture(canvas, file_name=''):
""" Save the canvas to a file. """
_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, False, 8, canvas.width,
canvas.height)
_pixbuf.get_from_drawable(canvas.canvas.images[0],
canvas.canvas.images[0].get_colormap(),
0, 0, 0, 0, canvas.width, canvas.height)
if file_name != '':
_pixbuf.save(file_name, 'png')
return _pixbuf
def save_svg(string, file_name):
""" Write a string to a file. """
file_handle = file(file_name, "w")
file_handle.write(string)
file_handle.close()
def get_pixbuf_from_journal(dsobject, w, h):
""" Load a pixbuf from a Journal object. """
try:
_pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(dsobject.file_path,
int(w), int(h))
except:
try:
_pixbufloader = \
gtk.gdk.pixbuf_loader_new_with_mime_type('image/png')
_pixbufloader.set_size(min(300, int(w)), min(225, int(h)))
_pixbufloader.write(dsobject.metadata['preview'])
_pixbufloader.close()
_pixbuf = _pixbufloader.get_pixbuf()
except:
_pixbuf = None
return _pixbuf
def get_path(activity, subpath):
""" Find a Rainbow-approved place for temporary files. """
try:
return(os.path.join(activity.get_activity_root(), subpath))
except:
# Early versions of Sugar didn't support get_activity_root()
return(os.path.join(os.environ['HOME'], ".sugar/default",
"org.laptop.TurtleArtActivity", subpath))
def image_to_base64(pixbuf, activity):
""" Convert an image to base64 """
_file_name = os.path.join(get_path(activity, 'instance'), 'imagetmp.png')
if pixbuf != None:
pixbuf.save(_file_name, "png")
_base64 = os.path.join(get_path(activity, 'instance'), 'base64tmp')
_cmd = "base64 <" + _file_name + " >" + _base64
subprocess.check_call(_cmd, shell=True)
_file_handle = open(_base64, 'r')
_data = _file_handle.read()
_file_handle.close()
return _data
def movie_media_type(name):
""" Is it movie media? """
return name.endswith(('.ogv', '.vob', '.mp4', '.wmv', '.mov', '.mpeg'))
def audio_media_type(name):
""" Is it audio media? """
return name.endswith(('.ogg', '.oga', '.m4a'))
def image_media_type(name):
""" Is it image media? """
return name.endswith(('.png', '.jpg', '.jpeg', '.gif', '.tiff', '.tif',
'.svg'))
def text_media_type(name):
""" Is it text media? """
return name.endswith(('.txt', '.py', '.lg', '.doc', '.rtf'))
def round_int(num):
""" Remove trailing decimal places if number is an int """
try:
float(num)
except TypeError:
_logger.debug("error trying to convert %s to number" % (str(num)))
raise pythonerror("#syntaxerror")
if int(float(num)) == num:
return int(num)
else:
if float(num) < 0:
_nn = int((float(num) - 0.005) * 100) / 100.
else:
_nn = int((float(num) + 0.005) * 100) / 100.
if int(float(_nn)) == _nn:
return int(_nn)
return _nn
def calc_image_size(spr):
""" Calculate the maximum size for placing an image onto a sprite. """
return int(max(spr.label_safe_width(), 1)), \
int(max(spr.label_safe_height(), 1))
# Collapsible stacks live between 'sandwichtop' and 'sandwichbottom' blocks
def
|
(top):
""" When we undock, retract the 'arm' that extends from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
if top.ey > 0:
top.reset_y()
def grow_stack_arm(top):
""" When we dock, grow an 'arm' from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
_bot = find_sandwich_bottom(top)
if _bot is None:
return
if top.ey > 0:
top.reset_y()
_ty = top.spr.get_xy()[1]
_th = top.spr.get_dimensions()[1]
_by = _bot.spr.get_xy()[1]
_dy = _by - (_ty + _th)
if _dy > 0:
top.expand_in_y(_dy / top.scale)
top.refresh()
def find_sandwich_top(blk):
""" Find the sandwich top above this block. """
# Always follow the main branch of a flow: the first connection.
_blk = blk.connections[0]
while _blk is not None:
if _blk.name in COLLAPSIBLE:
return None
if _blk.name in ['repeat', 'if', 'ifelse', 'forever', 'while']:
if blk != _blk.connections[len(_blk.connections) - 1]:
return None
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
blk = _blk
_blk = _blk.connections[0]
return None
def find_sandwich_bottom(blk):
""" Find the sandwich bottom below this block. """
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return None
if _blk.name in COLLAPSIBLE:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def find_sandwich_top_below(blk):
""" Find the sandwich top below this block. """
if blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return blk
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def restore_stack(top):
""" Restore the blocks between the sandwich top and sandwich bottom. """
_group = find_group(top.connections[len(top.connections) - 1])
_hit_bottom = False
_bot = find_sandwich_bottom(top)
for _blk in _group:
if not _hit_bottom and _blk == _bot:
_hit_bottom = True
if len(_blk.values) == 0:
_blk.values.append(0)
else:
_blk.values[0] = 0
_olddx = _blk.docks[1][2]
_olddy = _blk.docks[1][3]
# Replace 'sandwichcollapsed' shape with 'sandwichbottom' shape
_blk.name = 'sandwichbottom'
_blk.spr.set_label(' ', 1)
_blk.svg.set_show(False)
_blk.svg.set_hide(True)
_blk.refresh()
# Redock to previous block in group
_you = _blk.connections[0]
(_yx, _yy) = _you.spr.get_xy()
_yd
|
reset_stack_arm
|
identifier_name
|
tautils.py
|
def data_from_string(text):
""" JSON load data from a string. """
return json_load(text.replace(']],\n', ']], '))
def data_to_file(data, ta_file):
""" Write data to a file. """
file_handle = file(ta_file, "w")
file_handle.write(data_to_string(data))
file_handle.close()
def data_to_string(data):
""" JSON dump a string. """
return json_dump(data).replace(']], ', ']],\n')
def do_dialog(dialog, suffix, load_save_folder):
""" Open a file dialog. """
_result = None
file_filter = gtk.FileFilter()
file_filter.add_pattern('*' + suffix)
file_filter.set_name("Turtle Art")
dialog.add_filter(file_filter)
dialog.set_current_folder(load_save_folder)
_response = dialog.run()
if _response == gtk.RESPONSE_OK:
_result = dialog.get_filename()
load_save_folder = dialog.get_current_folder()
dialog.destroy()
return _result, load_save_folder
def save_picture(canvas, file_name=''):
""" Save the canvas to a file. """
_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, False, 8, canvas.width,
canvas.height)
_pixbuf.get_from_drawable(canvas.canvas.images[0],
canvas.canvas.images[0].get_colormap(),
0, 0, 0, 0, canvas.width, canvas.height)
if file_name != '':
_pixbuf.save(file_name, 'png')
return _pixbuf
def save_svg(string, file_name):
""" Write a string to a file. """
file_handle = file(file_name, "w")
file_handle.write(string)
file_handle.close()
def get_pixbuf_from_journal(dsobject, w, h):
""" Load a pixbuf from a Journal object. """
try:
_pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(dsobject.file_path,
int(w), int(h))
except:
try:
_pixbufloader = \
gtk.gdk.pixbuf_loader_new_with_mime_type('image/png')
_pixbufloader.set_size(min(300, int(w)), min(225, int(h)))
_pixbufloader.write(dsobject.metadata['preview'])
_pixbufloader.close()
_pixbuf = _pixbufloader.get_pixbuf()
except:
_pixbuf = None
return _pixbuf
def get_path(activity, subpath):
""" Find a Rainbow-approved place for temporary files. """
try:
return(os.path.join(activity.get_activity_root(), subpath))
except:
# Early versions of Sugar didn't support get_activity_root()
return(os.path.join(os.environ['HOME'], ".sugar/default",
"org.laptop.TurtleArtActivity", subpath))
def image_to_base64(pixbuf, activity):
""" Convert an image to base64 """
_file_name = os.path.join(get_path(activity, 'instance'), 'imagetmp.png')
if pixbuf != None:
pixbuf.save(_file_name, "png")
_base64 = os.path.join(get_path(activity, 'instance'), 'base64tmp')
_cmd = "base64 <" + _file_name + " >" + _base64
subprocess.check_call(_cmd, shell=True)
_file_handle = open(_base64, 'r')
_data = _file_handle.read()
_file_handle.close()
return _data
def movie_media_type(name):
""" Is it movie media? """
return name.endswith(('.ogv', '.vob', '.mp4', '.wmv', '.mov', '.mpeg'))
def audio_media_type(name):
""" Is it audio media? """
return name.endswith(('.ogg', '.oga', '.m4a'))
def image_media_type(name):
""" Is it image media? """
return name.endswith(('.png', '.jpg', '.jpeg', '.gif', '.tiff', '.tif',
'.svg'))
def text_media_type(name):
""" Is it text media? """
return name.endswith(('.txt', '.py', '.lg', '.doc', '.rtf'))
def round_int(num):
""" Remove trailing decimal places if number is an int """
try:
float(num)
except TypeError:
_logger.debug("error trying to convert %s to number" % (str(num)))
raise pythonerror("#syntaxerror")
if int(float(num)) == num:
return int(num)
else:
if float(num) < 0:
_nn = int((float(num) - 0.005) * 100) / 100.
else:
_nn = int((float(num) + 0.005) * 100) / 100.
if int(float(_nn)) == _nn:
return int(_nn)
return _nn
def calc_image_size(spr):
""" Calculate the maximum size for placing an image onto a sprite. """
return int(max(spr.label_safe_width(), 1)), \
int(max(spr.label_safe_height(), 1))
# Collapsible stacks live between 'sandwichtop' and 'sandwichbottom' blocks
def reset_stack_arm(top):
""" When we undock, retract the 'arm' that extends from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
if top.ey > 0:
top.reset_y()
def grow_stack_arm(top):
""" When we dock, grow an 'arm' from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
_bot = find_sandwich_bottom(top)
if _bot is None:
return
if top.ey > 0:
top.reset_y()
_ty = top.spr.get_xy()[1]
_th = top.spr.get_dimensions()[1]
_by = _bot.spr.get_xy()[1]
_dy = _by - (_ty + _th)
if _dy > 0:
top.expand_in_y(_dy / top.scale)
top.refresh()
def find_sandwich_top(blk):
""" Find the sandwich top above this block. """
# Always follow the main branch of a flow: the first connection.
_blk = blk.connections[0]
while _blk is not None:
if _blk.name in COLLAPSIBLE:
return None
if _blk.name in ['repeat', 'if', 'ifelse', 'forever', 'while']:
if blk != _blk.connections[len(_blk.connections) - 1]:
return None
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
blk = _blk
_blk = _blk.connections[0]
return None
def find_sandwich_bottom(blk):
""" Find the sandwich bottom below this block. """
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return None
if _blk.name in COLLAPSIBLE:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def find_sandwich_top_below(blk):
""" Find the sandwich top below this block. """
if blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return blk
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def restore_stack(top):
""" Restore the blocks between the sandwich top and sandwich bottom. """
_group = find_group(top.connections[len(top.connections) - 1])
_hit_bottom = False
_bot = find_sandwich_bottom(top)
for _blk in _group:
if not _hit_bottom and _blk == _bot:
_hit_bottom = True
|
""" Open the .ta file, ignoring any .png file that might be present. """
file_handle = open(ta_file, "r")
#
# We try to maintain read-compatibility with all versions of Turtle Art.
# Try pickle first; then different versions of json.
#
try:
_data = pickle.load(file_handle)
except:
# Rewind necessary because of failed pickle.load attempt
file_handle.seek(0)
_text = file_handle.read()
_data = data_from_string(_text)
file_handle.close()
return _data
|
identifier_body
|
|
tautils.py
|
(data, ta_file):
""" Write data to a file. """
file_handle = file(ta_file, "w")
file_handle.write(data_to_string(data))
file_handle.close()
def data_to_string(data):
""" JSON dump a string. """
return json_dump(data).replace(']], ', ']],\n')
def do_dialog(dialog, suffix, load_save_folder):
""" Open a file dialog. """
_result = None
file_filter = gtk.FileFilter()
file_filter.add_pattern('*' + suffix)
file_filter.set_name("Turtle Art")
dialog.add_filter(file_filter)
dialog.set_current_folder(load_save_folder)
_response = dialog.run()
if _response == gtk.RESPONSE_OK:
_result = dialog.get_filename()
load_save_folder = dialog.get_current_folder()
dialog.destroy()
return _result, load_save_folder
def save_picture(canvas, file_name=''):
""" Save the canvas to a file. """
_pixbuf = gtk.gdk.Pixbuf(gtk.gdk.COLORSPACE_RGB, False, 8, canvas.width,
canvas.height)
_pixbuf.get_from_drawable(canvas.canvas.images[0],
canvas.canvas.images[0].get_colormap(),
0, 0, 0, 0, canvas.width, canvas.height)
if file_name != '':
_pixbuf.save(file_name, 'png')
return _pixbuf
def save_svg(string, file_name):
""" Write a string to a file. """
file_handle = file(file_name, "w")
file_handle.write(string)
file_handle.close()
def get_pixbuf_from_journal(dsobject, w, h):
""" Load a pixbuf from a Journal object. """
try:
_pixbuf = gtk.gdk.pixbuf_new_from_file_at_size(dsobject.file_path,
int(w), int(h))
except:
try:
_pixbufloader = \
gtk.gdk.pixbuf_loader_new_with_mime_type('image/png')
_pixbufloader.set_size(min(300, int(w)), min(225, int(h)))
_pixbufloader.write(dsobject.metadata['preview'])
_pixbufloader.close()
_pixbuf = _pixbufloader.get_pixbuf()
except:
_pixbuf = None
return _pixbuf
def get_path(activity, subpath):
""" Find a Rainbow-approved place for temporary files. """
try:
return(os.path.join(activity.get_activity_root(), subpath))
except:
# Early versions of Sugar didn't support get_activity_root()
return(os.path.join(os.environ['HOME'], ".sugar/default",
"org.laptop.TurtleArtActivity", subpath))
def image_to_base64(pixbuf, activity):
""" Convert an image to base64 """
_file_name = os.path.join(get_path(activity, 'instance'), 'imagetmp.png')
if pixbuf != None:
pixbuf.save(_file_name, "png")
_base64 = os.path.join(get_path(activity, 'instance'), 'base64tmp')
_cmd = "base64 <" + _file_name + " >" + _base64
subprocess.check_call(_cmd, shell=True)
_file_handle = open(_base64, 'r')
_data = _file_handle.read()
_file_handle.close()
return _data
def movie_media_type(name):
""" Is it movie media? """
return name.endswith(('.ogv', '.vob', '.mp4', '.wmv', '.mov', '.mpeg'))
def audio_media_type(name):
""" Is it audio media? """
return name.endswith(('.ogg', '.oga', '.m4a'))
def image_media_type(name):
""" Is it image media? """
return name.endswith(('.png', '.jpg', '.jpeg', '.gif', '.tiff', '.tif',
'.svg'))
def text_media_type(name):
""" Is it text media? """
return name.endswith(('.txt', '.py', '.lg', '.doc', '.rtf'))
def round_int(num):
""" Remove trailing decimal places if number is an int """
try:
float(num)
except TypeError:
_logger.debug("error trying to convert %s to number" % (str(num)))
raise pythonerror("#syntaxerror")
if int(float(num)) == num:
return int(num)
else:
if float(num) < 0:
_nn = int((float(num) - 0.005) * 100) / 100.
else:
_nn = int((float(num) + 0.005) * 100) / 100.
if int(float(_nn)) == _nn:
return int(_nn)
return _nn
def calc_image_size(spr):
""" Calculate the maximum size for placing an image onto a sprite. """
return int(max(spr.label_safe_width(), 1)), \
int(max(spr.label_safe_height(), 1))
# Collapsible stacks live between 'sandwichtop' and 'sandwichbottom' blocks
def reset_stack_arm(top):
""" When we undock, retract the 'arm' that extends from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
if top.ey > 0:
top.reset_y()
def grow_stack_arm(top):
""" When we dock, grow an 'arm' from 'sandwichtop'. """
if top is not None and top.name in ['sandwichtop', 'sandwichtop_no_label']:
_bot = find_sandwich_bottom(top)
if _bot is None:
return
if top.ey > 0:
top.reset_y()
_ty = top.spr.get_xy()[1]
_th = top.spr.get_dimensions()[1]
_by = _bot.spr.get_xy()[1]
_dy = _by - (_ty + _th)
if _dy > 0:
top.expand_in_y(_dy / top.scale)
top.refresh()
|
while _blk is not None:
if _blk.name in COLLAPSIBLE:
return None
if _blk.name in ['repeat', 'if', 'ifelse', 'forever', 'while']:
if blk != _blk.connections[len(_blk.connections) - 1]:
return None
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
blk = _blk
_blk = _blk.connections[0]
return None
def find_sandwich_bottom(blk):
""" Find the sandwich bottom below this block. """
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return None
if _blk.name in COLLAPSIBLE:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def find_sandwich_top_below(blk):
""" Find the sandwich top below this block. """
if blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return blk
# Always follow the main branch of a flow: the last connection.
_blk = blk.connections[len(blk.connections) - 1]
while _blk is not None:
if _blk.name in ['sandwichtop', 'sandwichtop_no_label',
'sandwichtop_no_arm', 'sandwichtop_no_arm_no_label']:
return _blk
_blk = _blk.connections[len(_blk.connections) - 1]
return None
def restore_stack(top):
""" Restore the blocks between the sandwich top and sandwich bottom. """
_group = find_group(top.connections[len(top.connections) - 1])
_hit_bottom = False
_bot = find_sandwich_bottom(top)
for _blk in _group:
if not _hit_bottom and _blk == _bot:
_hit_bottom = True
if len(_blk.values) == 0:
_blk.values.append(0)
else:
_blk.values[0] = 0
_olddx = _blk.docks[1][2]
_olddy = _blk.docks[1][3]
# Replace 'sandwichcollapsed' shape with 'sandwichbottom' shape
_blk.name = 'sandwichbottom'
_blk.spr.set_label(' ', 1)
_blk.svg.set_show(False)
_blk.svg.set_hide(True)
_blk.refresh()
# Redock to previous block in group
_you = _blk.connections[0]
(_yx, _yy) = _you.spr.get_xy()
_yd
|
def find_sandwich_top(blk):
""" Find the sandwich top above this block. """
# Always follow the main branch of a flow: the first connection.
_blk = blk.connections[0]
|
random_line_split
|
tautils.py
|
.spr.get_xy()
_yd = _you.docks[len(_you.docks) - 1]
(_bx, _by) = _blk.spr.get_xy()
_dx = _yx + _yd[2] - _blk.docks[0][2] - _bx
_dy = _yy + _yd[3] - _blk.docks[0][3] - _by
_blk.spr.move_relative((_dx, _dy))
# Since the shapes have changed, the dock positions have too.
_newdx = _blk.docks[1][2]
_newdy = _blk.docks[1][3]
_dx += _newdx - _olddx
_dy += _newdy - _olddy
else:
if not _hit_bottom:
_blk.spr.set_layer(HIDE_LAYER)
_blk.status = 'collapsed'
else:
_blk.spr.move_relative((_dx, _dy))
# Remove 'sandwichtop' arm
if top.name == 'sandwichtop' or top.name == 'sandwichtop_no_arm':
top.name = 'sandwichtop_no_arm'
else:
top.name = 'sandwichtop_no_arm_no_label'
top.refresh()
def collapsed(blk):
""" Is this stack collapsed? """
if blk is not None and blk.name in COLLAPSIBLE and\
len(blk.values) == 1 and blk.values[0] != 0:
return True
return False
def collapsible(blk):
""" Can this stack be collapsed? """
if blk is None or blk.name not in COLLAPSIBLE:
return False
if find_sandwich_top(blk) is None:
return False
return True
def hide_button_hit(spr, x, y):
""" Did the sprite's hide (contract) button get hit? """
_red, _green, _blue, _alpha = spr.get_pixel((x, y))
if _red == HIT_HIDE:
return True
else:
return False
def show_button_hit(spr, x, y):
""" Did the sprite's show (expand) button get hit? """
_red, _green, _blue, _alpha = spr.get_pixel((x, y))
if _green == HIT_SHOW:
return True
else:
return False
def numeric_arg(value):
""" Dock test: looking for a numeric value """
if type(convert(value, float)) is float:
return True
return False
def zero_arg(value):
""" Dock test: looking for a zero argument """
if numeric_arg(value):
if convert(value, float) == 0:
return True
return False
def neg_arg(value):
""" Dock test: looking for a negative argument """
if numeric_arg(value):
if convert(value, float) < 0:
return True
return False
def dock_dx_dy(block1, dock1n, block2, dock2n):
""" Find the distance between the dock points of two blocks. """
_dock1 = block1.docks[dock1n]
_dock2 = block2.docks[dock2n]
_d1type, _d1dir, _d1x, _d1y = _dock1[0:4]
_d2type, _d2dir, _d2x, _d2y = _dock2[0:4]
if block1 == block2:
return (100, 100)
if _d1dir == _d2dir:
return (100, 100)
if (_d2type is not 'number') or (dock2n is not 0):
if block1.connections is not None and \
dock1n < len(block1.connections) and \
block1.connections[dock1n] is not None:
return (100, 100)
if block2.connections is not None and \
dock2n < len(block2.connections) and \
block2.connections[dock2n] is not None:
return (100, 100)
if _d1type != _d2type:
if block1.name in STRING_OR_NUMBER_ARGS:
if _d2type == 'number' or _d2type == 'string':
pass
elif block1.name in CONTENT_ARGS:
if _d2type in CONTENT_BLOCKS:
pass
else:
return (100, 100)
(_b1x, _b1y) = block1.spr.get_xy()
(_b2x, _b2y) = block2.spr.get_xy()
return ((_b1x + _d1x) - (_b2x + _d2x), (_b1y + _d1y) - (_b2y + _d2y))
def journal_check(blk1, blk2, dock1, dock2):
""" Dock blocks only if arg is Journal block """
if blk1 == None or blk2 == None:
return True
if (blk1.name == 'skin' and dock1 == 1) and blk2.name != 'journal':
return False
if (blk2.name == 'skin' and dock2 == 1) and blk1.name != 'journal':
return False
return True
def arithmetic_check(blk1, blk2, dock1, dock2):
""" Dock strings only if they convert to numbers. Avoid /0 and root(-1)"""
if blk1 == None or blk2 == None:
return True
if blk1.name in ['sqrt', 'number', 'string'] and\
blk2.name in ['sqrt', 'number', 'string']:
if blk1.name == 'number' or blk1.name == 'string':
if not numeric_arg(blk1.values[0]) or neg_arg(blk1.values[0]):
return False
elif blk2.name == 'number' or blk2.name == 'string':
if not numeric_arg(blk2.values[0]) or neg_arg(blk2.values[0]):
return False
elif blk1.name in ['division2', 'number', 'string'] and\
blk2.name in ['division2', 'number', 'string']:
if blk1.name == 'number' or blk1.name == 'string':
if not numeric_arg(blk1.values[0]):
return False
if dock2 == 2 and zero_arg(blk1.values[0]):
return False
elif blk2.name == 'number' or blk2.name == 'string':
if not numeric_arg(blk2.values[0]):
return False
if dock1 == 2 and zero_arg(blk2.values[0]):
return False
elif blk1.name in ['product2', 'minus2', 'random', 'remainder2',
'string'] and\
blk2.name in ['product2', 'minus2', 'random', 'remainder2',
'string']:
if blk1.name == 'string':
if not numeric_arg(blk1.values[0]):
return False
elif blk1.name == 'string':
if not numeric_arg(blk2.values[0]):
return False
elif blk1.name in ['greater2', 'less2'] and blk2.name == 'string':
# Non-numeric stings are OK if only both args are strings;
# Lots of test conditions...
if dock1 == 1 and blk1.connections[2] is not None:
if blk1.connections[2].name == 'number':
if not numeric_arg(blk2.values[0]):
return False
elif dock1 == 2 and blk1.connections[1] is not None:
if blk1.connections[1].name == 'number':
if not numeric_arg(blk2.values[0]):
return False
elif blk2.name in ['greater2', 'less2'] and blk1.name == 'string':
if dock2 == 1 and blk2.connections[2] is not None:
if blk2.connections[2].name == 'number':
if not numeric_arg(blk1.values[0]):
return False
elif dock2 == 2 and blk2.connections[1] is not None:
if blk2.connections[1].name == 'number':
if not numeric_arg(blk1.values[0]):
return False
elif blk1.name in ['greater2', 'less2'] and blk2.name == 'number':
if dock1 == 1 and blk1.connections[2] is not None:
if blk1.connections[2].name == 'string':
if not numeric_arg(blk1.connections[2].values[0]):
return False
elif dock1 == 2 and blk1.connections[1] is not None:
if blk1.connections[1].name == 'string':
if not numeric_arg(blk1.connections[1].values[0]):
return False
elif blk2.name in ['greater2', 'less2'] and blk1.name == 'number':
if dock2 == 1 and blk2.connections[2] is not None:
if blk2.connections[2].name == 'string':
|
if not numeric_arg(blk2.connections[2].values[0]):
return False
|
conditional_block
|
|
color.rs
|
/// Return the green value.
pub fn green(&self) -> f32 {
let Rgba(_, g, _, _) = self.to_rgb();
g
}
/// Return the blue value.
pub fn blue(&self) -> f32 {
let Rgba(_, _, b, _) = self.to_rgb();
b
}
/// Set the red value.
pub fn set_red(&mut self, r: f32) {
let Rgba(_, g, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the green value.
pub fn set_green(&mut self, g: f32) {
let Rgba(r, _, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the blue value.
pub fn set_blue(&mut self, b: f32) {
let Rgba(r, g, _, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
}
/// The parts of HSL along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Hsla(pub f32, pub f32, pub f32, pub f32);
/// The parts of RGB along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Rgba(pub f32, pub f32, pub f32, pub f32);
/// Convert an f32 color to a byte.
#[inline]
pub fn f32_to_byte(c: f32) -> u8 { (c * 255.0) as u8 }
/// Pure function for converting rgb to hsl.
pub fn rgb_to_hsl(r: f32, g: f32, b: f32) -> (f32, f32, f32) {
let c_max = r.max(g).max(b);
let c_min = r.min(g).min(b);
let c = c_max - c_min;
let hue = if c == 0.0 {
// If there's no difference in the channels we have grayscale, so the hue is undefined.
0.0
} else {
degrees(60.0) * if c_max == r { fmod(((g - b) / c), 6) }
else if c_max == g { ((b - r) / c) + 2.0 }
else { ((r - g) / c) + 4.0 }
};
let lightness = (c_max + c_min) / 2.0;
let saturation = if lightness == 0.0 { 0.0 }
else { c / (1.0 - (2.0 * lightness - 1.0).abs()) };
(hue, saturation, lightness)
}
/// Pure function for converting hsl to rgb.
pub fn hsl_to_rgb(hue: f32, saturation: f32, lightness: f32) -> (f32, f32, f32) {
let chroma = (1.0 - (2.0 * lightness - 1.0).abs()) * saturation;
let hue = hue / degrees(60.0);
let x = chroma * (1.0 - (fmod(hue, 2) - 1.0).abs());
let (r, g, b) = match hue {
hue if hue < 0.0 => (0.0, 0.0, 0.0),
hue if hue < 1.0 => (chroma, x, 0.0),
hue if hue < 2.0 => (x, chroma, 0.0),
hue if hue < 3.0 => (0.0, chroma, x),
hue if hue < 4.0 => (0.0, x, chroma),
hue if hue < 5.0 => (x, 0.0, chroma),
hue if hue < 6.0 => (chroma, 0.0, x),
_ => (0.0, 0.0, 0.0),
};
let m = lightness - chroma / 2.0;
(r + m, g + m, b + m)
}
/// Linear or Radial Gradient.
#[derive(Clone, Debug)]
pub enum Gradient {
/// Takes a start and end point and then a series of color stops that indicate how to
/// interpolate between the start and end points.
Linear((f64, f64), (f64, f64), Vec<(f64, Color)>),
/// First takes a start point and inner radius. Then takes an end point and outer radius.
/// It then takes a series of color stops that indicate how to interpolate between the
/// inner and outer circles.
Radial((f64, f64), f64, (f64, f64), f64, Vec<(f64, Color)>),
}
/// Create a linear gradient.
pub fn linear(start: (f64, f64), end: (f64, f64), colors: Vec<(f64, Color)>) -> Gradient {
Gradient::Linear(start, end, colors)
}
/// Create a radial gradient.
pub fn radial(start: (f64, f64), start_r: f64,
end: (f64, f64), end_r: f64,
colors: Vec<(f64, Color)>) -> Gradient {
Gradient::Radial(start, start_r, end, end_r, colors)
}
/// Built-in colors.
///
/// These colors come from the
/// [Tango palette](http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines) which provides
/// aesthetically reasonable defaults for colors. Each color also comes with a light and dark
/// version.
/// Scarlet Red - Light - #EF2929
pub fn light_red() -> Color { rgb_bytes(239 , 41 , 41 ) }
/// Scarlet Red - Regular - #CC0000
pub fn red() -> Color { rgb_bytes(204 , 0 , 0 ) }
/// Scarlet Red - Dark - #A30000
pub fn dark_red() -> Color { rgb_bytes(164 , 0 , 0 ) }
/// Orange - Light - #FCAF3E
pub fn light_orange() -> Color { rgb_bytes(252 , 175 , 62 ) }
/// Orange - Regular - #F57900
pub fn orange() -> Color { rgb_bytes(245 , 121 , 0 ) }
/// Orange - Dark - #CE5C00
pub fn dark_orange() -> Color { rgb_bytes(206 , 92 , 0 ) }
/// Butter - Light - #FCE94F
pub fn light_yellow() -> Color { rgb_bytes(255 , 233 , 79 ) }
/// Butter - Regular - #EDD400
pub fn yellow() -> Color { rgb_bytes(237 , 212 , 0 ) }
/// Butter - Dark - #C4A000
pub fn dark_yellow() -> Color { rgb_bytes(196 , 160 , 0 ) }
/// Chameleon - Light - #8AE234
pub fn light_green() -> Color { rgb_bytes(138 , 226 , 52 ) }
/// Chameleon - Regular - #73D216
pub fn green() -> Color { rgb_bytes(115 , 210 , 22 ) }
/// Chameleon - Dark - #4E9A06
pub fn dark_green() -> Color { rgb_bytes(78 , 154 , 6 ) }
/// Sky Blue - Light - #729FCF
pub fn light_blue() -> Color { rgb_bytes(114 , 159 , 207) }
/// Sky Blue - Regular - #3465A4
pub fn blue() -> Color { rgb_bytes(52 , 101 , 164) }
/// Sky Blue - Dark - #204A87
pub fn dark_blue() -> Color { rgb_bytes(32 , 74 , 135) }
/// Plum - Light - #AD7FA8
pub fn light_purple() -> Color { rgb_bytes(173 , 127 , 168) }
/// Plum - Regular - #75507B
pub fn purple() -> Color { rgb_bytes(117 , 80 , 123) }
/// Plum - Dark - #5C3566
pub fn dark_purple() -> Color { rgb_bytes(92 , 53 , 102) }
|
/// Chocolate - Light - #E9B96E
|
random_line_split
|
|
color.rs
|
_rgb(hue: f32, saturation: f32, lightness: f32) -> (f32, f32, f32) {
let chroma = (1.0 - (2.0 * lightness - 1.0).abs()) * saturation;
let hue = hue / degrees(60.0);
let x = chroma * (1.0 - (fmod(hue, 2) - 1.0).abs());
let (r, g, b) = match hue {
hue if hue < 0.0 => (0.0, 0.0, 0.0),
hue if hue < 1.0 => (chroma, x, 0.0),
hue if hue < 2.0 => (x, chroma, 0.0),
hue if hue < 3.0 => (0.0, chroma, x),
hue if hue < 4.0 => (0.0, x, chroma),
hue if hue < 5.0 => (x, 0.0, chroma),
hue if hue < 6.0 => (chroma, 0.0, x),
_ => (0.0, 0.0, 0.0),
};
let m = lightness - chroma / 2.0;
(r + m, g + m, b + m)
}
/// Linear or Radial Gradient.
#[derive(Clone, Debug)]
pub enum Gradient {
/// Takes a start and end point and then a series of color stops that indicate how to
/// interpolate between the start and end points.
Linear((f64, f64), (f64, f64), Vec<(f64, Color)>),
/// First takes a start point and inner radius. Then takes an end point and outer radius.
/// It then takes a series of color stops that indicate how to interpolate between the
/// inner and outer circles.
Radial((f64, f64), f64, (f64, f64), f64, Vec<(f64, Color)>),
}
/// Create a linear gradient.
pub fn linear(start: (f64, f64), end: (f64, f64), colors: Vec<(f64, Color)>) -> Gradient {
Gradient::Linear(start, end, colors)
}
/// Create a radial gradient.
pub fn radial(start: (f64, f64), start_r: f64,
end: (f64, f64), end_r: f64,
colors: Vec<(f64, Color)>) -> Gradient {
Gradient::Radial(start, start_r, end, end_r, colors)
}
/// Built-in colors.
///
/// These colors come from the
/// [Tango palette](http://tango.freedesktop.org/Tango_Icon_Theme_Guidelines) which provides
/// aesthetically reasonable defaults for colors. Each color also comes with a light and dark
/// version.
/// Scarlet Red - Light - #EF2929
pub fn light_red() -> Color { rgb_bytes(239 , 41 , 41 ) }
/// Scarlet Red - Regular - #CC0000
pub fn red() -> Color { rgb_bytes(204 , 0 , 0 ) }
/// Scarlet Red - Dark - #A30000
pub fn dark_red() -> Color { rgb_bytes(164 , 0 , 0 ) }
/// Orange - Light - #FCAF3E
pub fn light_orange() -> Color { rgb_bytes(252 , 175 , 62 ) }
/// Orange - Regular - #F57900
pub fn orange() -> Color { rgb_bytes(245 , 121 , 0 ) }
/// Orange - Dark - #CE5C00
pub fn dark_orange() -> Color { rgb_bytes(206 , 92 , 0 ) }
/// Butter - Light - #FCE94F
pub fn light_yellow() -> Color { rgb_bytes(255 , 233 , 79 ) }
/// Butter - Regular - #EDD400
pub fn yellow() -> Color { rgb_bytes(237 , 212 , 0 ) }
/// Butter - Dark - #C4A000
pub fn dark_yellow() -> Color { rgb_bytes(196 , 160 , 0 ) }
/// Chameleon - Light - #8AE234
pub fn light_green() -> Color { rgb_bytes(138 , 226 , 52 ) }
/// Chameleon - Regular - #73D216
pub fn green() -> Color { rgb_bytes(115 , 210 , 22 ) }
/// Chameleon - Dark - #4E9A06
pub fn dark_green() -> Color { rgb_bytes(78 , 154 , 6 ) }
/// Sky Blue - Light - #729FCF
pub fn light_blue() -> Color { rgb_bytes(114 , 159 , 207) }
/// Sky Blue - Regular - #3465A4
pub fn blue() -> Color { rgb_bytes(52 , 101 , 164) }
/// Sky Blue - Dark - #204A87
pub fn dark_blue() -> Color { rgb_bytes(32 , 74 , 135) }
/// Plum - Light - #AD7FA8
pub fn light_purple() -> Color { rgb_bytes(173 , 127 , 168) }
/// Plum - Regular - #75507B
pub fn purple() -> Color { rgb_bytes(117 , 80 , 123) }
/// Plum - Dark - #5C3566
pub fn dark_purple() -> Color { rgb_bytes(92 , 53 , 102) }
/// Chocolate - Light - #E9B96E
pub fn light_brown() -> Color { rgb_bytes(233 , 185 , 110) }
/// Chocolate - Regular - #C17D11
pub fn brown() -> Color { rgb_bytes(193 , 125 , 17 ) }
/// Chocolate - Dark - #8F5902
pub fn dark_brown() -> Color { rgb_bytes(143 , 89 , 2 ) }
/// Straight Black.
pub fn black() -> Color { rgb_bytes(0 , 0 , 0 ) }
/// Straight White.
pub fn white() -> Color { rgb_bytes(255 , 255 , 255) }
/// Alluminium - Light
pub fn light_gray() -> Color { rgb_bytes(238 , 238 , 236) }
/// Alluminium - Regular
pub fn gray() -> Color { rgb_bytes(211 , 215 , 207) }
/// Alluminium - Dark
pub fn dark_gray() -> Color { rgb_bytes(186 , 189 , 182) }
/// Aluminium - Light - #EEEEEC
pub fn light_grey() -> Color { rgb_bytes(238 , 238 , 236) }
/// Aluminium - Regular - #D3D7CF
pub fn grey() -> Color { rgb_bytes(211 , 215 , 207) }
/// Aluminium - Dark - #BABDB6
pub fn dark_grey() -> Color { rgb_bytes(186 , 189 , 182) }
/// Charcoal - Light - #888A85
pub fn light_charcoal() -> Color { rgb_bytes(136 , 138 , 133) }
/// Charcoal - Regular - #555753
pub fn charcoal() -> Color { rgb_bytes(85 , 87 , 83 ) }
/// Charcoal - Dark - #2E3436
pub fn dark_charcoal() -> Color { rgb_bytes(46 , 52 , 54 ) }
/// Types that can be colored.
pub trait Colorable: Sized {
/// Set the color of the widget.
fn color(self, color: Color) -> Self;
/// Set the color of the widget from rgba values.
fn rgba(self, r: f32, g: f32, b: f32, a: f32) -> Self {
self.color(rgba(r, g, b, a))
}
/// Set the color of the widget from rgb values.
fn rgb(self, r: f32, g: f32, b: f32) -> Self {
self.color(rgb(r, g, b))
}
/// Set the color of the widget from hsla values.
fn
|
hsla
|
identifier_name
|
|
color.rs
|
, 1.0-p, 1.0)
}
/// Construct a random color.
pub fn random() -> Color {
rgb(::rand::random(), ::rand::random(), ::rand::random())
}
/// Clamp a f32 between 0f32 and 1f32.
fn clampf32(f: f32) -> f32 {
if f < 0.0 { 0.0 } else if f > 1.0 { 1.0 } else { f }
}
impl Color {
/// Produce a complementary color. The two colors will accent each other. This is the same as
/// rotating the hue by 180 degrees.
pub fn complement(self) -> Color {
match self {
Color::Hsla(h, s, l, a) => hsla(h + degrees(180.0), s, l, a),
Color::Rgba(r, g, b, a) => {
let (h, s, l) = rgb_to_hsl(r, g, b);
hsla(h + degrees(180.0), s, l, a)
},
}
}
/// Calculate and return the luminance of the Color.
pub fn luminance(&self) -> f32 {
match *self {
Color::Rgba(r, g, b, _) => (r + g + b) / 3.0,
Color::Hsla(_, _, l, _) => l,
}
}
/// Return either black or white, depending which contrasts the Color the most. This will be
/// useful for determining a readable color for text on any given background Color.
pub fn plain_contrast(self) -> Color {
if self.luminance() > 0.5 { black() } else { white() }
}
/// Extract the components of a color in the HSL format.
pub fn to_hsl(self) -> Hsla {
match self {
Color::Hsla(h, s, l, a) => Hsla(h, s, l, a),
Color::Rgba(r, g, b, a) => {
let (h, s, l) = rgb_to_hsl(r, g, b);
Hsla(h, s, l, a)
},
}
}
/// Extract the components of a color in the RGB format.
pub fn to_rgb(self) -> Rgba {
match self {
Color::Rgba(r, g, b, a) => Rgba(r, g, b, a),
Color::Hsla(h, s, l, a) => {
let (r, g, b) = hsl_to_rgb(h, s, l);
Rgba(r, g, b, a)
},
}
}
/// Extract the components of a color in the RGB format within a fixed-size array.
pub fn to_fsa(self) -> [f32; 4] {
let Rgba(r, g, b, a) = self.to_rgb();
[r, g, b, a]
}
/// Same as `to_fsa`, except r, g, b and a are represented in byte form.
pub fn to_byte_fsa(self) -> [u8; 4] {
let Rgba(r, g, b, a) = self.to_rgb();
[f32_to_byte(r), f32_to_byte(g), f32_to_byte(b), f32_to_byte(a)]
}
// /// Return the hex representation of this color in the format #RRGGBBAA
// /// e.g. `Color(1.0, 0.0, 5.0, 1.0) == "#FF0080FF"`
// pub fn to_hex(self) -> String {
// let vals = self.to_byte_fsa();
// let hex = vals.to_hex().to_ascii_uppercase();
// format!("#{}", &hex)
// }
/// Return the same color but with the given luminance.
pub fn with_luminance(self, l: f32) -> Color {
let Hsla(h, s, _, a) = self.to_hsl();
Color::Hsla(h, s, l, a)
}
/// Return the same color but with the alpha multiplied by the given alpha.
pub fn alpha(self, alpha: f32) -> Color {
match self {
Color::Rgba(r, g, b, a) => Color::Rgba(r, g, b, a * alpha),
Color::Hsla(h, s, l, a) => Color::Hsla(h, s, l, a * alpha),
}
}
/// Return the same color but with the given alpha.
pub fn with_alpha(self, a: f32) -> Color {
match self {
Color::Rgba(r, g, b, _) => Color::Rgba(r, g, b, a),
Color::Hsla(h, s, l, _) => Color::Hsla(h, s, l, a),
}
}
/// Return a highlighted version of the current Color.
pub fn highlighted(self) -> Color {
let luminance = self.luminance();
let Rgba(r, g, b, a) = self.to_rgb();
let (r, g, b) = {
if luminance > 0.8 { (r - 0.2, g - 0.2, b - 0.2) }
else if luminance < 0.2 { (r + 0.2, g + 0.2, b + 0.2) }
else {
(clampf32((1.0 - r) * 0.5 * r + r),
clampf32((1.0 - g) * 0.1 * g + g),
clampf32((1.0 - b) * 0.1 * b + b))
}
};
let a = clampf32((1.0 - a) * 0.5 + a);
rgba(r, g, b, a)
}
/// Return a clicked version of the current Color.
pub fn clicked(&self) -> Color {
let luminance = self.luminance();
let Rgba(r, g, b, a) = self.to_rgb();
let (r, g, b) = {
if luminance > 0.8 { (r , g - 0.2, b - 0.2) }
else if luminance < 0.2
|
else {
(clampf32((1.0 - r) * 0.75 + r),
clampf32((1.0 - g) * 0.25 + g),
clampf32((1.0 - b) * 0.25 + b))
}
};
let a = clampf32((1.0 - a) * 0.75 + a);
rgba(r, g, b, a)
}
/// Return the Color's invert.
pub fn invert(self) -> Color {
let Rgba(r, g, b, a) = self.to_rgb();
rgba((r - 1.0).abs(), (g - 1.0).abs(), (b - 1.0).abs(), a)
}
/// Return the red value.
pub fn red(&self) -> f32 {
let Rgba(r, _, _, _) = self.to_rgb();
r
}
/// Return the green value.
pub fn green(&self) -> f32 {
let Rgba(_, g, _, _) = self.to_rgb();
g
}
/// Return the blue value.
pub fn blue(&self) -> f32 {
let Rgba(_, _, b, _) = self.to_rgb();
b
}
/// Set the red value.
pub fn set_red(&mut self, r: f32) {
let Rgba(_, g, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the green value.
pub fn set_green(&mut self, g: f32) {
let Rgba(r, _, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the blue value.
pub fn set_blue(&mut self, b: f32) {
let Rgba(r, g, _, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
}
/// The parts of HSL along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Hsla(pub f32, pub f32, pub f32, pub f32);
/// The parts of RGB along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Rgba(pub f32, pub f32, pub f32, pub f32);
/// Convert an f32 color to a byte.
#[inline]
pub fn f32_to_byte(c: f32) -> u8
|
{ (r + 0.4, g + 0.2, b + 0.2) }
|
conditional_block
|
color.rs
|
, 1.0-p, 1.0)
}
/// Construct a random color.
pub fn random() -> Color {
rgb(::rand::random(), ::rand::random(), ::rand::random())
}
/// Clamp a f32 between 0f32 and 1f32.
fn clampf32(f: f32) -> f32 {
if f < 0.0 { 0.0 } else if f > 1.0 { 1.0 } else { f }
}
impl Color {
/// Produce a complementary color. The two colors will accent each other. This is the same as
/// rotating the hue by 180 degrees.
pub fn complement(self) -> Color {
match self {
Color::Hsla(h, s, l, a) => hsla(h + degrees(180.0), s, l, a),
Color::Rgba(r, g, b, a) => {
let (h, s, l) = rgb_to_hsl(r, g, b);
hsla(h + degrees(180.0), s, l, a)
},
}
}
/// Calculate and return the luminance of the Color.
pub fn luminance(&self) -> f32
|
/// Return either black or white, depending which contrasts the Color the most. This will be
/// useful for determining a readable color for text on any given background Color.
pub fn plain_contrast(self) -> Color {
if self.luminance() > 0.5 { black() } else { white() }
}
/// Extract the components of a color in the HSL format.
pub fn to_hsl(self) -> Hsla {
match self {
Color::Hsla(h, s, l, a) => Hsla(h, s, l, a),
Color::Rgba(r, g, b, a) => {
let (h, s, l) = rgb_to_hsl(r, g, b);
Hsla(h, s, l, a)
},
}
}
/// Extract the components of a color in the RGB format.
pub fn to_rgb(self) -> Rgba {
match self {
Color::Rgba(r, g, b, a) => Rgba(r, g, b, a),
Color::Hsla(h, s, l, a) => {
let (r, g, b) = hsl_to_rgb(h, s, l);
Rgba(r, g, b, a)
},
}
}
/// Extract the components of a color in the RGB format within a fixed-size array.
pub fn to_fsa(self) -> [f32; 4] {
let Rgba(r, g, b, a) = self.to_rgb();
[r, g, b, a]
}
/// Same as `to_fsa`, except r, g, b and a are represented in byte form.
pub fn to_byte_fsa(self) -> [u8; 4] {
let Rgba(r, g, b, a) = self.to_rgb();
[f32_to_byte(r), f32_to_byte(g), f32_to_byte(b), f32_to_byte(a)]
}
// /// Return the hex representation of this color in the format #RRGGBBAA
// /// e.g. `Color(1.0, 0.0, 5.0, 1.0) == "#FF0080FF"`
// pub fn to_hex(self) -> String {
// let vals = self.to_byte_fsa();
// let hex = vals.to_hex().to_ascii_uppercase();
// format!("#{}", &hex)
// }
/// Return the same color but with the given luminance.
pub fn with_luminance(self, l: f32) -> Color {
let Hsla(h, s, _, a) = self.to_hsl();
Color::Hsla(h, s, l, a)
}
/// Return the same color but with the alpha multiplied by the given alpha.
pub fn alpha(self, alpha: f32) -> Color {
match self {
Color::Rgba(r, g, b, a) => Color::Rgba(r, g, b, a * alpha),
Color::Hsla(h, s, l, a) => Color::Hsla(h, s, l, a * alpha),
}
}
/// Return the same color but with the given alpha.
pub fn with_alpha(self, a: f32) -> Color {
match self {
Color::Rgba(r, g, b, _) => Color::Rgba(r, g, b, a),
Color::Hsla(h, s, l, _) => Color::Hsla(h, s, l, a),
}
}
/// Return a highlighted version of the current Color.
pub fn highlighted(self) -> Color {
let luminance = self.luminance();
let Rgba(r, g, b, a) = self.to_rgb();
let (r, g, b) = {
if luminance > 0.8 { (r - 0.2, g - 0.2, b - 0.2) }
else if luminance < 0.2 { (r + 0.2, g + 0.2, b + 0.2) }
else {
(clampf32((1.0 - r) * 0.5 * r + r),
clampf32((1.0 - g) * 0.1 * g + g),
clampf32((1.0 - b) * 0.1 * b + b))
}
};
let a = clampf32((1.0 - a) * 0.5 + a);
rgba(r, g, b, a)
}
/// Return a clicked version of the current Color.
pub fn clicked(&self) -> Color {
let luminance = self.luminance();
let Rgba(r, g, b, a) = self.to_rgb();
let (r, g, b) = {
if luminance > 0.8 { (r , g - 0.2, b - 0.2) }
else if luminance < 0.2 { (r + 0.4, g + 0.2, b + 0.2) }
else {
(clampf32((1.0 - r) * 0.75 + r),
clampf32((1.0 - g) * 0.25 + g),
clampf32((1.0 - b) * 0.25 + b))
}
};
let a = clampf32((1.0 - a) * 0.75 + a);
rgba(r, g, b, a)
}
/// Return the Color's invert.
pub fn invert(self) -> Color {
let Rgba(r, g, b, a) = self.to_rgb();
rgba((r - 1.0).abs(), (g - 1.0).abs(), (b - 1.0).abs(), a)
}
/// Return the red value.
pub fn red(&self) -> f32 {
let Rgba(r, _, _, _) = self.to_rgb();
r
}
/// Return the green value.
pub fn green(&self) -> f32 {
let Rgba(_, g, _, _) = self.to_rgb();
g
}
/// Return the blue value.
pub fn blue(&self) -> f32 {
let Rgba(_, _, b, _) = self.to_rgb();
b
}
/// Set the red value.
pub fn set_red(&mut self, r: f32) {
let Rgba(_, g, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the green value.
pub fn set_green(&mut self, g: f32) {
let Rgba(r, _, b, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
/// Set the blue value.
pub fn set_blue(&mut self, b: f32) {
let Rgba(r, g, _, a) = self.to_rgb();
*self = rgba(r, g, b, a);
}
}
/// The parts of HSL along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Hsla(pub f32, pub f32, pub f32, pub f32);
/// The parts of RGB along with an alpha for transparency.
#[derive(Copy, Clone, Debug)]
pub struct Rgba(pub f32, pub f32, pub f32, pub f32);
/// Convert an f32 color to a byte.
#[inline]
pub fn f32_to_byte(c: f32) -> u8
|
{
match *self {
Color::Rgba(r, g, b, _) => (r + g + b) / 3.0,
Color::Hsla(_, _, l, _) => l,
}
}
|
identifier_body
|
lib.rs
|
//! either specific types or type constraints.
//! - Functions are first-class types. Functions can have type and/or const params.
//! Const params always specify tuple length.
//! - Type params can be constrained. Constraints are expressed via [`Constraint`]s.
//! As an example, [`Num`] has a few known constraints, such as type [`Linearity`].
//!
//! [`Constraint`]: crate::arith::Constraint
//! [`Num`]: crate::arith::Num
//! [`Linearity`]: crate::arith::Linearity
//!
//! # Inference rules
//!
//! Inference mostly corresponds to [Hindley–Milner typing rules]. It does not require
//! type annotations, but utilizes them if present. Type unification (encapsulated in
//! [`Substitutions`]) is performed at each variable use or assignment. Variable uses include
//! function calls and unary and binary ops; the op behavior is customizable
//! via [`TypeArithmetic`].
//!
//! Whenever possible, the most generic type satisfying the constraints is used. In particular,
//! this means that all type / length variables not resolved at the function definition site become
//! parameters of the function. Likewise, each function call instantiates a separate instance
//! of a generic function; type / length params for each call are assigned independently.
//! See the example below for more details.
//!
//! [Hindley–Milner typing rules]: https://en.wikipedia.org/wiki/Hindley%E2%80%93Milner_type_system#Typing_rules
//! [`Substitutions`]: crate::arith::Substitutions
//! [`TypeArithmetic`]: crate::arith::TypeArithmetic
//!
//! # Operations
//!
//! ## Field access
//!
//! See [`Tuple` docs](Tuple#indexing) for discussion of indexing expressions, such as `xs.0`,
//! and [`Object` docs](Object) for discussion of field access, such as `point.x`.
//!
//! ## Type casts
//!
//! [A type cast](arithmetic_parser::Expr::TypeCast) is equivalent to introducing a new var
//! with the specified annotation, assigning to it and returning the new var. That is,
//! `x as Bool` is equivalent to `{ _x: Bool = x; _x }`. As such, casts are safe (cannot be used
//! to transmute the type arbitrarily), unless `any` type is involved.
//!
//! # Examples
//!
//! ```
//! use arithmetic_parser::grammars::{F32Grammar, Parse};
//! use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//!
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum = |xs| xs.fold(0, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! // Evaluate `code` to get the inferred `sum` function signature.
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(env["sum"].to_string(), "([Num; N]) -> Num");
//! # Ok(())
//! # }
//! ```
//!
//! Defining and using generic functions:
//!
//! ```
//! # use arithmetic_parser::grammars::{F32Grammar, Parse};
//! # use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum_with = |xs, init| xs.fold(init, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(
//! env["sum_with"].to_string(),
//! "for<'T: Ops> (['T; N], 'T) -> 'T"
//! );
//! // Note that `sum_with` is parametric by the element of the slice
//! // (for which the linearity constraint is applied based on the arg usage)
//! // *and* by its length.
//!
//! let usage_code = r#"
//! num_sum: Num = (1, 2, 3).sum_with(0);
//! tuple_sum: (Num, Num) = ((1, 2), (3, 4)).sum_with((0, 0));
//! "#;
//! let ast = Annotated::<F32Grammar>::parse_statements(usage_code)?;
//! // Both lengths and element types differ in these invocations,
//! // but it works fine since they are treated independently.
//! env.process_statements(&ast)?;
//! # Ok(())
//! # }
//! ```
//!
//! [`arithmetic-parser`]: https://crates.io/crates/arithmetic-parser
//! [`Grammar`]: arithmetic_parser::grammars::Grammar
//! [`arithmetic-eval`]: https://crates.io/crates/arithmetic-eval
#![doc(html_root_url = "https://docs.rs/arithmetic-typing/0.3.0")]
#![warn(missing_docs, missing_debug_implementations)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::module_name_repetitions,
clippy::similar_names, // too many false positives because of lhs / rhs
clippy::option_if_let_else // too many false positives
)]
use std::{fmt, marker::PhantomData, str::FromStr};
use arithmetic_parser::{
grammars::{Features, Grammar, Parse, ParseLiteral},
InputSpan, NomResult,
};
pub mod arith;
pub mod ast;
pub mod defs;
mod env;
pub mod error;
mod types;
pub mod visit;
pub use self::{
env::TypeEnvironment,
types::{
DynConstraints, FnWithConstraints, Function, FunctionBuilder, LengthVar, Object, Slice,
Tuple, TupleIndex, TupleLen, Type, TypeVar, UnknownLen,
},
};
use self::{arith::ConstraintSet, ast::TypeAst};
/// Primitive types in a certain type system.
///
/// More complex types, like [`Type`] and [`Function`], are defined with a type param
/// which determines the primitive type(s). This type param must implement [`PrimitiveType`].
///
/// [`TypeArithmetic`] has a `PrimitiveType` impl as an associated type, and one of the required
/// operations of this trait is to be able to infer type for literal values from a [`Grammar`].
///
/// # Implementation Requirements
///
/// - [`Display`](fmt::Display) and [`FromStr`] implementations must be consistent; i.e.,
/// `Display` should produce output parseable by `FromStr`. `Display` will be used in
/// `Display` impls for `Type` etc. `FromStr` will be used to read type annotations.
/// - `Display` presentations must be identifiers, such as `Num`.
/// - While not required, a `PrimitiveType` should usually contain a Boolean type and
/// implement [`WithBoolean`]. This allows to reuse [`BoolArithmetic`] and/or [`NumArithmetic`]
/// as building blocks for your [`TypeArithmetic`].
///
/// [`Grammar`]: arithmetic_parser::grammars::Grammar
/// [`TypeArithmetic`]: crate::arith::TypeArithmetic
/// [`WithBoolean`]: crate::arith::WithBoolean
/// [`BoolArithmetic`]: crate::arith::BoolArithmetic
/// [`NumArithmetic`]: crate::arith::NumArithmetic
///
/// # Examples
///
/// ```
/// # use std::{fmt, str::FromStr};
/// use arithmetic_typing::PrimitiveType;
///
/// #[derive(Debug, Clone, Copy, PartialEq)]
/// enum NumOrBytes {
/// /// Numeric value, such as 1.
/// Num,
/// /// Bytes value, such as 0x1234 or "hello".
/// Bytes,
/// }
///
/// // `NumOrBytes` should correspond to a "value" type in the `Grammar`,
/// // for example:
/// enum NumOrBytesValue {
/// Num(f64),
/// Bytes(Vec<u8>),
/// }
///
/// impl fmt::Display for NumOrBytes {
/// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// match self {
/// Self::Num => formatter.write_str("Num"),
/// Self::Bytes => formatter.write_str("Bytes"),
/// }
/// }
/// }
///
/// impl FromStr for NumOrBytes {
/// type Err = anyhow::Error;
///
/// fn from_str(s: &str) -> Result<Self, Self::Err> {
/// match s {
/// "Num" => Ok(Self::Num),
/// "Bytes" => Ok(Self::Bytes),
/// _ => Err(anyhow::anyhow!("expected `Num` or `Bytes`")),
/// }
/// }
/// }
///
/// impl PrimitiveType for NumOrBytes {}
/// ```
pub trait PrimitiveType:
Clone + PartialEq + fmt::Debug + fmt::Display + FromStr + Send + Sync + 'static
{
/// Returns well-known constraints for this type. These constraints are used
/// in standalone parsing of type signatures.
///
/// The default implementation returns an empty set.
fn well_known_constraints() -> ConstraintSet<Self> {
|
ConstraintSet::default()
}
}
//
|
identifier_body
|
|
lib.rs
|
that all type / length variables not resolved at the function definition site become
//! parameters of the function. Likewise, each function call instantiates a separate instance
//! of a generic function; type / length params for each call are assigned independently.
//! See the example below for more details.
//!
//! [Hindley–Milner typing rules]: https://en.wikipedia.org/wiki/Hindley%E2%80%93Milner_type_system#Typing_rules
//! [`Substitutions`]: crate::arith::Substitutions
//! [`TypeArithmetic`]: crate::arith::TypeArithmetic
//!
//! # Operations
//!
//! ## Field access
//!
//! See [`Tuple` docs](Tuple#indexing) for discussion of indexing expressions, such as `xs.0`,
//! and [`Object` docs](Object) for discussion of field access, such as `point.x`.
//!
//! ## Type casts
//!
//! [A type cast](arithmetic_parser::Expr::TypeCast) is equivalent to introducing a new var
//! with the specified annotation, assigning to it and returning the new var. That is,
//! `x as Bool` is equivalent to `{ _x: Bool = x; _x }`. As such, casts are safe (cannot be used
//! to transmute the type arbitrarily), unless `any` type is involved.
//!
//! # Examples
//!
//! ```
//! use arithmetic_parser::grammars::{F32Grammar, Parse};
//! use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//!
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum = |xs| xs.fold(0, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! // Evaluate `code` to get the inferred `sum` function signature.
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(env["sum"].to_string(), "([Num; N]) -> Num");
//! # Ok(())
//! # }
//! ```
//!
//! Defining and using generic functions:
//!
//! ```
//! # use arithmetic_parser::grammars::{F32Grammar, Parse};
//! # use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum_with = |xs, init| xs.fold(init, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(
//! env["sum_with"].to_string(),
//! "for<'T: Ops> (['T; N], 'T) -> 'T"
//! );
//! // Note that `sum_with` is parametric by the element of the slice
//! // (for which the linearity constraint is applied based on the arg usage)
//! // *and* by its length.
//!
//! let usage_code = r#"
//! num_sum: Num = (1, 2, 3).sum_with(0);
//! tuple_sum: (Num, Num) = ((1, 2), (3, 4)).sum_with((0, 0));
//! "#;
//! let ast = Annotated::<F32Grammar>::parse_statements(usage_code)?;
//! // Both lengths and element types differ in these invocations,
//! // but it works fine since they are treated independently.
//! env.process_statements(&ast)?;
//! # Ok(())
//! # }
//! ```
//!
//! [`arithmetic-parser`]: https://crates.io/crates/arithmetic-parser
//! [`Grammar`]: arithmetic_parser::grammars::Grammar
//! [`arithmetic-eval`]: https://crates.io/crates/arithmetic-eval
#![doc(html_root_url = "https://docs.rs/arithmetic-typing/0.3.0")]
#![warn(missing_docs, missing_debug_implementations)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::module_name_repetitions,
clippy::similar_names, // too many false positives because of lhs / rhs
clippy::option_if_let_else // too many false positives
)]
use std::{fmt, marker::PhantomData, str::FromStr};
use arithmetic_parser::{
grammars::{Features, Grammar, Parse, ParseLiteral},
InputSpan, NomResult,
};
pub mod arith;
pub mod ast;
pub mod defs;
mod env;
pub mod error;
mod types;
pub mod visit;
pub use self::{
env::TypeEnvironment,
types::{
DynConstraints, FnWithConstraints, Function, FunctionBuilder, LengthVar, Object, Slice,
Tuple, TupleIndex, TupleLen, Type, TypeVar, UnknownLen,
},
};
use self::{arith::ConstraintSet, ast::TypeAst};
/// Primitive types in a certain type system.
///
/// More complex types, like [`Type`] and [`Function`], are defined with a type param
/// which determines the primitive type(s). This type param must implement [`PrimitiveType`].
///
/// [`TypeArithmetic`] has a `PrimitiveType` impl as an associated type, and one of the required
/// operations of this trait is to be able to infer type for literal values from a [`Grammar`].
///
/// # Implementation Requirements
///
/// - [`Display`](fmt::Display) and [`FromStr`] implementations must be consistent; i.e.,
/// `Display` should produce output parseable by `FromStr`. `Display` will be used in
/// `Display` impls for `Type` etc. `FromStr` will be used to read type annotations.
/// - `Display` presentations must be identifiers, such as `Num`.
/// - While not required, a `PrimitiveType` should usually contain a Boolean type and
/// implement [`WithBoolean`]. This allows to reuse [`BoolArithmetic`] and/or [`NumArithmetic`]
/// as building blocks for your [`TypeArithmetic`].
///
/// [`Grammar`]: arithmetic_parser::grammars::Grammar
/// [`TypeArithmetic`]: crate::arith::TypeArithmetic
/// [`WithBoolean`]: crate::arith::WithBoolean
/// [`BoolArithmetic`]: crate::arith::BoolArithmetic
/// [`NumArithmetic`]: crate::arith::NumArithmetic
///
/// # Examples
///
/// ```
/// # use std::{fmt, str::FromStr};
/// use arithmetic_typing::PrimitiveType;
///
/// #[derive(Debug, Clone, Copy, PartialEq)]
/// enum NumOrBytes {
/// /// Numeric value, such as 1.
/// Num,
/// /// Bytes value, such as 0x1234 or "hello".
/// Bytes,
/// }
///
/// // `NumOrBytes` should correspond to a "value" type in the `Grammar`,
/// // for example:
/// enum NumOrBytesValue {
/// Num(f64),
/// Bytes(Vec<u8>),
/// }
///
/// impl fmt::Display for NumOrBytes {
/// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// match self {
/// Self::Num => formatter.write_str("Num"),
/// Self::Bytes => formatter.write_str("Bytes"),
/// }
/// }
/// }
///
/// impl FromStr for NumOrBytes {
/// type Err = anyhow::Error;
///
/// fn from_str(s: &str) -> Result<Self, Self::Err> {
/// match s {
/// "Num" => Ok(Self::Num),
/// "Bytes" => Ok(Self::Bytes),
/// _ => Err(anyhow::anyhow!("expected `Num` or `Bytes`")),
/// }
/// }
/// }
///
/// impl PrimitiveType for NumOrBytes {}
/// ```
pub trait PrimitiveType:
Clone + PartialEq + fmt::Debug + fmt::Display + FromStr + Send + Sync + 'static
{
/// Returns well-known constraints for this type. These constraints are used
/// in standalone parsing of type signatures.
///
/// The default implementation returns an empty set.
fn well_known_constraints() -> ConstraintSet<Self> {
ConstraintSet::default()
}
}
/// Grammar with support of type annotations. Works as a decorator.
///
/// # Examples
///
/// ```
/// use arithmetic_parser::grammars::{F32Grammar, Parse};
/// use arithmetic_typing::Annotated;
///
/// # fn main() -> anyhow::Result<()> {
/// let code = "x: [Num] = (1, 2, 3);";
/// let ast = Annotated::<F32Grammar>::parse_statements(code)?;
/// # assert_eq!(ast.statements.len(), 1);
/// # Ok(())
/// # }
/// ```
#[derive(Debug)]
pub struct Annotated<T>(PhantomData<T>);
impl<T: ParseLiteral> ParseLiteral for Annotated<T> {
type Lit = T::Lit;
fn parse_literal(input: InputSpan<'_>) -> NomResult<'_, Self::Lit> {
<T as ParseLiteral>::parse_literal(input)
}
}
impl<'a, T: ParseLiteral> Grammar<'a> for Annotated<T> {
type Type = TypeAst<'a>;
fn parse_
|
type(input
|
identifier_name
|
|
lib.rs
|
arith::Num
//! [`Linearity`]: crate::arith::Linearity
//!
//! # Inference rules
//!
//! Inference mostly corresponds to [Hindley–Milner typing rules]. It does not require
//! type annotations, but utilizes them if present. Type unification (encapsulated in
//! [`Substitutions`]) is performed at each variable use or assignment. Variable uses include
//! function calls and unary and binary ops; the op behavior is customizable
//! via [`TypeArithmetic`].
//!
//! Whenever possible, the most generic type satisfying the constraints is used. In particular,
//! this means that all type / length variables not resolved at the function definition site become
//! parameters of the function. Likewise, each function call instantiates a separate instance
//! of a generic function; type / length params for each call are assigned independently.
//! See the example below for more details.
//!
//! [Hindley–Milner typing rules]: https://en.wikipedia.org/wiki/Hindley%E2%80%93Milner_type_system#Typing_rules
//! [`Substitutions`]: crate::arith::Substitutions
//! [`TypeArithmetic`]: crate::arith::TypeArithmetic
//!
//! # Operations
//!
//! ## Field access
//!
//! See [`Tuple` docs](Tuple#indexing) for discussion of indexing expressions, such as `xs.0`,
//! and [`Object` docs](Object) for discussion of field access, such as `point.x`.
//!
//! ## Type casts
//!
//! [A type cast](arithmetic_parser::Expr::TypeCast) is equivalent to introducing a new var
//! with the specified annotation, assigning to it and returning the new var. That is,
//! `x as Bool` is equivalent to `{ _x: Bool = x; _x }`. As such, casts are safe (cannot be used
//! to transmute the type arbitrarily), unless `any` type is involved.
//!
//! # Examples
//!
//! ```
//! use arithmetic_parser::grammars::{F32Grammar, Parse};
//! use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//!
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum = |xs| xs.fold(0, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! // Evaluate `code` to get the inferred `sum` function signature.
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(env["sum"].to_string(), "([Num; N]) -> Num");
//! # Ok(())
//! # }
//! ```
//!
//! Defining and using generic functions:
//!
//! ```
//! # use arithmetic_parser::grammars::{F32Grammar, Parse};
//! # use arithmetic_typing::{defs::Prelude, Annotated, TypeEnvironment, Type};
//! # fn main() -> anyhow::Result<()> {
//! let code = "sum_with = |xs, init| xs.fold(init, |acc, x| acc + x);";
//! let ast = Annotated::<F32Grammar>::parse_statements(code)?;
//!
//! let mut env = TypeEnvironment::new();
//! env.insert("fold", Prelude::Fold);
//!
//! let output_type = env.process_statements(&ast)?;
//! assert!(output_type.is_void());
//! assert_eq!(
//! env["sum_with"].to_string(),
//! "for<'T: Ops> (['T; N], 'T) -> 'T"
//! );
//! // Note that `sum_with` is parametric by the element of the slice
//! // (for which the linearity constraint is applied based on the arg usage)
//! // *and* by its length.
//!
//! let usage_code = r#"
//! num_sum: Num = (1, 2, 3).sum_with(0);
//! tuple_sum: (Num, Num) = ((1, 2), (3, 4)).sum_with((0, 0));
//! "#;
//! let ast = Annotated::<F32Grammar>::parse_statements(usage_code)?;
//! // Both lengths and element types differ in these invocations,
//! // but it works fine since they are treated independently.
//! env.process_statements(&ast)?;
//! # Ok(())
//! # }
//! ```
//!
//! [`arithmetic-parser`]: https://crates.io/crates/arithmetic-parser
//! [`Grammar`]: arithmetic_parser::grammars::Grammar
//! [`arithmetic-eval`]: https://crates.io/crates/arithmetic-eval
#![doc(html_root_url = "https://docs.rs/arithmetic-typing/0.3.0")]
#![warn(missing_docs, missing_debug_implementations)]
#![warn(clippy::all, clippy::pedantic)]
#![allow(
clippy::missing_errors_doc,
clippy::must_use_candidate,
clippy::module_name_repetitions,
clippy::similar_names, // too many false positives because of lhs / rhs
clippy::option_if_let_else // too many false positives
)]
use std::{fmt, marker::PhantomData, str::FromStr};
use arithmetic_parser::{
grammars::{Features, Grammar, Parse, ParseLiteral},
InputSpan, NomResult,
};
pub mod arith;
pub mod ast;
pub mod defs;
mod env;
pub mod error;
mod types;
pub mod visit;
pub use self::{
env::TypeEnvironment,
types::{
DynConstraints, FnWithConstraints, Function, FunctionBuilder, LengthVar, Object, Slice,
Tuple, TupleIndex, TupleLen, Type, TypeVar, UnknownLen,
},
};
use self::{arith::ConstraintSet, ast::TypeAst};
/// Primitive types in a certain type system.
///
/// More complex types, like [`Type`] and [`Function`], are defined with a type param
/// which determines the primitive type(s). This type param must implement [`PrimitiveType`].
///
/// [`TypeArithmetic`] has a `PrimitiveType` impl as an associated type, and one of the required
/// operations of this trait is to be able to infer type for literal values from a [`Grammar`].
///
/// # Implementation Requirements
///
/// - [`Display`](fmt::Display) and [`FromStr`] implementations must be consistent; i.e.,
/// `Display` should produce output parseable by `FromStr`. `Display` will be used in
/// `Display` impls for `Type` etc. `FromStr` will be used to read type annotations.
/// - `Display` presentations must be identifiers, such as `Num`.
/// - While not required, a `PrimitiveType` should usually contain a Boolean type and
/// implement [`WithBoolean`]. This allows to reuse [`BoolArithmetic`] and/or [`NumArithmetic`]
/// as building blocks for your [`TypeArithmetic`].
///
/// [`Grammar`]: arithmetic_parser::grammars::Grammar
/// [`TypeArithmetic`]: crate::arith::TypeArithmetic
/// [`WithBoolean`]: crate::arith::WithBoolean
/// [`BoolArithmetic`]: crate::arith::BoolArithmetic
/// [`NumArithmetic`]: crate::arith::NumArithmetic
///
/// # Examples
///
/// ```
/// # use std::{fmt, str::FromStr};
/// use arithmetic_typing::PrimitiveType;
///
/// #[derive(Debug, Clone, Copy, PartialEq)]
/// enum NumOrBytes {
/// /// Numeric value, such as 1.
/// Num,
/// /// Bytes value, such as 0x1234 or "hello".
/// Bytes,
/// }
///
/// // `NumOrBytes` should correspond to a "value" type in the `Grammar`,
/// // for example:
/// enum NumOrBytesValue {
/// Num(f64),
/// Bytes(Vec<u8>),
/// }
///
/// impl fmt::Display for NumOrBytes {
/// fn fmt(&self, formatter: &mut fmt::Formatter<'_>) -> fmt::Result {
/// match self {
/// Self::Num => formatter.write_str("Num"),
/// Self::Bytes => formatter.write_str("Bytes"),
/// }
/// }
/// }
///
/// impl FromStr for NumOrBytes {
/// type Err = anyhow::Error;
///
/// fn from_str(s: &str) -> Result<Self, Self::Err> {
/// match s {
/// "Num" => Ok(Self::Num),
/// "Bytes" => Ok(Self::Bytes),
/// _ => Err(anyhow::anyhow!("expected `Num` or `Bytes`")),
/// }
/// }
/// }
///
/// impl PrimitiveType for NumOrBytes {}
/// ```
pub trait PrimitiveType:
Clone + PartialEq + fmt::Debug + fmt::Display + FromStr + Send + Sync + 'static
{
/// Returns well-known constraints for this type. These constraints are used
/// in standalone parsing of type signatures.
///
/// The default implementation returns an empty set.
fn well_known_constraints() -> ConstraintSet<Self> {
ConstraintSet::default()
}
}
/// Grammar with support of type annotations. Works as a decorator.
///
/// # Examples
///
/// ```
/// use arithmetic_parser::grammars::{F32Grammar, Parse};
/// use arithmetic_typing::Annotated;
///
/// # fn main() -> anyhow::Result<()> {
|
/// let code = "x: [Num] = (1, 2, 3);";
/// let ast = Annotated::<F32Grammar>::parse_statements(code)?;
|
random_line_split
|
|
NJ.py
|
else:
my_species = self.mrca
self.graph.add_node(newNode, species=my_species)
# replace first merged leave by newNode then shift everything after the 2nd merged leave
self.graph.add_edge(unadded_nodes[minp[0]], newNode, homology_dist=mp0_mp_dist, synteny_dist=syn0_mp_dist)
self.graph.add_edge(unadded_nodes[minp[1]], newNode, homology_dist=mp1_mp_dist, synteny_dist=syn1_mp_dist)
unadded_nodes[minp[0]] = newNode
for i in xrange(minp[1], unadded_count - 1):
unadded_nodes[i] = unadded_nodes[i + 1]
unadded_count -= 1 # replaced 2 nodes with 1
# replace the first line/column of the merging with the merging and shift values after second line/column
k = 0
l = 1
offset = 0
dfg_hom = hom_matrix[(minp[1] * (minp[1] - 1) / 2) + minp[0]]
dfg_syn = syn_matrix[(minp[1] * (minp[1] - 1) / 2) + minp[0]]
for pos in xrange(matrix_size):
if k == minp[1] or l == minp[1]:
offset += 1
elif l == minp[0]:
dfk_hom = hom_matrix[pos]
dgk_hom = hom_matrix[(minp[1] * (minp[1] - 1) / 2) + k]
dfk_syn = syn_matrix[pos]
dgk_syn = syn_matrix[(minp[1] * (minp[1] - 1) / 2) + k]
hom_matrix[pos] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
elif k == minp[0]:
dfk_hom = hom_matrix[pos]
dgk_hom = hom_matrix[pos + minp[1] - minp[0]]
dfk_syn = syn_matrix[pos]
dgk_syn = syn_matrix[pos + minp[1] - minp[0]]
hom_matrix[pos - offset] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos - offset] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
else:
hom_matrix[pos - offset] = hom_matrix[pos]
syn_matrix[pos - offset] = syn_matrix[pos]
k += 1
if not k < l:
k = 0
l += 1
if unadded_count == 2:
self.graph.add_edge(unadded_nodes[0], unadded_nodes[1], homology_dist=hom_matrix[0], synteny_dist=syn_matrix[0]) # check this
unadded_nodes = [";".join(unadded_nodes[:2])]
bigNode = unadded_nodes.pop()
self.bigNode = bigNode
return bigNode
def getNewick(self):
if self.rootedTree:
processed = ['root']
current_leaves = list(self.rootedTree['root'])
# nwk = "(" + ",".join(current_leaves) + ");"
# nwk = ",".join(current_leaves)
nwk = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['homology_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['homology_dist']) + ")"
if self.synteny:
nwk2 = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['synteny_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['synteny_dist']) + ")"
while current_leaves:
n = current_leaves.pop()
neighbors = list(self.rootedTree[n])
if len(neighbors) > 1: # if not a leaf
for neighbor in neighbors:
if neighbor in processed:
neighbors.remove(neighbor)
break
processed.append(n)
# new_nwk = ",".join(neighbors)
new_nwk = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['homology_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['homology_dist'])
nwk = nwk.replace(n, "(" + new_nwk + ")")
if self.synteny:
new_nwk2 = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['synteny_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['synteny_dist'])
nwk2 = nwk2.replace(n, "(" + new_nwk2 + ")")
current_leaves.extend(neighbors)
if self.synteny:
return [nwk, nwk2]
else:
return [nwk, ""]
else:
NJTree.logger.critical("Tried to get Newick from a tree that has no rootTree: %s" % (self.bigNode))
@staticmethod
def toNewick(graph):
up = [] # unprocessed
leaf = []
for n in graph.nodes():
if len(graph[n]) > 1:
up.append(n)
else:
leaf.append((n, graph.node[n]['species']))
curNode = None
last_string = ""
if len(graph.nodes()) == 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + leaf[0][0] + ":" + ew + "," + leaf[1][0] + ":" + ew + ")"
while len(up) > 0:
(curNode, e_count) = NJTree.calcMostEdgesToLeaves(up, leaf, graph)
leaves = []
for e in graph[curNode]:
for l in leaf:
if l[0] == e:
e_i = leaf.index(l)
e_text = e
if 'child_newick' in graph.node[e]:
if e_count > 2 and len(up) > 1:
continue
e_text = graph.node[e]['child_newick']
leaf.pop(e_i)
ew = graph[curNode][e]['homology_dist']
text = e_text + ":" + str(ew)
leaves.append(text)
# add newick text to curNode
node_text = "(" + ",".join(leaves) + ")"
last_string = node_text
graph.node[curNode]['child_newick'] = node_text
# change curNode to leaf
cn_i = up.index(curNode)
up.pop(cn_i)
leaf.append((curNode, graph.node[curNode]['species']))
if len(leaf) == 2 and len(up) == 0 and len(graph.nodes()) > 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + graph.node[leaf[0][0]]['child_newick'] + ":" + ew + "," + graph.node[leaf[1][0]]['child_newick'] + ":" + ew + ")"
last_string = last_string.replace("(", "(\n")
last_string = last_string.replace(",", ",\n")
last_string = last_string.replace(")", ")\n")
last_string = last_string.rstrip()
return last_string + ";"
def rootTree(self):
"""Return Score, root edge, number of losses
"""
# for each edge in 'tree' graph, score the tree
roots = []
min_gl = len(self.graph.nodes()) * 2
if self.rootEdge is not None: # check speed of this part in case of big cluster that's already been split once and is still big
# self.hom_shortest_paths = nxe.all_pairs_path_length(self.graph, 'homology_dist') # should already be present from splitNewTree if rooted because it was splitted from larger tree
self.paths = nx.shortest_path(self.graph, None, None)
# self.syn_shortest_paths = nxe.all_pairs_path_length(self.graph, 'synteny_dist')
(score, tree, gl_sum, loss) = self.scoreEdge(self.rootEdge, min_gl)
self.rootedTree = tree
return (score, self.rootEdge, loss)
if self.synteny:
([self.hom_shortest_paths, self.syn_shortest_paths], self.paths) = nxe.all_pairs_path_length(self.graph, ['homology_dist', 'synteny_dist'])
else:
([self.hom_shortest_paths], self.paths
|
my_species = self.graph.node[unadded_nodes[minp[0]]]['species']
|
conditional_block
|
|
NJ.py
|
] * (minp[1] - 1) / 2) + k]
hom_matrix[pos] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
elif k == minp[0]:
dfk_hom = hom_matrix[pos]
dgk_hom = hom_matrix[pos + minp[1] - minp[0]]
dfk_syn = syn_matrix[pos]
dgk_syn = syn_matrix[pos + minp[1] - minp[0]]
hom_matrix[pos - offset] = 0.5 * (dfk_hom + dgk_hom - dfg_hom)
syn_matrix[pos - offset] = 0.5 * (dfk_syn + dgk_syn - dfg_syn)
else:
hom_matrix[pos - offset] = hom_matrix[pos]
syn_matrix[pos - offset] = syn_matrix[pos]
k += 1
if not k < l:
k = 0
l += 1
if unadded_count == 2:
self.graph.add_edge(unadded_nodes[0], unadded_nodes[1], homology_dist=hom_matrix[0], synteny_dist=syn_matrix[0]) # check this
unadded_nodes = [";".join(unadded_nodes[:2])]
bigNode = unadded_nodes.pop()
self.bigNode = bigNode
return bigNode
def
|
(self):
if self.rootedTree:
processed = ['root']
current_leaves = list(self.rootedTree['root'])
# nwk = "(" + ",".join(current_leaves) + ");"
# nwk = ",".join(current_leaves)
nwk = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['homology_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['homology_dist']) + ")"
if self.synteny:
nwk2 = "(" + current_leaves[0] + ":" + str(self.rootedTree['root'][current_leaves[0]]['synteny_dist']) + ',' + current_leaves[1] + ":" + str(self.rootedTree['root'][current_leaves[1]]['synteny_dist']) + ")"
while current_leaves:
n = current_leaves.pop()
neighbors = list(self.rootedTree[n])
if len(neighbors) > 1: # if not a leaf
for neighbor in neighbors:
if neighbor in processed:
neighbors.remove(neighbor)
break
processed.append(n)
# new_nwk = ",".join(neighbors)
new_nwk = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['homology_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['homology_dist'])
nwk = nwk.replace(n, "(" + new_nwk + ")")
if self.synteny:
new_nwk2 = neighbors[0] + ":" + str(self.rootedTree[n][neighbors[0]]['synteny_dist']) + ',' + neighbors[1] + ":" + str(self.rootedTree[n][neighbors[1]]['synteny_dist'])
nwk2 = nwk2.replace(n, "(" + new_nwk2 + ")")
current_leaves.extend(neighbors)
if self.synteny:
return [nwk, nwk2]
else:
return [nwk, ""]
else:
NJTree.logger.critical("Tried to get Newick from a tree that has no rootTree: %s" % (self.bigNode))
@staticmethod
def toNewick(graph):
up = [] # unprocessed
leaf = []
for n in graph.nodes():
if len(graph[n]) > 1:
up.append(n)
else:
leaf.append((n, graph.node[n]['species']))
curNode = None
last_string = ""
if len(graph.nodes()) == 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + leaf[0][0] + ":" + ew + "," + leaf[1][0] + ":" + ew + ")"
while len(up) > 0:
(curNode, e_count) = NJTree.calcMostEdgesToLeaves(up, leaf, graph)
leaves = []
for e in graph[curNode]:
for l in leaf:
if l[0] == e:
e_i = leaf.index(l)
e_text = e
if 'child_newick' in graph.node[e]:
if e_count > 2 and len(up) > 1:
continue
e_text = graph.node[e]['child_newick']
leaf.pop(e_i)
ew = graph[curNode][e]['homology_dist']
text = e_text + ":" + str(ew)
leaves.append(text)
# add newick text to curNode
node_text = "(" + ",".join(leaves) + ")"
last_string = node_text
graph.node[curNode]['child_newick'] = node_text
# change curNode to leaf
cn_i = up.index(curNode)
up.pop(cn_i)
leaf.append((curNode, graph.node[curNode]['species']))
if len(leaf) == 2 and len(up) == 0 and len(graph.nodes()) > 2:
ew = str(graph[leaf[0][0]][leaf[1][0]]['homology_dist'])
last_string = "(" + graph.node[leaf[0][0]]['child_newick'] + ":" + ew + "," + graph.node[leaf[1][0]]['child_newick'] + ":" + ew + ")"
last_string = last_string.replace("(", "(\n")
last_string = last_string.replace(",", ",\n")
last_string = last_string.replace(")", ")\n")
last_string = last_string.rstrip()
return last_string + ";"
def rootTree(self):
"""Return Score, root edge, number of losses
"""
# for each edge in 'tree' graph, score the tree
roots = []
min_gl = len(self.graph.nodes()) * 2
if self.rootEdge is not None: # check speed of this part in case of big cluster that's already been split once and is still big
# self.hom_shortest_paths = nxe.all_pairs_path_length(self.graph, 'homology_dist') # should already be present from splitNewTree if rooted because it was splitted from larger tree
self.paths = nx.shortest_path(self.graph, None, None)
# self.syn_shortest_paths = nxe.all_pairs_path_length(self.graph, 'synteny_dist')
(score, tree, gl_sum, loss) = self.scoreEdge(self.rootEdge, min_gl)
self.rootedTree = tree
return (score, self.rootEdge, loss)
if self.synteny:
([self.hom_shortest_paths, self.syn_shortest_paths], self.paths) = nxe.all_pairs_path_length(self.graph, ['homology_dist', 'synteny_dist'])
else:
([self.hom_shortest_paths], self.paths) = nxe.all_pairs_path_length(self.graph, ['homology_dist'])
# self.syn_shortest_paths = nxe.all_pairs_path_length(self.graph, 'synteny_dist')[0]
# store shortest path matrix - it is the same for everyone
if len(self.graph.nodes()) > 100:
limit = len(self.graph.nodes()) / 2
degrees = {}
right_stack = []
left_stack = []
to_degree_stack = []
for n in self.graph.nodes():
if len(self.graph[n]) == 1:
degrees[n] = 1
right_stack.append(self.graph[n].keys()[0])
break
while True:
if right_stack:
current_node = right_stack.pop()
else:
current_node = left_stack.pop()
right = False
neighbors = self.graph[current_node].keys()
if len(neighbors) == 1:
degrees[current_node] = 1
else:
for neighbor in neighbors:
if neighbor in to_degree_stack or neighbor in degrees: # neighbor == to_degree_stack[-1]?
continue
if not right:
right_stack.append(neighbor)
right = True
else:
left_stack.append(neighbor)
to_degree_stack.append(current_node)
if not right:
while True:
if not to_degree_stack:
break
to_degree = to_degree_stack[len(to_degree_stack) - 1]
neighbors = self.graph[to_degree].keys()
for neighbor in self.graph[to_degree].keys():
if neighbor not in degrees:
neighbors.remove(neighbor)
if len(neighbors) == 2:
degrees[to_degree] = degrees[neighbors[0]] + degrees[neighbors[1]]
if degrees[to_degree] >= limit:
pair = neighbors[0] if
|
getNewick
|
identifier_name
|
NJ.py
|
- 1):
# raw_len += self.graph[raw_path[i]][raw_path[i + 1]]['homology_dist']
# # subract half of root edge length from raw_dist to get the distance from the root (edge mid-point) to this node, n
# edge_length = self.graph[e[0]][e[1]][attr]
# mid_edge = edge_length / 2.0
# dist = raw_len + mid_edge
# return dist
# # this calculates the distance from leaf to root-edge midpoint
# def getSynInterNodeDist(self, n, e, attr):
# # find distance to farthest node on potential root edge
# raw_dist = min(self.syn_shortest_paths[n][e[0]], self.syn_shortest_paths[n][e[1]])
# near_node = e[0]
# if self.syn_shortest_paths[n][e[1]] == raw_dist:
# near_node = e[1]
# raw_len = 0.0
# raw_path = self.paths[n][near_node]
# for i in range(len(raw_path) - 1):
# raw_len += self.graph[raw_path[i]][raw_path[i + 1]]['synteny_dist']
# # subract half of root edge length from raw_dist to get the distance from the root (edge mid-point) to this node, n
# edge_length = self.graph[e[0]][e[1]][attr]
# mid_edge = edge_length / 2.0
# dist = raw_len + mid_edge
# return dist
def getGainLossCount(self, e, min_gl):
# TODO verify how the species of a node is set to MRCA
# returns 0 gain when there needs to be a duplication event for the tree to exist (the loss after is found)
gain = 0
loss = 0
gl_total = 0
tGraph = self.graph.copy()
newWeight = tGraph[e[0]][e[1]]['homology_dist'] / 2.0
if self.synteny:
newWeight2 = tGraph[e[0]][e[1]]['synteny_dist'] / 2.0
# newSpecies = ""
newID = "root"
tGraph.remove_edge(e[0], e[1])
tGraph.add_node(newID, species=self.mrca)
if self.synteny:
tGraph.add_edge(e[0], newID, homology_dist=newWeight, synteny_dist=newWeight2)
tGraph.add_edge(e[1], newID, homology_dist=newWeight, synteny_dist=newWeight2)
else:
tGraph.add_edge(e[0], newID, homology_dist=newWeight)
tGraph.add_edge(e[1], newID, homology_dist=newWeight)
# TODO no modification to synteny weights??
up = [] # up = unprocessed, length is number of edges
leaf = [] # leaf nodes
for n in tGraph.nodes():
if len(tGraph[n]) > 1:
up.append(n)
else:
leaf.append((n, tGraph.node[n]['species']))
# get node with most edges to leaves to process
# note: species of a node can change during this process based on rooting, the e_leaf situation is for handling that
while len(up) > 0:
curNode = (NJTree.calcMostEdgesToLeaves(up, leaf, tGraph))[0] # curNode = AA node instead of root?
curNodeSpecies = ""
# if curNode in self.gl_map:
# gain += self.gl_map[curNode]['gain']
# loss += self.gl_map[curNode]['loss']
# curNodeSpecies = self.gl_map[curNode]['species']
# gl_total = gain + loss
# else:
childSpecies = set([])
child = []
for e in tGraph[curNode]:
e_leaf = None
for l in leaf:
if l[0] == e:
e_leaf = l
if e_leaf:
e_i = leaf.index(e_leaf)
leaf.pop(e_i)
childSpecies.add(e_leaf[1])
child.append(e_leaf)
if len(childSpecies) == 1:
if self.mrca in childSpecies:
curNodeSpecies = self.mrca
pass # useless?
else:
curNodeSpecies = child[0][1]
gain += 1
gl_total += 1
else:
curNodeSpecies = self.mrca
# 2 child species
if self.mrca in childSpecies:
# shouldn't there be a gain somewhere too in this case?
loss += 1
gl_total += 1
# ~ self.gl_map[curNode] = {'gain':gain,'loss':loss, 'species':curNodeSpecies}
cn_i = up.index(curNode)
up.pop(cn_i)
leaf.append((curNode, curNodeSpecies))
tGraph.node[curNode]['species'] = curNodeSpecies
return (gain, loss, tGraph)
@staticmethod
def calcMostEdgesToLeaves(unprocN, leaf, TG):
"""
unprocN = list of nodes that are not leaves?
leaf = list of (node, species_name)
TG = graph
"""
mostLeaves = -1
retNode = None
l_zero = []
for l in leaf:
l_zero.append(l[0])
for n in unprocN:
e_count = 0
for e in TG[n]:
if e in l_zero:
e_count += 1
if e_count > mostLeaves:
mostLeaves = e_count
retNode = n
if e_count == 2:
return (retNode, mostLeaves)
return (retNode, mostLeaves)
# checks if tree needs to be split
def checkTree(self, root):
""" Returns "true" if they are 2 species in the tree and the mrca is not present.
Returns "false" if they are 2 species in the tree but one of them is the mrca, or if more than 100 nodes in the graph.
Returns "orphan" if they is only 1 species in the tree
mrca = current node
"""
if len(self.graph.nodes()) > 100: # TODO why this arbitrary limit on the size of the tree?
NJTree.logger.debug("Too big graph detected and arbitrarly split")
self.OK = "false"
return self.OK
# check species of root node neighbors
edge = root[1]
# loss = root[2]
set_species = set([])
species = []
# edge_weight = self.graph[edge[0]][edge[1]]['homology_weight']
# TODO what is the point of having twice the results?
set_species.add(self.rootedTree.node[edge[0]]['species'])
set_species.add(self.rootedTree.node[edge[1]]['species'])
species.append(self.rootedTree.node[edge[0]]['species'])
species.append(self.rootedTree.node[edge[1]]['species'])
if len(species) == 1 or len(set_species) == 1 and (self.mrca not in set_species):
# all leaf nodes get added to orphan pile
# NJTree.logger.debug("Orphan in checkTree still happens")
self.OK = "orphan"
return self.OK
if len(species) == 2:
if self.mrca in species:
self.OK = "false"
return self.OK
else:
self.OK = "true"
return self.OK
# should add a verification that there are no more than 2 species
def splitNewTree(self, root):
|
new_trees = []
new_root_edges = []
futur_roots = [root[1][0], root[1][1]]
self.graph.remove_edge(root[1][0], root[1][1])
new_graphs = nx.connected_component_subgraphs(self.graph)
for n in new_graphs:
for futur_root in futur_roots: # loop here, but next if selects only 1 iteration
if futur_root in n.nodes():
new_tree = NJTree(self.mrca, self.alpha, self.beta, self.gamma, self.gain, self.loss, self.synteny)
if len(n.nodes()) == 1: # only 1 leaf on this half
new_tree.bigNode = futur_root
new_tree.rootEdge = (futur_root, futur_root)
new_root_edges.append(new_tree.rootEdge)
new_trees.append(new_tree)
break
new_hom_weight = 0.0
new_syn_weight = 0.0
children = []
to_remove = []
hom_attributes = nx.get_edge_attributes(n, 'homology_dist')
|
identifier_body
|
|
NJ.py
|
_gain_poisson = 0.0
my_gain_poisson = poisson.pmf((gain), self.gain)
if my_gain_poisson > 0:
my_poisson += math.log10(my_gain_poisson)
# my_loss_poisson = 0.0
my_loss_poisson = poisson.pmf((loss), self.loss)
if my_loss_poisson > 0:
my_poisson += math.log10(my_loss_poisson)
gl_factor = self.gamma * my_poisson
# dist_factor = (-self.beta) * h_var
if h_var > 0:
dist_factor = (-self.alpha) * math.log10(h_var)
else: # if var = 0, it means we're right in the middle
dist_factor = (-self.alpha) * -2.0
# syn_factor = (-self.alpha) * s_var
if s_var > 0:
syn_factor = (-self.beta) * math.log10(s_var)
else:
syn_factor = (-self.beta) * -2.0
score = math.exp(gl_factor) * math.exp(dist_factor) * math.exp(syn_factor)
score2 = gl_factor + dist_factor + syn_factor
# score = math.exp(gl_factor + dist_factor + syn_factor) should be the same
# and since exp is a strictly increasing function, the ranking would remain the same without applying it
return (score, tree, my_gl, loss)
# returns a list of distances from the root edge midpoint to all leaf nodes
def getDistances(self, e, attr):
# bigNode is a concatenation of all leaf nodes in this tree, separated by a ;
nodes = self.bigNode.split(";")
dists = []
for n in nodes:
dists.append(self.getInterNodeDist(n, e, attr))
return dists
# def getHomologyDistances(self, e):
# # bigNode is a concatenation of all leaf nodes in this tree, separated by a ;
# nodes = self.bigNode.split(";")
# dists = []
# for n in nodes:
# dists.append(self.getHomInterNodeDist(n, e, 'homology_dist'))
# return dists
# def getSyntenyDistances(self, e):
# nodes = self.bigNode.split(";")
# dists = []
# for n in nodes:
# dists.append(self.getSynInterNodeDist(n, e, 'synteny_dist'))
# return dists
# this calculates the distance from leaf to root-edge midpoint
def getInterNodeDist(self, n, e, attr):
dist = None
if len(self.paths[e[0]][e[1]]) < len(self.paths[e[1]][e[0]]):
dist = self.hom_shortest_paths[n][e[0]]
else:
dist = self.hom_shortest_paths[n][e[1]]
if self.graph[e[0]][e[1]][attr] >= 0.0:
return dist + (self.graph[e[0]][e[1]][attr] / 2)
else:
return dist
# def getHomInterNodeDist(self, n, e, attr):
# # find distance to farthest node on potential root edge
# raw_dist = min(self.hom_shortest_paths[n][e[0]], self.hom_shortest_paths[n][e[1]])
# near_node = e[0]
# if self.hom_shortest_paths[n][e[1]] == raw_dist:
# near_node = e[1]
# raw_len = 0.0
# raw_path = self.paths[n][near_node]
# for i in range(len(raw_path) - 1):
# raw_len += self.graph[raw_path[i]][raw_path[i + 1]]['homology_dist']
# # subract half of root edge length from raw_dist to get the distance from the root (edge mid-point) to this node, n
# edge_length = self.graph[e[0]][e[1]][attr]
# mid_edge = edge_length / 2.0
# dist = raw_len + mid_edge
# return dist
# # this calculates the distance from leaf to root-edge midpoint
# def getSynInterNodeDist(self, n, e, attr):
# # find distance to farthest node on potential root edge
# raw_dist = min(self.syn_shortest_paths[n][e[0]], self.syn_shortest_paths[n][e[1]])
# near_node = e[0]
# if self.syn_shortest_paths[n][e[1]] == raw_dist:
# near_node = e[1]
# raw_len = 0.0
# raw_path = self.paths[n][near_node]
# for i in range(len(raw_path) - 1):
# raw_len += self.graph[raw_path[i]][raw_path[i + 1]]['synteny_dist']
# # subract half of root edge length from raw_dist to get the distance from the root (edge mid-point) to this node, n
# edge_length = self.graph[e[0]][e[1]][attr]
# mid_edge = edge_length / 2.0
# dist = raw_len + mid_edge
# return dist
def getGainLossCount(self, e, min_gl):
# TODO verify how the species of a node is set to MRCA
# returns 0 gain when there needs to be a duplication event for the tree to exist (the loss after is found)
gain = 0
loss = 0
gl_total = 0
tGraph = self.graph.copy()
newWeight = tGraph[e[0]][e[1]]['homology_dist'] / 2.0
if self.synteny:
newWeight2 = tGraph[e[0]][e[1]]['synteny_dist'] / 2.0
# newSpecies = ""
newID = "root"
tGraph.remove_edge(e[0], e[1])
tGraph.add_node(newID, species=self.mrca)
if self.synteny:
tGraph.add_edge(e[0], newID, homology_dist=newWeight, synteny_dist=newWeight2)
tGraph.add_edge(e[1], newID, homology_dist=newWeight, synteny_dist=newWeight2)
else:
tGraph.add_edge(e[0], newID, homology_dist=newWeight)
tGraph.add_edge(e[1], newID, homology_dist=newWeight)
# TODO no modification to synteny weights??
up = [] # up = unprocessed, length is number of edges
leaf = [] # leaf nodes
for n in tGraph.nodes():
if len(tGraph[n]) > 1:
up.append(n)
else:
leaf.append((n, tGraph.node[n]['species']))
# get node with most edges to leaves to process
# note: species of a node can change during this process based on rooting, the e_leaf situation is for handling that
while len(up) > 0:
curNode = (NJTree.calcMostEdgesToLeaves(up, leaf, tGraph))[0] # curNode = AA node instead of root?
curNodeSpecies = ""
# if curNode in self.gl_map:
# gain += self.gl_map[curNode]['gain']
# loss += self.gl_map[curNode]['loss']
# curNodeSpecies = self.gl_map[curNode]['species']
# gl_total = gain + loss
# else:
childSpecies = set([])
child = []
for e in tGraph[curNode]:
e_leaf = None
for l in leaf:
if l[0] == e:
e_leaf = l
if e_leaf:
e_i = leaf.index(e_leaf)
leaf.pop(e_i)
childSpecies.add(e_leaf[1])
child.append(e_leaf)
if len(childSpecies) == 1:
if self.mrca in childSpecies:
curNodeSpecies = self.mrca
pass # useless?
else:
curNodeSpecies = child[0][1]
gain += 1
gl_total += 1
else:
curNodeSpecies = self.mrca
# 2 child species
if self.mrca in childSpecies:
# shouldn't there be a gain somewhere too in this case?
loss += 1
gl_total += 1
# ~ self.gl_map[curNode] = {'gain':gain,'loss':loss, 'species':curNodeSpecies}
cn_i = up.index(curNode)
up.pop(cn_i)
leaf.append((curNode, curNodeSpecies))
tGraph.node[curNode]['species'] = curNodeSpecies
return (gain, loss, tGraph)
@staticmethod
|
random_line_split
|
||
main.rs
|
::activation::softmax_with_loss::SoftmaxWithLoss;
use selecting_flow::compute_graph::fully_connected_layer::{ApplyFullyConnectedLayer, FullyConnectedLayer};
use selecting_flow::compute_graph::input_box::InputBox;
use selecting_flow::compute_graph::{ExactDimensionComputeGraphNode, GraphNode};
use selecting_flow::data_types::{Sparse, TensorEitherOwned};
use selecting_flow::hasher::sim_hash::SimHash;
use selecting_flow::hasher::FullyConnectedHasher;
use selecting_flow::optimizer::adam::Adam;
fn main() {
let arg = App::new("shield_example")
.arg(Arg::with_name("label").help("path to trn_lbl_mat.txt").long("label").short("l").takes_value(true).required(true))
.arg(Arg::with_name("feature").help("path to trn_ft_mat.txt").long("feature").short("f").takes_value(true).required(true))
.get_matches();
let labels = arg.value_of("label").unwrap();
let features = arg.value_of("feature").unwrap();
eprintln!("boot");
let train_data = read_train_data(labels, features);
eprintln!("load train_data");
train(train_data);
}
fn train(train_data: TrainData) {
let TrainData {
input_size,
output_size,
mut data_pair,
} = train_data;
const NUM_ITERATION: usize = 5;
const MINI_BATCH_SIZE: usize = 256;
const REBUILD_DELTA_INC: f64 = 1.05;
const TRAIN_DATA_RATIO: f64 = 0.95;
let (data_pair_train, data_pair_test) = {
let mid = (data_pair.len() as f64 * TRAIN_DATA_RATIO) as usize;
data_pair.shuffle(&mut thread_rng());
data_pair.split_at_mut(mid)
};
let time = Instant::now();
let mut layer1 = FullyConnectedLayer::new_random_param(input_size, 128, SimHash::new(50, 6, 128, 1, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer1 in {}ms", time.elapsed().as_millis());
let time = Instant::now();
let mut layer2 = FullyConnectedLayer::new_random_param(128, output_size, SimHash::new(50, 8, 4096, 3, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer2 in {}ms", time.elapsed().as_millis());
let mut next_rebuild = 49;
let mut rebuild_delta = 50;
let parallel_num = num_cpus::get();
dbg!(parallel_num);
eprintln!("start training");
let time = Instant::now();
let mini_batch_count = (data_pair_train.len() + MINI_BATCH_SIZE - 1) / MINI_BATCH_SIZE;
dbg!(data_pair_train.len());
dbg!(mini_batch_count);
println!("log_type,iteration,time_ms,accuracy,loss");
for e in 0..NUM_ITERATION {
data_pair_train.shuffle(&mut thread_rng());
for i in 0..mini_batch_count {
dbg!(e);
dbg!(i);
let batch_range = i * MINI_BATCH_SIZE..((i + 1) * MINI_BATCH_SIZE).min(data_pair_train.len());
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(&data_pair_train[batch_range.clone()], parallel_num, true, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"train_log,{},{},{},{}",
e * mini_batch_count + i,
time.elapsed().as_millis(),
sum_of_accuracy / batch_range.len() as f64,
sum_of_loss / batch_range.len() as f64,
);
layer1.update_parameter();
layer2.update_parameter();
if e * mini_batch_count + i >= next_rebuild {
layer1.rebuild_hash();
layer2.rebuild_hash();
rebuild_delta = (rebuild_delta as f64 * REBUILD_DELTA_INC) as usize;
next_rebuild += rebuild_delta;
}
}
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(data_pair_test, parallel_num, false, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_unhash_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"test_log,{},{},{},{}",
(e + 1) * mini_batch_count,
time.elapsed().as_millis(),
sum_of_accuracy / data_pair_test.len() as f64,
sum_of_loss / data_pair_test.len() as f64,
);
}
}
fn process_mini_batch<I: 'static + ExactDimensionComputeGraphNode<1, Item = f32>, H: FullyConnectedHasher<f32, f32>>(
data_pair: &[TrainDataPair],
parallel_num: usize,
back_propagate: bool,
construct_layers: impl Sync + Fn() -> (GraphNode<InputBox<f32, 1>, 1>, GraphNode<ApplyFullyConnectedLayer<I, f32, H, SoftmaxWithLoss<f32>, 0>, 0>),
) -> (f64, f64) {
crossbeam::scope(|scope| {
let mut threads = Vec::with_capacity(parallel_num);
for t in 0..parallel_num {
let range = t * data_pair.len() / parallel_num..(t + 1) * data_pair.len() / parallel_num;
threads.push(scope.spawn(|_| {
let (mut input, mut output) = construct_layers();
let mut sum_of_loss = 0f64;
let mut accuracy = 0f64;
for data in &data_pair[range] {
let TrainDataPair {
input: input_value,
output: output_value,
} = &data;
assert_eq!(output_value.value_count(), 1);
input.set_value(input_value.clone().into());
output.set_expect_output(output_value.clone());
let output_loss = output.get_output_value();
let output_without_loss = output.get_output_without_loss();
accuracy += match &output_without_loss {
TensorEitherOwned::Dense(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.as_all_slice().iter().enumerate().all(|(i, v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
TensorEitherOwned::Sparse(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.iter().all(|([i], v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
};
sum_of_loss += *output_loss.get([]).unwrap() as f64;
if back_propagate {
output.clear_gradient_all();
output.back_propagate_all();
}
}
(sum_of_loss, accuracy)
}));
}
threads.into_iter().fold((0f64, 0f64), |(sum_loss, sum_accuracy), handle| {
let (loss, accuracy) = handle.join().unwrap();
(sum_loss + loss, sum_accuracy + accuracy)
})
})
.expect("failed to use thread")
}
struct TrainData {
input_size: usize,
output_size: usize,
data_pair: Vec<TrainDataPair>,
}
struct TrainDataPair {
input: Sparse<f32, 1>,
output: Sparse<f32, 1>,
}
impl TrainDataPair {
fn new(input: Sparse<f32, 1>, output: Sparse<f32, 1>) -> Self
|
}
fn read_train_data(labels: impl AsRef<Path>, features: impl AsRef<Path>) -> TrainData {
let (output_size, labels) = read_file_as_tensors(labels);
let (input_size, features) = read_file_as_tensors(features);
let data_pair = labels
.into_par_iter()
.zip_eq(features)
.filter(|(output, _)| output.value_count() == 1)
.map(|(output, input)| TrainDataPair::new(input, output))
.collect();
TrainData { input_size, output_size, data_pair }
}
fn read_file_as_tensors(path: impl AsRef<Path>) -> (usize, Vec<Sparse<f32, 1>>) {
let path = path.as_ref();
let failed_read = &format!("failed to read file {}", path.display());
let invalid_format = &format!("invalid file format {}", path.display());
let file = File::open(path).expect
|
{
TrainDataPair { input, output }
}
|
identifier_body
|
main.rs
|
_graph::activation::softmax_with_loss::SoftmaxWithLoss;
use selecting_flow::compute_graph::fully_connected_layer::{ApplyFullyConnectedLayer, FullyConnectedLayer};
use selecting_flow::compute_graph::input_box::InputBox;
use selecting_flow::compute_graph::{ExactDimensionComputeGraphNode, GraphNode};
use selecting_flow::data_types::{Sparse, TensorEitherOwned};
use selecting_flow::hasher::sim_hash::SimHash;
use selecting_flow::hasher::FullyConnectedHasher;
use selecting_flow::optimizer::adam::Adam;
fn main() {
let arg = App::new("shield_example")
.arg(Arg::with_name("label").help("path to trn_lbl_mat.txt").long("label").short("l").takes_value(true).required(true))
.arg(Arg::with_name("feature").help("path to trn_ft_mat.txt").long("feature").short("f").takes_value(true).required(true))
.get_matches();
let labels = arg.value_of("label").unwrap();
let features = arg.value_of("feature").unwrap();
eprintln!("boot");
let train_data = read_train_data(labels, features);
eprintln!("load train_data");
train(train_data);
}
fn train(train_data: TrainData) {
let TrainData {
input_size,
output_size,
mut data_pair,
} = train_data;
const NUM_ITERATION: usize = 5;
const MINI_BATCH_SIZE: usize = 256;
const REBUILD_DELTA_INC: f64 = 1.05;
const TRAIN_DATA_RATIO: f64 = 0.95;
let (data_pair_train, data_pair_test) = {
let mid = (data_pair.len() as f64 * TRAIN_DATA_RATIO) as usize;
data_pair.shuffle(&mut thread_rng());
data_pair.split_at_mut(mid)
};
let time = Instant::now();
let mut layer1 = FullyConnectedLayer::new_random_param(input_size, 128, SimHash::new(50, 6, 128, 1, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer1 in {}ms", time.elapsed().as_millis());
let time = Instant::now();
let mut layer2 = FullyConnectedLayer::new_random_param(128, output_size, SimHash::new(50, 8, 4096, 3, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer2 in {}ms", time.elapsed().as_millis());
let mut next_rebuild = 49;
let mut rebuild_delta = 50;
let parallel_num = num_cpus::get();
dbg!(parallel_num);
eprintln!("start training");
let time = Instant::now();
let mini_batch_count = (data_pair_train.len() + MINI_BATCH_SIZE - 1) / MINI_BATCH_SIZE;
dbg!(data_pair_train.len());
dbg!(mini_batch_count);
println!("log_type,iteration,time_ms,accuracy,loss");
for e in 0..NUM_ITERATION {
data_pair_train.shuffle(&mut thread_rng());
for i in 0..mini_batch_count {
dbg!(e);
dbg!(i);
let batch_range = i * MINI_BATCH_SIZE..((i + 1) * MINI_BATCH_SIZE).min(data_pair_train.len());
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(&data_pair_train[batch_range.clone()], parallel_num, true, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"train_log,{},{},{},{}",
e * mini_batch_count + i,
time.elapsed().as_millis(),
sum_of_accuracy / batch_range.len() as f64,
sum_of_loss / batch_range.len() as f64,
);
layer1.update_parameter();
layer2.update_parameter();
if e * mini_batch_count + i >= next_rebuild {
layer1.rebuild_hash();
layer2.rebuild_hash();
rebuild_delta = (rebuild_delta as f64 * REBUILD_DELTA_INC) as usize;
next_rebuild += rebuild_delta;
}
}
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(data_pair_test, parallel_num, false, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_unhash_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"test_log,{},{},{},{}",
(e + 1) * mini_batch_count,
time.elapsed().as_millis(),
sum_of_accuracy / data_pair_test.len() as f64,
sum_of_loss / data_pair_test.len() as f64,
);
}
}
fn process_mini_batch<I: 'static + ExactDimensionComputeGraphNode<1, Item = f32>, H: FullyConnectedHasher<f32, f32>>(
data_pair: &[TrainDataPair],
parallel_num: usize,
back_propagate: bool,
construct_layers: impl Sync + Fn() -> (GraphNode<InputBox<f32, 1>, 1>, GraphNode<ApplyFullyConnectedLayer<I, f32, H, SoftmaxWithLoss<f32>, 0>, 0>),
) -> (f64, f64) {
crossbeam::scope(|scope| {
let mut threads = Vec::with_capacity(parallel_num);
for t in 0..parallel_num {
let range = t * data_pair.len() / parallel_num..(t + 1) * data_pair.len() / parallel_num;
threads.push(scope.spawn(|_| {
let (mut input, mut output) = construct_layers();
let mut sum_of_loss = 0f64;
let mut accuracy = 0f64;
for data in &data_pair[range] {
let TrainDataPair {
input: input_value,
output: output_value,
} = &data;
assert_eq!(output_value.value_count(), 1);
input.set_value(input_value.clone().into());
output.set_expect_output(output_value.clone());
let output_loss = output.get_output_value();
let output_without_loss = output.get_output_without_loss();
accuracy += match &output_without_loss {
TensorEitherOwned::Dense(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.as_all_slice().iter().enumerate().all(|(i, v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
TensorEitherOwned::Sparse(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.iter().all(|([i], v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
};
sum_of_loss += *output_loss.get([]).unwrap() as f64;
if back_propagate {
output.clear_gradient_all();
output.back_propagate_all();
}
}
(sum_of_loss, accuracy)
}));
}
threads.into_iter().fold((0f64, 0f64), |(sum_loss, sum_accuracy), handle| {
let (loss, accuracy) = handle.join().unwrap();
(sum_loss + loss, sum_accuracy + accuracy)
})
})
.expect("failed to use thread")
}
struct TrainData {
input_size: usize,
output_size: usize,
data_pair: Vec<TrainDataPair>,
}
struct TrainDataPair {
input: Sparse<f32, 1>,
output: Sparse<f32, 1>,
}
impl TrainDataPair {
fn new(input: Sparse<f32, 1>, output: Sparse<f32, 1>) -> Self {
TrainDataPair { input, output }
}
}
fn
|
(labels: impl AsRef<Path>, features: impl AsRef<Path>) -> TrainData {
let (output_size, labels) = read_file_as_tensors(labels);
let (input_size, features) = read_file_as_tensors(features);
let data_pair = labels
.into_par_iter()
.zip_eq(features)
.filter(|(output, _)| output.value_count() == 1)
.map(|(output, input)| TrainDataPair::new(input, output))
.collect();
TrainData { input_size, output_size, data_pair }
}
fn read_file_as_tensors(path: impl AsRef<Path>) -> (usize, Vec<Sparse<f32, 1>>) {
let path = path.as_ref();
let failed_read = &format!("failed to read file {}", path.display());
let invalid_format = &format!("invalid file format {}", path.display());
let file = File::open(path).expect
|
read_train_data
|
identifier_name
|
main.rs
|
_graph::activation::softmax_with_loss::SoftmaxWithLoss;
use selecting_flow::compute_graph::fully_connected_layer::{ApplyFullyConnectedLayer, FullyConnectedLayer};
use selecting_flow::compute_graph::input_box::InputBox;
use selecting_flow::compute_graph::{ExactDimensionComputeGraphNode, GraphNode};
use selecting_flow::data_types::{Sparse, TensorEitherOwned};
use selecting_flow::hasher::sim_hash::SimHash;
use selecting_flow::hasher::FullyConnectedHasher;
use selecting_flow::optimizer::adam::Adam;
fn main() {
let arg = App::new("shield_example")
.arg(Arg::with_name("label").help("path to trn_lbl_mat.txt").long("label").short("l").takes_value(true).required(true))
.arg(Arg::with_name("feature").help("path to trn_ft_mat.txt").long("feature").short("f").takes_value(true).required(true))
.get_matches();
let labels = arg.value_of("label").unwrap();
let features = arg.value_of("feature").unwrap();
eprintln!("boot");
let train_data = read_train_data(labels, features);
eprintln!("load train_data");
train(train_data);
}
fn train(train_data: TrainData) {
let TrainData {
input_size,
output_size,
mut data_pair,
} = train_data;
const NUM_ITERATION: usize = 5;
const MINI_BATCH_SIZE: usize = 256;
const REBUILD_DELTA_INC: f64 = 1.05;
const TRAIN_DATA_RATIO: f64 = 0.95;
let (data_pair_train, data_pair_test) = {
let mid = (data_pair.len() as f64 * TRAIN_DATA_RATIO) as usize;
data_pair.shuffle(&mut thread_rng());
data_pair.split_at_mut(mid)
};
let time = Instant::now();
let mut layer1 = FullyConnectedLayer::new_random_param(input_size, 128, SimHash::new(50, 6, 128, 1, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer1 in {}ms", time.elapsed().as_millis());
let time = Instant::now();
let mut layer2 = FullyConnectedLayer::new_random_param(128, output_size, SimHash::new(50, 8, 4096, 3, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer2 in {}ms", time.elapsed().as_millis());
let mut next_rebuild = 49;
let mut rebuild_delta = 50;
let parallel_num = num_cpus::get();
dbg!(parallel_num);
eprintln!("start training");
let time = Instant::now();
let mini_batch_count = (data_pair_train.len() + MINI_BATCH_SIZE - 1) / MINI_BATCH_SIZE;
dbg!(data_pair_train.len());
dbg!(mini_batch_count);
println!("log_type,iteration,time_ms,accuracy,loss");
for e in 0..NUM_ITERATION {
data_pair_train.shuffle(&mut thread_rng());
for i in 0..mini_batch_count {
dbg!(e);
dbg!(i);
let batch_range = i * MINI_BATCH_SIZE..((i + 1) * MINI_BATCH_SIZE).min(data_pair_train.len());
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(&data_pair_train[batch_range.clone()], parallel_num, true, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"train_log,{},{},{},{}",
e * mini_batch_count + i,
time.elapsed().as_millis(),
sum_of_accuracy / batch_range.len() as f64,
sum_of_loss / batch_range.len() as f64,
);
layer1.update_parameter();
layer2.update_parameter();
if e * mini_batch_count + i >= next_rebuild {
layer1.rebuild_hash();
layer2.rebuild_hash();
rebuild_delta = (rebuild_delta as f64 * REBUILD_DELTA_INC) as usize;
next_rebuild += rebuild_delta;
}
}
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(data_pair_test, parallel_num, false, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_unhash_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"test_log,{},{},{},{}",
(e + 1) * mini_batch_count,
time.elapsed().as_millis(),
sum_of_accuracy / data_pair_test.len() as f64,
sum_of_loss / data_pair_test.len() as f64,
);
}
}
fn process_mini_batch<I: 'static + ExactDimensionComputeGraphNode<1, Item = f32>, H: FullyConnectedHasher<f32, f32>>(
data_pair: &[TrainDataPair],
parallel_num: usize,
back_propagate: bool,
construct_layers: impl Sync + Fn() -> (GraphNode<InputBox<f32, 1>, 1>, GraphNode<ApplyFullyConnectedLayer<I, f32, H, SoftmaxWithLoss<f32>, 0>, 0>),
) -> (f64, f64) {
crossbeam::scope(|scope| {
let mut threads = Vec::with_capacity(parallel_num);
for t in 0..parallel_num {
let range = t * data_pair.len() / parallel_num..(t + 1) * data_pair.len() / parallel_num;
threads.push(scope.spawn(|_| {
let (mut input, mut output) = construct_layers();
let mut sum_of_loss = 0f64;
|
input: input_value,
output: output_value,
} = &data;
assert_eq!(output_value.value_count(), 1);
input.set_value(input_value.clone().into());
output.set_expect_output(output_value.clone());
let output_loss = output.get_output_value();
let output_without_loss = output.get_output_without_loss();
accuracy += match &output_without_loss {
TensorEitherOwned::Dense(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.as_all_slice().iter().enumerate().all(|(i, v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
TensorEitherOwned::Sparse(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.iter().all(|([i], v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
};
sum_of_loss += *output_loss.get([]).unwrap() as f64;
if back_propagate {
output.clear_gradient_all();
output.back_propagate_all();
}
}
(sum_of_loss, accuracy)
}));
}
threads.into_iter().fold((0f64, 0f64), |(sum_loss, sum_accuracy), handle| {
let (loss, accuracy) = handle.join().unwrap();
(sum_loss + loss, sum_accuracy + accuracy)
})
})
.expect("failed to use thread")
}
struct TrainData {
input_size: usize,
output_size: usize,
data_pair: Vec<TrainDataPair>,
}
struct TrainDataPair {
input: Sparse<f32, 1>,
output: Sparse<f32, 1>,
}
impl TrainDataPair {
fn new(input: Sparse<f32, 1>, output: Sparse<f32, 1>) -> Self {
TrainDataPair { input, output }
}
}
fn read_train_data(labels: impl AsRef<Path>, features: impl AsRef<Path>) -> TrainData {
let (output_size, labels) = read_file_as_tensors(labels);
let (input_size, features) = read_file_as_tensors(features);
let data_pair = labels
.into_par_iter()
.zip_eq(features)
.filter(|(output, _)| output.value_count() == 1)
.map(|(output, input)| TrainDataPair::new(input, output))
.collect();
TrainData { input_size, output_size, data_pair }
}
fn read_file_as_tensors(path: impl AsRef<Path>) -> (usize, Vec<Sparse<f32, 1>>) {
let path = path.as_ref();
let failed_read = &format!("failed to read file {}", path.display());
let invalid_format = &format!("invalid file format {}", path.display());
let file = File::open(path).expect("
|
let mut accuracy = 0f64;
for data in &data_pair[range] {
let TrainDataPair {
|
random_line_split
|
main.rs
|
_graph::activation::softmax_with_loss::SoftmaxWithLoss;
use selecting_flow::compute_graph::fully_connected_layer::{ApplyFullyConnectedLayer, FullyConnectedLayer};
use selecting_flow::compute_graph::input_box::InputBox;
use selecting_flow::compute_graph::{ExactDimensionComputeGraphNode, GraphNode};
use selecting_flow::data_types::{Sparse, TensorEitherOwned};
use selecting_flow::hasher::sim_hash::SimHash;
use selecting_flow::hasher::FullyConnectedHasher;
use selecting_flow::optimizer::adam::Adam;
fn main() {
let arg = App::new("shield_example")
.arg(Arg::with_name("label").help("path to trn_lbl_mat.txt").long("label").short("l").takes_value(true).required(true))
.arg(Arg::with_name("feature").help("path to trn_ft_mat.txt").long("feature").short("f").takes_value(true).required(true))
.get_matches();
let labels = arg.value_of("label").unwrap();
let features = arg.value_of("feature").unwrap();
eprintln!("boot");
let train_data = read_train_data(labels, features);
eprintln!("load train_data");
train(train_data);
}
fn train(train_data: TrainData) {
let TrainData {
input_size,
output_size,
mut data_pair,
} = train_data;
const NUM_ITERATION: usize = 5;
const MINI_BATCH_SIZE: usize = 256;
const REBUILD_DELTA_INC: f64 = 1.05;
const TRAIN_DATA_RATIO: f64 = 0.95;
let (data_pair_train, data_pair_test) = {
let mid = (data_pair.len() as f64 * TRAIN_DATA_RATIO) as usize;
data_pair.shuffle(&mut thread_rng());
data_pair.split_at_mut(mid)
};
let time = Instant::now();
let mut layer1 = FullyConnectedLayer::new_random_param(input_size, 128, SimHash::new(50, 6, 128, 1, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer1 in {}ms", time.elapsed().as_millis());
let time = Instant::now();
let mut layer2 = FullyConnectedLayer::new_random_param(128, output_size, SimHash::new(50, 8, 4096, 3, 0.333), Adam::new(0.9, 0.999, 0.001));
eprintln!("construct layer2 in {}ms", time.elapsed().as_millis());
let mut next_rebuild = 49;
let mut rebuild_delta = 50;
let parallel_num = num_cpus::get();
dbg!(parallel_num);
eprintln!("start training");
let time = Instant::now();
let mini_batch_count = (data_pair_train.len() + MINI_BATCH_SIZE - 1) / MINI_BATCH_SIZE;
dbg!(data_pair_train.len());
dbg!(mini_batch_count);
println!("log_type,iteration,time_ms,accuracy,loss");
for e in 0..NUM_ITERATION {
data_pair_train.shuffle(&mut thread_rng());
for i in 0..mini_batch_count {
dbg!(e);
dbg!(i);
let batch_range = i * MINI_BATCH_SIZE..((i + 1) * MINI_BATCH_SIZE).min(data_pair_train.len());
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(&data_pair_train[batch_range.clone()], parallel_num, true, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"train_log,{},{},{},{}",
e * mini_batch_count + i,
time.elapsed().as_millis(),
sum_of_accuracy / batch_range.len() as f64,
sum_of_loss / batch_range.len() as f64,
);
layer1.update_parameter();
layer2.update_parameter();
if e * mini_batch_count + i >= next_rebuild {
layer1.rebuild_hash();
layer2.rebuild_hash();
rebuild_delta = (rebuild_delta as f64 * REBUILD_DELTA_INC) as usize;
next_rebuild += rebuild_delta;
}
}
let (sum_of_loss, sum_of_accuracy) = process_mini_batch(data_pair_test, parallel_num, false, || {
let input = InputBox::new([input_size]);
let mid = layer1.apply_to(input.clone(), ReLU::new());
let output = layer2.apply_unhash_to(mid, SoftmaxWithLoss::new());
(input, output)
});
println!(
"test_log,{},{},{},{}",
(e + 1) * mini_batch_count,
time.elapsed().as_millis(),
sum_of_accuracy / data_pair_test.len() as f64,
sum_of_loss / data_pair_test.len() as f64,
);
}
}
fn process_mini_batch<I: 'static + ExactDimensionComputeGraphNode<1, Item = f32>, H: FullyConnectedHasher<f32, f32>>(
data_pair: &[TrainDataPair],
parallel_num: usize,
back_propagate: bool,
construct_layers: impl Sync + Fn() -> (GraphNode<InputBox<f32, 1>, 1>, GraphNode<ApplyFullyConnectedLayer<I, f32, H, SoftmaxWithLoss<f32>, 0>, 0>),
) -> (f64, f64) {
crossbeam::scope(|scope| {
let mut threads = Vec::with_capacity(parallel_num);
for t in 0..parallel_num {
let range = t * data_pair.len() / parallel_num..(t + 1) * data_pair.len() / parallel_num;
threads.push(scope.spawn(|_| {
let (mut input, mut output) = construct_layers();
let mut sum_of_loss = 0f64;
let mut accuracy = 0f64;
for data in &data_pair[range] {
let TrainDataPair {
input: input_value,
output: output_value,
} = &data;
assert_eq!(output_value.value_count(), 1);
input.set_value(input_value.clone().into());
output.set_expect_output(output_value.clone());
let output_loss = output.get_output_value();
let output_without_loss = output.get_output_without_loss();
accuracy += match &output_without_loss {
TensorEitherOwned::Dense(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.as_all_slice().iter().enumerate().all(|(i, v)| i == correct_index || *v < correct) {
1.
} else {
0.
}
}
TensorEitherOwned::Sparse(tensor) => {
let ([correct_index], _) = output_value.iter().next().unwrap();
let correct = *tensor.get([correct_index]).unwrap();
if tensor.iter().all(|([i], v)| i == correct_index || *v < correct)
|
else {
0.
}
}
};
sum_of_loss += *output_loss.get([]).unwrap() as f64;
if back_propagate {
output.clear_gradient_all();
output.back_propagate_all();
}
}
(sum_of_loss, accuracy)
}));
}
threads.into_iter().fold((0f64, 0f64), |(sum_loss, sum_accuracy), handle| {
let (loss, accuracy) = handle.join().unwrap();
(sum_loss + loss, sum_accuracy + accuracy)
})
})
.expect("failed to use thread")
}
struct TrainData {
input_size: usize,
output_size: usize,
data_pair: Vec<TrainDataPair>,
}
struct TrainDataPair {
input: Sparse<f32, 1>,
output: Sparse<f32, 1>,
}
impl TrainDataPair {
fn new(input: Sparse<f32, 1>, output: Sparse<f32, 1>) -> Self {
TrainDataPair { input, output }
}
}
fn read_train_data(labels: impl AsRef<Path>, features: impl AsRef<Path>) -> TrainData {
let (output_size, labels) = read_file_as_tensors(labels);
let (input_size, features) = read_file_as_tensors(features);
let data_pair = labels
.into_par_iter()
.zip_eq(features)
.filter(|(output, _)| output.value_count() == 1)
.map(|(output, input)| TrainDataPair::new(input, output))
.collect();
TrainData { input_size, output_size, data_pair }
}
fn read_file_as_tensors(path: impl AsRef<Path>) -> (usize, Vec<Sparse<f32, 1>>) {
let path = path.as_ref();
let failed_read = &format!("failed to read file {}", path.display());
let invalid_format = &format!("invalid file format {}", path.display());
let file = File::open(path).expect
|
{
1.
}
|
conditional_block
|
structs.go
|
// This is the point of sale of segments in PNRs: - 9 char Amadeus Office ID. - OR 2 char GDS code for OA PNRs PNRs containing a segment sold in any Amadeus Office ID matching pattern NCE6X*** or ***BA0*** or sold in Sabre (1S) or Gallileo (1G).
Pos *PointOfSaleInformationType `xml:"pos,omitempty"`
// The repetition is 10 because we can transport: - until 5 tierLevel - until 5 customerValue, including possibly range of customerValue. If we have tierLevel in the FTI, the customerValue must not be present. If we have customerValue in the FTI, the tierLevel must not be present.
TierLevelAndCustomerValue *FrequentTravellerIdentificationCodeType `xml:"tierLevelAndCustomerValue,omitempty"`
SortCriteria *SortCriteria `xml:"sortCriteria,omitempty"`
}
type SearchCriteria struct {
// used to specify if ticketing, departure or creation options
SearchOption *SelectionDetailsTypeI `xml:"searchOption,omitempty"`
// used to specify the dates to be searched on
Dates *StructuredPeriodInformationType `xml:"dates,omitempty"`
}
type FlightInformation struct {
// It transport the type of flight information that will follow.
FlightInformationType *StatusTypeI `xml:"flightInformationType,omitempty"`
// Board point or Off Point.
BoardPointOrOffPoint *OriginAndDestinationDetailsTypeI `xml:"boardPointOrOffPoint,omitempty"`
// Airline code or Flight Number (in fact, airline + flight number)
AirlineCodeOrFlightNumber *TransportIdentifierType `xml:"airlineCodeOrFlightNumber,omitempty"`
// Booking class.
ClassOfService *ProductInformationTypeI `xml:"classOfService,omitempty"`
// Segment status code.
SegmentStatus *RelatedProductInformationTypeI `xml:"segmentStatus,omitempty"`
}
type SortCriteria struct {
// dummy for SDT clash
Dumbo *DummySegmentTypeI `xml:"dumbo,omitempty"`
// Determine the order of the display.
SortOption *SelectionDetailsTypeI `xml:"sortOption,omitempty"`
}
type AccountingElementType struct {
// Account number
Number formats.AlphaNumericString_Length1To10 `xml:"number,omitempty"`
}
type AccountingInformationElementType struct {
// One of these 4 data elements is mandatory , but non in particular
Account *AccountingElementType `xml:"account,omitempty"`
}
type ActionDetailsTypeI struct {
// used for scrollling purposes
NumberOfItemsDetails *ProcessingInformationTypeI `xml:"numberOfItemsDetails,omitempty"`
}
type AdditionalBusinessSourceInformationTypeI struct {
// the office we are targetting
SourceType *SourceTypeDetailsTypeI `xml:"sourceType,omitempty"`
// contains the office ID
OriginatorDetails *OriginatorIdentificationDetailsTypeI `xml:"originatorDetails,omitempty"`
}
type CompanyIdentificationTypeI struct {
// Marketing company.
MarketingCompany formats.AlphaNumericString_Length1To3 `xml:"marketingCompany,omitempty"`
}
type DummySegmentTypeI struct {
XMLName xml.Name `xml:"http://xml.amadeus.com/QDQLRQ_11_1_1A DummySegmentTypeI"`
}
type FrequentTravellerIdentificationCodeType struct {
// Frequent Traveller Info. Repetition 2 is used only in the case we provide a customer value range (only one is accepted).
FrequentTravellerDetails *FrequentTravellerIdentificationType `xml:"frequentTravellerDetails,omitempty"`
DummyNET struct {
} `xml:"Dummy.NET,omitempty"`
}
type FrequentTravellerIdentificationType struct {
// This field specifies the Tier Level. This is a 4 letter string indicating the airline's ranking of frequent flyers. It is not to be confused with Alliance tier. If tierLevel is filled in a given FTI segment, customerValue must not be filled.
TierLevel formats.AlphaNumericString_Length1To4 `xml:"tierLevel,omitempty"`
// This field specifies the Customer value. This is a 4 letter string indicating the airline's ranking of frequent flyers. It is not to be confused with Alliance tier. If customerValue is filled in a given FTI segment, tierLevel field must not be filled.
CustomerValue formats.NumericInteger_Length1To4 `xml:"customerValue,omitempty"`
}
type LocationTypeU struct {
// Office identification. It can contain wildcards.
Name formats.AlphaNumericString_Length1To9 `xml:"name,omitempty"`
}
type OriginAndDestinationDetailsTypeI struct {
// Board point
Origin formats.AlphaNumericString_Length3To3 `xml:"origin,omitempty"`
// Off point
Destination formats.AlphaNumericString_Length3To3 `xml:"destination,omitempty"`
}
type OriginatorIdentificationDetailsTypeI struct {
// the office that is being targetted
InHouseIdentification1 formats.AlphaNumericString_Length1To9 `xml:"inHouseIdentification1,omitempty"`
}
type PartyIdentifierTypeU struct {
// GDS identifier: 1A, 1S, 1G.
PartyIdentifier formats.AlphaNumericString_Length1To3 `xml:"partyIdentifier,omitempty"`
}
type PointOfSaleInformationType struct {
// Party identification.
PointOfSale *PartyIdentifierTypeU `xml:"pointOfSale,omitempty"`
// Office id in case the party identifier is 1A.
LocationDetails *LocationTypeU `xml:"locationDetails,omitempty"`
}
type ProcessingInformationTypeI struct {
// determine if move up or move down required
ActionQualifier formats.AlphaNumericString_Length1To3 `xml:"actionQualifier,omitempty"`
}
type ProductDetailsTypeI struct {
// Class designator.
Designator formats.AlphaNumericString_Length1To1 `xml:"designator,omitempty"`
}
type ProductIdentificationDetailsTypeI struct {
// Flight number.
FlightNumber formats.AlphaNumericString_Length1To4 `xml:"flightNumber,omitempty"`
}
type ProductInformationTypeI struct {
// Booking class details.
BookingClassDetails *ProductDetailsTypeI `xml:"bookingClassDetails,omitempty"`
}
type QueueInformationDetailsTypeI struct {
// queue number
Number formats.NumericInteger_Length1To2 `xml:"number,omitempty"`
}
type QueueInformationTypeI struct {
// queue identification
QueueDetails *QueueInformationDetailsTypeI `xml:"queueDetails,omitempty"`
}
type RangeDetailsTypeI struct {
// define is a range or not
RangeQualifier formats.AlphaNumericString_Length1To3 `xml:"rangeQualifier,omitempty"`
// define the start and possible end point of the scan
RangeDetails *RangeTypeI `xml:"rangeDetails,omitempty"`
}
type RangeTypeI struct {
// starting point of the scan
Min formats.NumericInteger_Length1To18 `xml:"min,omitempty"`
// ending point of the scan
Max formats.NumericInteger_Length1To18 `xml:"max,omitempty"`
}
type RelatedProductInformationTypeI struct {
// Status code
StatusCode formats.AlphaNumericString_Length2To2 `xml:"statusCode,omitempty"`
}
type SelectionDetailsInformationTypeI struct {
// used to determine if a new start or a continuation Also used for search and sort criteria on the ticketing, departure and creation dates
Option formats.AlphaNumericString_Length1To3 `xml:"option,omitempty"`
}
type SelectionDetailsTypeI struct {
// used for search and sort criteria
SelectionDetails *SelectionDetailsInformationTypeI `xml:"selectionDetails,omitempty"`
}
type SourceTypeDetailsTypeI struct {
// not needed - but mandatory field So just stick a 4 in it !!
SourceQualifier1 formats.AlphaNumericString_Length1To3 `xml:"sourceQualifier1,omitempty"`
}
type StatusDetailsTypeI struct {
// Indicator showing what flight information will be transported.
Indicator formats.AlphaNumericString_Length1To3 `xml:"indicator,omitempty"`
}
type StatusTypeI struct {
// Flight status details.
StatusDetails *StatusDetailsTypeI `xml:"statusDetails,omitempty"`
}
type StructuredDateTimeInformationType struct {
// used for date range only The date ranges are defined on central system as 1,2,3,4 The actual values of the ranges are set in the office profile
TimeMode formats.NumericInteger_Length1To3 `xml:"timeMode,omitempty"`
}
type StructuredDateTimeType struct {
// Year number.
Year formats.Year_YYYY `xml:"year,omitempty"`
// Month number in the year ( begins to 1 )
Month formats.Month_mM `xml:"month,omitempty"`
// Day number in the month ( begins to 1 )
Day formats.Day_nN `xml:"day,omitempty"`
}
type StructuredPeriodInformationType struct {
// Convey the begin date/time of a period.
BeginDateTime *StructuredDateTimeType `xml:"beginDateTime,omitempty"`
// Convey the end date/time of a period.
EndDateTime *StructuredDateTimeType `xml:"endDateTime,omitempty"`
}
type SubQueueInformationDetailsTypeI struct {
// E for every category A for cats with items to be worked C for category number N for nickname CN for both category number and nickname numeric for date range
IdentificationType formats.AlphaNumericString_Length1To3 `xml:"identificationType,omitempty"`
// category number
ItemNumber formats.AlphaNumericString_Length1To3 `xml:"itemNumber,omitempty"`
// used for nickname on inbound used for category name on outbound
ItemDescription formats.AlphaNumericString_Length1To35 `xml:"itemDescription,omitempty"`
}
type SubQueueInformationTypeI struct {
|
// identifies the category or categories.
SubQueueInfoDetails *SubQueueInformationDetailsTypeI `xml:"subQueueInfoDetails,omitempty"`
}
|
random_line_split
|
|
tkteach.py
|
=tk.LEFT)
self.frameLEFT = tk.Frame(master,bd=2,relief=tk.SUNKEN)
self.frameLEFT.pack(side=tk.LEFT)
self.datasetTitleLabel = tk.Label(self.frameLEFT, text="Data Set Selection:")
self.datasetTitleLabel.pack()
self.dataSetsListbox = tk.Listbox(self.frameLEFT,relief=tk.FLAT)
for item in self.dataSetsListStr:
self.dataSetsListbox.insert(tk.END, item)
self.dataSetsListbox.pack()
self.loadDataSetButton = tk.Button(self.frameLEFT, text="Load Data Set", command=self.loadDataSet)
self.loadDataSetButton.pack()
self.dataSetStatusLabel = tk.Label(self.frameLEFT, text="No Data Set Loaded!")
self.dataSetStatusLabel.pack()
|
# MIDDLE FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameMIDDLE = tk.Frame(master, bd=2)
self.frameMIDDLE.pack(side=tk.LEFT)
self.imgStage = tk.Label(self.frameMIDDLE, text="", height=self.default_size[1], width=self.default_size[0])
self.imgStage.pack()
self.imgFileName = tk.Label(self.frameMIDDLE, text="")
self.imgFileName.pack()
self.frameMIDDLEBUTTONS = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEBUTTONS.pack()
self.prevImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="<- Save & Previous", command=self.prevImage, state=tk.DISABLED)
self.prevImageButton.pack(side=tk.LEFT)
self.nextImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="Save & Next ->", command=self.nextImage, state=tk.DISABLED)
self.nextImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGLABEL = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGLABEL.pack()
self.imageNumberLabel = tk.Label(self.frameMIDDLEIMGLABEL, text="Image Number:")
self.imageNumberLabel.pack(side=tk.LEFT)
self.imageNumberInput = tk.Entry(self.frameMIDDLEIMGLABEL, width=10)
self.imageNumberInput.pack(side=tk.LEFT)
self.skipToImageButton = tk.Button(self.frameMIDDLEIMGLABEL, text="Go", command=self.skipToImage, state=tk.DISABLED)
self.skipToImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGZOOM = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGZOOM.pack()
self.imageZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text="Zoom:")
self.imageZoomLabel.pack(side=tk.LEFT)
self.zoomOutButton = tk.Button(self.frameMIDDLEIMGZOOM, text="-", command=self.zoomOut, state=tk.DISABLED)
self.zoomOutButton.pack(side=tk.LEFT)
self.zoomInButton = tk.Button(self.frameMIDDLEIMGZOOM, text="+", command=self.zoomIn, state=tk.DISABLED)
self.zoomInButton.pack(side=tk.LEFT)
self.currentZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text=' '+str(self.imgScaleFactor)+'X ')
self.currentZoomLabel.pack(side=tk.LEFT)
self.frameSeperator02 = tk.Frame(master,width=20,height=1)
self.frameSeperator02.pack(side=tk.LEFT)
# RIGHT FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameRIGHT = tk.Frame(master, bd=2, relief=tk.SUNKEN)
self.frameRIGHT.pack(side=tk.LEFT)
self.categoriesLabel = tk.Label(self.frameRIGHT, text="Categories:")
self.categoriesLabel.pack()
self.categoriesListbox = tk.Listbox(self.frameRIGHT, selectmode=tk.MULTIPLE, selectbackground='#119911',relief=tk.FLAT, bd=2)
for item in self.categories:
self.categoriesListbox.insert(tk.END, item)
self.categoriesListbox.pack()
self.categoriesListbox.config(state=tk.DISABLED)
self.frameSeperator03 = tk.Frame(master,width=6,height=1)
self.frameSeperator03.pack(side=tk.LEFT)
self.select_defaults()
def select_defaults(self):
print("-->select_defaults")
if len(self.dataSetsListStr) == 1:
self.dataSetsListbox.selection_set(0)
self.loadDataSet()
def initialize(self):
print("-->initialize")
#Set parameters:
self.imgScaleFactor = 1
#Sub-initializations
self.initializeDatabase()
self.initializeDatasets()
self.initializeCategories()
def initializeDatabase(self):
print("-->initializeDatabase")
#Load/create database:
self.db = sq.connect('storage.db')
self.cursor = self.db.cursor()
self.cursor.execute('CREATE TABLE IF NOT EXISTS dataSets(id INTEGER NOT NULL PRIMARY KEY, dataSetName TEXT, dataSetPath TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS images(id INTEGER NOT NULL PRIMARY KEY, dataSet_id INTEGER, imageName TEXT, imagePath TEXT UNIQUE, FOREIGN KEY(dataSet_id) REFERENCES dataSets(id))')
self.cursor.execute('CREATE TABLE IF NOT EXISTS categories(id INTEGER NOT NULL PRIMARY KEY, categoryName TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS labels(category_id INTEGER, image_id INTEGER, FOREIGN KEY(category_id) REFERENCES categories(id), FOREIGN KEY(image_id) REFERENCES images(id))')
self.db.commit()
def initializeDatasets(self):
print("-->initializeDatasets")
#Get Datasets:
d = './ds'
self.dataSetsListDir = [os.path.join(d, o) for o in os.listdir(d) if os.path.isdir(os.path.join(d,o))]
self.dataSetsListStr = [x[2:] for x in self.dataSetsListDir]
if len(self.dataSetsListDir)==0:
self.statusBar.config(text="ERROR! No datasets found.")
print("ERROR! No datasets found.")
def initializeCategories(self):
print("-->initializeCategories")
#Get Categories from categories.txt file:
try:
catFile = open('categories.txt','r')
self.categories = [o.strip() for o in catFile.readlines()]
catFile.close()
except IOError:
self.categories = []
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
if len(self.categories)==0:
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
#Populate db table for categories:
for category in self.categories:
self.cursor.execute('INSERT OR IGNORE INTO categories(categoryName) VALUES(?)',(category,))
self.db.commit()
#Parse Categories, set ad-hoc category key bindings:
self.keyBindings=[]
for keyi in self.categories:
if keyi[0]=="(" and keyi[2]==")":
if keyi[1] in self.keyBindings:
self.statusBar.config(text="ERROR! Multiple categories with the same key binding!")
print("ERROR! Multiple categories with the same key binding!")
exit()
else:
self.keyBindings.append(keyi[1].lower())
def keyPressed(self, key):
print("-->keyPressed: "+str(key.char))
if key.keysym == 'Left':
self.prevImageButton.config(relief=tk.SUNKEN)
self.prevImageButton.update_idletasks()
self.prevImage()
time.sleep(0.05)
self.prevImageButton.config(relief=tk.RAISED)
elif key.keysym == 'Right':
self.nextImageButton.config(relief=tk.SUNKEN)
self.nextImageButton.update_idletasks()
self.nextImage()
time.sleep(0.05)
self.nextImageButton.config(relief=tk.RAISED)
elif key.char == '+' or key.char == '=':
self.zoomInButton.config(relief=tk.SUNKEN)
self.zoomInButton.update_idletasks()
self.zoomIn()
time.sleep(0.05)
self.zoomInButton.config(relief=tk.RAISED)
elif key.char == '-':
self.zoomOutButton.config(relief=tk.SUNKEN)
self.zoomOutButton.update_idletasks()
self.zoomOut()
time.sleep(0.05)
self.zoomOutButton.config(relief=tk.RAISED)
else:
#Check if this is an ad-hoc keybind for a category selection...
try:
if self.categoriesListbox.selection_includes(self.keyBindings.index(key.char.lower())):
self.categoriesListbox.selection_clear(self.keyBindings.index(key.char.lower()))
else:
self.categoriesListbox.selection_set(self.keyBindings.index(key.char.lower()))
except ValueError:
pass
def prevImage(self):
#Go to previous image
print("-->prevImage")
self.saveImageCategorization()
if self.imageSelection>0:
self.imageSelection-=1
self.loadImage()
else:
self.statusBar.config(text="ERROR! Already at first image.")
print("ERROR! Already at first image.")
def nextImage(self):
#Go to next image
print("-->nextImage")
self.save
|
self.frameSeperator01 = tk.Frame(master,width=20,height=1)
self.frameSeperator01.pack(side=tk.LEFT)
|
random_line_split
|
tkteach.py
|
.frameMIDDLEIMGZOOM = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGZOOM.pack()
self.imageZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text="Zoom:")
self.imageZoomLabel.pack(side=tk.LEFT)
self.zoomOutButton = tk.Button(self.frameMIDDLEIMGZOOM, text="-", command=self.zoomOut, state=tk.DISABLED)
self.zoomOutButton.pack(side=tk.LEFT)
self.zoomInButton = tk.Button(self.frameMIDDLEIMGZOOM, text="+", command=self.zoomIn, state=tk.DISABLED)
self.zoomInButton.pack(side=tk.LEFT)
self.currentZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text=' '+str(self.imgScaleFactor)+'X ')
self.currentZoomLabel.pack(side=tk.LEFT)
self.frameSeperator02 = tk.Frame(master,width=20,height=1)
self.frameSeperator02.pack(side=tk.LEFT)
# RIGHT FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameRIGHT = tk.Frame(master, bd=2, relief=tk.SUNKEN)
self.frameRIGHT.pack(side=tk.LEFT)
self.categoriesLabel = tk.Label(self.frameRIGHT, text="Categories:")
self.categoriesLabel.pack()
self.categoriesListbox = tk.Listbox(self.frameRIGHT, selectmode=tk.MULTIPLE, selectbackground='#119911',relief=tk.FLAT, bd=2)
for item in self.categories:
self.categoriesListbox.insert(tk.END, item)
self.categoriesListbox.pack()
self.categoriesListbox.config(state=tk.DISABLED)
self.frameSeperator03 = tk.Frame(master,width=6,height=1)
self.frameSeperator03.pack(side=tk.LEFT)
self.select_defaults()
def select_defaults(self):
print("-->select_defaults")
if len(self.dataSetsListStr) == 1:
self.dataSetsListbox.selection_set(0)
self.loadDataSet()
def initialize(self):
print("-->initialize")
#Set parameters:
self.imgScaleFactor = 1
#Sub-initializations
self.initializeDatabase()
self.initializeDatasets()
self.initializeCategories()
def initializeDatabase(self):
print("-->initializeDatabase")
#Load/create database:
self.db = sq.connect('storage.db')
self.cursor = self.db.cursor()
self.cursor.execute('CREATE TABLE IF NOT EXISTS dataSets(id INTEGER NOT NULL PRIMARY KEY, dataSetName TEXT, dataSetPath TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS images(id INTEGER NOT NULL PRIMARY KEY, dataSet_id INTEGER, imageName TEXT, imagePath TEXT UNIQUE, FOREIGN KEY(dataSet_id) REFERENCES dataSets(id))')
self.cursor.execute('CREATE TABLE IF NOT EXISTS categories(id INTEGER NOT NULL PRIMARY KEY, categoryName TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS labels(category_id INTEGER, image_id INTEGER, FOREIGN KEY(category_id) REFERENCES categories(id), FOREIGN KEY(image_id) REFERENCES images(id))')
self.db.commit()
def initializeDatasets(self):
print("-->initializeDatasets")
#Get Datasets:
d = './ds'
self.dataSetsListDir = [os.path.join(d, o) for o in os.listdir(d) if os.path.isdir(os.path.join(d,o))]
self.dataSetsListStr = [x[2:] for x in self.dataSetsListDir]
if len(self.dataSetsListDir)==0:
self.statusBar.config(text="ERROR! No datasets found.")
print("ERROR! No datasets found.")
def initializeCategories(self):
print("-->initializeCategories")
#Get Categories from categories.txt file:
try:
catFile = open('categories.txt','r')
self.categories = [o.strip() for o in catFile.readlines()]
catFile.close()
except IOError:
self.categories = []
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
if len(self.categories)==0:
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
#Populate db table for categories:
for category in self.categories:
self.cursor.execute('INSERT OR IGNORE INTO categories(categoryName) VALUES(?)',(category,))
self.db.commit()
#Parse Categories, set ad-hoc category key bindings:
self.keyBindings=[]
for keyi in self.categories:
if keyi[0]=="(" and keyi[2]==")":
if keyi[1] in self.keyBindings:
self.statusBar.config(text="ERROR! Multiple categories with the same key binding!")
print("ERROR! Multiple categories with the same key binding!")
exit()
else:
self.keyBindings.append(keyi[1].lower())
def keyPressed(self, key):
print("-->keyPressed: "+str(key.char))
if key.keysym == 'Left':
self.prevImageButton.config(relief=tk.SUNKEN)
self.prevImageButton.update_idletasks()
self.prevImage()
time.sleep(0.05)
self.prevImageButton.config(relief=tk.RAISED)
elif key.keysym == 'Right':
self.nextImageButton.config(relief=tk.SUNKEN)
self.nextImageButton.update_idletasks()
self.nextImage()
time.sleep(0.05)
self.nextImageButton.config(relief=tk.RAISED)
elif key.char == '+' or key.char == '=':
self.zoomInButton.config(relief=tk.SUNKEN)
self.zoomInButton.update_idletasks()
self.zoomIn()
time.sleep(0.05)
self.zoomInButton.config(relief=tk.RAISED)
elif key.char == '-':
self.zoomOutButton.config(relief=tk.SUNKEN)
self.zoomOutButton.update_idletasks()
self.zoomOut()
time.sleep(0.05)
self.zoomOutButton.config(relief=tk.RAISED)
else:
#Check if this is an ad-hoc keybind for a category selection...
try:
if self.categoriesListbox.selection_includes(self.keyBindings.index(key.char.lower())):
self.categoriesListbox.selection_clear(self.keyBindings.index(key.char.lower()))
else:
self.categoriesListbox.selection_set(self.keyBindings.index(key.char.lower()))
except ValueError:
pass
def prevImage(self):
#Go to previous image
print("-->prevImage")
self.saveImageCategorization()
if self.imageSelection>0:
self.imageSelection-=1
self.loadImage()
else:
self.statusBar.config(text="ERROR! Already at first image.")
print("ERROR! Already at first image.")
def nextImage(self):
#Go to next image
print("-->nextImage")
self.saveImageCategorization()
if self.imageSelection<(len(self.imageListDir)-1):
self.imageSelection += 1
self.imgScaleFactor = 1
self.loadImage()
else:
self.statusBar.config(text="ERROR! Already at last image.")
print("ERROR! Already at last image.")
def loadImage(self):
print("-->loadImage")
#Draw image to screen:
imageFile = Image.open(self.imageListDir[self.imageSelection])
imageFile.thumbnail(self.default_size, Image.ANTIALIAS)
canvasImage = ImageTk.PhotoImage(imageFile.resize(
(int(imageFile.size[0] * self.imgScaleFactor),
int(imageFile.size[1] * self.imgScaleFactor)),
Image.NEAREST))
self.imgStage.config(image=canvasImage)
self.imgStage.image = canvasImage
self.imgFileName.config(text=str(self.imageListStr[self.imageSelection]))
self.imageNumberInput.delete(0, tk.END)
self.imageNumberInput.insert(0, str(self.imageSelection))
self.statusBar.config(text="")
#Read from db and update starting categories in listbox if data exists:
self.categoriesListbox.selection_clear(0,len(self.categories))
self.cursor.execute('SELECT category_id FROM labels WHERE image_id = ?',(self.db_getImageID(self.imageListDir[self.imageSelection]),))
for category_id in self.cursor.fetchall():
self.cursor.execute('SELECT categoryName FROM categories WHERE id = ?',(category_id[0],))
categoryName = self.cursor.fetchone()[0]
try:
self.categoriesListbox.selection_set(self.categories.index(categoryName))
except ValueError:
self.statusBar.config(text="FATAL ERROR! Image is saved with invalid category.")
print('FATAL ERROR! Image is saved with invalid category: '+str(categoryName))
print('image Name: '+self.imageListStr[self.imageSelection])
print('This category must be listed in the categories.txt file.')
exit()
def saveImageCategorization(self):
print("-->saveImageCategorization")
#Clear out existing category labels for the image...
self.cursor.execute('DELETE FROM labels WHERE image_id = ?', (self.db_getImageID(self.imageListDir[self.imageSelection]),))
#Insert new category labels for the image...
for cati in self.categoriesListbox.curselection():
self.cursor.execute('INSERT INTO labels(image_id, category_id) VALUES(?, ?)',(self.db_getImageID(self.imageListDir[self.imageSelection]), self.db_getCategoryID(self.categories[cati])))
self.db.commit()
def
|
skipToImage
|
identifier_name
|
|
tkteach.py
|
=tk.LEFT)
self.frameLEFT = tk.Frame(master,bd=2,relief=tk.SUNKEN)
self.frameLEFT.pack(side=tk.LEFT)
self.datasetTitleLabel = tk.Label(self.frameLEFT, text="Data Set Selection:")
self.datasetTitleLabel.pack()
self.dataSetsListbox = tk.Listbox(self.frameLEFT,relief=tk.FLAT)
for item in self.dataSetsListStr:
self.dataSetsListbox.insert(tk.END, item)
self.dataSetsListbox.pack()
self.loadDataSetButton = tk.Button(self.frameLEFT, text="Load Data Set", command=self.loadDataSet)
self.loadDataSetButton.pack()
self.dataSetStatusLabel = tk.Label(self.frameLEFT, text="No Data Set Loaded!")
self.dataSetStatusLabel.pack()
self.frameSeperator01 = tk.Frame(master,width=20,height=1)
self.frameSeperator01.pack(side=tk.LEFT)
# MIDDLE FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameMIDDLE = tk.Frame(master, bd=2)
self.frameMIDDLE.pack(side=tk.LEFT)
self.imgStage = tk.Label(self.frameMIDDLE, text="", height=self.default_size[1], width=self.default_size[0])
self.imgStage.pack()
self.imgFileName = tk.Label(self.frameMIDDLE, text="")
self.imgFileName.pack()
self.frameMIDDLEBUTTONS = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEBUTTONS.pack()
self.prevImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="<- Save & Previous", command=self.prevImage, state=tk.DISABLED)
self.prevImageButton.pack(side=tk.LEFT)
self.nextImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="Save & Next ->", command=self.nextImage, state=tk.DISABLED)
self.nextImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGLABEL = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGLABEL.pack()
self.imageNumberLabel = tk.Label(self.frameMIDDLEIMGLABEL, text="Image Number:")
self.imageNumberLabel.pack(side=tk.LEFT)
self.imageNumberInput = tk.Entry(self.frameMIDDLEIMGLABEL, width=10)
self.imageNumberInput.pack(side=tk.LEFT)
self.skipToImageButton = tk.Button(self.frameMIDDLEIMGLABEL, text="Go", command=self.skipToImage, state=tk.DISABLED)
self.skipToImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGZOOM = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGZOOM.pack()
self.imageZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text="Zoom:")
self.imageZoomLabel.pack(side=tk.LEFT)
self.zoomOutButton = tk.Button(self.frameMIDDLEIMGZOOM, text="-", command=self.zoomOut, state=tk.DISABLED)
self.zoomOutButton.pack(side=tk.LEFT)
self.zoomInButton = tk.Button(self.frameMIDDLEIMGZOOM, text="+", command=self.zoomIn, state=tk.DISABLED)
self.zoomInButton.pack(side=tk.LEFT)
self.currentZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text=' '+str(self.imgScaleFactor)+'X ')
self.currentZoomLabel.pack(side=tk.LEFT)
self.frameSeperator02 = tk.Frame(master,width=20,height=1)
self.frameSeperator02.pack(side=tk.LEFT)
# RIGHT FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameRIGHT = tk.Frame(master, bd=2, relief=tk.SUNKEN)
self.frameRIGHT.pack(side=tk.LEFT)
self.categoriesLabel = tk.Label(self.frameRIGHT, text="Categories:")
self.categoriesLabel.pack()
self.categoriesListbox = tk.Listbox(self.frameRIGHT, selectmode=tk.MULTIPLE, selectbackground='#119911',relief=tk.FLAT, bd=2)
for item in self.categories:
self.categoriesListbox.insert(tk.END, item)
self.categoriesListbox.pack()
self.categoriesListbox.config(state=tk.DISABLED)
self.frameSeperator03 = tk.Frame(master,width=6,height=1)
self.frameSeperator03.pack(side=tk.LEFT)
self.select_defaults()
def select_defaults(self):
print("-->select_defaults")
if len(self.dataSetsListStr) == 1:
self.dataSetsListbox.selection_set(0)
self.loadDataSet()
def initialize(self):
print("-->initialize")
#Set parameters:
self.imgScaleFactor = 1
#Sub-initializations
self.initializeDatabase()
self.initializeDatasets()
self.initializeCategories()
def initializeDatabase(self):
print("-->initializeDatabase")
#Load/create database:
self.db = sq.connect('storage.db')
self.cursor = self.db.cursor()
self.cursor.execute('CREATE TABLE IF NOT EXISTS dataSets(id INTEGER NOT NULL PRIMARY KEY, dataSetName TEXT, dataSetPath TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS images(id INTEGER NOT NULL PRIMARY KEY, dataSet_id INTEGER, imageName TEXT, imagePath TEXT UNIQUE, FOREIGN KEY(dataSet_id) REFERENCES dataSets(id))')
self.cursor.execute('CREATE TABLE IF NOT EXISTS categories(id INTEGER NOT NULL PRIMARY KEY, categoryName TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS labels(category_id INTEGER, image_id INTEGER, FOREIGN KEY(category_id) REFERENCES categories(id), FOREIGN KEY(image_id) REFERENCES images(id))')
self.db.commit()
def initializeDatasets(self):
print("-->initializeDatasets")
#Get Datasets:
d = './ds'
self.dataSetsListDir = [os.path.join(d, o) for o in os.listdir(d) if os.path.isdir(os.path.join(d,o))]
self.dataSetsListStr = [x[2:] for x in self.dataSetsListDir]
if len(self.dataSetsListDir)==0:
self.statusBar.config(text="ERROR! No datasets found.")
print("ERROR! No datasets found.")
def initializeCategories(self):
print("-->initializeCategories")
#Get Categories from categories.txt file:
try:
catFile = open('categories.txt','r')
self.categories = [o.strip() for o in catFile.readlines()]
catFile.close()
except IOError:
self.categories = []
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
if len(self.categories)==0:
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
#Populate db table for categories:
for category in self.categories:
self.cursor.execute('INSERT OR IGNORE INTO categories(categoryName) VALUES(?)',(category,))
self.db.commit()
#Parse Categories, set ad-hoc category key bindings:
self.keyBindings=[]
for keyi in self.categories:
if keyi[0]=="(" and keyi[2]==")":
if keyi[1] in self.keyBindings:
self.statusBar.config(text="ERROR! Multiple categories with the same key binding!")
print("ERROR! Multiple categories with the same key binding!")
exit()
else:
self.keyBindings.append(keyi[1].lower())
def keyPressed(self, key):
print("-->keyPressed: "+str(key.char))
if key.keysym == 'Left':
self.prevImageButton.config(relief=tk.SUNKEN)
self.prevImageButton.update_idletasks()
self.prevImage()
time.sleep(0.05)
self.prevImageButton.config(relief=tk.RAISED)
elif key.keysym == 'Right':
self.nextImageButton.config(relief=tk.SUNKEN)
self.nextImageButton.update_idletasks()
self.nextImage()
time.sleep(0.05)
self.nextImageButton.config(relief=tk.RAISED)
elif key.char == '+' or key.char == '=':
self.zoomInButton.config(relief=tk.SUNKEN)
self.zoomInButton.update_idletasks()
self.zoomIn()
time.sleep(0.05)
self.zoomInButton.config(relief=tk.RAISED)
elif key.char == '-':
|
else:
#Check if this is an ad-hoc keybind for a category selection...
try:
if self.categoriesListbox.selection_includes(self.keyBindings.index(key.char.lower())):
self.categoriesListbox.selection_clear(self.keyBindings.index(key.char.lower()))
else:
self.categoriesListbox.selection_set(self.keyBindings.index(key.char.lower()))
except ValueError:
pass
def prevImage(self):
#Go to previous image
print("-->prevImage")
self.saveImageCategorization()
if self.imageSelection>0:
self.imageSelection-=1
self.loadImage()
else:
self.statusBar.config(text="ERROR! Already at first image.")
print("ERROR! Already at first image.")
def nextImage(self):
#Go to next image
print("-->nextImage")
self.saveImage
|
self.zoomOutButton.config(relief=tk.SUNKEN)
self.zoomOutButton.update_idletasks()
self.zoomOut()
time.sleep(0.05)
self.zoomOutButton.config(relief=tk.RAISED)
|
conditional_block
|
tkteach.py
|
# LEFT FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameSeperator00 = tk.Frame(master,width=6,height=1)
self.frameSeperator00.pack(side=tk.LEFT)
self.frameLEFT = tk.Frame(master,bd=2,relief=tk.SUNKEN)
self.frameLEFT.pack(side=tk.LEFT)
self.datasetTitleLabel = tk.Label(self.frameLEFT, text="Data Set Selection:")
self.datasetTitleLabel.pack()
self.dataSetsListbox = tk.Listbox(self.frameLEFT,relief=tk.FLAT)
for item in self.dataSetsListStr:
self.dataSetsListbox.insert(tk.END, item)
self.dataSetsListbox.pack()
self.loadDataSetButton = tk.Button(self.frameLEFT, text="Load Data Set", command=self.loadDataSet)
self.loadDataSetButton.pack()
self.dataSetStatusLabel = tk.Label(self.frameLEFT, text="No Data Set Loaded!")
self.dataSetStatusLabel.pack()
self.frameSeperator01 = tk.Frame(master,width=20,height=1)
self.frameSeperator01.pack(side=tk.LEFT)
# MIDDLE FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameMIDDLE = tk.Frame(master, bd=2)
self.frameMIDDLE.pack(side=tk.LEFT)
self.imgStage = tk.Label(self.frameMIDDLE, text="", height=self.default_size[1], width=self.default_size[0])
self.imgStage.pack()
self.imgFileName = tk.Label(self.frameMIDDLE, text="")
self.imgFileName.pack()
self.frameMIDDLEBUTTONS = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEBUTTONS.pack()
self.prevImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="<- Save & Previous", command=self.prevImage, state=tk.DISABLED)
self.prevImageButton.pack(side=tk.LEFT)
self.nextImageButton = tk.Button(self.frameMIDDLEBUTTONS, text="Save & Next ->", command=self.nextImage, state=tk.DISABLED)
self.nextImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGLABEL = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGLABEL.pack()
self.imageNumberLabel = tk.Label(self.frameMIDDLEIMGLABEL, text="Image Number:")
self.imageNumberLabel.pack(side=tk.LEFT)
self.imageNumberInput = tk.Entry(self.frameMIDDLEIMGLABEL, width=10)
self.imageNumberInput.pack(side=tk.LEFT)
self.skipToImageButton = tk.Button(self.frameMIDDLEIMGLABEL, text="Go", command=self.skipToImage, state=tk.DISABLED)
self.skipToImageButton.pack(side=tk.LEFT)
self.frameMIDDLEIMGZOOM = tk.Frame(self.frameMIDDLE, bd=2)
self.frameMIDDLEIMGZOOM.pack()
self.imageZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text="Zoom:")
self.imageZoomLabel.pack(side=tk.LEFT)
self.zoomOutButton = tk.Button(self.frameMIDDLEIMGZOOM, text="-", command=self.zoomOut, state=tk.DISABLED)
self.zoomOutButton.pack(side=tk.LEFT)
self.zoomInButton = tk.Button(self.frameMIDDLEIMGZOOM, text="+", command=self.zoomIn, state=tk.DISABLED)
self.zoomInButton.pack(side=tk.LEFT)
self.currentZoomLabel = tk.Label(self.frameMIDDLEIMGZOOM, text=' '+str(self.imgScaleFactor)+'X ')
self.currentZoomLabel.pack(side=tk.LEFT)
self.frameSeperator02 = tk.Frame(master,width=20,height=1)
self.frameSeperator02.pack(side=tk.LEFT)
# RIGHT FRAME VVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVVV
self.frameRIGHT = tk.Frame(master, bd=2, relief=tk.SUNKEN)
self.frameRIGHT.pack(side=tk.LEFT)
self.categoriesLabel = tk.Label(self.frameRIGHT, text="Categories:")
self.categoriesLabel.pack()
self.categoriesListbox = tk.Listbox(self.frameRIGHT, selectmode=tk.MULTIPLE, selectbackground='#119911',relief=tk.FLAT, bd=2)
for item in self.categories:
self.categoriesListbox.insert(tk.END, item)
self.categoriesListbox.pack()
self.categoriesListbox.config(state=tk.DISABLED)
self.frameSeperator03 = tk.Frame(master,width=6,height=1)
self.frameSeperator03.pack(side=tk.LEFT)
self.select_defaults()
def select_defaults(self):
print("-->select_defaults")
if len(self.dataSetsListStr) == 1:
self.dataSetsListbox.selection_set(0)
self.loadDataSet()
def initialize(self):
print("-->initialize")
#Set parameters:
self.imgScaleFactor = 1
#Sub-initializations
self.initializeDatabase()
self.initializeDatasets()
self.initializeCategories()
def initializeDatabase(self):
print("-->initializeDatabase")
#Load/create database:
self.db = sq.connect('storage.db')
self.cursor = self.db.cursor()
self.cursor.execute('CREATE TABLE IF NOT EXISTS dataSets(id INTEGER NOT NULL PRIMARY KEY, dataSetName TEXT, dataSetPath TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS images(id INTEGER NOT NULL PRIMARY KEY, dataSet_id INTEGER, imageName TEXT, imagePath TEXT UNIQUE, FOREIGN KEY(dataSet_id) REFERENCES dataSets(id))')
self.cursor.execute('CREATE TABLE IF NOT EXISTS categories(id INTEGER NOT NULL PRIMARY KEY, categoryName TEXT UNIQUE)')
self.cursor.execute('CREATE TABLE IF NOT EXISTS labels(category_id INTEGER, image_id INTEGER, FOREIGN KEY(category_id) REFERENCES categories(id), FOREIGN KEY(image_id) REFERENCES images(id))')
self.db.commit()
def initializeDatasets(self):
print("-->initializeDatasets")
#Get Datasets:
d = './ds'
self.dataSetsListDir = [os.path.join(d, o) for o in os.listdir(d) if os.path.isdir(os.path.join(d,o))]
self.dataSetsListStr = [x[2:] for x in self.dataSetsListDir]
if len(self.dataSetsListDir)==0:
self.statusBar.config(text="ERROR! No datasets found.")
print("ERROR! No datasets found.")
def initializeCategories(self):
print("-->initializeCategories")
#Get Categories from categories.txt file:
try:
catFile = open('categories.txt','r')
self.categories = [o.strip() for o in catFile.readlines()]
catFile.close()
except IOError:
self.categories = []
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
if len(self.categories)==0:
self.statusBar.config(text="ERROR! No categories found.")
print("ERROR! No categories found.")
#Populate db table for categories:
for category in self.categories:
self.cursor.execute('INSERT OR IGNORE INTO categories(categoryName) VALUES(?)',(category,))
self.db.commit()
#Parse Categories, set ad-hoc category key bindings:
self.keyBindings=[]
for keyi in self.categories:
if keyi[0]=="(" and keyi[2]==")":
if keyi[1] in self.keyBindings:
self.statusBar.config(text="ERROR! Multiple categories with the same key binding!")
print("ERROR! Multiple categories with the same key binding!")
exit()
else:
self.keyBindings.append(keyi[1].lower())
def keyPressed(self, key):
print("-->keyPressed: "+str(key.char))
if key.keysym == 'Left':
self.prevImageButton.config(relief=tk.SUNKEN)
self.prevImageButton.update_idletasks()
self.prevImage()
time.sleep(0.05)
self.prevImageButton.config(relief=tk.RAISED)
elif key.keysym == 'Right':
self.nextImageButton.config(relief=tk.SUNKEN)
self.nextImageButton.update_idletasks()
self.nextImage()
time.sleep(0.05)
self.nextImageButton.config(relief=tk.RAISED)
elif key.char == '+' or key.char == '=':
self.zoomInButton.config(relief=tk.SUNKEN)
self.zoomInButton.update_idletasks()
self.zoomIn()
time.sleep(0.05)
self.zoomInButton.config(relief=tk.RAISED)
elif key.char == '-':
self.zoomOutButton.config(relief=tk.SUNKEN)
self.zoomOutButton.update_idletasks()
self.zoomOut()
time.sleep(0.0
|
print("-->__init__")
self.master = master
self.default_size = (800, 400)
master.title("tkteach version 002")
master.bind("<Key>", self.keyPressed)
# Create GUI elements:
self.titleLabel = tk.Label(master, text="tkteach version 002")
self.titleLabel.pack()
# BOTTOM "STATUS BAR" VVVVVVVVVVVVVVVVVVVVVVVVV
self.statusBar = tk.Label(master, text="", relief=tk.RIDGE)
self.statusBar.pack(side=tk.BOTTOM, fill=tk.X)
self.initialize()
|
identifier_body
|
|
v3_alarm_test.go
|
during apply
func TestV3StorageQuotaApply(t *testing.T) {
integration.BeforeTest(t)
quotasize := int64(16 * os.Getpagesize())
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 2})
defer clus.Terminate(t)
kvc1 := integration.ToGRPC(clus.Client(1)).KV
// Set a quota on one node
clus.Members[0].QuotaBackendBytes = quotasize
clus.Members[0].Stop(t)
clus.Members[0].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
kvc0 := integration.ToGRPC(clus.Client(0)).KV
waitForRestart(t, kvc0)
key := []byte("abc")
// test small put still works
smallbuf := make([]byte, 1024)
_, serr := kvc0.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if serr != nil {
t.Fatal(serr)
}
// test big put
bigbuf := make([]byte, quotasize)
_, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: bigbuf})
if err != nil {
t.Fatal(err)
}
// quorum get should work regardless of whether alarm is raised
_, err = kvc0.Range(context.TODO(), &pb.RangeRequest{Key: []byte("foo")})
if err != nil {
t.Fatal(err)
}
// wait until alarm is raised for sure-- poll the alarms
stopc := time.After(5 * time.Second)
for {
req := &pb.AlarmRequest{Action: pb.AlarmRequest_GET}
resp, aerr := clus.Members[0].Server.Alarm(context.TODO(), req)
if aerr != nil {
t.Fatal(aerr)
}
if len(resp.Alarms) != 0 {
break
}
select {
case <-stopc:
t.Fatalf("timed out waiting for alarm")
case <-time.After(10 * time.Millisecond):
}
}
// txn with non-mutating Ops should go through when NOSPACE alarm is raised
_, err = kvc0.Txn(context.TODO(), &pb.TxnRequest{
Compare: []*pb.Compare{
{
Key: key,
Result: pb.Compare_EQUAL,
Target: pb.Compare_CREATE,
TargetUnion: &pb.Compare_CreateRevision{CreateRevision: 0},
},
},
Success: []*pb.RequestOp{
{
Request: &pb.RequestOp_RequestDeleteRange{
RequestDeleteRange: &pb.DeleteRangeRequest{
Key: key,
},
},
},
},
})
if err != nil {
t.Fatal(err)
}
ctx, cancel := context.WithTimeout(context.TODO(), integration.RequestWaitTimeout)
defer cancel()
// small quota machine should reject put
if _, err := kvc0.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// large quota machine should reject put
if _, err := kvc1.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// reset large quota node to ensure alarm persisted
clus.Members[1].Stop(t)
clus.Members[1].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
if _, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("alarmed instance should reject put after reset")
}
}
// TestV3AlarmDeactivate ensures that space alarms can be deactivated so puts go through.
func
|
(t *testing.T) {
integration.BeforeTest(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
kvc := integration.ToGRPC(clus.RandClient()).KV
mt := integration.ToGRPC(clus.RandClient()).Maintenance
alarmReq := &pb.AlarmRequest{
MemberID: 123,
Action: pb.AlarmRequest_ACTIVATE,
Alarm: pb.AlarmType_NOSPACE,
}
if _, err := mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
key := []byte("abc")
smallbuf := make([]byte, 512)
_, err := kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if err == nil && !eqErrGRPC(err, rpctypes.ErrGRPCNoSpace) {
t.Fatalf("put got %v, expected %v", err, rpctypes.ErrGRPCNoSpace)
}
alarmReq.Action = pb.AlarmRequest_DEACTIVATE
if _, err = mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
if _, err = kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err != nil {
t.Fatal(err)
}
}
func TestV3CorruptAlarm(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3, UseBridge: true})
defer clus.Terminate(t)
var wg sync.WaitGroup
wg.Add(10)
for i := 0; i < 10; i++ {
go func() {
defer wg.Done()
if _, err := clus.Client(0).Put(context.TODO(), "k", "v"); err != nil {
t.Error(err)
}
}()
}
wg.Wait()
// Corrupt member 0 by modifying backend offline.
clus.Members[0].Stop(t)
fp := filepath.Join(clus.Members[0].DataDir, "member", "snap", "db")
be := backend.NewDefaultBackend(lg, fp)
s := mvcc.NewStore(lg, be, nil, mvcc.StoreConfig{})
// NOTE: cluster_proxy mode with namespacing won't set 'k', but namespace/'k'.
s.Put([]byte("abc"), []byte("def"), 0)
s.Put([]byte("xyz"), []byte("123"), 0)
s.Compact(traceutil.TODO(), 5)
s.Commit()
s.Close()
be.Close()
clus.Members[1].WaitOK(t)
clus.Members[2].WaitOK(t)
time.Sleep(time.Second * 2)
// Wait for cluster so Puts succeed in case member 0 was the leader.
if _, err := clus.Client(1).Get(context.TODO(), "k"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "xyz", "321"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "abc", "fed"); err != nil {
t.Fatal(err)
}
// Restart with corruption checking enabled.
clus.Members[1].Stop(t)
clus.Members[2].Stop(t)
for _, m := range clus.Members {
m.CorruptCheckTime = time.Second
m.Restart(t)
}
clus.WaitLeader(t)
time.Sleep(time.Second * 2)
clus.Members[0].WaitStarted(t)
resp0, err0 := clus.Client(0).Get(context.TODO(), "abc")
if err0 != nil {
t.Fatal(err0)
}
clus.Members[1].WaitStarted(t)
resp1, err1 := clus.Client(1).Get(context.TODO(), "abc")
if err1 != nil {
t.Fatal(err1)
}
if resp0.Kvs[0].ModRevision == resp1.Kvs[0].ModRevision {
t.Fatalf("matching ModRevision values")
}
for i := 0; i < 5; i++ {
presp, perr := clus.Client(0).Put(context.TODO(), "abc", "aaa")
if perr != nil {
if !eqErrGRPC(perr, rpctypes.ErrCorrupt) {
t.Fatalf("expected %v, got %+v (%v)", rpctypes.ErrCorrupt, presp, perr)
} else {
return
}
}
time.Sleep(time.Second)
}
t.Fatalf("expected error %v after %s", rpctypes.ErrCorrupt, 5*time.Second)
}
func TestV3CorruptAlarmWithLeaseCorrupted(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{
CorruptCheckTime: time.Second,
Size: 3,
SnapshotCount: 10,
SnapshotCatchUpEntries: 5,
DisableStrictReconfigCheck: true,
})
defer clus.Terminate(t)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
lresp, err := integration.ToGRPC(clus.RandClient()).Lease.LeaseGrant(ctx, &pb.LeaseGrantRequest{ID
|
TestV3AlarmDeactivate
|
identifier_name
|
v3_alarm_test.go
|
during apply
func TestV3StorageQuotaApply(t *testing.T) {
integration.BeforeTest(t)
quotasize := int64(16 * os.Getpagesize())
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 2})
defer clus.Terminate(t)
kvc1 := integration.ToGRPC(clus.Client(1)).KV
// Set a quota on one node
clus.Members[0].QuotaBackendBytes = quotasize
clus.Members[0].Stop(t)
clus.Members[0].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
kvc0 := integration.ToGRPC(clus.Client(0)).KV
waitForRestart(t, kvc0)
key := []byte("abc")
// test small put still works
smallbuf := make([]byte, 1024)
_, serr := kvc0.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if serr != nil {
t.Fatal(serr)
}
// test big put
bigbuf := make([]byte, quotasize)
_, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: bigbuf})
if err != nil {
t.Fatal(err)
}
// quorum get should work regardless of whether alarm is raised
_, err = kvc0.Range(context.TODO(), &pb.RangeRequest{Key: []byte("foo")})
if err != nil {
t.Fatal(err)
}
// wait until alarm is raised for sure-- poll the alarms
stopc := time.After(5 * time.Second)
for {
req := &pb.AlarmRequest{Action: pb.AlarmRequest_GET}
resp, aerr := clus.Members[0].Server.Alarm(context.TODO(), req)
if aerr != nil {
t.Fatal(aerr)
}
if len(resp.Alarms) != 0 {
break
}
|
select {
case <-stopc:
t.Fatalf("timed out waiting for alarm")
case <-time.After(10 * time.Millisecond):
}
}
// txn with non-mutating Ops should go through when NOSPACE alarm is raised
_, err = kvc0.Txn(context.TODO(), &pb.TxnRequest{
Compare: []*pb.Compare{
{
Key: key,
Result: pb.Compare_EQUAL,
Target: pb.Compare_CREATE,
TargetUnion: &pb.Compare_CreateRevision{CreateRevision: 0},
},
},
Success: []*pb.RequestOp{
{
Request: &pb.RequestOp_RequestDeleteRange{
RequestDeleteRange: &pb.DeleteRangeRequest{
Key: key,
},
},
},
},
})
if err != nil {
t.Fatal(err)
}
ctx, cancel := context.WithTimeout(context.TODO(), integration.RequestWaitTimeout)
defer cancel()
// small quota machine should reject put
if _, err := kvc0.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// large quota machine should reject put
if _, err := kvc1.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// reset large quota node to ensure alarm persisted
clus.Members[1].Stop(t)
clus.Members[1].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
if _, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("alarmed instance should reject put after reset")
}
}
// TestV3AlarmDeactivate ensures that space alarms can be deactivated so puts go through.
func TestV3AlarmDeactivate(t *testing.T) {
integration.BeforeTest(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
kvc := integration.ToGRPC(clus.RandClient()).KV
mt := integration.ToGRPC(clus.RandClient()).Maintenance
alarmReq := &pb.AlarmRequest{
MemberID: 123,
Action: pb.AlarmRequest_ACTIVATE,
Alarm: pb.AlarmType_NOSPACE,
}
if _, err := mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
key := []byte("abc")
smallbuf := make([]byte, 512)
_, err := kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if err == nil && !eqErrGRPC(err, rpctypes.ErrGRPCNoSpace) {
t.Fatalf("put got %v, expected %v", err, rpctypes.ErrGRPCNoSpace)
}
alarmReq.Action = pb.AlarmRequest_DEACTIVATE
if _, err = mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
if _, err = kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err != nil {
t.Fatal(err)
}
}
func TestV3CorruptAlarm(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3, UseBridge: true})
defer clus.Terminate(t)
var wg sync.WaitGroup
wg.Add(10)
for i := 0; i < 10; i++ {
go func() {
defer wg.Done()
if _, err := clus.Client(0).Put(context.TODO(), "k", "v"); err != nil {
t.Error(err)
}
}()
}
wg.Wait()
// Corrupt member 0 by modifying backend offline.
clus.Members[0].Stop(t)
fp := filepath.Join(clus.Members[0].DataDir, "member", "snap", "db")
be := backend.NewDefaultBackend(lg, fp)
s := mvcc.NewStore(lg, be, nil, mvcc.StoreConfig{})
// NOTE: cluster_proxy mode with namespacing won't set 'k', but namespace/'k'.
s.Put([]byte("abc"), []byte("def"), 0)
s.Put([]byte("xyz"), []byte("123"), 0)
s.Compact(traceutil.TODO(), 5)
s.Commit()
s.Close()
be.Close()
clus.Members[1].WaitOK(t)
clus.Members[2].WaitOK(t)
time.Sleep(time.Second * 2)
// Wait for cluster so Puts succeed in case member 0 was the leader.
if _, err := clus.Client(1).Get(context.TODO(), "k"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "xyz", "321"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "abc", "fed"); err != nil {
t.Fatal(err)
}
// Restart with corruption checking enabled.
clus.Members[1].Stop(t)
clus.Members[2].Stop(t)
for _, m := range clus.Members {
m.CorruptCheckTime = time.Second
m.Restart(t)
}
clus.WaitLeader(t)
time.Sleep(time.Second * 2)
clus.Members[0].WaitStarted(t)
resp0, err0 := clus.Client(0).Get(context.TODO(), "abc")
if err0 != nil {
t.Fatal(err0)
}
clus.Members[1].WaitStarted(t)
resp1, err1 := clus.Client(1).Get(context.TODO(), "abc")
if err1 != nil {
t.Fatal(err1)
}
if resp0.Kvs[0].ModRevision == resp1.Kvs[0].ModRevision {
t.Fatalf("matching ModRevision values")
}
for i := 0; i < 5; i++ {
presp, perr := clus.Client(0).Put(context.TODO(), "abc", "aaa")
if perr != nil {
if !eqErrGRPC(perr, rpctypes.ErrCorrupt) {
t.Fatalf("expected %v, got %+v (%v)", rpctypes.ErrCorrupt, presp, perr)
} else {
return
}
}
time.Sleep(time.Second)
}
t.Fatalf("expected error %v after %s", rpctypes.ErrCorrupt, 5*time.Second)
}
func TestV3CorruptAlarmWithLeaseCorrupted(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{
CorruptCheckTime: time.Second,
Size: 3,
SnapshotCount: 10,
SnapshotCatchUpEntries: 5,
DisableStrictReconfigCheck: true,
})
defer clus.Terminate(t)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
lresp, err := integration.ToGRPC(clus.RandClient()).Lease.LeaseGrant(ctx, &pb.LeaseGrantRequest{ID:
|
random_line_split
|
|
v3_alarm_test.go
|
during apply
func TestV3StorageQuotaApply(t *testing.T) {
integration.BeforeTest(t)
quotasize := int64(16 * os.Getpagesize())
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 2})
defer clus.Terminate(t)
kvc1 := integration.ToGRPC(clus.Client(1)).KV
// Set a quota on one node
clus.Members[0].QuotaBackendBytes = quotasize
clus.Members[0].Stop(t)
clus.Members[0].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
kvc0 := integration.ToGRPC(clus.Client(0)).KV
waitForRestart(t, kvc0)
key := []byte("abc")
// test small put still works
smallbuf := make([]byte, 1024)
_, serr := kvc0.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if serr != nil {
t.Fatal(serr)
}
// test big put
bigbuf := make([]byte, quotasize)
_, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: bigbuf})
if err != nil {
t.Fatal(err)
}
// quorum get should work regardless of whether alarm is raised
_, err = kvc0.Range(context.TODO(), &pb.RangeRequest{Key: []byte("foo")})
if err != nil {
t.Fatal(err)
}
// wait until alarm is raised for sure-- poll the alarms
stopc := time.After(5 * time.Second)
for {
req := &pb.AlarmRequest{Action: pb.AlarmRequest_GET}
resp, aerr := clus.Members[0].Server.Alarm(context.TODO(), req)
if aerr != nil {
t.Fatal(aerr)
}
if len(resp.Alarms) != 0 {
break
}
select {
case <-stopc:
t.Fatalf("timed out waiting for alarm")
case <-time.After(10 * time.Millisecond):
}
}
// txn with non-mutating Ops should go through when NOSPACE alarm is raised
_, err = kvc0.Txn(context.TODO(), &pb.TxnRequest{
Compare: []*pb.Compare{
{
Key: key,
Result: pb.Compare_EQUAL,
Target: pb.Compare_CREATE,
TargetUnion: &pb.Compare_CreateRevision{CreateRevision: 0},
},
},
Success: []*pb.RequestOp{
{
Request: &pb.RequestOp_RequestDeleteRange{
RequestDeleteRange: &pb.DeleteRangeRequest{
Key: key,
},
},
},
},
})
if err != nil
|
ctx, cancel := context.WithTimeout(context.TODO(), integration.RequestWaitTimeout)
defer cancel()
// small quota machine should reject put
if _, err := kvc0.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// large quota machine should reject put
if _, err := kvc1.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// reset large quota node to ensure alarm persisted
clus.Members[1].Stop(t)
clus.Members[1].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
if _, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("alarmed instance should reject put after reset")
}
}
// TestV3AlarmDeactivate ensures that space alarms can be deactivated so puts go through.
func TestV3AlarmDeactivate(t *testing.T) {
integration.BeforeTest(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
kvc := integration.ToGRPC(clus.RandClient()).KV
mt := integration.ToGRPC(clus.RandClient()).Maintenance
alarmReq := &pb.AlarmRequest{
MemberID: 123,
Action: pb.AlarmRequest_ACTIVATE,
Alarm: pb.AlarmType_NOSPACE,
}
if _, err := mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
key := []byte("abc")
smallbuf := make([]byte, 512)
_, err := kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if err == nil && !eqErrGRPC(err, rpctypes.ErrGRPCNoSpace) {
t.Fatalf("put got %v, expected %v", err, rpctypes.ErrGRPCNoSpace)
}
alarmReq.Action = pb.AlarmRequest_DEACTIVATE
if _, err = mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
if _, err = kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err != nil {
t.Fatal(err)
}
}
func TestV3CorruptAlarm(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3, UseBridge: true})
defer clus.Terminate(t)
var wg sync.WaitGroup
wg.Add(10)
for i := 0; i < 10; i++ {
go func() {
defer wg.Done()
if _, err := clus.Client(0).Put(context.TODO(), "k", "v"); err != nil {
t.Error(err)
}
}()
}
wg.Wait()
// Corrupt member 0 by modifying backend offline.
clus.Members[0].Stop(t)
fp := filepath.Join(clus.Members[0].DataDir, "member", "snap", "db")
be := backend.NewDefaultBackend(lg, fp)
s := mvcc.NewStore(lg, be, nil, mvcc.StoreConfig{})
// NOTE: cluster_proxy mode with namespacing won't set 'k', but namespace/'k'.
s.Put([]byte("abc"), []byte("def"), 0)
s.Put([]byte("xyz"), []byte("123"), 0)
s.Compact(traceutil.TODO(), 5)
s.Commit()
s.Close()
be.Close()
clus.Members[1].WaitOK(t)
clus.Members[2].WaitOK(t)
time.Sleep(time.Second * 2)
// Wait for cluster so Puts succeed in case member 0 was the leader.
if _, err := clus.Client(1).Get(context.TODO(), "k"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "xyz", "321"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "abc", "fed"); err != nil {
t.Fatal(err)
}
// Restart with corruption checking enabled.
clus.Members[1].Stop(t)
clus.Members[2].Stop(t)
for _, m := range clus.Members {
m.CorruptCheckTime = time.Second
m.Restart(t)
}
clus.WaitLeader(t)
time.Sleep(time.Second * 2)
clus.Members[0].WaitStarted(t)
resp0, err0 := clus.Client(0).Get(context.TODO(), "abc")
if err0 != nil {
t.Fatal(err0)
}
clus.Members[1].WaitStarted(t)
resp1, err1 := clus.Client(1).Get(context.TODO(), "abc")
if err1 != nil {
t.Fatal(err1)
}
if resp0.Kvs[0].ModRevision == resp1.Kvs[0].ModRevision {
t.Fatalf("matching ModRevision values")
}
for i := 0; i < 5; i++ {
presp, perr := clus.Client(0).Put(context.TODO(), "abc", "aaa")
if perr != nil {
if !eqErrGRPC(perr, rpctypes.ErrCorrupt) {
t.Fatalf("expected %v, got %+v (%v)", rpctypes.ErrCorrupt, presp, perr)
} else {
return
}
}
time.Sleep(time.Second)
}
t.Fatalf("expected error %v after %s", rpctypes.ErrCorrupt, 5*time.Second)
}
func TestV3CorruptAlarmWithLeaseCorrupted(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{
CorruptCheckTime: time.Second,
Size: 3,
SnapshotCount: 10,
SnapshotCatchUpEntries: 5,
DisableStrictReconfigCheck: true,
})
defer clus.Terminate(t)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
lresp, err := integration.ToGRPC(clus.RandClient()).Lease.LeaseGrant(ctx, &pb.LeaseGrantRequest{
|
{
t.Fatal(err)
}
|
conditional_block
|
v3_alarm_test.go
|
during apply
func TestV3StorageQuotaApply(t *testing.T) {
integration.BeforeTest(t)
quotasize := int64(16 * os.Getpagesize())
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 2})
defer clus.Terminate(t)
kvc1 := integration.ToGRPC(clus.Client(1)).KV
// Set a quota on one node
clus.Members[0].QuotaBackendBytes = quotasize
clus.Members[0].Stop(t)
clus.Members[0].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
kvc0 := integration.ToGRPC(clus.Client(0)).KV
waitForRestart(t, kvc0)
key := []byte("abc")
// test small put still works
smallbuf := make([]byte, 1024)
_, serr := kvc0.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if serr != nil {
t.Fatal(serr)
}
// test big put
bigbuf := make([]byte, quotasize)
_, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: bigbuf})
if err != nil {
t.Fatal(err)
}
// quorum get should work regardless of whether alarm is raised
_, err = kvc0.Range(context.TODO(), &pb.RangeRequest{Key: []byte("foo")})
if err != nil {
t.Fatal(err)
}
// wait until alarm is raised for sure-- poll the alarms
stopc := time.After(5 * time.Second)
for {
req := &pb.AlarmRequest{Action: pb.AlarmRequest_GET}
resp, aerr := clus.Members[0].Server.Alarm(context.TODO(), req)
if aerr != nil {
t.Fatal(aerr)
}
if len(resp.Alarms) != 0 {
break
}
select {
case <-stopc:
t.Fatalf("timed out waiting for alarm")
case <-time.After(10 * time.Millisecond):
}
}
// txn with non-mutating Ops should go through when NOSPACE alarm is raised
_, err = kvc0.Txn(context.TODO(), &pb.TxnRequest{
Compare: []*pb.Compare{
{
Key: key,
Result: pb.Compare_EQUAL,
Target: pb.Compare_CREATE,
TargetUnion: &pb.Compare_CreateRevision{CreateRevision: 0},
},
},
Success: []*pb.RequestOp{
{
Request: &pb.RequestOp_RequestDeleteRange{
RequestDeleteRange: &pb.DeleteRangeRequest{
Key: key,
},
},
},
},
})
if err != nil {
t.Fatal(err)
}
ctx, cancel := context.WithTimeout(context.TODO(), integration.RequestWaitTimeout)
defer cancel()
// small quota machine should reject put
if _, err := kvc0.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// large quota machine should reject put
if _, err := kvc1.Put(ctx, &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("past-quota instance should reject put")
}
// reset large quota node to ensure alarm persisted
clus.Members[1].Stop(t)
clus.Members[1].Restart(t)
clus.WaitMembersForLeader(t, clus.Members)
if _, err := kvc1.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err == nil {
t.Fatalf("alarmed instance should reject put after reset")
}
}
// TestV3AlarmDeactivate ensures that space alarms can be deactivated so puts go through.
func TestV3AlarmDeactivate(t *testing.T) {
integration.BeforeTest(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3})
defer clus.Terminate(t)
kvc := integration.ToGRPC(clus.RandClient()).KV
mt := integration.ToGRPC(clus.RandClient()).Maintenance
alarmReq := &pb.AlarmRequest{
MemberID: 123,
Action: pb.AlarmRequest_ACTIVATE,
Alarm: pb.AlarmType_NOSPACE,
}
if _, err := mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
key := []byte("abc")
smallbuf := make([]byte, 512)
_, err := kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf})
if err == nil && !eqErrGRPC(err, rpctypes.ErrGRPCNoSpace) {
t.Fatalf("put got %v, expected %v", err, rpctypes.ErrGRPCNoSpace)
}
alarmReq.Action = pb.AlarmRequest_DEACTIVATE
if _, err = mt.Alarm(context.TODO(), alarmReq); err != nil {
t.Fatal(err)
}
if _, err = kvc.Put(context.TODO(), &pb.PutRequest{Key: key, Value: smallbuf}); err != nil {
t.Fatal(err)
}
}
func TestV3CorruptAlarm(t *testing.T)
|
fp := filepath.Join(clus.Members[0].DataDir, "member", "snap", "db")
be := backend.NewDefaultBackend(lg, fp)
s := mvcc.NewStore(lg, be, nil, mvcc.StoreConfig{})
// NOTE: cluster_proxy mode with namespacing won't set 'k', but namespace/'k'.
s.Put([]byte("abc"), []byte("def"), 0)
s.Put([]byte("xyz"), []byte("123"), 0)
s.Compact(traceutil.TODO(), 5)
s.Commit()
s.Close()
be.Close()
clus.Members[1].WaitOK(t)
clus.Members[2].WaitOK(t)
time.Sleep(time.Second * 2)
// Wait for cluster so Puts succeed in case member 0 was the leader.
if _, err := clus.Client(1).Get(context.TODO(), "k"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "xyz", "321"); err != nil {
t.Fatal(err)
}
if _, err := clus.Client(1).Put(context.TODO(), "abc", "fed"); err != nil {
t.Fatal(err)
}
// Restart with corruption checking enabled.
clus.Members[1].Stop(t)
clus.Members[2].Stop(t)
for _, m := range clus.Members {
m.CorruptCheckTime = time.Second
m.Restart(t)
}
clus.WaitLeader(t)
time.Sleep(time.Second * 2)
clus.Members[0].WaitStarted(t)
resp0, err0 := clus.Client(0).Get(context.TODO(), "abc")
if err0 != nil {
t.Fatal(err0)
}
clus.Members[1].WaitStarted(t)
resp1, err1 := clus.Client(1).Get(context.TODO(), "abc")
if err1 != nil {
t.Fatal(err1)
}
if resp0.Kvs[0].ModRevision == resp1.Kvs[0].ModRevision {
t.Fatalf("matching ModRevision values")
}
for i := 0; i < 5; i++ {
presp, perr := clus.Client(0).Put(context.TODO(), "abc", "aaa")
if perr != nil {
if !eqErrGRPC(perr, rpctypes.ErrCorrupt) {
t.Fatalf("expected %v, got %+v (%v)", rpctypes.ErrCorrupt, presp, perr)
} else {
return
}
}
time.Sleep(time.Second)
}
t.Fatalf("expected error %v after %s", rpctypes.ErrCorrupt, 5*time.Second)
}
func TestV3CorruptAlarmWithLeaseCorrupted(t *testing.T) {
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{
CorruptCheckTime: time.Second,
Size: 3,
SnapshotCount: 10,
SnapshotCatchUpEntries: 5,
DisableStrictReconfigCheck: true,
})
defer clus.Terminate(t)
ctx, cancel := context.WithCancel(context.Background())
defer cancel()
lresp, err := integration.ToGRPC(clus.RandClient()).Lease.LeaseGrant(ctx, &pb.LeaseGrantRequest{ID
|
{
integration.BeforeTest(t)
lg := zaptest.NewLogger(t)
clus := integration.NewCluster(t, &integration.ClusterConfig{Size: 3, UseBridge: true})
defer clus.Terminate(t)
var wg sync.WaitGroup
wg.Add(10)
for i := 0; i < 10; i++ {
go func() {
defer wg.Done()
if _, err := clus.Client(0).Put(context.TODO(), "k", "v"); err != nil {
t.Error(err)
}
}()
}
wg.Wait()
// Corrupt member 0 by modifying backend offline.
clus.Members[0].Stop(t)
|
identifier_body
|
agent.go
|
network logical label
// Asynchronous operations
resumeReconciliation <-chan string // nil if no async ops
cancelAsyncOps context.CancelFunc // nil if no async ops
waitForAsyncOps func() // NOOP if no async ops
}
func (a *agent) init() error {
a.ctx = context.Background()
linkChan := a.linkSubscribe(a.ctx.Done())
a.macLookup = &maclookup.MacLookup{}
a.macLookup.RefreshCache()
registry := &reconciler.DefaultRegistry{}
if err := configitems.RegisterItems(registry, a.macLookup); err != nil {
return err
}
a.registry = registry
a.newNetModel = make(chan parsedNetModel, 10)
a.failingItems = make(map[dg.ItemRef]error)
// Initially start with an empty network model.
// Ever-present config items will get created.
// (e.g. DHCP client for the interface connecting SDN with the host)
a.netModel = parsedNetModel{}
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
go a.run(linkChan)
return nil
}
func (a *agent) run(linkChan chan netlink.LinkUpdate) {
for {
select {
case netModel := <-a.newNetModel:
// Network model is already validated, applying...
a.Lock()
a.netModel = netModel
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
a.Unlock()
case <-a.resumeReconciliation:
a.Lock()
a.reconcile()
a.Unlock()
case linkUpdate, ok := <-linkChan:
if !ok {
log.Warn("Link subscription was closed")
linkChan = a.linkSubscribe(a.ctx.Done())
continue
}
// If interface appeared or disappeared, refresh the current
// state graph and potentially reconcile.
_, found := a.macLookup.GetInterfaceByIndex(int(linkUpdate.Index))
added := !found
deleted := linkUpdate.Header.Type == syscall.RTM_DELLINK
if added || deleted {
log.Debugf("Important link change: %+v", linkUpdate)
a.Lock()
a.macLookup.RefreshCache()
changed := a.updateCurrentState()
mac := linkUpdate.Attrs().HardwareAddr
if bytes.HasPrefix(mac, hostPortMACPrefix) {
// Intended state for SDN<->Host connectivity changes
// when the "host port" (dis)appears.
a.updateIntendedState()
changed = true
}
if changed {
a.reconcile()
}
a.Unlock()
}
case <-a.ctx.Done():
a.Lock()
if a.cancelAsyncOps != nil {
a.cancelAsyncOps()
a.waitForAsyncOps()
log.Warn("Some asynchronous operations were canceled!")
}
a.Unlock()
return
}
}
}
// Called with agent in locked state.
func (a *agent) reconcile() {
reconcileStartTime := time.Now()
r := reconciler.New(a.registry)
status := r.Reconcile(a.ctx, a.currentState, a.intendedState)
a.currentState = status.NewCurrentState
// Update variables needed to resume reconciliation
// after async operation(s).
if status.AsyncOpsInProgress {
log.Debug("Some config operations continue in the background")
}
a.cancelAsyncOps = status.CancelAsyncOps
a.resumeReconciliation = status.ReadyToResume
a.waitForAsyncOps = status.WaitForAsyncOps
// Log every executed operation.
for _, opLog := range status.OperationLog {
var withErr string
if opLog.Err != nil {
withErr = fmt.Sprintf(" with error: %v", opLog.Err)
}
var verb string
if opLog.InProgress {
verb = "started async execution of"
} else {
if opLog.StartTime.Before(reconcileStartTime) {
verb = "finalized async execution of"
} else {
// synchronous operation
verb = "executed"
}
}
log.Infof("State Reconciler %s %v for %v%s, content: %s",
verb, opLog.Operation, dg.Reference(opLog.Item),
withErr, opLog.Item.String())
}
// Log transitions from no-error to error and vice-versa.
var failed, fixed []string
for _, opLog := range status.OperationLog {
itemRef := dg.Reference(opLog.Item)
if opLog.Err != nil {
a.failingItems[itemRef] = opLog.Err
} else {
delete(a.failingItems, itemRef)
}
if opLog.PrevErr == nil && opLog.Err != nil {
failed = append(failed, fmt.Sprintf("%v (err: %v)", itemRef, opLog.Err))
}
if opLog.PrevErr != nil && opLog.Err == nil {
fixed = append(fixed, itemRef.String())
}
}
if len(failed) > 0 {
log.Errorf("Newly failed config items: %s",
strings.Join(failed, ", "))
}
if len(fixed) > 0
|
}
func (a *agent) linkSubscribe(doneChan <-chan struct{}) chan netlink.LinkUpdate {
linkChan := make(chan netlink.LinkUpdate, 64)
linkErrFunc := func(err error) {
log.Errorf("LinkSubscribe failed %s\n", err)
}
linkOpts := netlink.LinkSubscribeOptions{
ErrorCallback: linkErrFunc,
}
if err := netlink.LinkSubscribeWithOptions(
linkChan, doneChan, linkOpts); err != nil {
log.Fatal(err)
}
return linkChan
}
func (a *agent) allocNetworkIndexes() {
if a.networkIndex == nil {
a.networkIndex = make(map[string]int)
}
// Allocate new indexes where needed.
for _, network := range a.netModel.Networks {
index, hasIndex := a.networkIndex[network.LogicalLabel]
if hasIndex {
// Keep already allocated index.
continue
}
index = 0
for a.isNetworkIndexUsed(index) {
index++
}
a.networkIndex[network.LogicalLabel] = index
}
}
func (a *agent) isNetworkIndexUsed(index int) bool {
for _, val := range a.networkIndex {
if val == index {
return true
}
}
return false
}
func (a *agent) getNetModel(w http.ResponseWriter, r *http.Request) {
a.Lock()
resp, err := json.Marshal(a.netModel.NetworkModel)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("failed to marshal network model to JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if _, err = w.Write(resp); err != nil {
log.Errorf("Failed to write network model to HTTP response: %v", err)
}
}
func (a *agent) applyNetModel(w http.ResponseWriter, r *http.Request) {
var netModel api.NetworkModel
body, err := ioutil.ReadAll(r.Body)
if err != nil {
errMsg := fmt.Sprintf("Failed to read network model from HTTP request: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
err = json.Unmarshal(body, &netModel)
if err != nil {
errMsg := fmt.Sprintf("Failed to unmarshal network model from JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
parsedNetModel, err := a.parseNetModel(netModel)
if err != nil {
errMsg := fmt.Sprintf("Network model is invalid: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
log.Debugf("Parsed network model: %+v", parsedNetModel)
a.newNetModel <- parsedNetModel
w.WriteHeader(http.StatusOK)
}
func (a *agent) getNetConfig(w http.ResponseWriter, r *http.Request) {
dotExporter := &dg.DotExporter{CheckDeps: true}
a.Lock()
dot, err := dotExporter.ExportTransition(a.currentState, a.intendedState)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("Failed to export network config to DOT: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/vnd.graphviz")
w.WriteHeader(http.StatusOK)
if _, err = w.Write([]byte(dot)); err != nil {
log.Errorf("Failed to write network config to HTTP response: %v", err)
}
}
func (a *agent) getSDNStatus(w http.ResponseWriter, r *http.Request) {
status := api.SDNStatus{
MgmtIPs: a.getMgmtIPs(),
}
a.Lock()
for itemRef, err := range a.failingItems {
status.ConfigErrors = append(status.ConfigErrors, api.ConfigError{
ItemRef: itemRef,
ErrMsg: err.Error(),
})
}
a.Unlock()
resp, err := json.Marshal(status)
if err
|
{
log.Infof("Fixed config items: %s",
strings.Join(fixed, ", "))
}
|
conditional_block
|
agent.go
|
: network logical label
// Asynchronous operations
resumeReconciliation <-chan string // nil if no async ops
cancelAsyncOps context.CancelFunc // nil if no async ops
waitForAsyncOps func() // NOOP if no async ops
}
func (a *agent) init() error
|
return nil
}
func (a *agent) run(linkChan chan netlink.LinkUpdate) {
for {
select {
case netModel := <-a.newNetModel:
// Network model is already validated, applying...
a.Lock()
a.netModel = netModel
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
a.Unlock()
case <-a.resumeReconciliation:
a.Lock()
a.reconcile()
a.Unlock()
case linkUpdate, ok := <-linkChan:
if !ok {
log.Warn("Link subscription was closed")
linkChan = a.linkSubscribe(a.ctx.Done())
continue
}
// If interface appeared or disappeared, refresh the current
// state graph and potentially reconcile.
_, found := a.macLookup.GetInterfaceByIndex(int(linkUpdate.Index))
added := !found
deleted := linkUpdate.Header.Type == syscall.RTM_DELLINK
if added || deleted {
log.Debugf("Important link change: %+v", linkUpdate)
a.Lock()
a.macLookup.RefreshCache()
changed := a.updateCurrentState()
mac := linkUpdate.Attrs().HardwareAddr
if bytes.HasPrefix(mac, hostPortMACPrefix) {
// Intended state for SDN<->Host connectivity changes
// when the "host port" (dis)appears.
a.updateIntendedState()
changed = true
}
if changed {
a.reconcile()
}
a.Unlock()
}
case <-a.ctx.Done():
a.Lock()
if a.cancelAsyncOps != nil {
a.cancelAsyncOps()
a.waitForAsyncOps()
log.Warn("Some asynchronous operations were canceled!")
}
a.Unlock()
return
}
}
}
// Called with agent in locked state.
func (a *agent) reconcile() {
reconcileStartTime := time.Now()
r := reconciler.New(a.registry)
status := r.Reconcile(a.ctx, a.currentState, a.intendedState)
a.currentState = status.NewCurrentState
// Update variables needed to resume reconciliation
// after async operation(s).
if status.AsyncOpsInProgress {
log.Debug("Some config operations continue in the background")
}
a.cancelAsyncOps = status.CancelAsyncOps
a.resumeReconciliation = status.ReadyToResume
a.waitForAsyncOps = status.WaitForAsyncOps
// Log every executed operation.
for _, opLog := range status.OperationLog {
var withErr string
if opLog.Err != nil {
withErr = fmt.Sprintf(" with error: %v", opLog.Err)
}
var verb string
if opLog.InProgress {
verb = "started async execution of"
} else {
if opLog.StartTime.Before(reconcileStartTime) {
verb = "finalized async execution of"
} else {
// synchronous operation
verb = "executed"
}
}
log.Infof("State Reconciler %s %v for %v%s, content: %s",
verb, opLog.Operation, dg.Reference(opLog.Item),
withErr, opLog.Item.String())
}
// Log transitions from no-error to error and vice-versa.
var failed, fixed []string
for _, opLog := range status.OperationLog {
itemRef := dg.Reference(opLog.Item)
if opLog.Err != nil {
a.failingItems[itemRef] = opLog.Err
} else {
delete(a.failingItems, itemRef)
}
if opLog.PrevErr == nil && opLog.Err != nil {
failed = append(failed, fmt.Sprintf("%v (err: %v)", itemRef, opLog.Err))
}
if opLog.PrevErr != nil && opLog.Err == nil {
fixed = append(fixed, itemRef.String())
}
}
if len(failed) > 0 {
log.Errorf("Newly failed config items: %s",
strings.Join(failed, ", "))
}
if len(fixed) > 0 {
log.Infof("Fixed config items: %s",
strings.Join(fixed, ", "))
}
}
func (a *agent) linkSubscribe(doneChan <-chan struct{}) chan netlink.LinkUpdate {
linkChan := make(chan netlink.LinkUpdate, 64)
linkErrFunc := func(err error) {
log.Errorf("LinkSubscribe failed %s\n", err)
}
linkOpts := netlink.LinkSubscribeOptions{
ErrorCallback: linkErrFunc,
}
if err := netlink.LinkSubscribeWithOptions(
linkChan, doneChan, linkOpts); err != nil {
log.Fatal(err)
}
return linkChan
}
func (a *agent) allocNetworkIndexes() {
if a.networkIndex == nil {
a.networkIndex = make(map[string]int)
}
// Allocate new indexes where needed.
for _, network := range a.netModel.Networks {
index, hasIndex := a.networkIndex[network.LogicalLabel]
if hasIndex {
// Keep already allocated index.
continue
}
index = 0
for a.isNetworkIndexUsed(index) {
index++
}
a.networkIndex[network.LogicalLabel] = index
}
}
func (a *agent) isNetworkIndexUsed(index int) bool {
for _, val := range a.networkIndex {
if val == index {
return true
}
}
return false
}
func (a *agent) getNetModel(w http.ResponseWriter, r *http.Request) {
a.Lock()
resp, err := json.Marshal(a.netModel.NetworkModel)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("failed to marshal network model to JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if _, err = w.Write(resp); err != nil {
log.Errorf("Failed to write network model to HTTP response: %v", err)
}
}
func (a *agent) applyNetModel(w http.ResponseWriter, r *http.Request) {
var netModel api.NetworkModel
body, err := ioutil.ReadAll(r.Body)
if err != nil {
errMsg := fmt.Sprintf("Failed to read network model from HTTP request: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
err = json.Unmarshal(body, &netModel)
if err != nil {
errMsg := fmt.Sprintf("Failed to unmarshal network model from JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
parsedNetModel, err := a.parseNetModel(netModel)
if err != nil {
errMsg := fmt.Sprintf("Network model is invalid: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
log.Debugf("Parsed network model: %+v", parsedNetModel)
a.newNetModel <- parsedNetModel
w.WriteHeader(http.StatusOK)
}
func (a *agent) getNetConfig(w http.ResponseWriter, r *http.Request) {
dotExporter := &dg.DotExporter{CheckDeps: true}
a.Lock()
dot, err := dotExporter.ExportTransition(a.currentState, a.intendedState)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("Failed to export network config to DOT: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/vnd.graphviz")
w.WriteHeader(http.StatusOK)
if _, err = w.Write([]byte(dot)); err != nil {
log.Errorf("Failed to write network config to HTTP response: %v", err)
}
}
func (a *agent) getSDNStatus(w http.ResponseWriter, r *http.Request) {
status := api.SDNStatus{
MgmtIPs: a.getMgmtIPs(),
}
a.Lock()
for itemRef, err := range a.failingItems {
status.ConfigErrors = append(status.ConfigErrors, api.ConfigError{
ItemRef: itemRef,
ErrMsg: err.Error(),
})
}
a.Unlock()
resp, err := json.Marshal(status)
if err
|
{
a.ctx = context.Background()
linkChan := a.linkSubscribe(a.ctx.Done())
a.macLookup = &maclookup.MacLookup{}
a.macLookup.RefreshCache()
registry := &reconciler.DefaultRegistry{}
if err := configitems.RegisterItems(registry, a.macLookup); err != nil {
return err
}
a.registry = registry
a.newNetModel = make(chan parsedNetModel, 10)
a.failingItems = make(map[dg.ItemRef]error)
// Initially start with an empty network model.
// Ever-present config items will get created.
// (e.g. DHCP client for the interface connecting SDN with the host)
a.netModel = parsedNetModel{}
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
go a.run(linkChan)
|
identifier_body
|
agent.go
|
: network logical label
// Asynchronous operations
resumeReconciliation <-chan string // nil if no async ops
cancelAsyncOps context.CancelFunc // nil if no async ops
waitForAsyncOps func() // NOOP if no async ops
}
func (a *agent) init() error {
a.ctx = context.Background()
linkChan := a.linkSubscribe(a.ctx.Done())
a.macLookup = &maclookup.MacLookup{}
a.macLookup.RefreshCache()
registry := &reconciler.DefaultRegistry{}
if err := configitems.RegisterItems(registry, a.macLookup); err != nil {
return err
}
a.registry = registry
a.newNetModel = make(chan parsedNetModel, 10)
a.failingItems = make(map[dg.ItemRef]error)
// Initially start with an empty network model.
// Ever-present config items will get created.
// (e.g. DHCP client for the interface connecting SDN with the host)
a.netModel = parsedNetModel{}
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
go a.run(linkChan)
return nil
}
func (a *agent) run(linkChan chan netlink.LinkUpdate) {
for {
select {
case netModel := <-a.newNetModel:
// Network model is already validated, applying...
a.Lock()
a.netModel = netModel
a.updateCurrentState()
a.updateIntendedState()
a.reconcile()
a.Unlock()
case <-a.resumeReconciliation:
a.Lock()
a.reconcile()
a.Unlock()
case linkUpdate, ok := <-linkChan:
if !ok {
log.Warn("Link subscription was closed")
linkChan = a.linkSubscribe(a.ctx.Done())
continue
}
// If interface appeared or disappeared, refresh the current
// state graph and potentially reconcile.
_, found := a.macLookup.GetInterfaceByIndex(int(linkUpdate.Index))
added := !found
deleted := linkUpdate.Header.Type == syscall.RTM_DELLINK
if added || deleted {
log.Debugf("Important link change: %+v", linkUpdate)
a.Lock()
a.macLookup.RefreshCache()
changed := a.updateCurrentState()
mac := linkUpdate.Attrs().HardwareAddr
if bytes.HasPrefix(mac, hostPortMACPrefix) {
// Intended state for SDN<->Host connectivity changes
// when the "host port" (dis)appears.
a.updateIntendedState()
changed = true
}
if changed {
a.reconcile()
}
a.Unlock()
}
case <-a.ctx.Done():
a.Lock()
if a.cancelAsyncOps != nil {
a.cancelAsyncOps()
a.waitForAsyncOps()
log.Warn("Some asynchronous operations were canceled!")
}
a.Unlock()
return
}
}
}
// Called with agent in locked state.
func (a *agent) reconcile() {
reconcileStartTime := time.Now()
r := reconciler.New(a.registry)
status := r.Reconcile(a.ctx, a.currentState, a.intendedState)
a.currentState = status.NewCurrentState
// Update variables needed to resume reconciliation
// after async operation(s).
if status.AsyncOpsInProgress {
log.Debug("Some config operations continue in the background")
}
a.cancelAsyncOps = status.CancelAsyncOps
a.resumeReconciliation = status.ReadyToResume
a.waitForAsyncOps = status.WaitForAsyncOps
// Log every executed operation.
for _, opLog := range status.OperationLog {
var withErr string
if opLog.Err != nil {
withErr = fmt.Sprintf(" with error: %v", opLog.Err)
}
var verb string
if opLog.InProgress {
verb = "started async execution of"
} else {
if opLog.StartTime.Before(reconcileStartTime) {
verb = "finalized async execution of"
} else {
// synchronous operation
verb = "executed"
}
}
log.Infof("State Reconciler %s %v for %v%s, content: %s",
verb, opLog.Operation, dg.Reference(opLog.Item),
withErr, opLog.Item.String())
}
// Log transitions from no-error to error and vice-versa.
var failed, fixed []string
for _, opLog := range status.OperationLog {
itemRef := dg.Reference(opLog.Item)
if opLog.Err != nil {
a.failingItems[itemRef] = opLog.Err
} else {
delete(a.failingItems, itemRef)
}
if opLog.PrevErr == nil && opLog.Err != nil {
failed = append(failed, fmt.Sprintf("%v (err: %v)", itemRef, opLog.Err))
}
if opLog.PrevErr != nil && opLog.Err == nil {
fixed = append(fixed, itemRef.String())
}
}
if len(failed) > 0 {
log.Errorf("Newly failed config items: %s",
strings.Join(failed, ", "))
}
if len(fixed) > 0 {
log.Infof("Fixed config items: %s",
strings.Join(fixed, ", "))
}
}
func (a *agent)
|
(doneChan <-chan struct{}) chan netlink.LinkUpdate {
linkChan := make(chan netlink.LinkUpdate, 64)
linkErrFunc := func(err error) {
log.Errorf("LinkSubscribe failed %s\n", err)
}
linkOpts := netlink.LinkSubscribeOptions{
ErrorCallback: linkErrFunc,
}
if err := netlink.LinkSubscribeWithOptions(
linkChan, doneChan, linkOpts); err != nil {
log.Fatal(err)
}
return linkChan
}
func (a *agent) allocNetworkIndexes() {
if a.networkIndex == nil {
a.networkIndex = make(map[string]int)
}
// Allocate new indexes where needed.
for _, network := range a.netModel.Networks {
index, hasIndex := a.networkIndex[network.LogicalLabel]
if hasIndex {
// Keep already allocated index.
continue
}
index = 0
for a.isNetworkIndexUsed(index) {
index++
}
a.networkIndex[network.LogicalLabel] = index
}
}
func (a *agent) isNetworkIndexUsed(index int) bool {
for _, val := range a.networkIndex {
if val == index {
return true
}
}
return false
}
func (a *agent) getNetModel(w http.ResponseWriter, r *http.Request) {
a.Lock()
resp, err := json.Marshal(a.netModel.NetworkModel)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("failed to marshal network model to JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if _, err = w.Write(resp); err != nil {
log.Errorf("Failed to write network model to HTTP response: %v", err)
}
}
func (a *agent) applyNetModel(w http.ResponseWriter, r *http.Request) {
var netModel api.NetworkModel
body, err := ioutil.ReadAll(r.Body)
if err != nil {
errMsg := fmt.Sprintf("Failed to read network model from HTTP request: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
err = json.Unmarshal(body, &netModel)
if err != nil {
errMsg := fmt.Sprintf("Failed to unmarshal network model from JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
parsedNetModel, err := a.parseNetModel(netModel)
if err != nil {
errMsg := fmt.Sprintf("Network model is invalid: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
log.Debugf("Parsed network model: %+v", parsedNetModel)
a.newNetModel <- parsedNetModel
w.WriteHeader(http.StatusOK)
}
func (a *agent) getNetConfig(w http.ResponseWriter, r *http.Request) {
dotExporter := &dg.DotExporter{CheckDeps: true}
a.Lock()
dot, err := dotExporter.ExportTransition(a.currentState, a.intendedState)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("Failed to export network config to DOT: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/vnd.graphviz")
w.WriteHeader(http.StatusOK)
if _, err = w.Write([]byte(dot)); err != nil {
log.Errorf("Failed to write network config to HTTP response: %v", err)
}
}
func (a *agent) getSDNStatus(w http.ResponseWriter, r *http.Request) {
status := api.SDNStatus{
MgmtIPs: a.getMgmtIPs(),
}
a.Lock()
for itemRef, err := range a.failingItems {
status.ConfigErrors = append(status.ConfigErrors, api.ConfigError{
ItemRef: itemRef,
ErrMsg: err.Error(),
})
}
a.Unlock()
resp, err := json.Marshal(status)
if err
|
linkSubscribe
|
identifier_name
|
agent.go
|
:= linkUpdate.Header.Type == syscall.RTM_DELLINK
if added || deleted {
log.Debugf("Important link change: %+v", linkUpdate)
a.Lock()
a.macLookup.RefreshCache()
changed := a.updateCurrentState()
mac := linkUpdate.Attrs().HardwareAddr
if bytes.HasPrefix(mac, hostPortMACPrefix) {
// Intended state for SDN<->Host connectivity changes
// when the "host port" (dis)appears.
a.updateIntendedState()
changed = true
}
if changed {
a.reconcile()
}
a.Unlock()
}
case <-a.ctx.Done():
a.Lock()
if a.cancelAsyncOps != nil {
a.cancelAsyncOps()
a.waitForAsyncOps()
log.Warn("Some asynchronous operations were canceled!")
}
a.Unlock()
return
}
}
}
// Called with agent in locked state.
func (a *agent) reconcile() {
reconcileStartTime := time.Now()
r := reconciler.New(a.registry)
status := r.Reconcile(a.ctx, a.currentState, a.intendedState)
a.currentState = status.NewCurrentState
// Update variables needed to resume reconciliation
// after async operation(s).
if status.AsyncOpsInProgress {
log.Debug("Some config operations continue in the background")
}
a.cancelAsyncOps = status.CancelAsyncOps
a.resumeReconciliation = status.ReadyToResume
a.waitForAsyncOps = status.WaitForAsyncOps
// Log every executed operation.
for _, opLog := range status.OperationLog {
var withErr string
if opLog.Err != nil {
withErr = fmt.Sprintf(" with error: %v", opLog.Err)
}
var verb string
if opLog.InProgress {
verb = "started async execution of"
} else {
if opLog.StartTime.Before(reconcileStartTime) {
verb = "finalized async execution of"
} else {
// synchronous operation
verb = "executed"
}
}
log.Infof("State Reconciler %s %v for %v%s, content: %s",
verb, opLog.Operation, dg.Reference(opLog.Item),
withErr, opLog.Item.String())
}
// Log transitions from no-error to error and vice-versa.
var failed, fixed []string
for _, opLog := range status.OperationLog {
itemRef := dg.Reference(opLog.Item)
if opLog.Err != nil {
a.failingItems[itemRef] = opLog.Err
} else {
delete(a.failingItems, itemRef)
}
if opLog.PrevErr == nil && opLog.Err != nil {
failed = append(failed, fmt.Sprintf("%v (err: %v)", itemRef, opLog.Err))
}
if opLog.PrevErr != nil && opLog.Err == nil {
fixed = append(fixed, itemRef.String())
}
}
if len(failed) > 0 {
log.Errorf("Newly failed config items: %s",
strings.Join(failed, ", "))
}
if len(fixed) > 0 {
log.Infof("Fixed config items: %s",
strings.Join(fixed, ", "))
}
}
func (a *agent) linkSubscribe(doneChan <-chan struct{}) chan netlink.LinkUpdate {
linkChan := make(chan netlink.LinkUpdate, 64)
linkErrFunc := func(err error) {
log.Errorf("LinkSubscribe failed %s\n", err)
}
linkOpts := netlink.LinkSubscribeOptions{
ErrorCallback: linkErrFunc,
}
if err := netlink.LinkSubscribeWithOptions(
linkChan, doneChan, linkOpts); err != nil {
log.Fatal(err)
}
return linkChan
}
func (a *agent) allocNetworkIndexes() {
if a.networkIndex == nil {
a.networkIndex = make(map[string]int)
}
// Allocate new indexes where needed.
for _, network := range a.netModel.Networks {
index, hasIndex := a.networkIndex[network.LogicalLabel]
if hasIndex {
// Keep already allocated index.
continue
}
index = 0
for a.isNetworkIndexUsed(index) {
index++
}
a.networkIndex[network.LogicalLabel] = index
}
}
func (a *agent) isNetworkIndexUsed(index int) bool {
for _, val := range a.networkIndex {
if val == index {
return true
}
}
return false
}
func (a *agent) getNetModel(w http.ResponseWriter, r *http.Request) {
a.Lock()
resp, err := json.Marshal(a.netModel.NetworkModel)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("failed to marshal network model to JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if _, err = w.Write(resp); err != nil {
log.Errorf("Failed to write network model to HTTP response: %v", err)
}
}
func (a *agent) applyNetModel(w http.ResponseWriter, r *http.Request) {
var netModel api.NetworkModel
body, err := ioutil.ReadAll(r.Body)
if err != nil {
errMsg := fmt.Sprintf("Failed to read network model from HTTP request: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
err = json.Unmarshal(body, &netModel)
if err != nil {
errMsg := fmt.Sprintf("Failed to unmarshal network model from JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
parsedNetModel, err := a.parseNetModel(netModel)
if err != nil {
errMsg := fmt.Sprintf("Network model is invalid: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusBadRequest)
return
}
log.Debugf("Parsed network model: %+v", parsedNetModel)
a.newNetModel <- parsedNetModel
w.WriteHeader(http.StatusOK)
}
func (a *agent) getNetConfig(w http.ResponseWriter, r *http.Request) {
dotExporter := &dg.DotExporter{CheckDeps: true}
a.Lock()
dot, err := dotExporter.ExportTransition(a.currentState, a.intendedState)
a.Unlock()
if err != nil {
errMsg := fmt.Sprintf("Failed to export network config to DOT: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "text/vnd.graphviz")
w.WriteHeader(http.StatusOK)
if _, err = w.Write([]byte(dot)); err != nil {
log.Errorf("Failed to write network config to HTTP response: %v", err)
}
}
func (a *agent) getSDNStatus(w http.ResponseWriter, r *http.Request) {
status := api.SDNStatus{
MgmtIPs: a.getMgmtIPs(),
}
a.Lock()
for itemRef, err := range a.failingItems {
status.ConfigErrors = append(status.ConfigErrors, api.ConfigError{
ItemRef: itemRef,
ErrMsg: err.Error(),
})
}
a.Unlock()
resp, err := json.Marshal(status)
if err != nil {
errMsg := fmt.Sprintf("failed to marshal SDN status to JSON: %v", err)
log.Error(errMsg)
http.Error(w, errMsg, http.StatusInternalServerError)
return
}
w.Header().Set("Content-Type", "application/json")
w.WriteHeader(http.StatusOK)
if _, err = w.Write(resp); err != nil {
log.Errorf("Failed to write SDN status to HTTP response: %v", err)
}
}
func (a *agent) getMgmtIPs() (ips []string) {
hostNetIf, found := a.macLookup.GetInterfaceByMAC(hostPortMACPrefix, true)
if !found {
log.Warnf("failed to find port connecting SDN with the host")
return
}
ifName := hostNetIf.IfName
link, err := netlink.LinkByName(ifName)
if err != nil {
log.Warnf("Failed to get link for interface %s: %v", ifName, err)
return
}
ips4, err := netlink.AddrList(link, netlink.FAMILY_V4)
if err != nil {
log.Warnf("Failed to get IPv4 addresses for interface %s: %v", ifName, err)
}
ips6, err := netlink.AddrList(link, netlink.FAMILY_V6)
if err != nil {
log.Errorf("Failed to get IPv6 addresses for interface %s: %v", ifName, err)
}
for _, ip := range ips4 {
if ip.IP.IsGlobalUnicast() {
ips = append(ips, ip.IP.String())
}
}
for _, ip := range ips6 {
if ip.IP.IsGlobalUnicast() {
ips = append(ips, ip.IP.String())
}
}
return
}
// Gateway to use to route traffic towards host OS.
|
func (a *agent) getHostGwIP(ipv6 bool) net.IP {
hostPort, found := a.macLookup.GetInterfaceByMAC(hostPortMACPrefix, true)
|
random_line_split
|
|
client.go
|
getLogger = func(ctx context.Context) log.Logger {
return log.NewNopLogger()
}
}
if setLogger == nil {
setLogger = func(ctx context.Context, _ log.Logger) context.Context {
return ctx
}
}
tokenAcquirer, err := buildTokenAcquirer(&config.Auth)
if err != nil {
return nil, err
}
clientStore := &Client{
client: config.HTTPClient,
auth: tokenAcquirer,
logger: config.Logger,
observer: newObserver(config.Logger, config.Listen, measures),
bucket: config.Bucket,
storeBaseURL: config.Address + storeAPIPath,
getLogger: getLogger,
setLogger: setLogger,
}
return clientStore, nil
}
// translateNonSuccessStatusCode returns as specific error
// for known Argus status codes.
func translateNonSuccessStatusCode(code int) error {
switch code {
case http.StatusBadRequest:
return ErrBadRequest
case http.StatusUnauthorized, http.StatusForbidden:
return ErrFailedAuthentication
default:
return errNonSuccessResponse
}
}
func newObserver(logger log.Logger, config ListenerConfig, measures *Measures) *observerConfig {
if config.Listener == nil {
return nil
}
return &observerConfig{
listener: config.Listener,
ticker: time.NewTicker(config.PullInterval),
pullInterval: config.PullInterval,
measures: measures,
shutdown: make(chan struct{}),
}
}
func validateConfig(config *ClientConfig) error {
if config.Address == "" {
return ErrAddressEmpty
}
if config.Bucket == "" {
return ErrBucketEmpty
}
if config.HTTPClient == nil {
config.HTTPClient = http.DefaultClient
}
if config.Listen.PullInterval == 0 {
config.Listen.PullInterval = time.Second * 5
}
if config.Logger == nil {
config.Logger = log.NewNopLogger()
}
return nil
}
func isEmpty(options acquire.RemoteBearerTokenAcquirerOptions) bool {
return len(options.AuthURL) < 1 || options.Buffer == 0 || options.Timeout == 0
}
func buildTokenAcquirer(auth *Auth) (acquire.Acquirer, error) {
if !isEmpty(auth.JWT) {
return acquire.NewRemoteBearerTokenAcquirer(auth.JWT)
} else if len(auth.Basic) > 0 {
return acquire.NewFixedAuthAcquirer(auth.Basic)
}
return &acquire.DefaultAcquirer{}, nil
}
func (c *Client) sendRequest(ctx context.Context, owner, method, url string, body io.Reader) (response, error) {
r, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errNewRequestFailure, err.Error())
}
err = acquire.AddAuth(r, c.auth)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, ErrAuthAcquirerFailure, err.Error())
}
if len(owner) > 0 {
r.Header.Set(store.ItemOwnerHeaderKey, owner)
}
resp, err := c.client.Do(r)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errDoRequestFailure, err.Error())
}
defer resp.Body.Close()
var sqResp = response{
Code: resp.StatusCode,
ArgusErrorHeader: resp.Header.Get(store.XmidtErrorHeaderKey),
}
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return sqResp, fmt.Errorf(errWrappedFmt, errReadingBodyFailure, err.Error())
}
sqResp.Body = bodyBytes
return sqResp, nil
}
// GetItems fetches all items that belong to a given owner.
func (c *Client) GetItems(ctx context.Context, owner string) (Items, error) {
response, err := c.sendRequest(ctx, owner, http.MethodGet, fmt.Sprintf("%s/%s", c.storeBaseURL, c.bucket), nil)
if err != nil {
return nil, err
}
if response.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with non-200 response for GetItems request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return nil, fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
var items Items
err = json.Unmarshal(response.Body, &items)
if err != nil {
return nil, fmt.Errorf("GetItems: %w: %s", errJSONUnmarshal, err.Error())
}
return items, nil
}
// PushItem creates a new item if one doesn't already exist. If an item exists
// and the ownership matches, the item is simply updated.
func (c *Client) PushItem(ctx context.Context, owner string, item model.Item) (PushResult, error) {
err := validatePushItemInput(owner, item)
if err != nil {
return "", err
}
data, err := json.Marshal(item)
if err != nil {
return "", fmt.Errorf(errWrappedFmt, errJSONMarshal, err.Error())
}
response, err := c.sendRequest(ctx, owner, http.MethodPut, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, item.ID), bytes.NewReader(data))
if err != nil {
return "", err
}
if response.Code == http.StatusCreated {
return CreatedPushResult, nil
}
if response.Code == http.StatusOK {
return UpdatedPushResult, nil
}
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with a non-successful status code for a PushItem request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return "", fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
// RemoveItem removes the item if it exists and returns the data associated to it.
func (c *Client) RemoveItem(ctx context.Context, id, owner string) (model.Item, error) {
if len(id) < 1 {
return model.Item{}, ErrItemIDEmpty
}
resp, err := c.sendRequest(ctx, owner, http.MethodDelete, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, id), nil)
if err != nil {
return model.Item{}, err
}
if resp.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with a non-successful status code for a RemoveItem request",
"code", resp.Code, "ErrorHeader", resp.ArgusErrorHeader)
return model.Item{}, fmt.Errorf(errStatusCodeFmt, resp.Code, translateNonSuccessStatusCode(resp.Code))
}
var item model.Item
err = json.Unmarshal(resp.Body, &item)
if err != nil {
return item, fmt.Errorf("RemoveItem: %w: %s", errJSONUnmarshal, err.Error())
}
return item, nil
}
// Start begins listening for updates on an interval given that client configuration
// is setup correctly. If a listener process is already in progress, calling Start()
// is a NoOp. If you want to restart the current listener process, call Stop() first.
func (c *Client) Start(ctx context.Context) error {
if c.observer == nil || c.observer.listener == nil {
level.Warn(c.logger).Log(xlog.MessageKey(), "No listener was setup to receive updates.")
return nil
}
if c.observer.ticker == nil {
level.Error(c.logger).Log(xlog.MessageKey(), "Observer ticker is nil")
return ErrUndefinedIntervalTicker
}
if !atomic.CompareAndSwapInt32(&c.observer.state, stopped, transitioning) {
level.Error(c.logger).Log(xlog.MessageKey(), "Start called when a listener was not in stopped state", "err", ErrListenerNotStopped)
return ErrListenerNotStopped
}
c.observer.ticker.Reset(c.observer.pullInterval)
go func() {
for {
select {
case <-c.observer.shutdown:
return
case <-c.observer.ticker.C:
outcome := SuccessOutcome
ctx := c.setLogger(context.Background(), c.logger)
items, err := c.GetItems(ctx, "")
if err == nil {
c.observer.listener.Update(items)
} else {
outcome = FailureOutcome
level.Error(c.logger).Log(xlog.MessageKey(), "Failed to get items for listeners", xlog.ErrorKey(), err)
}
c.observer.measures.Polls.With(prometheus.Labels{
OutcomeLabel: outcome}).Add(1)
}
}
}()
atomic.SwapInt32(&c.observer.state, running)
return nil
}
// Stop requests the current listener process to stop and waits for its goroutine to complete.
// Calling Stop() when a listener is not running (or while one is getting stopped) returns an
// error.
func (c *Client) Stop(ctx context.Context) error
|
{
if c.observer == nil || c.observer.ticker == nil {
return nil
}
if !atomic.CompareAndSwapInt32(&c.observer.state, running, transitioning) {
level.Error(c.logger).Log(xlog.MessageKey(), "Stop called when a listener was not in running state", "err", ErrListenerNotStopped)
return ErrListenerNotRunning
}
c.observer.ticker.Stop()
c.observer.shutdown <- struct{}{}
atomic.SwapInt32(&c.observer.state, stopped)
return nil
}
|
identifier_body
|
|
client.go
|
client.
Listen ListenerConfig
}
type response struct {
Body []byte
ArgusErrorHeader string
Code int
}
type Auth struct {
JWT acquire.RemoteBearerTokenAcquirerOptions
Basic string
}
type Items []model.Item
type Client struct {
client *http.Client
auth acquire.Acquirer
storeBaseURL string
logger log.Logger
bucket string
observer *observerConfig
getLogger func(context.Context) log.Logger
setLogger func(context.Context, log.Logger) context.Context
}
// listening states
const (
stopped int32 = iota
running
transitioning
)
// ListenerConfig contains config data to enable listening for the Argus client.
type ListenerConfig struct {
// Listener provides a mechanism to fetch a copy of all items within a bucket on
// an interval.
// (Optional). If not provided, listening won't be enabled for this client.
Listener Listener
// PullInterval is how often listeners should get updates.
// (Optional). Defaults to 5 seconds.
PullInterval time.Duration
}
type observerConfig struct {
listener Listener
ticker *time.Ticker
pullInterval time.Duration
measures *Measures
shutdown chan struct{}
state int32
}
func NewClient(config ClientConfig, measures *Measures,
getLogger func(context.Context) log.Logger,
setLogger func(context.Context, log.Logger) context.Context,
) (*Client, error) {
if measures == nil {
return nil, ErrNilMeasures
}
err := validateConfig(&config)
if err != nil {
return nil, err
}
if getLogger == nil {
getLogger = func(ctx context.Context) log.Logger {
return log.NewNopLogger()
}
}
if setLogger == nil {
setLogger = func(ctx context.Context, _ log.Logger) context.Context {
return ctx
}
}
tokenAcquirer, err := buildTokenAcquirer(&config.Auth)
if err != nil {
return nil, err
}
clientStore := &Client{
client: config.HTTPClient,
auth: tokenAcquirer,
logger: config.Logger,
observer: newObserver(config.Logger, config.Listen, measures),
bucket: config.Bucket,
storeBaseURL: config.Address + storeAPIPath,
getLogger: getLogger,
setLogger: setLogger,
}
return clientStore, nil
}
// translateNonSuccessStatusCode returns as specific error
// for known Argus status codes.
func translateNonSuccessStatusCode(code int) error {
switch code {
case http.StatusBadRequest:
return ErrBadRequest
case http.StatusUnauthorized, http.StatusForbidden:
return ErrFailedAuthentication
default:
return errNonSuccessResponse
}
}
func newObserver(logger log.Logger, config ListenerConfig, measures *Measures) *observerConfig {
if config.Listener == nil {
return nil
}
return &observerConfig{
listener: config.Listener,
ticker: time.NewTicker(config.PullInterval),
pullInterval: config.PullInterval,
measures: measures,
shutdown: make(chan struct{}),
}
}
func validateConfig(config *ClientConfig) error {
if config.Address == "" {
return ErrAddressEmpty
}
if config.Bucket == "" {
return ErrBucketEmpty
}
if config.HTTPClient == nil {
config.HTTPClient = http.DefaultClient
}
if config.Listen.PullInterval == 0 {
config.Listen.PullInterval = time.Second * 5
}
if config.Logger == nil {
config.Logger = log.NewNopLogger()
}
return nil
}
func isEmpty(options acquire.RemoteBearerTokenAcquirerOptions) bool {
return len(options.AuthURL) < 1 || options.Buffer == 0 || options.Timeout == 0
}
func buildTokenAcquirer(auth *Auth) (acquire.Acquirer, error) {
if !isEmpty(auth.JWT) {
return acquire.NewRemoteBearerTokenAcquirer(auth.JWT)
} else if len(auth.Basic) > 0 {
return acquire.NewFixedAuthAcquirer(auth.Basic)
}
return &acquire.DefaultAcquirer{}, nil
}
func (c *Client) sendRequest(ctx context.Context, owner, method, url string, body io.Reader) (response, error) {
r, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errNewRequestFailure, err.Error())
}
err = acquire.AddAuth(r, c.auth)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, ErrAuthAcquirerFailure, err.Error())
}
if len(owner) > 0 {
r.Header.Set(store.ItemOwnerHeaderKey, owner)
}
resp, err := c.client.Do(r)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errDoRequestFailure, err.Error())
}
defer resp.Body.Close()
var sqResp = response{
Code: resp.StatusCode,
ArgusErrorHeader: resp.Header.Get(store.XmidtErrorHeaderKey),
}
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return sqResp, fmt.Errorf(errWrappedFmt, errReadingBodyFailure, err.Error())
}
sqResp.Body = bodyBytes
return sqResp, nil
}
// GetItems fetches all items that belong to a given owner.
func (c *Client) GetItems(ctx context.Context, owner string) (Items, error) {
response, err := c.sendRequest(ctx, owner, http.MethodGet, fmt.Sprintf("%s/%s", c.storeBaseURL, c.bucket), nil)
if err != nil {
return nil, err
}
if response.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with non-200 response for GetItems request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return nil, fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
var items Items
err = json.Unmarshal(response.Body, &items)
if err != nil {
return nil, fmt.Errorf("GetItems: %w: %s", errJSONUnmarshal, err.Error())
}
return items, nil
}
// PushItem creates a new item if one doesn't already exist. If an item exists
// and the ownership matches, the item is simply updated.
func (c *Client) PushItem(ctx context.Context, owner string, item model.Item) (PushResult, error) {
err := validatePushItemInput(owner, item)
if err != nil {
return "", err
}
data, err := json.Marshal(item)
if err != nil {
return "", fmt.Errorf(errWrappedFmt, errJSONMarshal, err.Error())
}
response, err := c.sendRequest(ctx, owner, http.MethodPut, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, item.ID), bytes.NewReader(data))
if err != nil {
return "", err
}
if response.Code == http.StatusCreated {
return CreatedPushResult, nil
}
if response.Code == http.StatusOK {
return UpdatedPushResult, nil
}
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with a non-successful status code for a PushItem request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return "", fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
// RemoveItem removes the item if it exists and returns the data associated to it.
func (c *Client) RemoveItem(ctx context.Context, id, owner string) (model.Item, error) {
if len(id) < 1 {
return model.Item{}, ErrItemIDEmpty
}
resp, err := c.sendRequest(ctx, owner, http.MethodDelete, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, id), nil)
if err != nil {
return model.Item{}, err
}
if resp.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with a non-successful status code for a RemoveItem request",
"code", resp.Code, "ErrorHeader", resp.ArgusErrorHeader)
return model.Item{}, fmt.Errorf(errStatusCodeFmt, resp.Code, translateNonSuccessStatusCode(resp.Code))
}
var item model.Item
err = json.Unmarshal(resp.Body, &item)
if err != nil {
return item, fmt.Errorf("RemoveItem: %w: %s", errJSONUnmarshal, err.Error())
}
return item, nil
}
// Start begins listening for updates on an interval given that client configuration
// is setup correctly. If a listener process is already in progress, calling Start()
// is a NoOp. If you want to restart the current listener process, call Stop() first.
func (c *Client) Start(ctx context.Context) error {
if c.observer == nil || c.observer.listener == nil {
level.Warn(c.logger).Log(xlog.MessageKey(), "No listener was setup to receive updates.")
return nil
}
if c.observer.ticker == nil {
level.Error(c.logger).Log(xlog.MessageKey(), "Observer ticker is nil")
return ErrUndefinedIntervalTicker
|
}
if !atomic.CompareAndSwapInt32(&c.observer.state, stopped, transitioning) {
level.Error(c.logger).Log(xlog.MessageKey(), "Start called when a listener was not in stopped state", "err", ErrListenerNotStopped)
|
random_line_split
|
|
client.go
|
.New("argus address is required")
ErrBucketEmpty = errors.New("bucket name is required")
ErrItemIDEmpty = errors.New("item ID is required")
ErrItemDataEmpty = errors.New("data field in item is required")
ErrUndefinedIntervalTicker = errors.New("interval ticker is nil. Can't listen for updates")
ErrAuthAcquirerFailure = errors.New("failed acquiring auth token")
ErrFailedAuthentication = errors.New("failed to authentication with argus")
ErrBadRequest = errors.New("argus rejected the request as invalid")
ErrListenerNotStopped = errors.New("listener is either running or starting")
ErrListenerNotRunning = errors.New("listener is either stopped or stopping")
)
var (
errNonSuccessResponse = errors.New("argus responded with a non-success status code")
errNewRequestFailure = errors.New("failed creating an HTTP request")
errDoRequestFailure = errors.New("http client failed while sending request")
errReadingBodyFailure = errors.New("failed while reading http response body")
errJSONUnmarshal = errors.New("failed unmarshaling JSON response payload")
errJSONMarshal = errors.New("failed marshaling item as JSON payload")
)
// PushResult is a simple type to indicate the result type for the
// PushItem operation.
type PushResult string
// Types of pushItem successful results.
const (
CreatedPushResult PushResult = "created"
UpdatedPushResult PushResult = "ok"
)
type ClientConfig struct {
// Address is the Argus URL (i.e. https://example-argus.io:8090)
Address string
// Bucket partition to be used by this client.
Bucket string
// HTTPClient refers to the client that will be used to send requests.
// (Optional) Defaults to http.DefaultClient.
HTTPClient *http.Client
// Auth provides the mechanism to add auth headers to outgoing requests.
// (Optional) If not provided, no auth headers are added.
Auth Auth
// Logger to be used by the client.
// (Optional). By default a no op logger will be used.
Logger log.Logger
// Listen helps enable and configure the listener feature of the client.
// (Optional) If section is not provided with Listener, this
// feature will be disabled for the client.
Listen ListenerConfig
}
type response struct {
Body []byte
ArgusErrorHeader string
Code int
}
type Auth struct {
JWT acquire.RemoteBearerTokenAcquirerOptions
Basic string
}
type Items []model.Item
type Client struct {
client *http.Client
auth acquire.Acquirer
storeBaseURL string
logger log.Logger
bucket string
observer *observerConfig
getLogger func(context.Context) log.Logger
setLogger func(context.Context, log.Logger) context.Context
}
// listening states
const (
stopped int32 = iota
running
transitioning
)
// ListenerConfig contains config data to enable listening for the Argus client.
type ListenerConfig struct {
// Listener provides a mechanism to fetch a copy of all items within a bucket on
// an interval.
// (Optional). If not provided, listening won't be enabled for this client.
Listener Listener
// PullInterval is how often listeners should get updates.
// (Optional). Defaults to 5 seconds.
PullInterval time.Duration
}
type observerConfig struct {
listener Listener
ticker *time.Ticker
pullInterval time.Duration
measures *Measures
shutdown chan struct{}
state int32
}
func NewClient(config ClientConfig, measures *Measures,
getLogger func(context.Context) log.Logger,
setLogger func(context.Context, log.Logger) context.Context,
) (*Client, error) {
if measures == nil {
return nil, ErrNilMeasures
}
err := validateConfig(&config)
if err != nil {
return nil, err
}
if getLogger == nil {
getLogger = func(ctx context.Context) log.Logger {
return log.NewNopLogger()
}
}
if setLogger == nil {
setLogger = func(ctx context.Context, _ log.Logger) context.Context {
return ctx
}
}
tokenAcquirer, err := buildTokenAcquirer(&config.Auth)
if err != nil {
return nil, err
}
clientStore := &Client{
client: config.HTTPClient,
auth: tokenAcquirer,
logger: config.Logger,
observer: newObserver(config.Logger, config.Listen, measures),
bucket: config.Bucket,
storeBaseURL: config.Address + storeAPIPath,
getLogger: getLogger,
setLogger: setLogger,
}
return clientStore, nil
}
// translateNonSuccessStatusCode returns as specific error
// for known Argus status codes.
func translateNonSuccessStatusCode(code int) error {
switch code {
case http.StatusBadRequest:
return ErrBadRequest
case http.StatusUnauthorized, http.StatusForbidden:
return ErrFailedAuthentication
default:
return errNonSuccessResponse
}
}
func newObserver(logger log.Logger, config ListenerConfig, measures *Measures) *observerConfig {
if config.Listener == nil {
return nil
}
return &observerConfig{
listener: config.Listener,
ticker: time.NewTicker(config.PullInterval),
pullInterval: config.PullInterval,
measures: measures,
shutdown: make(chan struct{}),
}
}
func validateConfig(config *ClientConfig) error {
if config.Address == "" {
return ErrAddressEmpty
}
if config.Bucket == "" {
return ErrBucketEmpty
}
if config.HTTPClient == nil {
config.HTTPClient = http.DefaultClient
}
if config.Listen.PullInterval == 0 {
config.Listen.PullInterval = time.Second * 5
}
if config.Logger == nil {
config.Logger = log.NewNopLogger()
}
return nil
}
func
|
(options acquire.RemoteBearerTokenAcquirerOptions) bool {
return len(options.AuthURL) < 1 || options.Buffer == 0 || options.Timeout == 0
}
func buildTokenAcquirer(auth *Auth) (acquire.Acquirer, error) {
if !isEmpty(auth.JWT) {
return acquire.NewRemoteBearerTokenAcquirer(auth.JWT)
} else if len(auth.Basic) > 0 {
return acquire.NewFixedAuthAcquirer(auth.Basic)
}
return &acquire.DefaultAcquirer{}, nil
}
func (c *Client) sendRequest(ctx context.Context, owner, method, url string, body io.Reader) (response, error) {
r, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errNewRequestFailure, err.Error())
}
err = acquire.AddAuth(r, c.auth)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, ErrAuthAcquirerFailure, err.Error())
}
if len(owner) > 0 {
r.Header.Set(store.ItemOwnerHeaderKey, owner)
}
resp, err := c.client.Do(r)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errDoRequestFailure, err.Error())
}
defer resp.Body.Close()
var sqResp = response{
Code: resp.StatusCode,
ArgusErrorHeader: resp.Header.Get(store.XmidtErrorHeaderKey),
}
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return sqResp, fmt.Errorf(errWrappedFmt, errReadingBodyFailure, err.Error())
}
sqResp.Body = bodyBytes
return sqResp, nil
}
// GetItems fetches all items that belong to a given owner.
func (c *Client) GetItems(ctx context.Context, owner string) (Items, error) {
response, err := c.sendRequest(ctx, owner, http.MethodGet, fmt.Sprintf("%s/%s", c.storeBaseURL, c.bucket), nil)
if err != nil {
return nil, err
}
if response.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with non-200 response for GetItems request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return nil, fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
var items Items
err = json.Unmarshal(response.Body, &items)
if err != nil {
return nil, fmt.Errorf("GetItems: %w: %s", errJSONUnmarshal, err.Error())
}
return items, nil
}
// PushItem creates a new item if one doesn't already exist. If an item exists
// and the ownership matches, the item is simply updated.
func (c *Client) PushItem(ctx context.Context, owner string, item model.Item) (PushResult, error) {
err := validatePushItemInput(owner, item)
if err != nil {
return "", err
}
data, err := json.Marshal(item)
if err != nil {
return "", fmt.Errorf(errWrappedFmt, errJSONMarshal, err.Error())
}
response, err := c.sendRequest(ctx, owner, http.MethodPut, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, item.ID), bytes.NewReader(data))
if err != nil {
return "", err
}
if response.Code == http.StatusCreated {
return CreatedPushResult, nil
}
if response.Code == http.StatusOK {
return UpdatedPushResult, nil
}
level.Error(c.getLogger(ctx)).Log(xlog
|
isEmpty
|
identifier_name
|
client.go
|
.New("argus address is required")
ErrBucketEmpty = errors.New("bucket name is required")
ErrItemIDEmpty = errors.New("item ID is required")
ErrItemDataEmpty = errors.New("data field in item is required")
ErrUndefinedIntervalTicker = errors.New("interval ticker is nil. Can't listen for updates")
ErrAuthAcquirerFailure = errors.New("failed acquiring auth token")
ErrFailedAuthentication = errors.New("failed to authentication with argus")
ErrBadRequest = errors.New("argus rejected the request as invalid")
ErrListenerNotStopped = errors.New("listener is either running or starting")
ErrListenerNotRunning = errors.New("listener is either stopped or stopping")
)
var (
errNonSuccessResponse = errors.New("argus responded with a non-success status code")
errNewRequestFailure = errors.New("failed creating an HTTP request")
errDoRequestFailure = errors.New("http client failed while sending request")
errReadingBodyFailure = errors.New("failed while reading http response body")
errJSONUnmarshal = errors.New("failed unmarshaling JSON response payload")
errJSONMarshal = errors.New("failed marshaling item as JSON payload")
)
// PushResult is a simple type to indicate the result type for the
// PushItem operation.
type PushResult string
// Types of pushItem successful results.
const (
CreatedPushResult PushResult = "created"
UpdatedPushResult PushResult = "ok"
)
type ClientConfig struct {
// Address is the Argus URL (i.e. https://example-argus.io:8090)
Address string
// Bucket partition to be used by this client.
Bucket string
// HTTPClient refers to the client that will be used to send requests.
// (Optional) Defaults to http.DefaultClient.
HTTPClient *http.Client
// Auth provides the mechanism to add auth headers to outgoing requests.
// (Optional) If not provided, no auth headers are added.
Auth Auth
// Logger to be used by the client.
// (Optional). By default a no op logger will be used.
Logger log.Logger
// Listen helps enable and configure the listener feature of the client.
// (Optional) If section is not provided with Listener, this
// feature will be disabled for the client.
Listen ListenerConfig
}
type response struct {
Body []byte
ArgusErrorHeader string
Code int
}
type Auth struct {
JWT acquire.RemoteBearerTokenAcquirerOptions
Basic string
}
type Items []model.Item
type Client struct {
client *http.Client
auth acquire.Acquirer
storeBaseURL string
logger log.Logger
bucket string
observer *observerConfig
getLogger func(context.Context) log.Logger
setLogger func(context.Context, log.Logger) context.Context
}
// listening states
const (
stopped int32 = iota
running
transitioning
)
// ListenerConfig contains config data to enable listening for the Argus client.
type ListenerConfig struct {
// Listener provides a mechanism to fetch a copy of all items within a bucket on
// an interval.
// (Optional). If not provided, listening won't be enabled for this client.
Listener Listener
// PullInterval is how often listeners should get updates.
// (Optional). Defaults to 5 seconds.
PullInterval time.Duration
}
type observerConfig struct {
listener Listener
ticker *time.Ticker
pullInterval time.Duration
measures *Measures
shutdown chan struct{}
state int32
}
func NewClient(config ClientConfig, measures *Measures,
getLogger func(context.Context) log.Logger,
setLogger func(context.Context, log.Logger) context.Context,
) (*Client, error) {
if measures == nil
|
err := validateConfig(&config)
if err != nil {
return nil, err
}
if getLogger == nil {
getLogger = func(ctx context.Context) log.Logger {
return log.NewNopLogger()
}
}
if setLogger == nil {
setLogger = func(ctx context.Context, _ log.Logger) context.Context {
return ctx
}
}
tokenAcquirer, err := buildTokenAcquirer(&config.Auth)
if err != nil {
return nil, err
}
clientStore := &Client{
client: config.HTTPClient,
auth: tokenAcquirer,
logger: config.Logger,
observer: newObserver(config.Logger, config.Listen, measures),
bucket: config.Bucket,
storeBaseURL: config.Address + storeAPIPath,
getLogger: getLogger,
setLogger: setLogger,
}
return clientStore, nil
}
// translateNonSuccessStatusCode returns as specific error
// for known Argus status codes.
func translateNonSuccessStatusCode(code int) error {
switch code {
case http.StatusBadRequest:
return ErrBadRequest
case http.StatusUnauthorized, http.StatusForbidden:
return ErrFailedAuthentication
default:
return errNonSuccessResponse
}
}
func newObserver(logger log.Logger, config ListenerConfig, measures *Measures) *observerConfig {
if config.Listener == nil {
return nil
}
return &observerConfig{
listener: config.Listener,
ticker: time.NewTicker(config.PullInterval),
pullInterval: config.PullInterval,
measures: measures,
shutdown: make(chan struct{}),
}
}
func validateConfig(config *ClientConfig) error {
if config.Address == "" {
return ErrAddressEmpty
}
if config.Bucket == "" {
return ErrBucketEmpty
}
if config.HTTPClient == nil {
config.HTTPClient = http.DefaultClient
}
if config.Listen.PullInterval == 0 {
config.Listen.PullInterval = time.Second * 5
}
if config.Logger == nil {
config.Logger = log.NewNopLogger()
}
return nil
}
func isEmpty(options acquire.RemoteBearerTokenAcquirerOptions) bool {
return len(options.AuthURL) < 1 || options.Buffer == 0 || options.Timeout == 0
}
func buildTokenAcquirer(auth *Auth) (acquire.Acquirer, error) {
if !isEmpty(auth.JWT) {
return acquire.NewRemoteBearerTokenAcquirer(auth.JWT)
} else if len(auth.Basic) > 0 {
return acquire.NewFixedAuthAcquirer(auth.Basic)
}
return &acquire.DefaultAcquirer{}, nil
}
func (c *Client) sendRequest(ctx context.Context, owner, method, url string, body io.Reader) (response, error) {
r, err := http.NewRequestWithContext(ctx, method, url, body)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errNewRequestFailure, err.Error())
}
err = acquire.AddAuth(r, c.auth)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, ErrAuthAcquirerFailure, err.Error())
}
if len(owner) > 0 {
r.Header.Set(store.ItemOwnerHeaderKey, owner)
}
resp, err := c.client.Do(r)
if err != nil {
return response{}, fmt.Errorf(errWrappedFmt, errDoRequestFailure, err.Error())
}
defer resp.Body.Close()
var sqResp = response{
Code: resp.StatusCode,
ArgusErrorHeader: resp.Header.Get(store.XmidtErrorHeaderKey),
}
bodyBytes, err := ioutil.ReadAll(resp.Body)
if err != nil {
return sqResp, fmt.Errorf(errWrappedFmt, errReadingBodyFailure, err.Error())
}
sqResp.Body = bodyBytes
return sqResp, nil
}
// GetItems fetches all items that belong to a given owner.
func (c *Client) GetItems(ctx context.Context, owner string) (Items, error) {
response, err := c.sendRequest(ctx, owner, http.MethodGet, fmt.Sprintf("%s/%s", c.storeBaseURL, c.bucket), nil)
if err != nil {
return nil, err
}
if response.Code != http.StatusOK {
level.Error(c.getLogger(ctx)).Log(xlog.MessageKey(), "Argus responded with non-200 response for GetItems request",
"code", response.Code, "ErrorHeader", response.ArgusErrorHeader)
return nil, fmt.Errorf(errStatusCodeFmt, response.Code, translateNonSuccessStatusCode(response.Code))
}
var items Items
err = json.Unmarshal(response.Body, &items)
if err != nil {
return nil, fmt.Errorf("GetItems: %w: %s", errJSONUnmarshal, err.Error())
}
return items, nil
}
// PushItem creates a new item if one doesn't already exist. If an item exists
// and the ownership matches, the item is simply updated.
func (c *Client) PushItem(ctx context.Context, owner string, item model.Item) (PushResult, error) {
err := validatePushItemInput(owner, item)
if err != nil {
return "", err
}
data, err := json.Marshal(item)
if err != nil {
return "", fmt.Errorf(errWrappedFmt, errJSONMarshal, err.Error())
}
response, err := c.sendRequest(ctx, owner, http.MethodPut, fmt.Sprintf("%s/%s/%s", c.storeBaseURL, c.bucket, item.ID), bytes.NewReader(data))
if err != nil {
return "", err
}
if response.Code == http.StatusCreated {
return CreatedPushResult, nil
}
if response.Code == http.StatusOK {
return UpdatedPushResult, nil
}
level.Error(c.getLogger(ctx)).Log(x
|
{
return nil, ErrNilMeasures
}
|
conditional_block
|
shader.rs
|
use_program(&self) {
unsafe {
gl::UseProgram(self.gl_handle);
}
}
pub fn is_bound(&self) -> bool {
self.gl_handle == Shader::get_currently_bound_raw()
}
pub fn get_uniform_loc(&self, uniform: &str) -> i32 {
use std::ffi::CString;
unsafe {
let cstr = CString::new(uniform).unwrap();
gl::GetUniformLocation(self.gl_handle, cstr.as_ptr())
}
}
pub fn set_uniform_vec2(&self, uniform: &str, v: Vec2) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform2f(self.get_uniform_loc(&uniform), v.x, v.y);
}
}
pub fn set_uniform_vec3<V>(&self, uniform: &str, v: V) where V: Into<Vec3> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform3f(self.get_uniform_loc(&uniform), v.x, v.y, v.z);
}
}
pub fn set_uniform_vec4<V>(&self, uniform: &str, v: V) where V: Into<Vec4> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform4f(self.get_uniform_loc(&uniform), v.x, v.y, v.z, v.w);
}
}
pub fn set_uniform_i32(&self, uniform: &str, v: i32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1i(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_f32(&self, uniform: &str, v: f32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1f(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_mat_raw(&self, uniform: i32, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform on unbound shader");
unsafe {
gl::UniformMatrix4fv(uniform, 1, 0, mat.transpose().rows.as_ptr() as *const f32);
}
}
pub fn set_uniform_mat(&self, uniform: &str, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
self.set_uniform_mat_raw(self.get_uniform_loc(&uniform), &mat);
}
pub fn set_proj(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_proj' on unbound shader");
self.set_uniform_mat_raw(self.proj_loc, &mat);
}
pub fn set_view(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_view' on unbound shader");
self.set_uniform_mat_raw(self.view_loc, &mat);
}
}
pub struct ShaderBuilder {
attributes: Vec<String>,
varyings: Vec<String>,
uniforms: Vec<String>,
vertex_body: String,
fragment_body: String,
use_3d: bool,
use_proj: bool,
use_view: bool,
use_highp: bool,
}
impl ShaderBuilder {
pub fn new() -> Self {
ShaderBuilder {
attributes: Vec::new(),
varyings: Vec::new(),
uniforms: Vec::new(),
vertex_body: String::new(),
fragment_body: String::new(),
use_3d: false,
use_proj: false,
use_view: false,
use_highp: false,
}
}
pub fn use_3d(mut self) -> Self { self.use_3d = true; self }
pub fn use_proj(mut self) -> Self { self.use_proj = true; self.uniform("proj", "mat4") }
pub fn use_view(mut self) -> Self { self.use_view = true; self.uniform("view", "mat4") }
pub fn use_highp(mut self) -> Self { self.use_highp = true; self }
pub fn vertex(mut self, data: &str) -> Self {
write!(&mut self.vertex_body, "{};\n", data).unwrap(); self
}
pub fn fragment(mut self, data: &str) -> Self {
write!(&mut self.fragment_body, "{};\n", data).unwrap(); self
}
pub fn uniform(mut self, name: &str, ty: &str) -> Self {
self.uniforms.push(format!("{} u_{}", ty, name)); self
}
pub fn attribute(mut self, name: &str, ty: &str) -> Self {
if name == "position" {
println!("Tried to overwrite 'position' attribute while building shader - ignoring");
return self
}
self.attributes.push(format!("{} {}", ty, name)); self
}
pub fn varying(mut self, name: &str, ty: &str) -> Self {
self.varyings.push(format!("{} v_{}", ty, name)); self
}
pub fn frag_attribute(mut self, name: &str, ty: &str) -> Self {
self.attributes.push(format!("{} {}", ty, name));
self.varyings.push(format!("{} v_{}", ty, name));
write!(&mut self.vertex_body, "v_{} = {};\n", name, name).unwrap();
self
}
pub fn output(mut self, expr: &str) -> Self {
write!(&mut self.fragment_body, "gl_FragColor = {};\n", expr).unwrap();
self
}
pub fn finalize_source(mut self) -> (String, String) {
let mut varyings_and_uniforms = String::new();
for v in self.varyings.iter() { write!(&mut varyings_and_uniforms, "varying {};\n", v).unwrap(); }
for u in self.uniforms.iter() { write!(&mut varyings_and_uniforms, "uniform {};\n", u).unwrap(); }
let mut vert_src = String::new();
let mut frag_src = String::new();
let precision = if self.use_highp { "precision highp float;" } else { "precision mediump float;" };
write!(&mut vert_src, "{}\n", precision).unwrap();
write!(&mut frag_src, "{}\n", precision).unwrap();
let position_attr_ty = if self.use_3d { "vec3" } else { "vec2" };
write!(&mut vert_src, "attribute {} position;\n", position_attr_ty).unwrap();
for a in self.attributes.iter() { write!(&mut vert_src, "attribute {};\n", a).unwrap(); }
let mut gl_position = String::from("gl_Position = ");
if self.use_proj { gl_position.push_str("u_proj * "); }
if self.use_view { gl_position.push_str("u_view * "); }
if self.use_3d {
gl_position.push_str("vec4(position, 1.0);\n");
} else {
gl_position.push_str("vec4(position, 0.0, 1.0);\n");
}
self.vertex_body = format!("{}{}", gl_position, self.vertex_body);
let mut bodies = [&mut self.vertex_body, &mut self.fragment_body];
for (sh, body) in [&mut vert_src, &mut frag_src].iter_mut().zip(bodies.iter_mut()) {
write!(sh, "\n{}\n", varyings_and_uniforms).unwrap();
let mut position = 0;
while let Some(start) = body[position..].find("func ") {
let length = body[start..].chars()
.scan((false, 0), |acc, c| {
let (body, nesting) = *acc;
*acc = match (body, nesting, c) {
(false, _, '}') => return None,
(true, 1, '}') => return None,
(false, 0, '{') => (true, 1),
(true, x, '{') => (true, x+1),
(true, x, '}') => (true, x-1),
_ => *acc,
};
Some(*acc)
})
.count();
let start = start + position;
let end = start + length + 1;
write!(sh, "{}\n", &body[start+5..end]).unwrap();
body.replace_range(start..end, "");
position = start;
}
write!(sh, "void main() {{\n{}}}\n", body).unwrap();
}
(vert_src, frag_src)
}
pub fn finalize(self) -> Result<Shader, String> {
use std::ffi::CString;
let attributes = self.attributes.iter()
.map(|a| CString::new(a.split(' ').nth(1).unwrap()).unwrap())
|
.collect::<Vec<_>>();
|
random_line_split
|
|
shader.rs
|
this please
gl::BindAttribLocation(program, 0, b"position\0".as_ptr() as _);
gl::LinkProgram(program);
let mut status = 0i32;
gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);
if status == 0 {
let mut buf = [0u8; 1024];
let mut len = 0;
gl::GetProgramInfoLog(program, buf.len() as _, &mut len, buf.as_mut_ptr() as _);
return Err(CStr::from_bytes_with_nul_unchecked(&buf[..len as usize]).to_string_lossy().into());
}
gl::DeleteShader(vs);
gl::DeleteShader(fs);
Ok(Shader {
gl_handle: program,
proj_loc: gl::GetUniformLocation(program, b"u_proj\0".as_ptr() as _),
view_loc: gl::GetUniformLocation(program, b"u_view\0".as_ptr() as _),
})
}
}
pub const fn invalid() -> Shader {
Shader {
gl_handle: 0,
proj_loc: 0,
view_loc: 0,
}
}
fn get_currently_bound_raw() -> u32 {
unsafe {
let mut handle = 0;
gl::GetIntegerv(gl::CURRENT_PROGRAM, &mut handle);
handle as u32
}
}
pub fn use_program(&self) {
unsafe {
gl::UseProgram(self.gl_handle);
}
}
pub fn is_bound(&self) -> bool {
self.gl_handle == Shader::get_currently_bound_raw()
}
pub fn get_uniform_loc(&self, uniform: &str) -> i32 {
use std::ffi::CString;
unsafe {
let cstr = CString::new(uniform).unwrap();
gl::GetUniformLocation(self.gl_handle, cstr.as_ptr())
}
}
pub fn set_uniform_vec2(&self, uniform: &str, v: Vec2) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform2f(self.get_uniform_loc(&uniform), v.x, v.y);
}
}
pub fn set_uniform_vec3<V>(&self, uniform: &str, v: V) where V: Into<Vec3> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform3f(self.get_uniform_loc(&uniform), v.x, v.y, v.z);
}
}
pub fn set_uniform_vec4<V>(&self, uniform: &str, v: V) where V: Into<Vec4> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform4f(self.get_uniform_loc(&uniform), v.x, v.y, v.z, v.w);
}
}
pub fn set_uniform_i32(&self, uniform: &str, v: i32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1i(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_f32(&self, uniform: &str, v: f32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1f(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_mat_raw(&self, uniform: i32, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform on unbound shader");
unsafe {
gl::UniformMatrix4fv(uniform, 1, 0, mat.transpose().rows.as_ptr() as *const f32);
}
}
pub fn set_uniform_mat(&self, uniform: &str, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
self.set_uniform_mat_raw(self.get_uniform_loc(&uniform), &mat);
}
pub fn set_proj(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_proj' on unbound shader");
self.set_uniform_mat_raw(self.proj_loc, &mat);
}
pub fn set_view(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_view' on unbound shader");
self.set_uniform_mat_raw(self.view_loc, &mat);
}
}
pub struct ShaderBuilder {
attributes: Vec<String>,
varyings: Vec<String>,
uniforms: Vec<String>,
vertex_body: String,
fragment_body: String,
use_3d: bool,
use_proj: bool,
use_view: bool,
use_highp: bool,
}
impl ShaderBuilder {
pub fn new() -> Self {
ShaderBuilder {
attributes: Vec::new(),
varyings: Vec::new(),
uniforms: Vec::new(),
vertex_body: String::new(),
fragment_body: String::new(),
use_3d: false,
use_proj: false,
use_view: false,
use_highp: false,
}
}
pub fn use_3d(mut self) -> Self { self.use_3d = true; self }
pub fn use_proj(mut self) -> Self { self.use_proj = true; self.uniform("proj", "mat4") }
pub fn use_view(mut self) -> Self { self.use_view = true; self.uniform("view", "mat4") }
pub fn use_highp(mut self) -> Self { self.use_highp = true; self }
pub fn vertex(mut self, data: &str) -> Self {
write!(&mut self.vertex_body, "{};\n", data).unwrap(); self
}
pub fn fragment(mut self, data: &str) -> Self {
write!(&mut self.fragment_body, "{};\n", data).unwrap(); self
}
pub fn uniform(mut self, name: &str, ty: &str) -> Self
|
pub fn attribute(mut self, name: &str, ty: &str) -> Self {
if name == "position" {
println!("Tried to overwrite 'position' attribute while building shader - ignoring");
return self
}
self.attributes.push(format!("{} {}", ty, name)); self
}
pub fn varying(mut self, name: &str, ty: &str) -> Self {
self.varyings.push(format!("{} v_{}", ty, name)); self
}
pub fn frag_attribute(mut self, name: &str, ty: &str) -> Self {
self.attributes.push(format!("{} {}", ty, name));
self.varyings.push(format!("{} v_{}", ty, name));
write!(&mut self.vertex_body, "v_{} = {};\n", name, name).unwrap();
self
}
pub fn output(mut self, expr: &str) -> Self {
write!(&mut self.fragment_body, "gl_FragColor = {};\n", expr).unwrap();
self
}
pub fn finalize_source(mut self) -> (String, String) {
let mut varyings_and_uniforms = String::new();
for v in self.varyings.iter() { write!(&mut varyings_and_uniforms, "varying {};\n", v).unwrap(); }
for u in self.uniforms.iter() { write!(&mut varyings_and_uniforms, "uniform {};\n", u).unwrap(); }
let mut vert_src = String::new();
let mut frag_src = String::new();
let precision = if self.use_highp { "precision highp float;" } else { "precision mediump float;" };
write!(&mut vert_src, "{}\n", precision).unwrap();
write!(&mut frag_src, "{}\n", precision).unwrap();
let position_attr_ty = if self.use_3d { "vec3" } else { "vec2" };
write!(&mut vert_src, "attribute {} position;\n", position_attr_ty).unwrap();
for a in self.attributes.iter() { write!(&mut vert_src, "attribute {};\n", a).unwrap(); }
let mut gl_position = String::from("gl_Position = ");
if self.use_proj { gl_position.push_str("u_proj * "); }
if self.use_view { gl_position.push_str("u_view * "); }
if self.use_3d {
gl_position.push_str("vec4(position, 1.0);\n");
} else {
gl_position.push_str("vec4(position, 0.0, 1.0);\n");
}
self.vertex_body = format!("{}{}", gl_position, self.vertex_body);
let mut bodies = [&mut self.vertex_body, &mut self.fragment_body];
for (sh, body) in [&mut vert_src, &mut frag_src].iter_mut().zip(bodies.iter_mut()) {
write!(sh, "\n{}\n", vary
|
{
self.uniforms.push(format!("{} u_{}", ty, name)); self
}
|
identifier_body
|
shader.rs
|
of this please
gl::BindAttribLocation(program, 0, b"position\0".as_ptr() as _);
gl::LinkProgram(program);
let mut status = 0i32;
gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);
if status == 0 {
let mut buf = [0u8; 1024];
let mut len = 0;
gl::GetProgramInfoLog(program, buf.len() as _, &mut len, buf.as_mut_ptr() as _);
return Err(CStr::from_bytes_with_nul_unchecked(&buf[..len as usize]).to_string_lossy().into());
}
gl::DeleteShader(vs);
gl::DeleteShader(fs);
Ok(Shader {
gl_handle: program,
proj_loc: gl::GetUniformLocation(program, b"u_proj\0".as_ptr() as _),
view_loc: gl::GetUniformLocation(program, b"u_view\0".as_ptr() as _),
})
}
}
pub const fn invalid() -> Shader {
Shader {
gl_handle: 0,
proj_loc: 0,
view_loc: 0,
}
}
fn get_currently_bound_raw() -> u32 {
unsafe {
let mut handle = 0;
gl::GetIntegerv(gl::CURRENT_PROGRAM, &mut handle);
handle as u32
}
}
pub fn use_program(&self) {
unsafe {
gl::UseProgram(self.gl_handle);
}
}
pub fn is_bound(&self) -> bool {
self.gl_handle == Shader::get_currently_bound_raw()
}
pub fn get_uniform_loc(&self, uniform: &str) -> i32 {
use std::ffi::CString;
unsafe {
let cstr = CString::new(uniform).unwrap();
gl::GetUniformLocation(self.gl_handle, cstr.as_ptr())
}
}
pub fn set_uniform_vec2(&self, uniform: &str, v: Vec2) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform2f(self.get_uniform_loc(&uniform), v.x, v.y);
}
}
pub fn
|
<V>(&self, uniform: &str, v: V) where V: Into<Vec3> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform3f(self.get_uniform_loc(&uniform), v.x, v.y, v.z);
}
}
pub fn set_uniform_vec4<V>(&self, uniform: &str, v: V) where V: Into<Vec4> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform4f(self.get_uniform_loc(&uniform), v.x, v.y, v.z, v.w);
}
}
pub fn set_uniform_i32(&self, uniform: &str, v: i32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1i(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_f32(&self, uniform: &str, v: f32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1f(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_mat_raw(&self, uniform: i32, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform on unbound shader");
unsafe {
gl::UniformMatrix4fv(uniform, 1, 0, mat.transpose().rows.as_ptr() as *const f32);
}
}
pub fn set_uniform_mat(&self, uniform: &str, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
self.set_uniform_mat_raw(self.get_uniform_loc(&uniform), &mat);
}
pub fn set_proj(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_proj' on unbound shader");
self.set_uniform_mat_raw(self.proj_loc, &mat);
}
pub fn set_view(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_view' on unbound shader");
self.set_uniform_mat_raw(self.view_loc, &mat);
}
}
pub struct ShaderBuilder {
attributes: Vec<String>,
varyings: Vec<String>,
uniforms: Vec<String>,
vertex_body: String,
fragment_body: String,
use_3d: bool,
use_proj: bool,
use_view: bool,
use_highp: bool,
}
impl ShaderBuilder {
pub fn new() -> Self {
ShaderBuilder {
attributes: Vec::new(),
varyings: Vec::new(),
uniforms: Vec::new(),
vertex_body: String::new(),
fragment_body: String::new(),
use_3d: false,
use_proj: false,
use_view: false,
use_highp: false,
}
}
pub fn use_3d(mut self) -> Self { self.use_3d = true; self }
pub fn use_proj(mut self) -> Self { self.use_proj = true; self.uniform("proj", "mat4") }
pub fn use_view(mut self) -> Self { self.use_view = true; self.uniform("view", "mat4") }
pub fn use_highp(mut self) -> Self { self.use_highp = true; self }
pub fn vertex(mut self, data: &str) -> Self {
write!(&mut self.vertex_body, "{};\n", data).unwrap(); self
}
pub fn fragment(mut self, data: &str) -> Self {
write!(&mut self.fragment_body, "{};\n", data).unwrap(); self
}
pub fn uniform(mut self, name: &str, ty: &str) -> Self {
self.uniforms.push(format!("{} u_{}", ty, name)); self
}
pub fn attribute(mut self, name: &str, ty: &str) -> Self {
if name == "position" {
println!("Tried to overwrite 'position' attribute while building shader - ignoring");
return self
}
self.attributes.push(format!("{} {}", ty, name)); self
}
pub fn varying(mut self, name: &str, ty: &str) -> Self {
self.varyings.push(format!("{} v_{}", ty, name)); self
}
pub fn frag_attribute(mut self, name: &str, ty: &str) -> Self {
self.attributes.push(format!("{} {}", ty, name));
self.varyings.push(format!("{} v_{}", ty, name));
write!(&mut self.vertex_body, "v_{} = {};\n", name, name).unwrap();
self
}
pub fn output(mut self, expr: &str) -> Self {
write!(&mut self.fragment_body, "gl_FragColor = {};\n", expr).unwrap();
self
}
pub fn finalize_source(mut self) -> (String, String) {
let mut varyings_and_uniforms = String::new();
for v in self.varyings.iter() { write!(&mut varyings_and_uniforms, "varying {};\n", v).unwrap(); }
for u in self.uniforms.iter() { write!(&mut varyings_and_uniforms, "uniform {};\n", u).unwrap(); }
let mut vert_src = String::new();
let mut frag_src = String::new();
let precision = if self.use_highp { "precision highp float;" } else { "precision mediump float;" };
write!(&mut vert_src, "{}\n", precision).unwrap();
write!(&mut frag_src, "{}\n", precision).unwrap();
let position_attr_ty = if self.use_3d { "vec3" } else { "vec2" };
write!(&mut vert_src, "attribute {} position;\n", position_attr_ty).unwrap();
for a in self.attributes.iter() { write!(&mut vert_src, "attribute {};\n", a).unwrap(); }
let mut gl_position = String::from("gl_Position = ");
if self.use_proj { gl_position.push_str("u_proj * "); }
if self.use_view { gl_position.push_str("u_view * "); }
if self.use_3d {
gl_position.push_str("vec4(position, 1.0);\n");
} else {
gl_position.push_str("vec4(position, 0.0, 1.0);\n");
}
self.vertex_body = format!("{}{}", gl_position, self.vertex_body);
let mut bodies = [&mut self.vertex_body, &mut self.fragment_body];
for (sh, body) in [&mut vert_src, &mut frag_src].iter_mut().zip(bodies.iter_mut()) {
write!(sh, "\n{}\n", vary
|
set_uniform_vec3
|
identifier_name
|
shader.rs
|
of this please
gl::BindAttribLocation(program, 0, b"position\0".as_ptr() as _);
gl::LinkProgram(program);
let mut status = 0i32;
gl::GetProgramiv(program, gl::LINK_STATUS, &mut status);
if status == 0 {
let mut buf = [0u8; 1024];
let mut len = 0;
gl::GetProgramInfoLog(program, buf.len() as _, &mut len, buf.as_mut_ptr() as _);
return Err(CStr::from_bytes_with_nul_unchecked(&buf[..len as usize]).to_string_lossy().into());
}
gl::DeleteShader(vs);
gl::DeleteShader(fs);
Ok(Shader {
gl_handle: program,
proj_loc: gl::GetUniformLocation(program, b"u_proj\0".as_ptr() as _),
view_loc: gl::GetUniformLocation(program, b"u_view\0".as_ptr() as _),
})
}
}
pub const fn invalid() -> Shader {
Shader {
gl_handle: 0,
proj_loc: 0,
view_loc: 0,
}
}
fn get_currently_bound_raw() -> u32 {
unsafe {
let mut handle = 0;
gl::GetIntegerv(gl::CURRENT_PROGRAM, &mut handle);
handle as u32
}
}
pub fn use_program(&self) {
unsafe {
gl::UseProgram(self.gl_handle);
}
}
pub fn is_bound(&self) -> bool {
self.gl_handle == Shader::get_currently_bound_raw()
}
pub fn get_uniform_loc(&self, uniform: &str) -> i32 {
use std::ffi::CString;
unsafe {
let cstr = CString::new(uniform).unwrap();
gl::GetUniformLocation(self.gl_handle, cstr.as_ptr())
}
}
pub fn set_uniform_vec2(&self, uniform: &str, v: Vec2) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform2f(self.get_uniform_loc(&uniform), v.x, v.y);
}
}
pub fn set_uniform_vec3<V>(&self, uniform: &str, v: V) where V: Into<Vec3> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform3f(self.get_uniform_loc(&uniform), v.x, v.y, v.z);
}
}
pub fn set_uniform_vec4<V>(&self, uniform: &str, v: V) where V: Into<Vec4> {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
let v = v.into();
gl::Uniform4f(self.get_uniform_loc(&uniform), v.x, v.y, v.z, v.w);
}
}
pub fn set_uniform_i32(&self, uniform: &str, v: i32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1i(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_f32(&self, uniform: &str, v: f32) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
unsafe {
gl::Uniform1f(self.get_uniform_loc(&uniform), v);
}
}
pub fn set_uniform_mat_raw(&self, uniform: i32, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform on unbound shader");
unsafe {
gl::UniformMatrix4fv(uniform, 1, 0, mat.transpose().rows.as_ptr() as *const f32);
}
}
pub fn set_uniform_mat(&self, uniform: &str, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform '{}' on unbound shader", uniform);
self.set_uniform_mat_raw(self.get_uniform_loc(&uniform), &mat);
}
pub fn set_proj(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_proj' on unbound shader");
self.set_uniform_mat_raw(self.proj_loc, &mat);
}
pub fn set_view(&self, mat: &Mat4) {
assert!(self.is_bound(), "Tried to set uniform 'u_view' on unbound shader");
self.set_uniform_mat_raw(self.view_loc, &mat);
}
}
pub struct ShaderBuilder {
attributes: Vec<String>,
varyings: Vec<String>,
uniforms: Vec<String>,
vertex_body: String,
fragment_body: String,
use_3d: bool,
use_proj: bool,
use_view: bool,
use_highp: bool,
}
impl ShaderBuilder {
pub fn new() -> Self {
ShaderBuilder {
attributes: Vec::new(),
varyings: Vec::new(),
uniforms: Vec::new(),
vertex_body: String::new(),
fragment_body: String::new(),
use_3d: false,
use_proj: false,
use_view: false,
use_highp: false,
}
}
pub fn use_3d(mut self) -> Self { self.use_3d = true; self }
pub fn use_proj(mut self) -> Self { self.use_proj = true; self.uniform("proj", "mat4") }
pub fn use_view(mut self) -> Self { self.use_view = true; self.uniform("view", "mat4") }
pub fn use_highp(mut self) -> Self { self.use_highp = true; self }
pub fn vertex(mut self, data: &str) -> Self {
write!(&mut self.vertex_body, "{};\n", data).unwrap(); self
}
pub fn fragment(mut self, data: &str) -> Self {
write!(&mut self.fragment_body, "{};\n", data).unwrap(); self
}
pub fn uniform(mut self, name: &str, ty: &str) -> Self {
self.uniforms.push(format!("{} u_{}", ty, name)); self
}
pub fn attribute(mut self, name: &str, ty: &str) -> Self {
if name == "position" {
println!("Tried to overwrite 'position' attribute while building shader - ignoring");
return self
}
self.attributes.push(format!("{} {}", ty, name)); self
}
pub fn varying(mut self, name: &str, ty: &str) -> Self {
self.varyings.push(format!("{} v_{}", ty, name)); self
}
pub fn frag_attribute(mut self, name: &str, ty: &str) -> Self {
self.attributes.push(format!("{} {}", ty, name));
self.varyings.push(format!("{} v_{}", ty, name));
write!(&mut self.vertex_body, "v_{} = {};\n", name, name).unwrap();
self
}
pub fn output(mut self, expr: &str) -> Self {
write!(&mut self.fragment_body, "gl_FragColor = {};\n", expr).unwrap();
self
}
pub fn finalize_source(mut self) -> (String, String) {
let mut varyings_and_uniforms = String::new();
for v in self.varyings.iter() { write!(&mut varyings_and_uniforms, "varying {};\n", v).unwrap(); }
for u in self.uniforms.iter() { write!(&mut varyings_and_uniforms, "uniform {};\n", u).unwrap(); }
let mut vert_src = String::new();
let mut frag_src = String::new();
let precision = if self.use_highp { "precision highp float;" } else { "precision mediump float;" };
write!(&mut vert_src, "{}\n", precision).unwrap();
write!(&mut frag_src, "{}\n", precision).unwrap();
let position_attr_ty = if self.use_3d { "vec3" } else { "vec2" };
write!(&mut vert_src, "attribute {} position;\n", position_attr_ty).unwrap();
for a in self.attributes.iter() { write!(&mut vert_src, "attribute {};\n", a).unwrap(); }
let mut gl_position = String::from("gl_Position = ");
if self.use_proj { gl_position.push_str("u_proj * "); }
if self.use_view { gl_position.push_str("u_view * "); }
if self.use_3d
|
else {
gl_position.push_str("vec4(position, 0.0, 1.0);\n");
}
self.vertex_body = format!("{}{}", gl_position, self.vertex_body);
let mut bodies = [&mut self.vertex_body, &mut self.fragment_body];
for (sh, body) in [&mut vert_src, &mut frag_src].iter_mut().zip(bodies.iter_mut()) {
write!(sh, "\n{}\n", vary
|
{
gl_position.push_str("vec4(position, 1.0);\n");
}
|
conditional_block
|
console.js
|
of the GNU General Public License
* as published by the Free Software Foundation; either version 2
* of the License, or any later version.
*
* Aloha Editor is distributed in the hope that it will be useful,
|
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program; if not, write to the Free Software
* Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*
* As an additional permission to the GNU GPL version 2, you may distribute
* non-source (e.g., minimized or compacted) forms of the Aloha-Editor
* source code without the copy of the GNU GPL normally required,
* provided you include this license notice and a URL through which
* recipients can access the Corresponding Source.
*/
define([
'aloha/core',
'util/class',
'jquery'
], function (Aloha, Class, jQuery) {
"use strict";
/**
* This is the aloha Log
* @namespace Aloha
* @class Log
* @singleton
*/
var AlohaConsole = Class.extend({
/**
* Initialize the logging
* @hide
*/
init:function () {
// initialize the logging settings (if not present)
if (typeof Aloha.settings.logLevels === 'undefined' || !Aloha.settings.logLevels) {
Aloha.settings.logLevels = {
'error':true,
'warn':true
};
}
// initialize the logHistory settings (if not present)
if (typeof Aloha.settings.logHistory === 'undefined' || !Aloha.settings.logHistory) {
Aloha.settings.logHistory = {};
}
// set the default values for the loghistory
if (!Aloha.settings.logHistory.maxEntries) {
Aloha.settings.logHistory.maxEntries = 100;
}
if (!Aloha.settings.logHistory.highWaterMark) {
Aloha.settings.logHistory.highWaterMark = 90;
}
if (!Aloha.settings.logHistory.levels) {
Aloha.settings.logHistory.levels = {
'error':true,
'warn':true
};
}
this.flushLogHistory();
Aloha.trigger('aloha-logger-ready');
},
/**
* Log History as array of Message Objects. Every object has the properties
* 'level', 'component' and 'message'
* @property
* @type Array
* @hide
*/
logHistory:[],
/**
* Flag, which is set as soon as the highWaterMark for the log history is reached.
* This flag is reset on every call of flushLogHistory()
* @hide
*/
highWaterMarkReached:false,
/**
* Logs a message to the console
* @method
* @param {String} level Level of the log ('error', 'warn' or 'info', 'debug')
* @param {String} component Component that calls the log
* @param {String} message log message
*/
log:function (level, component, message) {
// log ('Logging message');
if (typeof component === 'undefined') {
message = level;
}
if (typeof component !== 'string' && component && component.toString) {
component = component.toString();
}
// log ('warn', 'Warning message');
if (typeof message === 'undefined') {
message = component;
component = undefined;
}
if (typeof level === 'undefined' || !level) {
level = 'log';
}
level = level.toLowerCase();
if (typeof Aloha.settings.logLevels === "undefined") {
return;
}
// now check whether the log level is activated
if (!Aloha.settings.logLevels[level]) {
return;
}
component = component || "Unkown Aloha Component";
this.addToLogHistory({
'level':level,
'component':component,
'message':message,
'date':new Date()
});
var console = window.console;
switch (level) {
case 'error':
if (window.console && console.error) {
// FIXME:
// Using console.error rather than throwing an error is very
// problematic because we get not stack.
// We ought to consider doing the following:
// throw component + ': ' + message;
if (!component && !message) {
console.error("Error occured without message and component");
} else {
console.error(component + ': ' + message);
}
}
break;
case 'warn':
if (window.console && console.warn) {
console.warn(component + ': ' + message);
}
break;
case 'info':
if (window.console && console.info) {
console.info(component + ': ' + message);
}
break;
case 'debug':
if (window.console && console.log) {
console.log(component + ' [' + level + ']: ' + message);
}
break;
default:
if (window.console && console.log) {
console.log(component + ' [' + level + ']: ' + message);
}
break;
}
},
/**
* Log a message of log level 'error'
* @method
* @param {String} component Component that calls the log
* @param {String} message log message
*/
error:function (component, message) {
this.log('error', component, message);
},
/**
* Log a message of log level 'warn'
* @method
* @param {String} component Component that calls the log
* @param {String} message log message
*/
warn:function (component, message) {
this.log('warn', component, message);
},
/**
* Log a message of log level 'info'
* @method
* @param {String} component Component that calls the log
* @param {String} message log message
*/
info:function (component, message) {
this.log('info', component, message);
},
/**
* Log a message of log level 'debug'
* @param {String} component Component that calls the log
* @param {String} message log message
*/
debug:function (component, message) {
this.log('debug', component, message);
},
/**
* Methods to mark function as deprecated for developers.
* @param {String} component String that calls the log
* @param {String} message log message
*/
deprecated:function (component, message) {
this.log('warn', component, message);
// help the developer to locate the call.
if (Aloha.settings.logLevels.deprecated) {
throw new Error(message);
}
},
/**
* Check whether the given log level is currently enabled
* @param {String} level
* @return true when log level is enabled, false if not
*/
isLogLevelEnabled:function (level) {
return Aloha.settings && Aloha.settings.logLevels && Aloha.settings.logLevels[level];
},
/**
* Check whether error logging is enabled
* @return true if error logging is enabled, false if not
*/
isErrorEnabled:function () {
return this.isLogLevelEnabled('error');
},
/**
* Check whether warn logging is enabled
* @return true if warn logging is enabled, false if not
*/
isWarnEnabled:function () {
return this.isLogLevelEnabled('warn');
},
/**
* Check whether info logging is enabled
* @return true if info logging is enabled, false if not
*/
isInfoEnabled:function () {
return this.isLogLevelEnabled('info');
},
/**
* Check whether debug logging is enabled
* @return true if debug logging is enabled, false if not
*/
isDebugEnabled:function () {
return this.isLogLevelEnabled('debug');
},
/**
* Add the given entry to the log history. Check whether the highWaterMark has been reached, and fire an event if yes.
* @param {Object} entry entry to be added to the log history
* @hide
*/
addToLogHistory:function (entry) {
if (!Aloha.settings.logHistory) {
this.init();
}
// when maxEntries is set to something illegal, we do nothing (log history is disabled)
// check whether the level is one we like to have logged
if (Aloha.settings.logHistory.maxEntries <= 0 || !Aloha.settings.logHistory.levels[entry.level]) {
return;
}
// first add the entry as last element to the history array
this.logHistory.push(entry);
// check whether the highWaterMark was reached, if so, fire an event
if (!this.highWaterMarkReached) {
if (this.logHistory.length >= Aloha.settings.logHistory.maxEntries * Aloha.settings.logHistory.highWaterMark / 100) {
// fire the event
Aloha.trigger('aloha-log-full');
// set the flag (so we will not fire the event again until the logHistory is flushed)
this.highWaterMarkReached = true
|
* but WITHOUT ANY WARRANTY; without even the implied warranty of
|
random_line_split
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.