adding projects to repo
This commit is contained in:
4
.gitignore
vendored
Normal file
4
.gitignore
vendored
Normal file
@@ -0,0 +1,4 @@
|
|||||||
|
symlink.sh
|
||||||
|
symlink.sh
|
||||||
|
symlink.sh
|
||||||
|
symlink.sh
|
||||||
File diff suppressed because one or more lines are too long
BIN
Image Classifier (NN) (Udacity)/assets/Flowers.png
Normal file
BIN
Image Classifier (NN) (Udacity)/assets/Flowers.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 715 KiB |
BIN
Image Classifier (NN) (Udacity)/assets/inference_example.png
Normal file
BIN
Image Classifier (NN) (Udacity)/assets/inference_example.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 152 KiB |
2296
Image Classifier (NN) (Udacity)/neural_network_project.ipynb
Normal file
2296
Image Classifier (NN) (Udacity)/neural_network_project.ipynb
Normal file
File diff suppressed because one or more lines are too long
157
Image Classifier (NN) (Udacity)/predict.py
Normal file
157
Image Classifier (NN) (Udacity)/predict.py
Normal file
@@ -0,0 +1,157 @@
|
|||||||
|
#importing necessary libraries
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
from torch import nn
|
||||||
|
from torch import optim
|
||||||
|
from torchvision import datasets, models, transforms
|
||||||
|
import torch.nn.functional as F
|
||||||
|
import torch.utils.data
|
||||||
|
import pandas as pd
|
||||||
|
from collections import OrderedDict
|
||||||
|
from PIL import Image
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
|
||||||
|
# define Mandatory and Optional Arguments for the script
|
||||||
|
parser = argparse.ArgumentParser (description = "Parser of prediction script")
|
||||||
|
|
||||||
|
parser.add_argument ('image_dir', help = 'Provide path to image. Mandatory argument', type = str)
|
||||||
|
parser.add_argument ('load_dir', help = 'Provide path to checkpoint. Mandatory argument', type = str)
|
||||||
|
parser.add_argument ('--top_k', help = 'Top K most likely classes. Optional', type = int)
|
||||||
|
parser.add_argument ('--category_names', help = 'Mapping of categories to real names. JSON file name to be provided. Optional', type = str)
|
||||||
|
parser.add_argument ('--GPU', help = "Option to use GPU. Optional", type = str)
|
||||||
|
|
||||||
|
# a function that loads a checkpoint and rebuilds the model
|
||||||
|
def loading_model (file_path):
|
||||||
|
checkpoint = torch.load (file_path) #loading checkpoint from a file
|
||||||
|
if checkpoint ['arch'] == 'alexnet':
|
||||||
|
model = models.alexnet (pretrained = True)
|
||||||
|
else: #vgg13 as only 2 options available
|
||||||
|
model = models.vgg13 (pretrained = True)
|
||||||
|
model.classifier = checkpoint ['classifier']
|
||||||
|
model.load_state_dict (checkpoint ['state_dict'])
|
||||||
|
model.class_to_idx = checkpoint ['mapping']
|
||||||
|
|
||||||
|
for param in model.parameters():
|
||||||
|
param.requires_grad = False #turning off tuning of the model
|
||||||
|
|
||||||
|
return model
|
||||||
|
|
||||||
|
# function to process a PIL image for use in a PyTorch model
|
||||||
|
def process_image(image):
|
||||||
|
''' Scales, crops, and normalizes a PIL image for a PyTorch model,
|
||||||
|
returns an Numpy array
|
||||||
|
'''
|
||||||
|
im = Image.open (image) #loading image
|
||||||
|
width, height = im.size #original size
|
||||||
|
|
||||||
|
# smallest part: width or height should be kept not more than 256
|
||||||
|
if width > height:
|
||||||
|
height = 256
|
||||||
|
im.thumbnail ((50000, height), Image.ANTIALIAS)
|
||||||
|
else:
|
||||||
|
width = 256
|
||||||
|
im.thumbnail ((width,50000), Image.ANTIALIAS)
|
||||||
|
|
||||||
|
width, height = im.size #new size of im
|
||||||
|
#crop 224x224 in the center
|
||||||
|
reduce = 224
|
||||||
|
left = (width - reduce)/2
|
||||||
|
top = (height - reduce)/2
|
||||||
|
right = left + 224
|
||||||
|
bottom = top + 224
|
||||||
|
im = im.crop ((left, top, right, bottom))
|
||||||
|
|
||||||
|
#preparing numpy array
|
||||||
|
np_image = np.array (im)/255 #to make values from 0 to 1
|
||||||
|
np_image -= np.array ([0.485, 0.456, 0.406])
|
||||||
|
np_image /= np.array ([0.229, 0.224, 0.225])
|
||||||
|
|
||||||
|
#PyTorch expects the color channel to be the first dimension but it's the third dimension in the PIL image and Numpy array.
|
||||||
|
#The color channel needs to be first and retain the order of the other two dimensions.
|
||||||
|
np_image= np_image.transpose ((2,0,1))
|
||||||
|
return np_image
|
||||||
|
|
||||||
|
#defining prediction function
|
||||||
|
def predict(image_path, model, topkl, device):
|
||||||
|
''' Predict the class (or classes) of an image using a trained deep learning model.
|
||||||
|
'''
|
||||||
|
# Implement the code to predict the class from an image file
|
||||||
|
image = process_image (image_path) #loading image and processing it using above defined function
|
||||||
|
|
||||||
|
#we cannot pass image to model.forward 'as is' as it is expecting tensor, not numpy array
|
||||||
|
#converting to tensor
|
||||||
|
if device == 'cuda':
|
||||||
|
im = torch.from_numpy (image).type (torch.cuda.FloatTensor)
|
||||||
|
else:
|
||||||
|
im = torch.from_numpy (image).type (torch.FloatTensor)
|
||||||
|
|
||||||
|
im = im.unsqueeze (dim = 0) #used to make size of torch as expected. as forward method is working with batches,
|
||||||
|
#doing that we will have batch size = 1
|
||||||
|
|
||||||
|
#enabling GPU/CPU
|
||||||
|
model.to (device)
|
||||||
|
im.to (device)
|
||||||
|
|
||||||
|
with torch.no_grad ():
|
||||||
|
output = model.forward (im)
|
||||||
|
output_prob = torch.exp (output) #converting into a probability
|
||||||
|
|
||||||
|
probs, indeces = output_prob.topk (topkl)
|
||||||
|
probs = probs.cpu ()
|
||||||
|
indeces = indeces.cpu ()
|
||||||
|
probs = probs.numpy () #converting both to numpy array
|
||||||
|
indeces = indeces.numpy ()
|
||||||
|
|
||||||
|
probs = probs.tolist () [0] #converting both to list
|
||||||
|
indeces = indeces.tolist () [0]
|
||||||
|
|
||||||
|
mapping = {val: key for key, val in
|
||||||
|
model.class_to_idx.items()
|
||||||
|
}
|
||||||
|
|
||||||
|
classes = [mapping [item] for item in indeces]
|
||||||
|
classes = np.array (classes) #converting to Numpy array
|
||||||
|
|
||||||
|
return probs, classes
|
||||||
|
|
||||||
|
#setting values data loading
|
||||||
|
args = parser.parse_args ()
|
||||||
|
file_path = args.image_dir
|
||||||
|
|
||||||
|
#defining device: either cuda or cpu
|
||||||
|
if args.GPU == 'GPU':
|
||||||
|
device = 'cuda'
|
||||||
|
else:
|
||||||
|
device = 'cpu'
|
||||||
|
|
||||||
|
#loading JSON file if provided, else load default file name
|
||||||
|
if args.category_names:
|
||||||
|
with open(args.category_names, 'r') as f:
|
||||||
|
cat_to_name = json.load(f)
|
||||||
|
else:
|
||||||
|
with open('cat_to_name.json', 'r') as f:
|
||||||
|
cat_to_name = json.load(f)
|
||||||
|
pass
|
||||||
|
|
||||||
|
#loading model from checkpoint provided
|
||||||
|
model = loading_model (args.load_dir)
|
||||||
|
|
||||||
|
#defining number of classes to be predicted. Default = 1
|
||||||
|
if args.top_k:
|
||||||
|
nm_cl = args.top_k
|
||||||
|
else:
|
||||||
|
nm_cl = 1
|
||||||
|
|
||||||
|
#calculating probabilities and classes
|
||||||
|
probs, classes = predict (file_path, model, nm_cl, device)
|
||||||
|
|
||||||
|
#preparing class_names using mapping with cat_to_name
|
||||||
|
class_names = [cat_to_name [item] for item in classes]
|
||||||
|
|
||||||
|
for l in range (nm_cl):
|
||||||
|
print("Number: {}/{}.. ".format(l+1, nm_cl),
|
||||||
|
"Class name: {}.. ".format(class_names [l]),
|
||||||
|
"Probability: {:.3f}..% ".format(probs [l]*100),
|
||||||
|
)
|
||||||
220
Image Classifier (NN) (Udacity)/train.py
Normal file
220
Image Classifier (NN) (Udacity)/train.py
Normal file
@@ -0,0 +1,220 @@
|
|||||||
|
#importing necessary libraries
|
||||||
|
import matplotlib.pyplot as plt
|
||||||
|
import torch
|
||||||
|
import numpy as np
|
||||||
|
from torch import nn
|
||||||
|
from torch import optim
|
||||||
|
from torchvision import datasets, models, transforms
|
||||||
|
import torch.nn.functional as F
|
||||||
|
import torch.utils.data
|
||||||
|
import pandas as pd
|
||||||
|
from collections import OrderedDict
|
||||||
|
from PIL import Image
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
|
||||||
|
# define Mandatory and Optional Arguments for the script
|
||||||
|
parser = argparse.ArgumentParser (description = "Parser of training script")
|
||||||
|
|
||||||
|
parser.add_argument ('data_dir', help = 'Provide data directory. Mandatory argument', type = str)
|
||||||
|
parser.add_argument ('--save_dir', help = 'Provide saving directory. Optional argument', type = str)
|
||||||
|
parser.add_argument ('--arch', help = 'Vgg13 can be used if this argument specified, otherwise Alexnet will be used', type = str)
|
||||||
|
parser.add_argument ('--lrn', help = 'Learning rate, default value 0.001', type = float)
|
||||||
|
parser.add_argument ('--hidden_units', help = 'Hidden units in Classifier. Default value is 2048', type = int)
|
||||||
|
parser.add_argument ('--epochs', help = 'Number of epochs', type = int)
|
||||||
|
parser.add_argument ('--GPU', help = "Option to use GPU", type = str)
|
||||||
|
|
||||||
|
#setting values data loading
|
||||||
|
args = parser.parse_args ()
|
||||||
|
|
||||||
|
data_dir = args.data_dir
|
||||||
|
train_dir = data_dir + '/train'
|
||||||
|
valid_dir = data_dir + '/valid'
|
||||||
|
test_dir = data_dir + '/test'
|
||||||
|
|
||||||
|
#defining device: either cuda or cpu
|
||||||
|
if args.GPU == 'GPU':
|
||||||
|
device = 'cuda'
|
||||||
|
else:
|
||||||
|
device = 'cpu'
|
||||||
|
|
||||||
|
#data loading
|
||||||
|
if data_dir: #making sure we do have value for data_dir
|
||||||
|
# Define your transforms for the training, validation, and testing sets
|
||||||
|
train_data_transforms = transforms.Compose ([transforms.RandomRotation (30),
|
||||||
|
transforms.RandomResizedCrop (224),
|
||||||
|
transforms.RandomHorizontalFlip (),
|
||||||
|
transforms.ToTensor (),
|
||||||
|
transforms.Normalize ([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
|
||||||
|
])
|
||||||
|
|
||||||
|
valid_data_transforms = transforms.Compose ([transforms.Resize (255),
|
||||||
|
transforms.CenterCrop (224),
|
||||||
|
transforms.ToTensor (),
|
||||||
|
transforms.Normalize ([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
|
||||||
|
])
|
||||||
|
|
||||||
|
test_data_transforms = transforms.Compose ([transforms.Resize (255),
|
||||||
|
transforms.CenterCrop (224),
|
||||||
|
transforms.ToTensor (),
|
||||||
|
transforms.Normalize ([0.485, 0.456, 0.406],[0.229, 0.224, 0.225])
|
||||||
|
])
|
||||||
|
# Load the datasets with ImageFolder
|
||||||
|
train_image_datasets = datasets.ImageFolder (train_dir, transform = train_data_transforms)
|
||||||
|
valid_image_datasets = datasets.ImageFolder (valid_dir, transform = valid_data_transforms)
|
||||||
|
test_image_datasets = datasets.ImageFolder (test_dir, transform = test_data_transforms)
|
||||||
|
|
||||||
|
# Using the image datasets and the trainforms, define the dataloaders
|
||||||
|
train_loader = torch.utils.data.DataLoader(train_image_datasets, batch_size = 64, shuffle = True)
|
||||||
|
valid_loader = torch.utils.data.DataLoader(valid_image_datasets, batch_size = 64, shuffle = True)
|
||||||
|
test_loader = torch.utils.data.DataLoader(test_image_datasets, batch_size = 64, shuffle = True)
|
||||||
|
#end of data loading block
|
||||||
|
|
||||||
|
#mapping from category label to category name
|
||||||
|
with open('cat_to_name.json', 'r') as f:
|
||||||
|
cat_to_name = json.load(f)
|
||||||
|
|
||||||
|
def load_model (arch, hidden_units):
|
||||||
|
if arch == 'vgg13': #setting model based on vgg13
|
||||||
|
model = models.vgg13 (pretrained = True)
|
||||||
|
for param in model.parameters():
|
||||||
|
param.requires_grad = False
|
||||||
|
if hidden_units: #in case hidden_units were given
|
||||||
|
classifier = nn.Sequential (OrderedDict ([
|
||||||
|
('fc1', nn.Linear (25088, 4096)),
|
||||||
|
('relu1', nn.ReLU ()),
|
||||||
|
('dropout1', nn.Dropout (p = 0.3)),
|
||||||
|
('fc2', nn.Linear (4096, hidden_units)),
|
||||||
|
('relu2', nn.ReLU ()),
|
||||||
|
('dropout2', nn.Dropout (p = 0.3)),
|
||||||
|
('fc3', nn.Linear (hidden_units, 102)),
|
||||||
|
('output', nn.LogSoftmax (dim =1))
|
||||||
|
]))
|
||||||
|
else: #if hidden_units not given
|
||||||
|
classifier = nn.Sequential (OrderedDict ([
|
||||||
|
('fc1', nn.Linear (25088, 4096)),
|
||||||
|
('relu1', nn.ReLU ()),
|
||||||
|
('dropout1', nn.Dropout (p = 0.3)),
|
||||||
|
('fc2', nn.Linear (4096, 2048)),
|
||||||
|
('relu2', nn.ReLU ()),
|
||||||
|
('dropout2', nn.Dropout (p = 0.3)),
|
||||||
|
('fc3', nn.Linear (2048, 102)),
|
||||||
|
('output', nn.LogSoftmax (dim =1))
|
||||||
|
]))
|
||||||
|
else: #setting model based on default Alexnet ModuleList
|
||||||
|
arch = 'alexnet' #will be used for checkpoint saving, so should be explicitly defined
|
||||||
|
model = models.alexnet (pretrained = True)
|
||||||
|
for param in model.parameters():
|
||||||
|
param.requires_grad = False
|
||||||
|
if hidden_units: #in case hidden_units were given
|
||||||
|
classifier = nn.Sequential (OrderedDict ([
|
||||||
|
('fc1', nn.Linear (9216, 4096)),
|
||||||
|
('relu1', nn.ReLU ()),
|
||||||
|
('dropout1', nn.Dropout (p = 0.3)),
|
||||||
|
('fc2', nn.Linear (4096, hidden_units)),
|
||||||
|
('relu2', nn.ReLU ()),
|
||||||
|
('dropout2', nn.Dropout (p = 0.3)),
|
||||||
|
('fc3', nn.Linear (hidden_units, 102)),
|
||||||
|
('output', nn.LogSoftmax (dim =1))
|
||||||
|
]))
|
||||||
|
else: #if hidden_units not given
|
||||||
|
classifier = nn.Sequential (OrderedDict ([
|
||||||
|
('fc1', nn.Linear (9216, 4096)),
|
||||||
|
('relu1', nn.ReLU ()),
|
||||||
|
('dropout1', nn.Dropout (p = 0.3)),
|
||||||
|
('fc2', nn.Linear (4096, 2048)),
|
||||||
|
('relu2', nn.ReLU ()),
|
||||||
|
('dropout2', nn.Dropout (p = 0.3)),
|
||||||
|
('fc3', nn.Linear (2048, 102)),
|
||||||
|
('output', nn.LogSoftmax (dim =1))
|
||||||
|
]))
|
||||||
|
model.classifier = classifier #we can set classifier only once as cluasses self excluding (if/else)
|
||||||
|
return model, arch
|
||||||
|
|
||||||
|
# Defining validation Function. will be used during training
|
||||||
|
def validation(model, valid_loader, criterion):
|
||||||
|
model.to (device)
|
||||||
|
|
||||||
|
valid_loss = 0
|
||||||
|
accuracy = 0
|
||||||
|
for inputs, labels in valid_loader:
|
||||||
|
|
||||||
|
inputs, labels = inputs.to(device), labels.to(device)
|
||||||
|
output = model.forward(inputs)
|
||||||
|
valid_loss += criterion(output, labels).item()
|
||||||
|
|
||||||
|
ps = torch.exp(output)
|
||||||
|
equality = (labels.data == ps.max(dim=1)[1])
|
||||||
|
accuracy += equality.type(torch.FloatTensor).mean()
|
||||||
|
|
||||||
|
return valid_loss, accuracy
|
||||||
|
|
||||||
|
#loading model using above defined functiion
|
||||||
|
model, arch = load_model (args.arch, args.hidden_units)
|
||||||
|
|
||||||
|
#Actual training of the model
|
||||||
|
#initializing criterion and optimizer
|
||||||
|
criterion = nn.NLLLoss ()
|
||||||
|
if args.lrn: #if learning rate was provided
|
||||||
|
optimizer = optim.Adam (model.classifier.parameters (), lr = args.lrn)
|
||||||
|
else:
|
||||||
|
optimizer = optim.Adam (model.classifier.parameters (), lr = 0.001)
|
||||||
|
|
||||||
|
|
||||||
|
model.to (device) #device can be either cuda or cpu
|
||||||
|
#setting number of epochs to be run
|
||||||
|
if args.epochs:
|
||||||
|
epochs = args.epochs
|
||||||
|
else:
|
||||||
|
epochs = 7
|
||||||
|
|
||||||
|
print_every = 40
|
||||||
|
steps = 0
|
||||||
|
|
||||||
|
#runing through epochs
|
||||||
|
for e in range (epochs):
|
||||||
|
running_loss = 0
|
||||||
|
for ii, (inputs, labels) in enumerate (train_loader):
|
||||||
|
steps += 1
|
||||||
|
inputs, labels = inputs.to(device), labels.to(device)
|
||||||
|
optimizer.zero_grad () #where optimizer is working on classifier paramters only
|
||||||
|
|
||||||
|
# Forward and backward passes
|
||||||
|
outputs = model.forward (inputs) #calculating output
|
||||||
|
loss = criterion (outputs, labels) #calculating loss (cost function)
|
||||||
|
loss.backward ()
|
||||||
|
optimizer.step () #performs single optimization step
|
||||||
|
running_loss += loss.item () # loss.item () returns scalar value of Loss function
|
||||||
|
|
||||||
|
if steps % print_every == 0:
|
||||||
|
model.eval () #switching to evaluation mode so that dropout is turned off
|
||||||
|
# Turn off gradients for validation, saves memory and computations
|
||||||
|
with torch.no_grad():
|
||||||
|
valid_loss, accuracy = validation(model, valid_loader, criterion)
|
||||||
|
|
||||||
|
print("Epoch: {}/{}.. ".format(e+1, epochs),
|
||||||
|
"Training Loss: {:.3f}.. ".format(running_loss/print_every),
|
||||||
|
"Valid Loss: {:.3f}.. ".format(valid_loss/len(valid_loader)),
|
||||||
|
"Valid Accuracy: {:.3f}%".format(accuracy/len(valid_loader)*100))
|
||||||
|
|
||||||
|
running_loss = 0
|
||||||
|
# Make sure training is back on
|
||||||
|
model.train()
|
||||||
|
|
||||||
|
#saving trained Model
|
||||||
|
model.to ('cpu') #no need to use cuda for saving/loading model.
|
||||||
|
# Save the checkpoint
|
||||||
|
model.class_to_idx = train_image_datasets.class_to_idx #saving mapping between predicted class and class name,
|
||||||
|
#second variable is a class name in numeric
|
||||||
|
|
||||||
|
#creating dictionary for model saving
|
||||||
|
checkpoint = {'classifier': model.classifier,
|
||||||
|
'state_dict': model.state_dict (),
|
||||||
|
'arch': arch,
|
||||||
|
'mapping': model.class_to_idx
|
||||||
|
}
|
||||||
|
#saving trained model for future use
|
||||||
|
if args.save_dir:
|
||||||
|
torch.save (checkpoint, args.save_dir + '/checkpoint.pth')
|
||||||
|
else:
|
||||||
|
torch.save (checkpoint, 'checkpoint.pth')
|
||||||
68
README.md
68
README.md
@@ -1,3 +1,69 @@
|
|||||||
# dtomlinson-cv
|
# dtomlinson-cv
|
||||||
|
|
||||||
Repo showcasing example work
|
Repo showcasing example work
|
||||||
|
|
||||||
|
## Image Classifier using a Nerual network
|
||||||
|
|
||||||
|
This is an example of the Udacity project `Machine Learning - Introduction Nanodegree Program` I completed showing the steps taken to train an image classifier neural network on images of flowers.
|
||||||
|
|
||||||
|
The data set is too large for git but examples of the images are available in the jupter workbook. We create a neural network to identify 102 different categories of flowers.
|
||||||
|
|
||||||
|
We then create utility functions `train.py` and `predict.py` to update the model and predict a flower from the command line, without having to retrain the whole model.
|
||||||
|
|
||||||
|
### `train.py`
|
||||||
|
|
||||||
|
You can select either Alexnet or VGG13 as a base model
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
1. `data_dir: str` - Specify path to data directory
|
||||||
|
2. `save_dir: str` - Specify path to output directory
|
||||||
|
3. `arch: str` - Choose the base model
|
||||||
|
4. `lrn: float` - Specify the learning rate
|
||||||
|
5. `hidden_units: int` - Specify the hidden layers
|
||||||
|
6. `epochs: int` - Specify the epochs
|
||||||
|
7. `GPU: str` - If a GPU is available set this to true
|
||||||
|
|
||||||
|
### `predict.py`
|
||||||
|
|
||||||
|
Arguments:
|
||||||
|
|
||||||
|
1. `image_dir: str` - Specify path to image
|
||||||
|
2. `load_dir: str` - Specify path to load in
|
||||||
|
3. `top_k: str` - Specify top k classes that are most likely
|
||||||
|
4. `category_names: str` -- Specify JSON of mapping of categories to real names
|
||||||
|
5. `GPU: str` - If a GPU is available set this to true
|
||||||
|
|
||||||
|
|
||||||
|
## Anomaly Detection using a Neural network
|
||||||
|
|
||||||
|
Included is a jupyter notebook outlining the general steps during the development of a Neural Network to identify anomalies in key business metrics.
|
||||||
|
|
||||||
|
The general idea is
|
||||||
|
|
||||||
|
1. Fit a distribution to the data, once we have this we can find the probability of a value being an anomaly. This turns our problem form unsupervised to supervised as we can say (with a certain probability) whether or not we think a value is an anomaly. This supervised set is passed on.
|
||||||
|
|
||||||
|
2. Create a baseline Neural Network, experiment with the hyperparameters and Stochastic Gradient Descent and a Gradient Boosting classifier.
|
||||||
|
|
||||||
|
## Vrops API
|
||||||
|
|
||||||
|
A python wrapper for the VROPS api. No such wrapper exists publicly to automate pulling statistics from VMs using VROPS. See the `README.md` inside the project for detailed documentation.
|
||||||
|
|
||||||
|
This will be released publically as a module available in pypi.org (`pip`)
|
||||||
|
|
||||||
|
## Emoji finder
|
||||||
|
|
||||||
|
A simple script (for fun) that can lookup an emoji on the command line by name, tag or description.
|
||||||
|
|
||||||
|
## (Udacity) Supervised Learning Project
|
||||||
|
|
||||||
|
Supervised learning project from the Udacity Machine Learning course. This showcases an example of fitting several supervised learning models to a given dataset where the aim is to classify people on how likely they are to donate to a charity, based on their income, job, marital status etc. See the `finding_donors.ipynd` jupter workbook for a walktrhough through the project.
|
||||||
|
|
||||||
|
## Splunk dashboard screenshotter
|
||||||
|
|
||||||
|
For Splunk there is no easy way to automate screenshots of dashboards. You can set a report to send a single graph as a PDF, or you can use the API to pull your data and use matplotlib (for example) to graph and send your data via email.
|
||||||
|
|
||||||
|
This script uses Selenium to log in, hide certain elements, and screenshot the body. It uses postfix to send an email directly from the server.
|
||||||
|
|
||||||
|
The script is complete - I will write this as a custom splunk command `(| dashboardscreenshot $NAME $EMAIL )` and bundle it into a Splunk app to release publicly on SplunkBase.
|
||||||
|
|
||||||
|
Included is an example PNG of a random website to showcase the functionality.
|
||||||
|
|||||||
File diff suppressed because one or more lines are too long
56
Supervised Learning Project/README.md
Normal file
56
Supervised Learning Project/README.md
Normal file
@@ -0,0 +1,56 @@
|
|||||||
|
# Data Scientist Nanodegree
|
||||||
|
# Supervised Learning
|
||||||
|
## Project: Finding Donors for CharityML
|
||||||
|
|
||||||
|
### Install
|
||||||
|
|
||||||
|
This project requires **Python 3.x** and the following Python libraries installed:
|
||||||
|
|
||||||
|
- [NumPy](http://www.numpy.org/)
|
||||||
|
- [Pandas](http://pandas.pydata.org)
|
||||||
|
- [matplotlib](http://matplotlib.org/)
|
||||||
|
- [scikit-learn](http://scikit-learn.org/stable/)
|
||||||
|
|
||||||
|
You will also need to have software installed to run and execute an [iPython Notebook](http://ipython.org/notebook.html)
|
||||||
|
|
||||||
|
We recommend students install [Anaconda](https://www.continuum.io/downloads), a pre-packaged Python distribution that contains all of the necessary libraries and software for this project.
|
||||||
|
|
||||||
|
### Code
|
||||||
|
|
||||||
|
Template code is provided in the `finding_donors.ipynb` notebook file. You will also be required to use the included `visuals.py` Python file and the `census.csv` dataset file to complete your work. While some code has already been implemented to get you started, you will need to implement additional functionality when requested to successfully complete the project. Note that the code included in `visuals.py` is meant to be used out-of-the-box and not intended for students to manipulate. If you are interested in how the visualizations are created in the notebook, please feel free to explore this Python file.
|
||||||
|
|
||||||
|
### Run
|
||||||
|
|
||||||
|
In a terminal or command window, navigate to the top-level project directory `finding_donors/` (that contains this README) and run one of the following commands:
|
||||||
|
|
||||||
|
```bash
|
||||||
|
ipython notebook finding_donors.ipynb
|
||||||
|
```
|
||||||
|
or
|
||||||
|
```bash
|
||||||
|
jupyter notebook finding_donors.ipynb
|
||||||
|
```
|
||||||
|
|
||||||
|
This will open the iPython Notebook software and project file in your browser.
|
||||||
|
|
||||||
|
### Data
|
||||||
|
|
||||||
|
The modified census dataset consists of approximately 32,000 data points, with each datapoint having 13 features. This dataset is a modified version of the dataset published in the paper *"Scaling Up the Accuracy of Naive-Bayes Classifiers: a Decision-Tree Hybrid",* by Ron Kohavi. You may find this paper [online](https://www.aaai.org/Papers/KDD/1996/KDD96-033.pdf), with the original dataset hosted on [UCI](https://archive.ics.uci.edu/ml/datasets/Census+Income).
|
||||||
|
|
||||||
|
**Features**
|
||||||
|
- `age`: Age
|
||||||
|
- `workclass`: Working Class (Private, Self-emp-not-inc, Self-emp-inc, Federal-gov, Local-gov, State-gov, Without-pay, Never-worked)
|
||||||
|
- `education_level`: Level of Education (Bachelors, Some-college, 11th, HS-grad, Prof-school, Assoc-acdm, Assoc-voc, 9th, 7th-8th, 12th, Masters, 1st-4th, 10th, Doctorate, 5th-6th, Preschool)
|
||||||
|
- `education-num`: Number of educational years completed
|
||||||
|
- `marital-status`: Marital status (Married-civ-spouse, Divorced, Never-married, Separated, Widowed, Married-spouse-absent, Married-AF-spouse)
|
||||||
|
- `occupation`: Work Occupation (Tech-support, Craft-repair, Other-service, Sales, Exec-managerial, Prof-specialty, Handlers-cleaners, Machine-op-inspct, Adm-clerical, Farming-fishing, Transport-moving, Priv-house-serv, Protective-serv, Armed-Forces)
|
||||||
|
- `relationship`: Relationship Status (Wife, Own-child, Husband, Not-in-family, Other-relative, Unmarried)
|
||||||
|
- `race`: Race (White, Asian-Pac-Islander, Amer-Indian-Eskimo, Other, Black)
|
||||||
|
- `sex`: Sex (Female, Male)
|
||||||
|
- `capital-gain`: Monetary Capital Gains
|
||||||
|
- `capital-loss`: Monetary Capital Losses
|
||||||
|
- `hours-per-week`: Average Hours Per Week Worked
|
||||||
|
- `native-country`: Native Country (United-States, Cambodia, England, Puerto-Rico, Canada, Germany, Outlying-US(Guam-USVI-etc), India, Japan, Greece, South, China, Cuba, Iran, Honduras, Philippines, Italy, Poland, Jamaica, Vietnam, Mexico, Portugal, Ireland, France, Dominican-Republic, Laos, Ecuador, Taiwan, Haiti, Columbia, Hungary, Guatemala, Nicaragua, Scotland, Thailand, Yugoslavia, El-Salvador, Trinadad&Tobago, Peru, Hong, Holand-Netherlands)
|
||||||
|
|
||||||
|
**Target Variable**
|
||||||
|
- `income`: Income Class (<=50K, >50K)
|
||||||
BIN
Supervised Learning Project/__pycache__/visuals.cpython-37.pyc
Normal file
BIN
Supervised Learning Project/__pycache__/visuals.cpython-37.pyc
Normal file
Binary file not shown.
45223
Supervised Learning Project/census.csv
Normal file
45223
Supervised Learning Project/census.csv
Normal file
File diff suppressed because it is too large
Load Diff
45223
Supervised Learning Project/example_submission.csv
Normal file
45223
Supervised Learning Project/example_submission.csv
Normal file
File diff suppressed because it is too large
Load Diff
2500
Supervised Learning Project/finding_donors.ipynb
Normal file
2500
Supervised Learning Project/finding_donors.ipynb
Normal file
File diff suppressed because one or more lines are too long
45223
Supervised Learning Project/test_census.csv
Normal file
45223
Supervised Learning Project/test_census.csv
Normal file
File diff suppressed because it is too large
Load Diff
143
Supervised Learning Project/visuals.py
Normal file
143
Supervised Learning Project/visuals.py
Normal file
@@ -0,0 +1,143 @@
|
|||||||
|
###########################################
|
||||||
|
# Suppress matplotlib user warnings
|
||||||
|
# Necessary for newer version of matplotlib
|
||||||
|
import warnings
|
||||||
|
warnings.filterwarnings("ignore", category = UserWarning, module = "matplotlib")
|
||||||
|
#
|
||||||
|
# Display inline matplotlib plots with IPython
|
||||||
|
from IPython import get_ipython
|
||||||
|
get_ipython().run_line_magic('matplotlib', 'inline')
|
||||||
|
###########################################
|
||||||
|
|
||||||
|
import matplotlib.pyplot as pl
|
||||||
|
import matplotlib.patches as mpatches
|
||||||
|
import numpy as np
|
||||||
|
import pandas as pd
|
||||||
|
from time import time
|
||||||
|
from sklearn.metrics import f1_score, accuracy_score
|
||||||
|
|
||||||
|
|
||||||
|
def distribution(data, transformed = False):
|
||||||
|
"""
|
||||||
|
Visualization code for displaying skewed distributions of features
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create figure
|
||||||
|
fig = pl.figure(figsize = (11,5));
|
||||||
|
|
||||||
|
# Skewed feature plotting
|
||||||
|
for i, feature in enumerate(['capital-gain','capital-loss']):
|
||||||
|
ax = fig.add_subplot(1, 2, i+1)
|
||||||
|
ax.hist(data[feature], bins = 25, color = '#00A0A0')
|
||||||
|
ax.set_title("'%s' Feature Distribution"%(feature), fontsize = 14)
|
||||||
|
ax.set_xlabel("Value")
|
||||||
|
ax.set_ylabel("Number of Records")
|
||||||
|
ax.set_ylim((0, 2000))
|
||||||
|
ax.set_yticks([0, 500, 1000, 1500, 2000])
|
||||||
|
ax.set_yticklabels([0, 500, 1000, 1500, ">2000"])
|
||||||
|
|
||||||
|
# Plot aesthetics
|
||||||
|
if transformed:
|
||||||
|
fig.suptitle("Log-transformed Distributions of Continuous Census Data Features", \
|
||||||
|
fontsize = 16, y = 1.03)
|
||||||
|
else:
|
||||||
|
fig.suptitle("Skewed Distributions of Continuous Census Data Features", \
|
||||||
|
fontsize = 16, y = 1.03)
|
||||||
|
|
||||||
|
fig.tight_layout()
|
||||||
|
fig.show()
|
||||||
|
|
||||||
|
|
||||||
|
def evaluate(results, accuracy, f1):
|
||||||
|
"""
|
||||||
|
Visualization code to display results of various learners.
|
||||||
|
|
||||||
|
inputs:
|
||||||
|
- learners: a list of supervised learners
|
||||||
|
- stats: a list of dictionaries of the statistic results from 'train_predict()'
|
||||||
|
- accuracy: The score for the naive predictor
|
||||||
|
- f1: The score for the naive predictor
|
||||||
|
"""
|
||||||
|
|
||||||
|
# Create figure
|
||||||
|
fig, ax = pl.subplots(2, 3, figsize = (11,7))
|
||||||
|
|
||||||
|
# Constants
|
||||||
|
bar_width = 0.3
|
||||||
|
colors = ['#A00000','#00A0A0','#00A000']
|
||||||
|
|
||||||
|
# Super loop to plot four panels of data
|
||||||
|
for k, learner in enumerate(results.keys()):
|
||||||
|
for j, metric in enumerate(['train_time', 'acc_train', 'f_train', 'pred_time', 'acc_test', 'f_test']):
|
||||||
|
for i in np.arange(3):
|
||||||
|
|
||||||
|
# Creative plot code
|
||||||
|
ax[j//3, j%3].bar(i+k*bar_width, results[learner][i][metric], width = bar_width, color = colors[k])
|
||||||
|
ax[j//3, j%3].set_xticks([0.45, 1.45, 2.45])
|
||||||
|
ax[j//3, j%3].set_xticklabels(["1%", "10%", "100%"])
|
||||||
|
ax[j//3, j%3].set_xlabel("Training Set Size")
|
||||||
|
ax[j//3, j%3].set_xlim((-0.1, 3.0))
|
||||||
|
|
||||||
|
# Add unique y-labels
|
||||||
|
ax[0, 0].set_ylabel("Time (in seconds)")
|
||||||
|
ax[0, 1].set_ylabel("Accuracy Score")
|
||||||
|
ax[0, 2].set_ylabel("F-score")
|
||||||
|
ax[1, 0].set_ylabel("Time (in seconds)")
|
||||||
|
ax[1, 1].set_ylabel("Accuracy Score")
|
||||||
|
ax[1, 2].set_ylabel("F-score")
|
||||||
|
|
||||||
|
# Add titles
|
||||||
|
ax[0, 0].set_title("Model Training")
|
||||||
|
ax[0, 1].set_title("Accuracy Score on Training Subset")
|
||||||
|
ax[0, 2].set_title("F-score on Training Subset")
|
||||||
|
ax[1, 0].set_title("Model Predicting")
|
||||||
|
ax[1, 1].set_title("Accuracy Score on Testing Set")
|
||||||
|
ax[1, 2].set_title("F-score on Testing Set")
|
||||||
|
|
||||||
|
# Add horizontal lines for naive predictors
|
||||||
|
ax[0, 1].axhline(y = accuracy, xmin = -0.1, xmax = 3.0, linewidth = 1, color = 'k', linestyle = 'dashed')
|
||||||
|
ax[1, 1].axhline(y = accuracy, xmin = -0.1, xmax = 3.0, linewidth = 1, color = 'k', linestyle = 'dashed')
|
||||||
|
ax[0, 2].axhline(y = f1, xmin = -0.1, xmax = 3.0, linewidth = 1, color = 'k', linestyle = 'dashed')
|
||||||
|
ax[1, 2].axhline(y = f1, xmin = -0.1, xmax = 3.0, linewidth = 1, color = 'k', linestyle = 'dashed')
|
||||||
|
|
||||||
|
# Set y-limits for score panels
|
||||||
|
ax[0, 1].set_ylim((0, 1))
|
||||||
|
ax[0, 2].set_ylim((0, 1))
|
||||||
|
ax[1, 1].set_ylim((0, 1))
|
||||||
|
ax[1, 2].set_ylim((0, 1))
|
||||||
|
|
||||||
|
# Create patches for the legend
|
||||||
|
patches = []
|
||||||
|
for i, learner in enumerate(results.keys()):
|
||||||
|
patches.append(mpatches.Patch(color = colors[i], label = learner))
|
||||||
|
pl.legend(handles = patches, bbox_to_anchor = (-.80, 2.53), \
|
||||||
|
loc = 'upper center', borderaxespad = 0., ncol = 3, fontsize = 'x-large')
|
||||||
|
|
||||||
|
# Aesthetics
|
||||||
|
pl.suptitle("Performance Metrics for Three Supervised Learning Models", fontsize = 16, y = 1.10)
|
||||||
|
pl.tight_layout()
|
||||||
|
pl.show()
|
||||||
|
|
||||||
|
|
||||||
|
def feature_plot(importances, X_train, y_train):
|
||||||
|
|
||||||
|
# Display the five most important features
|
||||||
|
indices = np.argsort(importances)[::-1]
|
||||||
|
columns = X_train.columns.values[indices[:5]]
|
||||||
|
values = importances[indices][:5]
|
||||||
|
|
||||||
|
# Creat the plot
|
||||||
|
fig = pl.figure(figsize = (9,5))
|
||||||
|
pl.title("Normalized Weights for First Five Most Predictive Features", fontsize = 16)
|
||||||
|
pl.bar(np.arange(5), values, width = 0.6, align="center", color = '#00A000', \
|
||||||
|
label = "Feature Weight")
|
||||||
|
pl.bar(np.arange(5) - 0.3, np.cumsum(values), width = 0.2, align = "center", color = '#00A0A0', \
|
||||||
|
label = "Cumulative Feature Weight")
|
||||||
|
pl.xticks(np.arange(5), columns)
|
||||||
|
pl.xlim((-0.5, 4.5))
|
||||||
|
pl.ylabel("Weight", fontsize = 12)
|
||||||
|
pl.xlabel("Feature", fontsize = 12)
|
||||||
|
|
||||||
|
pl.legend(loc = 'upper center')
|
||||||
|
pl.tight_layout()
|
||||||
|
pl.show()
|
||||||
58
emojis/dev/emoji.py
Normal file
58
emojis/dev/emoji.py
Normal file
@@ -0,0 +1,58 @@
|
|||||||
|
#!//Users/dtomlinson/.virtualenvs/emojis-3.7.3/bin/python
|
||||||
|
import emojis
|
||||||
|
import argparse
|
||||||
|
from texttable import Texttable
|
||||||
|
|
||||||
|
|
||||||
|
def main(*args, **kwargs):
|
||||||
|
t = Texttable()
|
||||||
|
|
||||||
|
searchOne = [y for y in [x for x in emojis.db.get_tags()] if emoji in y]
|
||||||
|
|
||||||
|
emojiResults = []
|
||||||
|
|
||||||
|
emojiResults.append((['tag/alias', 'emoji']))
|
||||||
|
|
||||||
|
for item in searchOne:
|
||||||
|
listOne = [x for x in emojis.db.get_emojis_by_tag(item)]
|
||||||
|
for i in range(0, len(listOne)):
|
||||||
|
emojiResults.append([
|
||||||
|
(listOne[i][0][0]),
|
||||||
|
(listOne[i][1])
|
||||||
|
])
|
||||||
|
|
||||||
|
for alias, emojiAlias in zip(
|
||||||
|
emojis.db.get_emoji_aliases().keys(),
|
||||||
|
emojis.db.get_emoji_aliases().values()
|
||||||
|
):
|
||||||
|
if emoji in alias:
|
||||||
|
emojiResults.append([alias, emojiAlias])
|
||||||
|
|
||||||
|
t.add_rows(emojiResults)
|
||||||
|
|
||||||
|
if len(emojiResults) - 1 == 0:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 😢')
|
||||||
|
else:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 🥳')
|
||||||
|
|
||||||
|
print(t.draw())
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description="""
|
||||||
|
emoji lookup script to be ran on the shell
|
||||||
|
""")
|
||||||
|
|
||||||
|
parser.add_argument('emoji', type=str,
|
||||||
|
help=(
|
||||||
|
'full path to list of files. leave blank'
|
||||||
|
' to run in current directory. the results will be'
|
||||||
|
' placed in this folder if specified.'
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
emoji = vars(args)['emoji']
|
||||||
|
|
||||||
|
main(emoji)
|
||||||
58
emojis/dev/lists.py
Executable file
58
emojis/dev/lists.py
Executable file
@@ -0,0 +1,58 @@
|
|||||||
|
#!//Users/dtomlinson/.virtualenvs/emojis-3.7.3/bin/python
|
||||||
|
import emojis
|
||||||
|
import argparse
|
||||||
|
from texttable import Texttable
|
||||||
|
|
||||||
|
parser = argparse.ArgumentParser(description="""
|
||||||
|
emoji lookup script to be ran on the shell
|
||||||
|
""")
|
||||||
|
|
||||||
|
parser.add_argument('emoji', type=str,
|
||||||
|
help=(
|
||||||
|
'full path to list of files. leave blank'
|
||||||
|
' to run in current directory. the results will be'
|
||||||
|
' placed in this folder if specified.'
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
emoji = vars(args)['emoji']
|
||||||
|
|
||||||
|
t = Texttable()
|
||||||
|
|
||||||
|
# t.add_row(['tag/alias', 'emoji'])
|
||||||
|
|
||||||
|
searchOne = [y for y in [x for x in emojis.db.get_tags()] if emoji in y]
|
||||||
|
|
||||||
|
emojiResults = []
|
||||||
|
|
||||||
|
emojiResults.append((['tag/alias', 'emoji']))
|
||||||
|
|
||||||
|
for item in searchOne:
|
||||||
|
listOne = [x for x in emojis.db.get_emojis_by_tag(item)]
|
||||||
|
counter = 0
|
||||||
|
for i in range(0, len(listOne)):
|
||||||
|
if counter == 3:
|
||||||
|
break
|
||||||
|
# print(f'{listOne[i][0][0]} - {listOne[i][1]}')
|
||||||
|
emojiResults.append([(listOne[i][0][0]).strip(), (listOne[i][1]).strip()])
|
||||||
|
# counter += 1
|
||||||
|
|
||||||
|
for alias, emojiAlias in zip(
|
||||||
|
emojis.db.get_emoji_aliases().keys(),
|
||||||
|
emojis.db.get_emoji_aliases().values()
|
||||||
|
):
|
||||||
|
if emoji in alias:
|
||||||
|
emojiResults.append([alias.strip(), emojiAlias.strip()])
|
||||||
|
|
||||||
|
# print(emojiResults)
|
||||||
|
|
||||||
|
t.add_rows(emojiResults)
|
||||||
|
|
||||||
|
if len(emojiResults) - 1 == 0:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 😢')
|
||||||
|
else:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 🥳')
|
||||||
|
|
||||||
|
print(t.draw())
|
||||||
58
emojis/emoji.py
Executable file
58
emojis/emoji.py
Executable file
@@ -0,0 +1,58 @@
|
|||||||
|
#!//Users/dtomlinson/.virtualenvs/emojis-3.7.3/bin/python
|
||||||
|
import emojis
|
||||||
|
import argparse
|
||||||
|
from texttable import Texttable
|
||||||
|
|
||||||
|
|
||||||
|
def main(*args, **kwargs):
|
||||||
|
t = Texttable()
|
||||||
|
|
||||||
|
searchOne = [y for y in [x for x in emojis.db.get_tags()] if emoji in y]
|
||||||
|
|
||||||
|
emojiResults = []
|
||||||
|
|
||||||
|
emojiResults.append((['tag/alias', 'emoji']))
|
||||||
|
|
||||||
|
for item in searchOne:
|
||||||
|
listOne = [x for x in emojis.db.get_emojis_by_tag(item)]
|
||||||
|
for i in range(0, len(listOne)):
|
||||||
|
emojiResults.append([
|
||||||
|
(listOne[i][0][0]).strip(),
|
||||||
|
(listOne[i][1]).strip()
|
||||||
|
])
|
||||||
|
|
||||||
|
for alias, emojiAlias in zip(
|
||||||
|
emojis.db.get_emoji_aliases().keys(),
|
||||||
|
emojis.db.get_emoji_aliases().values()
|
||||||
|
):
|
||||||
|
if emoji in alias:
|
||||||
|
emojiResults.append([alias.strip(), emojiAlias.strip()])
|
||||||
|
|
||||||
|
t.add_rows(emojiResults)
|
||||||
|
|
||||||
|
if len(emojiResults) - 1 == 0:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 😢')
|
||||||
|
else:
|
||||||
|
print(f'I found {len(emojiResults)-1} emojis for {emoji}! 🥳')
|
||||||
|
print(t.draw())
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == '__main__':
|
||||||
|
parser = argparse.ArgumentParser(description="""
|
||||||
|
emoji lookup script to be ran on the shell
|
||||||
|
""")
|
||||||
|
|
||||||
|
parser.add_argument('emoji', type=str,
|
||||||
|
help=(
|
||||||
|
'full path to list of files. leave blank'
|
||||||
|
' to run in current directory. the results will be'
|
||||||
|
' placed in this folder if specified.'
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
emoji = vars(args)['emoji']
|
||||||
|
|
||||||
|
main(emoji)
|
||||||
96
emojis/readme.md
Normal file
96
emojis/readme.md
Normal file
@@ -0,0 +1,96 @@
|
|||||||
|
# Emoji finder on the command line! 🥳
|
||||||
|
|
||||||
|
*Author: Daniel Tomlinson*
|
||||||
|
|
||||||
|
*Team: Capacity & Monitoring*
|
||||||
|
|
||||||
|
*Date: October 2019*
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
**python 3**
|
||||||
|
|
||||||
|
```
|
||||||
|
emojis==0.4.0
|
||||||
|
texttable==1.6.2
|
||||||
|
```
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
### Add to path
|
||||||
|
|
||||||
|
```
|
||||||
|
ln -s emoji.py /usr/local/bin/emoji
|
||||||
|
```
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
```shell
|
||||||
|
emoji heart
|
||||||
|
```
|
||||||
|
|
||||||
|
### Output
|
||||||
|
|
||||||
|
```shell
|
||||||
|
[dtomlinson@WHM0004885 $] emoji heart
|
||||||
|
I found 28 emojis for heart! 🥳
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| tag/alias | emoji |
|
||||||
|
+==================================+==========+
|
||||||
|
| cupid | 💘 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :smiling_face_with_three_hearts: | 🥰 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heart_eyes: | 😍 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :kissing_heart: | 😘 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heart_eyes_cat: | 😻 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :gift_heart: | 💝 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :sparkling_heart: | 💖 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heartpulse: | 💗 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heartbeat: | 💓 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :revolving_hearts: | 💞 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :two_hearts: | 💕 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heart_decoration: | 💟 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heavy_heart_exclamation: | ❣️ |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :broken_heart: | 💔 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :heart: | ❤️ |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :orange_heart: | 🧡 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :yellow_heart: | 💛 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :green_heart: | 💚 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :blue_heart: | 💙 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :purple_heart: | 💜 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :brown_heart: | 🤎 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :black_heart: | 🖤 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :white_heart: | 🤍 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :couple_with_heart: | 💑 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :couple_with_heart_woman_man: | 👩❤️👨 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :couple_with_heart_man_man: | 👨❤️👨 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :couple_with_heart_woman_woman: | 👩❤️👩 |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
| :hearts: | ♥️ |
|
||||||
|
+----------------------------------+----------+
|
||||||
|
```
|
||||||
File diff suppressed because it is too large
Load Diff
1097
jupyter-workbooks/Keras, MLP-scikit - Neural Network.ipynb
Normal file
1097
jupyter-workbooks/Keras, MLP-scikit - Neural Network.ipynb
Normal file
File diff suppressed because it is too large
Load Diff
2
objects/67/28b1ecf3641cbadffd1ed056db496ad54cfb28
Normal file
2
objects/67/28b1ecf3641cbadffd1ed056db496ad54cfb28
Normal file
@@ -0,0 +1,2 @@
|
|||||||
|
xm<>;<0E>0<05>}<7D><><EFBFBD>"<22>&^G<>P:<3A>16<31>V<>A<EFBFBD><41><EFBFBD><EFBFBD>
|
||||||
|
^3ӌ<33><D38C>9G<39><16><><EFBFBD><10>N8)<29><06><>=Y<>Gj<47>^<5E>!I<>e<EFBFBD><65>L<EFBFBD><4C><EFBFBD><EFBFBD>u<05>5<EFBFBD>X<EFBFBD>Z`<60><><EFBFBD><EFBFBD>B<EFBFBD><42><EFBFBD>5<EFBFBD>6<EFBFBD>r<EFBFBD><72>Ce<43><65><11>(_<13>s<EFBFBD>a<EFBFBD>K<EFBFBD>@0<><30>gZB<5A><42><EFBFBD><EFBFBD><EFBFBD><11>9R<39>o,<2C>a:<3A>
|
||||||
BIN
objects/74/801214438a6927c550468d0d1e1a0799ea4a91
Normal file
BIN
objects/74/801214438a6927c550468d0d1e1a0799ea4a91
Normal file
Binary file not shown.
BIN
objects/8b/7b27d670239a8d16ca2b3d384a7a0c305d2faf
Normal file
BIN
objects/8b/7b27d670239a8d16ca2b3d384a7a0c305d2faf
Normal file
Binary file not shown.
BIN
splunk-dashboard-screenshot/.DS_Store
vendored
Normal file
BIN
splunk-dashboard-screenshot/.DS_Store
vendored
Normal file
Binary file not shown.
19
splunk-dashboard-screenshot/__dev/notes.md
Normal file
19
splunk-dashboard-screenshot/__dev/notes.md
Normal file
@@ -0,0 +1,19 @@
|
|||||||
|
# Selenium
|
||||||
|
|
||||||
|
## Move the mouse to element(s) then click at the end
|
||||||
|
|
||||||
|
```python
|
||||||
|
e1 = driver.find_element_by_xpath('//*[@id="nav"]/ol/li[5]/a')
|
||||||
|
e2 = e1.find_element_by_xpath('../ul/li[1]/a')
|
||||||
|
actions.move_to_element(e1).move_to_element(e2).perform()
|
||||||
|
e2.click()
|
||||||
|
```
|
||||||
|
|
||||||
|
https://stackoverflow.com/questions/43161987/python-selenium-how-to-move-mouse-on-element-which-shows-drop-down-menu
|
||||||
|
|
||||||
|
https://stackoverflow.com/questions/920910/sending-multipart-html-emails-which-contain-embedded-images
|
||||||
|
|
||||||
|
class this all up!
|
||||||
|
|
||||||
|
|
||||||
|
add image to a class and iterate through adding each one
|
||||||
BIN
splunk-dashboard-screenshot/example_screenshot.png
Normal file
BIN
splunk-dashboard-screenshot/example_screenshot.png
Normal file
Binary file not shown.
|
After Width: | Height: | Size: 149 KiB |
72
splunk-dashboard-screenshot/getDashboard.py
Normal file
72
splunk-dashboard-screenshot/getDashboard.py
Normal file
@@ -0,0 +1,72 @@
|
|||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.common.action_chains import ActionChains
|
||||||
|
from time import sleep
|
||||||
|
from selenium.common.exceptions import NoSuchElementException
|
||||||
|
|
||||||
|
# from PIL import Image
|
||||||
|
|
||||||
|
URL = 'https://sc1uxpremn81:8000/en-US/app/wh_netacea_blocking/byo_decoder_wip'
|
||||||
|
URL2 = (
|
||||||
|
'https://sc1uxpremn81:8000/en-US/app/wh_netacea_blocking/'
|
||||||
|
'blocking_visibility?form.time_tok.earliest=%40d&form.time_tok.latest=now'
|
||||||
|
)
|
||||||
|
|
||||||
|
options = webdriver.ChromeOptions()
|
||||||
|
options.headless = True
|
||||||
|
|
||||||
|
driver = webdriver.Chrome(options=options)
|
||||||
|
driver.get(URL2)
|
||||||
|
driver.implicitly_wait(10)
|
||||||
|
driver.maximize_window()
|
||||||
|
|
||||||
|
action = ActionChains(driver)
|
||||||
|
|
||||||
|
username = driver.find_element_by_xpath('//*[@id="username"]')
|
||||||
|
password = driver.find_element_by_xpath('//*[@id="password"]')
|
||||||
|
|
||||||
|
username.send_keys('admin')
|
||||||
|
password.send_keys('fWgbz6AU')
|
||||||
|
driver.find_element_by_xpath(
|
||||||
|
'/html/body/div[2]/div/div/div[1]/form/fieldset/input[1]'
|
||||||
|
).click()
|
||||||
|
|
||||||
|
|
||||||
|
def setWidth(var, adj=0):
|
||||||
|
script = "return document.body.parentNode.scroll" + (var)
|
||||||
|
return driver.execute_script(script) + adj
|
||||||
|
|
||||||
|
|
||||||
|
driver.set_window_size(setWidth('Width', 1000), setWidth('Height', 10000))
|
||||||
|
|
||||||
|
sleep(2)
|
||||||
|
|
||||||
|
|
||||||
|
try:
|
||||||
|
driver.find_element_by_xpath(
|
||||||
|
'/html/body/div[2]/div/div[3]/div[1]/a'
|
||||||
|
).click()
|
||||||
|
|
||||||
|
except NoSuchElementException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
element = driver.find_element_by_class_name('dashboard-menu')
|
||||||
|
driver.execute_script("arguments[0].style.visibility = 'hidden';", element)
|
||||||
|
|
||||||
|
except NoSuchElementException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
try:
|
||||||
|
action.move_to_element(
|
||||||
|
driver.find_element_by_class_name('dashboard-title')
|
||||||
|
).perform()
|
||||||
|
sleep(1)
|
||||||
|
|
||||||
|
except NoSuchElementException:
|
||||||
|
pass
|
||||||
|
|
||||||
|
driver.find_element_by_xpath('/html/body/div[2]').screenshot(
|
||||||
|
'web_screenshot.png'
|
||||||
|
)
|
||||||
|
|
||||||
|
driver.quit()
|
||||||
137
splunk-dashboard-screenshot/getDashboardFinal.py
Normal file
137
splunk-dashboard-screenshot/getDashboardFinal.py
Normal file
@@ -0,0 +1,137 @@
|
|||||||
|
from selenium import webdriver
|
||||||
|
from selenium.webdriver.common.action_chains import ActionChains
|
||||||
|
from selenium.common.exceptions import NoSuchElementException
|
||||||
|
from selenium.common.exceptions import ElementNotInteractableException
|
||||||
|
from time import sleep
|
||||||
|
import os
|
||||||
|
|
||||||
|
|
||||||
|
class getDashboardScreenshot(object):
|
||||||
|
def __init__(
|
||||||
|
self,
|
||||||
|
driver: webdriver,
|
||||||
|
url: str,
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
width: int = 0,
|
||||||
|
height: int = 10000,
|
||||||
|
path: str = os.getcwd(),
|
||||||
|
delay: int = 1,
|
||||||
|
):
|
||||||
|
super(getDashboardScreenshot, self).__init__()
|
||||||
|
self.driver = driver
|
||||||
|
self.action = ActionChains(driver)
|
||||||
|
self.url = url
|
||||||
|
self.username = username
|
||||||
|
self.password = password
|
||||||
|
self.width = width
|
||||||
|
self.height = height
|
||||||
|
self.path = self.checkPath(path)
|
||||||
|
self.delay = delay
|
||||||
|
|
||||||
|
def __enter__(self):
|
||||||
|
return self
|
||||||
|
|
||||||
|
def __exit__(self, exception_type, exception_value, traceback):
|
||||||
|
self.driver.quit()
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def createDriver(
|
||||||
|
cls,
|
||||||
|
url: str,
|
||||||
|
username: str,
|
||||||
|
password: str,
|
||||||
|
width: int = 0,
|
||||||
|
height: int = 0,
|
||||||
|
type: str = 'chromium',
|
||||||
|
path: str = os.getcwd(),
|
||||||
|
delay: int = 1,
|
||||||
|
):
|
||||||
|
if type == 'chromium':
|
||||||
|
options = webdriver.ChromeOptions()
|
||||||
|
options.headless = True
|
||||||
|
driver = webdriver.Chrome(options=options)
|
||||||
|
else:
|
||||||
|
print(f'No supported browser {type}')
|
||||||
|
return cls(driver, url, username, password, width, height, path, delay)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def checkPath(path: str) -> str:
|
||||||
|
if path[-1] != '/':
|
||||||
|
path += '/'
|
||||||
|
return path
|
||||||
|
|
||||||
|
def _setWidth(self, jsProperty: str, override: int):
|
||||||
|
script = 'return document.body.parentNode.scroll' + (jsProperty)
|
||||||
|
return self.driver.execute_script(script) + override
|
||||||
|
|
||||||
|
def _logIn(self):
|
||||||
|
self.usernameElement = self.driver.find_element_by_xpath(
|
||||||
|
'//*[@id="username"]'
|
||||||
|
)
|
||||||
|
self.passwordElement = self.driver.find_element_by_xpath(
|
||||||
|
'//*[@id="password"]'
|
||||||
|
)
|
||||||
|
self.usernameElement.send_keys(self.username)
|
||||||
|
self.passwordElement.send_keys(self.password)
|
||||||
|
self.driver.find_element_by_xpath(
|
||||||
|
'/html/body/div[2]/div/div/div[1]/form/fieldset/input[1]'
|
||||||
|
).click()
|
||||||
|
sleep(2)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _hideElements(self):
|
||||||
|
try:
|
||||||
|
self.driver.find_element_by_class_name(
|
||||||
|
'hide-global-filters'
|
||||||
|
).click()
|
||||||
|
except (NoSuchElementException, ElementNotInteractableException):
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
element = self.driver.find_element_by_class_name('dashboard-menu')
|
||||||
|
self.driver.execute_script(
|
||||||
|
"arguments[0].style.visibility = 'hidden';", element
|
||||||
|
)
|
||||||
|
except NoSuchElementException:
|
||||||
|
pass
|
||||||
|
try:
|
||||||
|
self.action.move_to_element(
|
||||||
|
self.driver.find_element_by_class_name('dashboard-title')
|
||||||
|
).perform()
|
||||||
|
sleep(1)
|
||||||
|
except NoSuchElementException:
|
||||||
|
pass
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _goToDashboard(self):
|
||||||
|
self.driver.get(self.url)
|
||||||
|
self.driver.implicitly_wait(10)
|
||||||
|
self.driver.maximize_window()
|
||||||
|
self._logIn()
|
||||||
|
return self
|
||||||
|
|
||||||
|
def takeScreenshot(self):
|
||||||
|
self._goToDashboard()
|
||||||
|
self.driver.set_window_size(
|
||||||
|
self._setWidth('Width', self.width),
|
||||||
|
self._setWidth('Height', self.height),
|
||||||
|
)
|
||||||
|
self._hideElements()
|
||||||
|
sleep(self.delay)
|
||||||
|
self.driver.find_element_by_class_name('dashboard-body').screenshot(
|
||||||
|
f'{self.path}web_screenshot.png'
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
|
||||||
|
with getDashboardScreenshot.createDriver(
|
||||||
|
url='https://sc1uxpremn81:8000/en-US/app/wh_weekly_standup/weekly_standup_'
|
||||||
|
'data?form.time_tok.earliest=-7d%40h&form.time_tok.latest=now&hideFilters='
|
||||||
|
'true',
|
||||||
|
username='admin',
|
||||||
|
password='fWgbz6AU',
|
||||||
|
width=1000,
|
||||||
|
height=10000,
|
||||||
|
delay=5,
|
||||||
|
) as screenShot:
|
||||||
|
screenShot.takeScreenshot()
|
||||||
6
splunk-dashboard-screenshot/screenshot.py
Normal file
6
splunk-dashboard-screenshot/screenshot.py
Normal file
@@ -0,0 +1,6 @@
|
|||||||
|
from selenium import webdriver
|
||||||
|
|
||||||
|
driver = webdriver.Firefox()
|
||||||
|
driver.get('https://www.python.org')
|
||||||
|
driver.save_screenshot('screenshot.png')
|
||||||
|
driver.quit()
|
||||||
32
splunk-dashboard-screenshot/screenshotFull.py
Normal file
32
splunk-dashboard-screenshot/screenshotFull.py
Normal file
@@ -0,0 +1,32 @@
|
|||||||
|
# https://stackoverflow.com/questions/41721734/take-screenshot-of-full-page-with-selenium-python-with-chromedriver/57338909#57338909
|
||||||
|
|
||||||
|
from selenium import webdriver
|
||||||
|
from PIL import Image
|
||||||
|
|
||||||
|
URL = 'https://www.trafficdelays.co.uk/m62-traffic-delays/'
|
||||||
|
|
||||||
|
options = webdriver.ChromeOptions()
|
||||||
|
options.headless = True
|
||||||
|
|
||||||
|
driver = webdriver.Chrome(options=options)
|
||||||
|
driver.get(URL)
|
||||||
|
|
||||||
|
|
||||||
|
def setWidth(var, adj=0):
|
||||||
|
script = "return document.body.parentNode.scroll" + (var)
|
||||||
|
return driver.execute_script(script) + adj
|
||||||
|
|
||||||
|
|
||||||
|
driver.set_window_size(setWidth('Width'), setWidth('Height'))
|
||||||
|
|
||||||
|
|
||||||
|
driver.find_element_by_tag_name('body').screenshot('web_screenshot.png')
|
||||||
|
# driver.find_element_by_css_selector('#post-4706').screenshot('web_screenshot.png')
|
||||||
|
|
||||||
|
# print(driver.find_element_by_css_selector('#post-4706').text)
|
||||||
|
|
||||||
|
im = Image.open('web_screenshot.png')
|
||||||
|
width, height = im.size
|
||||||
|
region = im.crop((0, 0, width, 880))
|
||||||
|
region.save('cropped.png')
|
||||||
|
driver.quit()
|
||||||
BIN
vrops-api/.DS_Store
vendored
Normal file
BIN
vrops-api/.DS_Store
vendored
Normal file
Binary file not shown.
4
vrops-api/.gitignore
vendored
Executable file
4
vrops-api/.gitignore
vendored
Executable file
@@ -0,0 +1,4 @@
|
|||||||
|
*.json
|
||||||
|
!approach1-output-example.json
|
||||||
|
!approach1-vms-example.json
|
||||||
|
!approach2-output-example.json
|
||||||
BIN
vrops-api/__dev/__pycache__/vropsAPI.cpython-37.pyc
Normal file
BIN
vrops-api/__dev/__pycache__/vropsAPI.cpython-37.pyc
Normal file
Binary file not shown.
8
vrops-api/__dev/devNotes.txt
Normal file
8
vrops-api/__dev/devNotes.txt
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
vrops._vmsResourcesRaw[0]['values'][0]['resourceList'][100]['links'][0]['href']
|
||||||
|
|
||||||
|
'/suite-api/api/resources/d38f3933-d432-4b62-9211-da6243fcfcaa'
|
||||||
|
|
||||||
|
|
||||||
|
vrops._vmsResourcesRaw is the raw with [i] being all VMS under each host
|
||||||
|
|
||||||
|
the ['values'] comes from when you pull back for VM data, not when you ask for the relationships
|
||||||
23
vrops-api/__dev/notes.md
Normal file
23
vrops-api/__dev/notes.md
Normal file
@@ -0,0 +1,23 @@
|
|||||||
|
<!-- MarkdownTOC -->
|
||||||
|
|
||||||
|
- [Added](#added)
|
||||||
|
- [Info](#info)
|
||||||
|
|
||||||
|
<!-- /MarkdownTOC -->
|
||||||
|
|
||||||
|
|
||||||
|
# Added
|
||||||
|
- self.chosenCluster in getHostsFromCLuster
|
||||||
|
|
||||||
|
|
||||||
|
# Info
|
||||||
|
|
||||||
|
```host info in {'pageInfo': {'totalCount': 37, 'page': 0, 'pageSize': 1000}, 'links': [{'href': '/suite-api/api/resources/15b3ea0c-9f62-4fc2-93b8-d4281196043e/relationships?page=0&pageSize=1000', 'rel': 'SELF', 'name': 'current'}, ```
|
||||||
|
|
||||||
|
in `self._vmsResources[i]['resourceList']`
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
if ```'resourceKey': {'name': 'SC1PRCONTXWHCUXCCL01', 'adapterKindKey': 'VMWARE', 'resourceKindKey': 'ClusterComputeResource', ```
|
||||||
|
|
||||||
|
if this is cluster compute resource, extract the name
|
||||||
91
vrops-api/__dev/old/addhostcluster.py
Normal file
91
vrops-api/__dev/old/addhostcluster.py
Normal file
@@ -0,0 +1,91 @@
|
|||||||
|
import base64
|
||||||
|
|
||||||
|
# import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(
|
||||||
|
'/Users/dtomlinson/OneDrive - William Hill Organisation Limited/Mac/'
|
||||||
|
'projects/vrops-api/__dev'
|
||||||
|
)
|
||||||
|
# sys.path.remove(
|
||||||
|
# '/Users/dtomlinson/OneDrive - William Hill Organisation Limited/Mac/'
|
||||||
|
# 'projects/vrops-api/'
|
||||||
|
# )
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all clusters and store a list of Names:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
allClustersList = vropsAPI.getKeysList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these clusters
|
||||||
|
print(allClustersList)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all hosts and store a list of Names:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
allHostsList = vrops.getKeysList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these hosts
|
||||||
|
print(allHostsList)
|
||||||
|
|
||||||
|
# # Add just 2 hots
|
||||||
|
# counter = 0
|
||||||
|
# hostList = []
|
||||||
|
# for host in allHostsList:
|
||||||
|
# if counter == 2:
|
||||||
|
# break
|
||||||
|
# else:
|
||||||
|
# hostList.append(host)
|
||||||
|
# counter += 1
|
||||||
|
|
||||||
|
|
||||||
|
# Get all VMs and sore a list of IDs
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
|
||||||
|
# Save all VMs to disk
|
||||||
|
vrops.saveToDisk(vrops.allVMS, indent=4, filePrefix='approach1-vms')
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-11),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export,
|
||||||
|
indent=4,
|
||||||
|
filePrefix='approach1-export',
|
||||||
|
breakLine=True,
|
||||||
|
path='/Users/dtomlinson/OneDrive - William Hill Organisation Limited'
|
||||||
|
'/Mac/projects/vrops-api/__dev',
|
||||||
|
)
|
||||||
22
vrops-api/__dev/old/authenticate.py
Normal file
22
vrops-api/__dev/old/authenticate.py
Normal file
@@ -0,0 +1,22 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
url = "https://sc1prapvro01/suite-api/api/auth/token/acquire"
|
||||||
|
|
||||||
|
payload = "{\n\"username\" : \"dtomlinson@Group.WilliamHill.PLC\",\
|
||||||
|
\n\"authSource\" : \"whgroup\",\n\"password\" : \"yi3Uw9Mw\"\n}"
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Accept': "application/json",
|
||||||
|
'Content-Type': "application/json",
|
||||||
|
'Cache-Control': "no-cache",
|
||||||
|
'Host': "sc1prapvro01",
|
||||||
|
'Accept-Encoding': "gzip, deflate",
|
||||||
|
'Content-Length': "102",
|
||||||
|
'Connection': "keep-alive",
|
||||||
|
'cache-control': "no-cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.request("POST", url, data=payload, headers=headers,
|
||||||
|
verify=False)
|
||||||
|
|
||||||
|
print(response.text)
|
||||||
87
vrops-api/__dev/old/getHostInfo.py
Normal file
87
vrops-api/__dev/old/getHostInfo.py
Normal file
@@ -0,0 +1,87 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all clusters and store a list of Names:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
allClustersList = vrops.getKeysList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these clusters
|
||||||
|
print(allClustersList)
|
||||||
|
|
||||||
|
# Get all hosts and store a list of Names:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
allHostsList = vrops.getValuesList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# # Print all these hosts
|
||||||
|
# print(allHostsList)
|
||||||
|
|
||||||
|
|
||||||
|
# # Get all VMs and sore a list of IDs:
|
||||||
|
# vrops.getVMSFromHost(allHostsList)
|
||||||
|
# vrops.getVMSIdentifiers()
|
||||||
|
# allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# # Save all VMs to disk
|
||||||
|
# vrops.saveToDisk(vrops.allVMS, indent=4, filePrefix='approach1-vms')
|
||||||
|
|
||||||
|
# # Save all VMs:Hosts to disk
|
||||||
|
# vrops.saveToDisk(
|
||||||
|
# vrops.VMSHostsNames, indent=4, filePrefix='approach1-vms_hosts'
|
||||||
|
# )
|
||||||
|
|
||||||
|
|
||||||
|
# # Add a single VM to a list to pull back
|
||||||
|
# myList = []
|
||||||
|
# myList.append(allVMSIdList[0])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow, days=-3, hour=15, minute=0, second=0
|
||||||
|
),
|
||||||
|
end=vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow, days=-3, hour=17, minute=0, second=0
|
||||||
|
),
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allHostsList,
|
||||||
|
statKey=['cpu|corecount_provisioned'],
|
||||||
|
vropsType='host',
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops._vmsResources,
|
||||||
|
indent=4,
|
||||||
|
filePrefix='reddit-help-before',
|
||||||
|
breakLine=True,
|
||||||
|
)
|
||||||
|
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export, indent=4, filePrefix='reddit-help-after', breakLine=True
|
||||||
|
)
|
||||||
26
vrops-api/__dev/old/getMetrics.py
Normal file
26
vrops-api/__dev/old/getMetrics.py
Normal file
@@ -0,0 +1,26 @@
|
|||||||
|
import requests
|
||||||
|
|
||||||
|
url = "https://sc1prapvro01/suite-api//api/resources/stats"
|
||||||
|
|
||||||
|
querystring = {"resourceId": "0389a5e4-9b4c-49aa-8139-92ef54f8dd9f",
|
||||||
|
"begin": "1564587600000", "end": "1564588200000",
|
||||||
|
"statKey":
|
||||||
|
["cpu|workload", "mem|usage_average", "mem|swapped_average"],
|
||||||
|
"currentOnly": "True"}
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Authorization': "vRealizeOpsToken f72a2910-88c3-442d-9dfb-58f61aa833fe"
|
||||||
|
"::10e2fe6d-55dc-416e-ad3c-9177901f9b3a",
|
||||||
|
'Content-Type': "application/json",
|
||||||
|
'Accept': "application/json",
|
||||||
|
'Cache-Control': "no-cache",
|
||||||
|
'Host': "sc1prapvro01",
|
||||||
|
'Accept-Encoding': "gzip, deflate",
|
||||||
|
'Connection': "keep-alive",
|
||||||
|
'cache-control': "no-cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.request("GET", url, headers=headers, params=querystring,
|
||||||
|
verify=False)
|
||||||
|
|
||||||
|
print(response.text)
|
||||||
42
vrops-api/__dev/old/getVms.py
Normal file
42
vrops-api/__dev/old/getVms.py
Normal file
@@ -0,0 +1,42 @@
|
|||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import math
|
||||||
|
|
||||||
|
url = "https://sc1prapvro01/suite-api/api/resources"
|
||||||
|
|
||||||
|
querystring = {"page": "0", "regex": "^prdx*"}
|
||||||
|
|
||||||
|
headers = {
|
||||||
|
'Content-Type': "application/json",
|
||||||
|
'Authorization': "vRealizeOpsToken f72a2910-88c3-442d-9dfb-58f61aa833fe"
|
||||||
|
"::29abe2bf-a1f7-464d-b48a-508320a98627",
|
||||||
|
'Content-Type': "application/json",
|
||||||
|
'Accept': "application/json",
|
||||||
|
'Cache-Control': "no-cache",
|
||||||
|
'Host': "sc1prapvro01",
|
||||||
|
'Accept-Encoding': "gzip, deflate",
|
||||||
|
'Connection': "keep-alive",
|
||||||
|
'cache-control': "no-cache"
|
||||||
|
}
|
||||||
|
|
||||||
|
response = requests.request("GET", url, headers=headers,
|
||||||
|
params=querystring, verify=False)
|
||||||
|
|
||||||
|
t = json.loads(response.text)
|
||||||
|
listNames = []
|
||||||
|
totalVms = (t['pageInfo']['totalCount'])
|
||||||
|
vmsPerPage = (t['pageInfo']['pageSize'])
|
||||||
|
pages = math.ceil(totalVms/vmsPerPage)
|
||||||
|
|
||||||
|
queryList = [i for i in range(0, pages)]
|
||||||
|
|
||||||
|
for page in queryList:
|
||||||
|
querystring = {'page': page}
|
||||||
|
response = requests.request("GET", url, headers=headers,
|
||||||
|
params=querystring, verify=False)
|
||||||
|
for i in (range(0, 1000)):
|
||||||
|
t = json.loads(response.text)
|
||||||
|
listNames.append(t['resourceList'][i]['resourceKey']['name'])
|
||||||
|
print(listNames[i])
|
||||||
|
|
||||||
|
print(listNames)
|
||||||
134
vrops-api/__dev/old/getoutput.py
Normal file
134
vrops-api/__dev/old/getoutput.py
Normal file
@@ -0,0 +1,134 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
# Load VM dict
|
||||||
|
|
||||||
|
with open('vms.json', 'r') as file:
|
||||||
|
vrops.allVMS = json.load(file)
|
||||||
|
|
||||||
|
# Add a single VM to a list to pull back
|
||||||
|
myList = []
|
||||||
|
for i in range(0, len(vrops.allVMS)):
|
||||||
|
myList.append(list(vrops.allVMS.values())[i])
|
||||||
|
|
||||||
|
testList = []
|
||||||
|
testList.append(myList[10])
|
||||||
|
testList.append(myList[11])
|
||||||
|
|
||||||
|
# print(myList)
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-20),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=testList,
|
||||||
|
statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],
|
||||||
|
)
|
||||||
|
|
||||||
|
# print(vrops.vmsResources)
|
||||||
|
|
||||||
|
# Save output to disk
|
||||||
|
# vrops.saveToDisk(vrops.vmsResources, indent=4, fileName='approach2-output')
|
||||||
|
|
||||||
|
export, metric = {}, {}
|
||||||
|
|
||||||
|
# All vms to loop through
|
||||||
|
print(len(vrops.vmsResources['values']))
|
||||||
|
|
||||||
|
|
||||||
|
# vm name
|
||||||
|
print(vrops.vmsResources['values'][0]['name'])
|
||||||
|
|
||||||
|
|
||||||
|
# data stored in
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'])
|
||||||
|
|
||||||
|
# how many keys there are
|
||||||
|
print(len(vrops.vmsResources['values'][0]['stat-list']['stat']))
|
||||||
|
|
||||||
|
# timestamp earliest, -1 latest?
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['timestamps'][0])
|
||||||
|
|
||||||
|
# statkey
|
||||||
|
print(
|
||||||
|
vrops.vmsResources['values'][0]['stat-list']['stat'][0]['statKey']['key']
|
||||||
|
)
|
||||||
|
|
||||||
|
# Rolluptype avg etc
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['rollUpType'])
|
||||||
|
|
||||||
|
# time interval
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['intervalUnit'][
|
||||||
|
'quantifier'
|
||||||
|
])
|
||||||
|
|
||||||
|
# intervaltype minutes etc
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['intervalUnit'][
|
||||||
|
'intervalType'
|
||||||
|
])
|
||||||
|
|
||||||
|
# data earliest, -1 latest?
|
||||||
|
print(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['data'][0])
|
||||||
|
|
||||||
|
|
||||||
|
loopLength = len(vrops.vmsResources['values'])
|
||||||
|
print(f'loop length - {loopLength}')
|
||||||
|
|
||||||
|
timeLength = len(
|
||||||
|
vrops.vmsResources['values'][0]['stat-list']['stat'][0]['timestamps']
|
||||||
|
)
|
||||||
|
|
||||||
|
metricLength = len(vrops.vmsResources['values'][0]['stat-list']['stat'])
|
||||||
|
print(metricLength)
|
||||||
|
|
||||||
|
print('\n')
|
||||||
|
|
||||||
|
for i in range(0, loopLength):
|
||||||
|
# timeLength = len(
|
||||||
|
# vrops.vmsResources['values'][1]['stat-list']['stat'][0]['timestamps']
|
||||||
|
# )
|
||||||
|
# print(json.dumps({'name': vrops.vmsResources['values'][i]['name']}))
|
||||||
|
pass
|
||||||
|
|
||||||
|
print(
|
||||||
|
len(vrops.vmsResources['values'][0]['stat-list']['stat'][0]['timestamps']))
|
||||||
|
|
||||||
|
for i in range(0, loopLength):
|
||||||
|
statKeyLength = len(vrops.vmsResources['values'][i]['stat-list']['stat'])
|
||||||
|
timeLength = len(
|
||||||
|
vrops.vmsResources['values'][i]['stat-list']['stat'][0]['timestamps']
|
||||||
|
)
|
||||||
|
for k in range(0, statKeyLength):
|
||||||
|
for j in range(0, timeLength):
|
||||||
|
print(type(
|
||||||
|
json.dumps({
|
||||||
|
'name': vrops.vmsResources['values'][i]['name'],
|
||||||
|
'timesamp': vrops.vmsResources['values'][i]['stat-list']
|
||||||
|
['stat'][0]['timestamps'][j],
|
||||||
|
'data': vrops.vmsResources['values'][i]['stat-list']
|
||||||
|
['stat'][k]['data'][j],
|
||||||
|
'statKey': vrops.vmsResources['values'][i]['stat-list']
|
||||||
|
['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': vrops.vmsResources['values'][i]['stat-list']
|
||||||
|
['stat'][k]['rollUpType'],
|
||||||
|
'intervalUnit': vrops.vmsResources['values'][i]
|
||||||
|
['stat-list']['stat'][k]['intervalUnit']['quantifier']
|
||||||
|
}),
|
||||||
|
))
|
||||||
12
vrops-api/__dev/old/regex.py
Normal file
12
vrops-api/__dev/old/regex.py
Normal file
@@ -0,0 +1,12 @@
|
|||||||
|
import re
|
||||||
|
import json
|
||||||
|
|
||||||
|
data = "host info in {'pageInfo': {'totalCount': 37, 'page': 0, 'pageSize': 1000}, 'links': [{'href': '/suite-api/api/resources/15b3ea0c-9f62-4fc2-93b8-d4281196043e/relationships?page=0&pageSize=1000', 'rel': 'SELF', 'name': 'current'},"
|
||||||
|
|
||||||
|
pattern = r'(?:.*resources\/)(?P<host_id_response>[^\/]+)'
|
||||||
|
|
||||||
|
match = re.findall(pattern, data)
|
||||||
|
|
||||||
|
host = [x for x in match]
|
||||||
|
|
||||||
|
print(host)
|
||||||
Binary file not shown.
76
vrops-api/__dev/spaghetti_code/redditHelp.py
Normal file
76
vrops-api/__dev/spaghetti_code/redditHelp.py
Normal file
@@ -0,0 +1,76 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all clusters and store a list of Names:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
allClustersList = vrops.getKeysList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these clusters
|
||||||
|
print(allClustersList)
|
||||||
|
|
||||||
|
# Get all hosts and store a list of Names:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
allHostsList = vrops.getKeysList(vrops.allHosts)
|
||||||
|
allHostsIdList = vrops.getValuesList(vrops.allHosts)
|
||||||
|
|
||||||
|
# Print all these hosts
|
||||||
|
print(allHostsList)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all VMs and sore a list of IDs:
|
||||||
|
vrops.getVMSFromHost('sc1hsesx156.prod.williamhill.plc')
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# Save all VMs to disk
|
||||||
|
vrops.saveToDisk(vrops.allVMS, indent=4, filePrefix='approach1-vms')
|
||||||
|
|
||||||
|
# Save all VMs:Hosts to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.VMSHostsNames, indent=4, filePrefix='approach1-vms_hosts'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Add a single VM to a list to pull back
|
||||||
|
# myList = []
|
||||||
|
# myList.append(allVMSIdList[0])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],
|
||||||
|
vropsType='virtualmachine'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# # Export the data into readable format
|
||||||
|
# vrops.exportVMData()
|
||||||
|
|
||||||
|
# # Save to disk
|
||||||
|
# vrops.saveToDisk(
|
||||||
|
# vrops.export, indent=4, filePrefix='approach1-export', breakLine=True
|
||||||
|
# )
|
||||||
65
vrops-api/__dev/spaghetti_code/spaghettiFixer.py
Normal file
65
vrops-api/__dev/spaghetti_code/spaghettiFixer.py
Normal file
@@ -0,0 +1,65 @@
|
|||||||
|
import os
|
||||||
|
import sys
|
||||||
|
import json
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
# from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Open files
|
||||||
|
|
||||||
|
with open('_vmsResourcesaim.json', 'r+') as jsonFile:
|
||||||
|
myDict = json.load(jsonFile)
|
||||||
|
|
||||||
|
with open('_vmsResources.json', 'r+') as jsonFile:
|
||||||
|
myDictBefore = json.load(jsonFile)
|
||||||
|
|
||||||
|
with open('vmid.json', 'r+') as jsonFile:
|
||||||
|
vmId = json.load(jsonFile)
|
||||||
|
|
||||||
|
with open('vmname.json', 'r+') as jsonFile:
|
||||||
|
vmName = json.load(jsonFile)
|
||||||
|
|
||||||
|
print(myDict['values'][0])
|
||||||
|
print()
|
||||||
|
|
||||||
|
# Playground
|
||||||
|
|
||||||
|
output0 = map(lambda x: x in 'values', myDict)
|
||||||
|
|
||||||
|
output1 = map(lambda i: i[0:], myDict)
|
||||||
|
|
||||||
|
|
||||||
|
output2 = [item['name'] for item in myDict['values']]
|
||||||
|
|
||||||
|
print(myDictBefore['values'][0])
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
# output2 = (x for x in myDict(iter(output1)))
|
||||||
|
|
||||||
|
print(output2)
|
||||||
|
|
||||||
|
print()
|
||||||
|
|
||||||
|
name = (
|
||||||
|
key
|
||||||
|
for item in [item['resourceId'] for item in myDictBefore['values']]
|
||||||
|
for key, value in vmId.items()
|
||||||
|
if item == value
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
for entry, name in zip(myDictBefore['values'], name):
|
||||||
|
entry['name'] = name
|
||||||
|
|
||||||
|
print()
|
||||||
|
print(name)
|
||||||
|
print()
|
||||||
|
print(myDictBefore['values'][0])
|
||||||
|
print(myDictBefore['values'][-2])
|
||||||
|
|
||||||
|
"""
|
||||||
|
names_res = map(lambda i: i['name'][0], data)
|
||||||
|
ids = [item['owner']['id'] for owner in results['playlists']['items']]
|
||||||
|
"""
|
||||||
767
vrops-api/__dev/spaghetti_code/vropsAPI.py
Normal file
767
vrops-api/__dev/spaghetti_code/vropsAPI.py
Normal file
@@ -0,0 +1,767 @@
|
|||||||
|
import requests
|
||||||
|
import urllib3
|
||||||
|
import json
|
||||||
|
from typing import Union
|
||||||
|
import copy
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import re
|
||||||
|
from deprecated import deprecated
|
||||||
|
|
||||||
|
# warnings.filterwarnings('ignore')
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
class vropsAPI(object):
|
||||||
|
"""Module for the vropsAPI for Capacity Management:
|
||||||
|
Author: Daniel Tomlinson
|
||||||
|
Team: Capacity & Monitoring
|
||||||
|
Date: October 2019
|
||||||
|
|
||||||
|
VROPS documentation: https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
index.html
|
||||||
|
StatKeys for VMS:
|
||||||
|
https://docs.vmware.com/en/vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
Authenticate a session:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'username',
|
||||||
|
'authSource',
|
||||||
|
'password',
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get all clusters:
|
||||||
|
Query VROPS for available clusters:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all cluster IDs and cluster names:
|
||||||
|
allClustersDict = vrops.allClusters
|
||||||
|
|
||||||
|
Get list of all cluster names:
|
||||||
|
allClustersList = vrops.getList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
Get all hosts:
|
||||||
|
Query VROPS for available hosts:
|
||||||
|
From a single cluster:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
From a list of clusters:
|
||||||
|
<Not implemented>
|
||||||
|
|
||||||
|
Get dict of all host IDs and host names:
|
||||||
|
allHostsDisct =vrops.allHosts
|
||||||
|
|
||||||
|
Get list of host names:
|
||||||
|
allHostsList = vrops.getList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
Get all VMs:
|
||||||
|
Query VROPS for available VMs:
|
||||||
|
For a single host:
|
||||||
|
vrops.getVMSFromHost('sc1hsesx148.prod.williamhill.plc')
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
For a list of hosts:
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all VM IDs and VM names:
|
||||||
|
allVMSDict = vrops.allVMS
|
||||||
|
|
||||||
|
Get list of all VMs:
|
||||||
|
allVMSList = vrops.getList(vrops.allVMS)
|
||||||
|
|
||||||
|
|
||||||
|
Get epoch time relative to another time:
|
||||||
|
Similar to Splunks relative_time command:
|
||||||
|
1. Can go back N hours/minutes etc.
|
||||||
|
2. Can set the hour/minute etc. to a specified value (snapping)
|
||||||
|
|
||||||
|
vrops.epochRelativeTime(epochTime, **kwargs)
|
||||||
|
|
||||||
|
**kwargs:
|
||||||
|
epochTime: int - start time
|
||||||
|
|
||||||
|
year: int = datetime.now().year # set year
|
||||||
|
month: int = datetime.now().month # set month
|
||||||
|
day: int = datetime.now().day # set day
|
||||||
|
hour: int = datetime.now().hour # set hour
|
||||||
|
minute: int = datetime.now().minute # set minute
|
||||||
|
second: int = datetime.now().second # set second
|
||||||
|
|
||||||
|
years: int = 0 # go back/forward N years
|
||||||
|
months: int = 0 # go back/forward N months
|
||||||
|
days: int = 0 # go back/forward N days
|
||||||
|
hours: int = 0 # go back/forward N hours
|
||||||
|
minutes: int = 0 # go back/forward N minutes
|
||||||
|
seconds: int = 0 # go back/forward N seconds
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
Get epoch 5 minutes ago:
|
||||||
|
vrops.epochRelativeTime(vrops.epochNow, minutes=-5)
|
||||||
|
|
||||||
|
Get epoch at start of current hour:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
Get epoch 1 week ago at start of day:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
days=-7
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get stats from VMs:
|
||||||
|
Pull back results:
|
||||||
|
Last 30 minutes, 5 minute intervals, average
|
||||||
|
for CPU average and ready %:
|
||||||
|
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=list(vrops.allVMS.values()),
|
||||||
|
statKey=['cpu|usage_average', 'cpu|readyPct'],
|
||||||
|
)
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
intervalType: <ns3:intervalType> (see
|
||||||
|
https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
models.html#repr-1190589417)
|
||||||
|
|
||||||
|
intervalQuantifier: int
|
||||||
|
|
||||||
|
rollUpType: <ns3:rollUpType> (see:
|
||||||
|
https://sc1prapvro01/suite-api/
|
||||||
|
docs/rest/models.html#repr-1735704374)
|
||||||
|
|
||||||
|
resourceId: string or list of vrops resourceIds (not names)
|
||||||
|
|
||||||
|
statKey: vrops api metrics (see https://docs.vmware.com/en/
|
||||||
|
vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/
|
||||||
|
GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html)
|
||||||
|
|
||||||
|
|
||||||
|
Print results:
|
||||||
|
for i in range(0, vrops.totalVMS):
|
||||||
|
print(vrops.vmsResources['values'][i])
|
||||||
|
|
||||||
|
Save to disk as json:
|
||||||
|
vrops.saveToDisk(vrops.vmsResources)
|
||||||
|
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
vropsURL: str
|
||||||
|
URL of the VROPS instance
|
||||||
|
"https://sc1prapvro01/"
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
defaultHeaders = {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Accept-Encoding': 'gzip, deflate',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, vropsURL: str, authToken: str, verify: bool = True):
|
||||||
|
super(vropsAPI, self).__init__()
|
||||||
|
self.vropsURL = vropsURL
|
||||||
|
self.authToken = authToken
|
||||||
|
self.verify = verify
|
||||||
|
self.headers = vropsAPI.defaultHeaders
|
||||||
|
self.headers['Authorization'] = f'vRealizeOpsToken {self.authToken}'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def authenticate(
|
||||||
|
cls,
|
||||||
|
vropsURL: str,
|
||||||
|
username: str,
|
||||||
|
authSource: str,
|
||||||
|
password: str,
|
||||||
|
verify: bool = True,
|
||||||
|
):
|
||||||
|
vropsURLauth = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
payload = {}
|
||||||
|
for key, value in zip(
|
||||||
|
['username', 'authSource', 'password'],
|
||||||
|
[username, authSource, password],
|
||||||
|
):
|
||||||
|
payload[key] = value
|
||||||
|
authToken = vropsAPI.getAuthenticationToken(
|
||||||
|
vropsURLauth, payload, verify
|
||||||
|
)
|
||||||
|
return cls(vropsURL, authToken, verify)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getVropsURL(vropsURL: str, endpointKey: str) -> str:
|
||||||
|
endpoints = {
|
||||||
|
'authenticationURL': 'suite-api/api/auth/token/acquire',
|
||||||
|
'resourcesURL': 'suite-api/api/resources',
|
||||||
|
'statsURL': 'suite-api/api/resources/stats/query',
|
||||||
|
}
|
||||||
|
if endpoints[endpointKey] not in vropsURL:
|
||||||
|
if vropsURL[-1] != '/':
|
||||||
|
vropsURL = vropsURL + '/'
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
else:
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
return vropsURL
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pythonToJSON(pythonObject: any, indent=4) -> str:
|
||||||
|
return json.dumps(pythonObject, indent=indent)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def jsonToPython(jsonObject: str) -> any:
|
||||||
|
return json.loads(jsonObject)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAuthenticationToken(
|
||||||
|
vropsURL: str, payload: dict, verify=True
|
||||||
|
) -> str:
|
||||||
|
|
||||||
|
payload = vropsAPI.pythonToJSON(payload)
|
||||||
|
vropsURL = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
data=payload,
|
||||||
|
headers=vropsAPI.defaultHeaders,
|
||||||
|
verify=verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
authToken = vropsAPI.jsonToPython(response.text)['token']
|
||||||
|
return authToken
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getIdentifiers(
|
||||||
|
identifierDict: dict,
|
||||||
|
vropsJSON: dict,
|
||||||
|
length: int,
|
||||||
|
resourceKindKey: str,
|
||||||
|
) -> dict:
|
||||||
|
for i in range(0, length):
|
||||||
|
if (
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['resourceKindKey']
|
||||||
|
== resourceKindKey
|
||||||
|
):
|
||||||
|
identifierDict[
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['name']
|
||||||
|
] = vropsJSON['resourceList'][i]['identifier']
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
return identifierDict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getKeysList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.keys():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getValuesList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.values():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def epochRelativeTime(
|
||||||
|
epochTime: int,
|
||||||
|
year: int = datetime.now().year,
|
||||||
|
month: int = datetime.now().month,
|
||||||
|
day: int = datetime.now().day,
|
||||||
|
hour: int = datetime.now().hour,
|
||||||
|
minute: int = datetime.now().minute,
|
||||||
|
second: int = datetime.now().second,
|
||||||
|
years: int = 0,
|
||||||
|
months: int = 0,
|
||||||
|
days: int = 0,
|
||||||
|
hours: int = 0,
|
||||||
|
minutes: int = 0,
|
||||||
|
seconds: int = 0,
|
||||||
|
) -> int:
|
||||||
|
delta = relativedelta(
|
||||||
|
year=year,
|
||||||
|
month=month,
|
||||||
|
day=day,
|
||||||
|
hour=hour,
|
||||||
|
minute=minute,
|
||||||
|
second=second,
|
||||||
|
years=years,
|
||||||
|
months=months,
|
||||||
|
days=days,
|
||||||
|
hours=hours,
|
||||||
|
minutes=minutes,
|
||||||
|
seconds=seconds,
|
||||||
|
)
|
||||||
|
if len(str(epochTime)) >= 12:
|
||||||
|
epochTime /= 1000
|
||||||
|
relativeTime = datetime.fromtimestamp(epochTime) + delta
|
||||||
|
relativeTime = math.ceil(relativeTime.timestamp() * 1000)
|
||||||
|
return relativeTime
|
||||||
|
|
||||||
|
def getClusters(self):
|
||||||
|
queryString = {'resourceKind': 'ClusterComputeResource'}
|
||||||
|
vropsURL = vropsAPI.getVropsURL(self.vropsURL, 'resourcesURL')
|
||||||
|
response = requests.request(
|
||||||
|
"GET",
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
params=queryString,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._clusterResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getClusterIdentifiers(self):
|
||||||
|
self.totalClusters = len(self._clusterResources['resourceList'])
|
||||||
|
self.allClusters = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allClusters,
|
||||||
|
self._clusterResources,
|
||||||
|
self.totalClusters,
|
||||||
|
'ClusterComputeResource',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostsFromCluster(self, cluster='SC1PRCONTXWHCUXCCL01'):
|
||||||
|
self.cluster = cluster
|
||||||
|
url = (
|
||||||
|
f'{self.vropsURL}/suite-api/api/resources/'
|
||||||
|
f'{self.allClusters[cluster]}/relationships'
|
||||||
|
)
|
||||||
|
self.chosenCluster = cluster
|
||||||
|
response = requests.request(
|
||||||
|
"GET", url, headers=self.headers, verify=self.verify
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._hostResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostIdentifiers(self):
|
||||||
|
self.totalHosts = len(self._hostResources['resourceList'])
|
||||||
|
self.allHosts = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allHosts, self._hostResources, self.totalHosts, 'HostSystem'
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSFromHost(self, host: Union[str, list]):
|
||||||
|
if isinstance(host, list):
|
||||||
|
self.vmType = 'list'
|
||||||
|
self._vmsResourcesRelationships = []
|
||||||
|
self.urlList = []
|
||||||
|
response = []
|
||||||
|
for item in host:
|
||||||
|
self.urlList.append(
|
||||||
|
(
|
||||||
|
f'{self.vropsURL}suite-api/api/resources/'
|
||||||
|
f'{self.allHosts[item]}/relationships'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for urlItem in self.urlList:
|
||||||
|
response.append(
|
||||||
|
requests.request(
|
||||||
|
'GET',
|
||||||
|
urlItem,
|
||||||
|
headers=self.headers,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
for i in range(0, len(response)):
|
||||||
|
self._vmsResourcesRelationships.append(
|
||||||
|
vropsAPI.jsonToPython(response[i].text)
|
||||||
|
)
|
||||||
|
if isinstance(host, str):
|
||||||
|
hostToList = []
|
||||||
|
hostToList.append(host)
|
||||||
|
print(hostToList)
|
||||||
|
return self.getVMSFromHost(host=hostToList)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _getHostInformation(self, i: int, j: int):
|
||||||
|
pattern = r'(?:.*resources\/)([^\/]+)'
|
||||||
|
vm = self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['name']
|
||||||
|
host = self.urlList[i]
|
||||||
|
match = re.findall(pattern, host)
|
||||||
|
for key, value in self.allHosts.items():
|
||||||
|
if match[0] == value:
|
||||||
|
self.VMSHostsNames[vm] = key
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSIdentifiers(self):
|
||||||
|
self.VMSHostsNames = {}
|
||||||
|
self.allVMS = OrderedDict()
|
||||||
|
if self.vmType == 'list':
|
||||||
|
self.countVMS = []
|
||||||
|
self.countVMSFiltered = []
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
counter = 0
|
||||||
|
for j in range(
|
||||||
|
0, len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self._getHostInformation(i, j)
|
||||||
|
self.countVMS.append(
|
||||||
|
len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
)
|
||||||
|
self.countVMSFiltered.append(counter)
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships[i],
|
||||||
|
self.countVMS[i],
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
if self.vmType == 'string':
|
||||||
|
counter = 0
|
||||||
|
self.countVMS = len(
|
||||||
|
self._vmsResourcesRelationships['resourceList']
|
||||||
|
)
|
||||||
|
for j in range(0, self.countVMS):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self.countVMSFiltered = counter
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships,
|
||||||
|
self.countVMS,
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getStats(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
vropsType: str,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
self.vropsType = vropsType
|
||||||
|
argList = copy.deepcopy(locals())
|
||||||
|
for i in ('self', 'vropsType'):
|
||||||
|
del argList[i]
|
||||||
|
vropsURL = self.getVropsURL(self.vropsURL, 'statsURL')
|
||||||
|
payload = self.pythonToJSON(argList, indent=0)
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
data=payload,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
self._vmsResources = OrderedDict(self.jsonToPython(response.text))
|
||||||
|
if self.vropsType == 'virtualmachine':
|
||||||
|
for key, value in self.allVMS.items():
|
||||||
|
for i in range(0, len(self._vmsResources['values'])):
|
||||||
|
if self._vmsResources['values'][i]['resourceId'] == value:
|
||||||
|
self._vmsResources['values'][i] = OrderedDict(
|
||||||
|
self._vmsResources['values'][i]
|
||||||
|
)
|
||||||
|
self._vmsResources['values'][i]['name'] = key
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
] = self.VMSHostsNames[key]
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'cluster'
|
||||||
|
] = self.chosenCluster
|
||||||
|
for item in ['cluster', 'host', 'name']:
|
||||||
|
self._vmsResources['values'][i].move_to_end(
|
||||||
|
item, last=False
|
||||||
|
)
|
||||||
|
if self.vropsType == 'host':
|
||||||
|
for key, value in self.allHosts.items():
|
||||||
|
for i in range(0, len(self._vmsResources['values'])):
|
||||||
|
if self._vmsResources['values'][i]['resourceId'] == value:
|
||||||
|
self._vmsResources['values'][i] = OrderedDict(
|
||||||
|
self._vmsResources['values'][i]
|
||||||
|
)
|
||||||
|
self._vmsResources['values'][i]['name'] = key
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'cluster'
|
||||||
|
] = self.chosenCluster
|
||||||
|
for item in ['cluster', 'name']:
|
||||||
|
self._vmsResources['values'][i].move_to_end(
|
||||||
|
item, last=False
|
||||||
|
)
|
||||||
|
|
||||||
|
@deprecated(
|
||||||
|
version='2.1',
|
||||||
|
reason='this method is deprecated, use getStats() instead with the'
|
||||||
|
' same functionality',
|
||||||
|
)
|
||||||
|
def getStatsFromVMS(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
vropsType: str,
|
||||||
|
):
|
||||||
|
argList = copy.deepcopy(locals())
|
||||||
|
del argList['self']
|
||||||
|
self.getStats(**argList)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def saveToDisk(
|
||||||
|
pythonObject: any,
|
||||||
|
path: str = os.getcwd(),
|
||||||
|
filePrefix: str = '',
|
||||||
|
type: str = 'json',
|
||||||
|
indent: int = 4,
|
||||||
|
breakLine: bool = False,
|
||||||
|
) -> None:
|
||||||
|
timeNow = datetime.now().strftime('%d-%m-%Y_%H-%M-%S')
|
||||||
|
fileName = f'{path}/{filePrefix}-{timeNow}.json'
|
||||||
|
if breakLine:
|
||||||
|
if not isinstance(pythonObject, list):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a list when using' ' breakLine=True'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
with open(fileName, 'a+') as outputFile:
|
||||||
|
for item in pythonObject:
|
||||||
|
try:
|
||||||
|
outputFile.write(
|
||||||
|
json.dump(
|
||||||
|
json.loads(item), outputFile, indent=indent
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
outputFile.write('\n')
|
||||||
|
else:
|
||||||
|
with open(fileName, 'w+') as outputFile:
|
||||||
|
json.dump(pythonObject, outputFile, indent=indent)
|
||||||
|
|
||||||
|
def exportVMData(self):
|
||||||
|
self.export = []
|
||||||
|
loopLength = len(self._vmsResources['values'])
|
||||||
|
for i in range(0, loopLength):
|
||||||
|
statKeyLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat']
|
||||||
|
)
|
||||||
|
timeLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat'][0][
|
||||||
|
'timestamps'
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for k in range(0, statKeyLength):
|
||||||
|
for j in range(0, timeLength):
|
||||||
|
if self.vropsType == 'virtualmachine':
|
||||||
|
self.export.append(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
'type': self.vropsType,
|
||||||
|
'name': self._vmsResources['values'][i][
|
||||||
|
'name'
|
||||||
|
],
|
||||||
|
'host': self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
],
|
||||||
|
'cluster': self.chosenCluster,
|
||||||
|
'timestamp': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][0]['timestamps'][j],
|
||||||
|
'value': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['data'][j],
|
||||||
|
'statKey': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][k]['rollUpType'],
|
||||||
|
'intervalQuantifier': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][k][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'quantifier'
|
||||||
|
],
|
||||||
|
'intervalType': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][0][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'intervalType'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif self.vropsType == 'host':
|
||||||
|
self.export.append(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
'type': self.vropsType,
|
||||||
|
'name': self._vmsResources['values'][i][
|
||||||
|
'name'
|
||||||
|
],
|
||||||
|
'cluster': self.chosenCluster,
|
||||||
|
'timestamp': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][0]['timestamps'][j],
|
||||||
|
'value': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['data'][j],
|
||||||
|
'statKey': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][k]['rollUpType'],
|
||||||
|
'intervalQuantifier': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][k][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'quantifier'
|
||||||
|
],
|
||||||
|
'intervalType': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][0][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'intervalType'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def totalVMS(self):
|
||||||
|
if isinstance(self.countVMSFiltered, list):
|
||||||
|
self.__totalVMS = sum(self.countVMSFiltered)
|
||||||
|
elif isinstance(self.countVMSFiltered, int):
|
||||||
|
self.__totalVMS = self.countVMSFiltered
|
||||||
|
return self.__totalVMS
|
||||||
|
|
||||||
|
@totalVMS.setter
|
||||||
|
def totalVMS(self, totalVMS):
|
||||||
|
self.__totalVMS = totalVMS
|
||||||
|
return self.__totalVMS
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def totalHosts(self):
|
||||||
|
# if isinstance(self.countVMSFiltered, list):
|
||||||
|
# self.__totalVMS = sum(self.countVMSFiltered)
|
||||||
|
# elif isinstance(self.countVMSFiltered, int):
|
||||||
|
# self.__totalVMS = self.countVMSFiltered
|
||||||
|
# return self.__totalVMS
|
||||||
|
|
||||||
|
# @totalHosts.setter
|
||||||
|
# def totalHosts(self, totalHosts):
|
||||||
|
# self.__totalHosts = totalHosts
|
||||||
|
# return self.__totalHosts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochNow(self):
|
||||||
|
self.__epochNow = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochNow
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochToday(self):
|
||||||
|
now = datetime.now()
|
||||||
|
self.__epochtoday = now + relativedelta(hour=0, minute=0, second=0)
|
||||||
|
self.__epochToday = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochToday
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allVMS(self):
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@allVMS.setter
|
||||||
|
def allVMS(self, allVMS):
|
||||||
|
if not isinstance(allVMS, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(allVMS)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__allVMS = allVMS
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def VMSHostsNames(self):
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@VMSHostsNames.setter
|
||||||
|
def VMSHostsNames(self, VMSHostsNames):
|
||||||
|
if not isinstance(VMSHostsNames, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(VMSHostsNames)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__VMSHostsNames = VMSHostsNames
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chosenCluster(self):
|
||||||
|
return self.__chosenCluster
|
||||||
|
|
||||||
|
@chosenCluster.setter
|
||||||
|
def chosenCluster(self, chosenCluster):
|
||||||
|
if not isinstance(chosenCluster, str):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(chosenCluster)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__chosenCluster = chosenCluster
|
||||||
|
return self.__chosenCluster
|
||||||
767
vrops-api/__dev/vropsAPI.py
Normal file
767
vrops-api/__dev/vropsAPI.py
Normal file
@@ -0,0 +1,767 @@
|
|||||||
|
import requests
|
||||||
|
import urllib3
|
||||||
|
import json
|
||||||
|
from typing import Union
|
||||||
|
import copy
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import re
|
||||||
|
from deprecated import deprecated
|
||||||
|
|
||||||
|
# warnings.filterwarnings('ignore')
|
||||||
|
urllib3.disable_warnings(urllib3.exceptions.InsecureRequestWarning)
|
||||||
|
|
||||||
|
|
||||||
|
class vropsAPI(object):
|
||||||
|
"""Module for the vropsAPI for Capacity Management:
|
||||||
|
Author: Daniel Tomlinson
|
||||||
|
Team: Capacity & Monitoring
|
||||||
|
Date: October 2019
|
||||||
|
|
||||||
|
VROPS documentation: https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
index.html
|
||||||
|
StatKeys for VMS:
|
||||||
|
https://docs.vmware.com/en/vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
Authenticate a session:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'username',
|
||||||
|
'authSource',
|
||||||
|
'password',
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get all clusters:
|
||||||
|
Query VROPS for available clusters:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all cluster IDs and cluster names:
|
||||||
|
allClustersDict = vrops.allClusters
|
||||||
|
|
||||||
|
Get list of all cluster names:
|
||||||
|
allClustersList = vrops.getList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
Get all hosts:
|
||||||
|
Query VROPS for available hosts:
|
||||||
|
From a single cluster:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
From a list of clusters:
|
||||||
|
<Not implemented>
|
||||||
|
|
||||||
|
Get dict of all host IDs and host names:
|
||||||
|
allHostsDisct =vrops.allHosts
|
||||||
|
|
||||||
|
Get list of host names:
|
||||||
|
allHostsList = vrops.getList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
Get all VMs:
|
||||||
|
Query VROPS for available VMs:
|
||||||
|
For a single host:
|
||||||
|
vrops.getVMSFromHost('sc1hsesx148.prod.williamhill.plc')
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
For a list of hosts:
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all VM IDs and VM names:
|
||||||
|
allVMSDict = vrops.allVMS
|
||||||
|
|
||||||
|
Get list of all VMs:
|
||||||
|
allVMSList = vrops.getList(vrops.allVMS)
|
||||||
|
|
||||||
|
|
||||||
|
Get epoch time relative to another time:
|
||||||
|
Similar to Splunks relative_time command:
|
||||||
|
1. Can go back N hours/minutes etc.
|
||||||
|
2. Can set the hour/minute etc. to a specified value (snapping)
|
||||||
|
|
||||||
|
vrops.epochRelativeTime(epochTime, **kwargs)
|
||||||
|
|
||||||
|
**kwargs:
|
||||||
|
epochTime: int - start time
|
||||||
|
|
||||||
|
year: int = datetime.now().year # set year
|
||||||
|
month: int = datetime.now().month # set month
|
||||||
|
day: int = datetime.now().day # set day
|
||||||
|
hour: int = datetime.now().hour # set hour
|
||||||
|
minute: int = datetime.now().minute # set minute
|
||||||
|
second: int = datetime.now().second # set second
|
||||||
|
|
||||||
|
years: int = 0 # go back/forward N years
|
||||||
|
months: int = 0 # go back/forward N months
|
||||||
|
days: int = 0 # go back/forward N days
|
||||||
|
hours: int = 0 # go back/forward N hours
|
||||||
|
minutes: int = 0 # go back/forward N minutes
|
||||||
|
seconds: int = 0 # go back/forward N seconds
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
Get epoch 5 minutes ago:
|
||||||
|
vrops.epochRelativeTime(vrops.epochNow, minutes=-5)
|
||||||
|
|
||||||
|
Get epoch at start of current hour:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
Get epoch 1 week ago at start of day:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
days=-7
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get stats from VMs:
|
||||||
|
Pull back results:
|
||||||
|
Last 30 minutes, 5 minute intervals, average
|
||||||
|
for CPU average and ready %:
|
||||||
|
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=list(vrops.allVMS.values()),
|
||||||
|
statKey=['cpu|usage_average', 'cpu|readyPct'],
|
||||||
|
)
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
intervalType: <ns3:intervalType> (see
|
||||||
|
https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
models.html#repr-1190589417)
|
||||||
|
|
||||||
|
intervalQuantifier: int
|
||||||
|
|
||||||
|
rollUpType: <ns3:rollUpType> (see:
|
||||||
|
https://sc1prapvro01/suite-api/
|
||||||
|
docs/rest/models.html#repr-1735704374)
|
||||||
|
|
||||||
|
resourceId: string or list of vrops resourceIds (not names)
|
||||||
|
|
||||||
|
statKey: vrops api metrics (see https://docs.vmware.com/en/
|
||||||
|
vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/
|
||||||
|
GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html)
|
||||||
|
|
||||||
|
|
||||||
|
Print results:
|
||||||
|
for i in range(0, vrops.totalVMS):
|
||||||
|
print(vrops.vmsResources['values'][i])
|
||||||
|
|
||||||
|
Save to disk as json:
|
||||||
|
vrops.saveToDisk(vrops.vmsResources)
|
||||||
|
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
vropsURL: str
|
||||||
|
URL of the VROPS instance
|
||||||
|
"https://sc1prapvro01/"
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
defaultHeaders = {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Accept-Encoding': 'gzip, deflate',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, vropsURL: str, authToken: str, verify: bool = True):
|
||||||
|
super(vropsAPI, self).__init__()
|
||||||
|
self.vropsURL = vropsURL
|
||||||
|
self.authToken = authToken
|
||||||
|
self.verify = verify
|
||||||
|
self.headers = vropsAPI.defaultHeaders
|
||||||
|
self.headers['Authorization'] = f'vRealizeOpsToken {self.authToken}'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def authenticate(
|
||||||
|
cls,
|
||||||
|
vropsURL: str,
|
||||||
|
username: str,
|
||||||
|
authSource: str,
|
||||||
|
password: str,
|
||||||
|
verify: bool = True,
|
||||||
|
):
|
||||||
|
vropsURLauth = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
payload = {}
|
||||||
|
for key, value in zip(
|
||||||
|
['username', 'authSource', 'password'],
|
||||||
|
[username, authSource, password],
|
||||||
|
):
|
||||||
|
payload[key] = value
|
||||||
|
authToken = vropsAPI.getAuthenticationToken(
|
||||||
|
vropsURLauth, payload, verify
|
||||||
|
)
|
||||||
|
return cls(vropsURL, authToken, verify)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getVropsURL(vropsURL: str, endpointKey: str) -> str:
|
||||||
|
endpoints = {
|
||||||
|
'authenticationURL': 'suite-api/api/auth/token/acquire',
|
||||||
|
'resourcesURL': 'suite-api/api/resources',
|
||||||
|
'statsURL': 'suite-api/api/resources/stats/query',
|
||||||
|
}
|
||||||
|
if endpoints[endpointKey] not in vropsURL:
|
||||||
|
if vropsURL[-1] != '/':
|
||||||
|
vropsURL = vropsURL + '/'
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
else:
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
return vropsURL
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pythonToJSON(pythonObject: any, indent=4) -> str:
|
||||||
|
return json.dumps(pythonObject, indent=indent)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def jsonToPython(jsonObject: str) -> any:
|
||||||
|
return json.loads(jsonObject)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAuthenticationToken(
|
||||||
|
vropsURL: str, payload: dict, verify=True
|
||||||
|
) -> str:
|
||||||
|
|
||||||
|
payload = vropsAPI.pythonToJSON(payload)
|
||||||
|
vropsURL = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
data=payload,
|
||||||
|
headers=vropsAPI.defaultHeaders,
|
||||||
|
verify=verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
authToken = vropsAPI.jsonToPython(response.text)['token']
|
||||||
|
return authToken
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getIdentifiers(
|
||||||
|
identifierDict: dict,
|
||||||
|
vropsJSON: dict,
|
||||||
|
length: int,
|
||||||
|
resourceKindKey: str,
|
||||||
|
) -> dict:
|
||||||
|
for i in range(0, length):
|
||||||
|
if (
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['resourceKindKey']
|
||||||
|
== resourceKindKey
|
||||||
|
):
|
||||||
|
identifierDict[
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['name']
|
||||||
|
] = vropsJSON['resourceList'][i]['identifier']
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
return identifierDict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getKeysList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.keys():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getValuesList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.values():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def epochRelativeTime(
|
||||||
|
epochTime: int,
|
||||||
|
year: int = datetime.now().year,
|
||||||
|
month: int = datetime.now().month,
|
||||||
|
day: int = datetime.now().day,
|
||||||
|
hour: int = datetime.now().hour,
|
||||||
|
minute: int = datetime.now().minute,
|
||||||
|
second: int = datetime.now().second,
|
||||||
|
years: int = 0,
|
||||||
|
months: int = 0,
|
||||||
|
days: int = 0,
|
||||||
|
hours: int = 0,
|
||||||
|
minutes: int = 0,
|
||||||
|
seconds: int = 0,
|
||||||
|
) -> int:
|
||||||
|
delta = relativedelta(
|
||||||
|
year=year,
|
||||||
|
month=month,
|
||||||
|
day=day,
|
||||||
|
hour=hour,
|
||||||
|
minute=minute,
|
||||||
|
second=second,
|
||||||
|
years=years,
|
||||||
|
months=months,
|
||||||
|
days=days,
|
||||||
|
hours=hours,
|
||||||
|
minutes=minutes,
|
||||||
|
seconds=seconds,
|
||||||
|
)
|
||||||
|
if len(str(epochTime)) >= 12:
|
||||||
|
epochTime /= 1000
|
||||||
|
relativeTime = datetime.fromtimestamp(epochTime) + delta
|
||||||
|
relativeTime = math.ceil(relativeTime.timestamp() * 1000)
|
||||||
|
return relativeTime
|
||||||
|
|
||||||
|
def getClusters(self):
|
||||||
|
queryString = {'resourceKind': 'ClusterComputeResource'}
|
||||||
|
vropsURL = vropsAPI.getVropsURL(self.vropsURL, 'resourcesURL')
|
||||||
|
response = requests.request(
|
||||||
|
"GET",
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
params=queryString,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._clusterResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getClusterIdentifiers(self):
|
||||||
|
self.totalClusters = len(self._clusterResources['resourceList'])
|
||||||
|
self.allClusters = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allClusters,
|
||||||
|
self._clusterResources,
|
||||||
|
self.totalClusters,
|
||||||
|
'ClusterComputeResource',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostsFromCluster(self, cluster='SC1PRCONTXWHCUXCCL01'):
|
||||||
|
self.cluster = cluster
|
||||||
|
url = (
|
||||||
|
f'{self.vropsURL}/suite-api/api/resources/'
|
||||||
|
f'{self.allClusters[cluster]}/relationships'
|
||||||
|
)
|
||||||
|
self.chosenCluster = cluster
|
||||||
|
response = requests.request(
|
||||||
|
"GET", url, headers=self.headers, verify=self.verify
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._hostResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostIdentifiers(self):
|
||||||
|
self.totalHosts = len(self._hostResources['resourceList'])
|
||||||
|
self.allHosts = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allHosts, self._hostResources, self.totalHosts, 'HostSystem'
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSFromHost(self, host: Union[str, list]):
|
||||||
|
if isinstance(host, list):
|
||||||
|
self.vmType = 'list'
|
||||||
|
self._vmsResourcesRelationships = []
|
||||||
|
self.urlList = []
|
||||||
|
response = []
|
||||||
|
for item in host:
|
||||||
|
self.urlList.append(
|
||||||
|
(
|
||||||
|
f'{self.vropsURL}suite-api/api/resources/'
|
||||||
|
f'{self.allHosts[item]}/relationships'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for urlItem in self.urlList:
|
||||||
|
response.append(
|
||||||
|
requests.request(
|
||||||
|
'GET',
|
||||||
|
urlItem,
|
||||||
|
headers=self.headers,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
for i in range(0, len(response)):
|
||||||
|
self._vmsResourcesRelationships.append(
|
||||||
|
vropsAPI.jsonToPython(response[i].text)
|
||||||
|
)
|
||||||
|
if isinstance(host, str):
|
||||||
|
hostToList = []
|
||||||
|
hostToList.append(host)
|
||||||
|
print(hostToList)
|
||||||
|
return self.getVMSFromHost(host=hostToList)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _getHostInformation(self, i: int, j: int):
|
||||||
|
pattern = r'(?:.*resources\/)([^\/]+)'
|
||||||
|
vm = self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['name']
|
||||||
|
host = self.urlList[i]
|
||||||
|
match = re.findall(pattern, host)
|
||||||
|
for key, value in self.allHosts.items():
|
||||||
|
if match[0] == value:
|
||||||
|
self.VMSHostsNames[vm] = key
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSIdentifiers(self):
|
||||||
|
self.VMSHostsNames = {}
|
||||||
|
self.allVMS = OrderedDict()
|
||||||
|
if self.vmType == 'list':
|
||||||
|
self.countVMS = []
|
||||||
|
self.countVMSFiltered = []
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
counter = 0
|
||||||
|
for j in range(
|
||||||
|
0, len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self._getHostInformation(i, j)
|
||||||
|
self.countVMS.append(
|
||||||
|
len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
)
|
||||||
|
self.countVMSFiltered.append(counter)
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships[i],
|
||||||
|
self.countVMS[i],
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
if self.vmType == 'string':
|
||||||
|
counter = 0
|
||||||
|
self.countVMS = len(
|
||||||
|
self._vmsResourcesRelationships['resourceList']
|
||||||
|
)
|
||||||
|
for j in range(0, self.countVMS):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self.countVMSFiltered = counter
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships,
|
||||||
|
self.countVMS,
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getStats(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
vropsType: str,
|
||||||
|
*args,
|
||||||
|
**kwargs,
|
||||||
|
):
|
||||||
|
self.vropsType = vropsType
|
||||||
|
argList = copy.deepcopy(locals())
|
||||||
|
for i in ('self', 'vropsType'):
|
||||||
|
del argList[i]
|
||||||
|
vropsURL = self.getVropsURL(self.vropsURL, 'statsURL')
|
||||||
|
payload = self.pythonToJSON(argList, indent=0)
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
data=payload,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
self._vmsResources = OrderedDict(self.jsonToPython(response.text))
|
||||||
|
if self.vropsType == 'virtualmachine':
|
||||||
|
for key, value in self.allVMS.items():
|
||||||
|
for i in range(0, len(self._vmsResources['values'])):
|
||||||
|
if self._vmsResources['values'][i]['resourceId'] == value:
|
||||||
|
self._vmsResources['values'][i] = OrderedDict(
|
||||||
|
self._vmsResources['values'][i]
|
||||||
|
)
|
||||||
|
self._vmsResources['values'][i]['name'] = key
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
] = self.VMSHostsNames[key]
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'cluster'
|
||||||
|
] = self.chosenCluster
|
||||||
|
for item in ['cluster', 'host', 'name']:
|
||||||
|
self._vmsResources['values'][i].move_to_end(
|
||||||
|
item, last=False
|
||||||
|
)
|
||||||
|
if self.vropsType == 'host':
|
||||||
|
for key, value in self.allHosts.items():
|
||||||
|
for i in range(0, len(self._vmsResources['values'])):
|
||||||
|
if self._vmsResources['values'][i]['resourceId'] == value:
|
||||||
|
self._vmsResources['values'][i] = OrderedDict(
|
||||||
|
self._vmsResources['values'][i]
|
||||||
|
)
|
||||||
|
self._vmsResources['values'][i]['name'] = key
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'cluster'
|
||||||
|
] = self.chosenCluster
|
||||||
|
for item in ['cluster', 'name']:
|
||||||
|
self._vmsResources['values'][i].move_to_end(
|
||||||
|
item, last=False
|
||||||
|
)
|
||||||
|
|
||||||
|
@deprecated(
|
||||||
|
version='2.1',
|
||||||
|
reason='this method is deprecated, use getStats() instead with the'
|
||||||
|
' same functionality',
|
||||||
|
)
|
||||||
|
def getStatsFromVMS(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
vropsType: str,
|
||||||
|
):
|
||||||
|
argList = copy.deepcopy(locals())
|
||||||
|
del argList['self']
|
||||||
|
self.getStats(**argList)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def saveToDisk(
|
||||||
|
pythonObject: any,
|
||||||
|
path: str = os.getcwd(),
|
||||||
|
filePrefix: str = '',
|
||||||
|
type: str = 'json',
|
||||||
|
indent: int = 4,
|
||||||
|
breakLine: bool = False,
|
||||||
|
) -> None:
|
||||||
|
timeNow = datetime.now().strftime('%d-%m-%Y_%H-%M-%S')
|
||||||
|
fileName = f'{path}/{filePrefix}-{timeNow}.json'
|
||||||
|
if breakLine:
|
||||||
|
if not isinstance(pythonObject, list):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a list when using' ' breakLine=True'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
with open(fileName, 'a+') as outputFile:
|
||||||
|
for item in pythonObject:
|
||||||
|
try:
|
||||||
|
outputFile.write(
|
||||||
|
json.dump(
|
||||||
|
json.loads(item), outputFile, indent=indent
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
outputFile.write('\n')
|
||||||
|
else:
|
||||||
|
with open(fileName, 'w+') as outputFile:
|
||||||
|
json.dump(pythonObject, outputFile, indent=indent)
|
||||||
|
|
||||||
|
def exportVMData(self):
|
||||||
|
self.export = []
|
||||||
|
loopLength = len(self._vmsResources['values'])
|
||||||
|
for i in range(0, loopLength):
|
||||||
|
statKeyLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat']
|
||||||
|
)
|
||||||
|
timeLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat'][0][
|
||||||
|
'timestamps'
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for k in range(0, statKeyLength):
|
||||||
|
for j in range(0, timeLength):
|
||||||
|
if self.vropsType == 'virtualmachine':
|
||||||
|
self.export.append(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
'type': self.vropsType,
|
||||||
|
'name': self._vmsResources['values'][i][
|
||||||
|
'name'
|
||||||
|
],
|
||||||
|
'host': self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
],
|
||||||
|
'cluster': self.chosenCluster,
|
||||||
|
'timestamp': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][0]['timestamps'][j],
|
||||||
|
'value': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['data'][j],
|
||||||
|
'statKey': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][k]['rollUpType'],
|
||||||
|
'intervalQuantifier': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][k][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'quantifier'
|
||||||
|
],
|
||||||
|
'intervalType': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][0][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'intervalType'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif self.vropsType == 'host':
|
||||||
|
self.export.append(
|
||||||
|
json.dumps(
|
||||||
|
{
|
||||||
|
'type': self.vropsType,
|
||||||
|
'name': self._vmsResources['values'][i][
|
||||||
|
'name'
|
||||||
|
],
|
||||||
|
'cluster': self.chosenCluster,
|
||||||
|
'timestamp': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][0]['timestamps'][j],
|
||||||
|
'value': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['data'][j],
|
||||||
|
'statKey': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][k]['rollUpType'],
|
||||||
|
'intervalQuantifier': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][k][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'quantifier'
|
||||||
|
],
|
||||||
|
'intervalType': self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][0][
|
||||||
|
'intervalUnit'
|
||||||
|
][
|
||||||
|
'intervalType'
|
||||||
|
],
|
||||||
|
}
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def totalVMS(self):
|
||||||
|
if isinstance(self.countVMSFiltered, list):
|
||||||
|
self.__totalVMS = sum(self.countVMSFiltered)
|
||||||
|
elif isinstance(self.countVMSFiltered, int):
|
||||||
|
self.__totalVMS = self.countVMSFiltered
|
||||||
|
return self.__totalVMS
|
||||||
|
|
||||||
|
@totalVMS.setter
|
||||||
|
def totalVMS(self, totalVMS):
|
||||||
|
self.__totalVMS = totalVMS
|
||||||
|
return self.__totalVMS
|
||||||
|
|
||||||
|
# @property
|
||||||
|
# def totalHosts(self):
|
||||||
|
# if isinstance(self.countVMSFiltered, list):
|
||||||
|
# self.__totalVMS = sum(self.countVMSFiltered)
|
||||||
|
# elif isinstance(self.countVMSFiltered, int):
|
||||||
|
# self.__totalVMS = self.countVMSFiltered
|
||||||
|
# return self.__totalVMS
|
||||||
|
|
||||||
|
# @totalHosts.setter
|
||||||
|
# def totalHosts(self, totalHosts):
|
||||||
|
# self.__totalHosts = totalHosts
|
||||||
|
# return self.__totalHosts
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochNow(self):
|
||||||
|
self.__epochNow = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochNow
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochToday(self):
|
||||||
|
now = datetime.now()
|
||||||
|
self.__epochtoday = now + relativedelta(hour=0, minute=0, second=0)
|
||||||
|
self.__epochToday = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochToday
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allVMS(self):
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@allVMS.setter
|
||||||
|
def allVMS(self, allVMS):
|
||||||
|
if not isinstance(allVMS, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(allVMS)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__allVMS = allVMS
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def VMSHostsNames(self):
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@VMSHostsNames.setter
|
||||||
|
def VMSHostsNames(self, VMSHostsNames):
|
||||||
|
if not isinstance(VMSHostsNames, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(VMSHostsNames)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__VMSHostsNames = VMSHostsNames
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chosenCluster(self):
|
||||||
|
return self.__chosenCluster
|
||||||
|
|
||||||
|
@chosenCluster.setter
|
||||||
|
def chosenCluster(self, chosenCluster):
|
||||||
|
if not isinstance(chosenCluster, str):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(chosenCluster)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__chosenCluster = chosenCluster
|
||||||
|
return self.__chosenCluster
|
||||||
BIN
vrops-api/__pycache__/vropsAPI.cpython-37.pyc
Normal file
BIN
vrops-api/__pycache__/vropsAPI.cpython-37.pyc
Normal file
Binary file not shown.
BIN
vrops-api/__pycache__/vropsAPI.cpython-38.pyc
Normal file
BIN
vrops-api/__pycache__/vropsAPI.cpython-38.pyc
Normal file
Binary file not shown.
21
vrops-api/__todo/vrops.todo
Normal file
21
vrops-api/__todo/vrops.todo
Normal file
@@ -0,0 +1,21 @@
|
|||||||
|
Documentation:
|
||||||
|
✔ Finish documentating attributes @created(19-10-20 07:06) @done (19-10-21 14:49)
|
||||||
|
✔ Change - to : in documentation for **kwargs @created(19-10-20 07:15) @done (19-10-20 20:53)
|
||||||
|
✔ Document how it can be ran on a schedule, without having to query the hosts every time @created(19-10-21 03:44) @done (19-10-25 03:03)
|
||||||
|
✔ Tidy up documentation - with flow and clear examples on what to use and when and why. @created(19-10-21 14:49) @done (19-10-23 12:52)
|
||||||
|
✔ Document the attributes that are returned from the main methods, and ones that you can edit and see (totalvms, allvms etc.) @created(19-10-23 12:52) @done (19-10-25 03:03)
|
||||||
|
≡ Document the extra saving steps needed for vm/id information @created(19-10-26 18:49)
|
||||||
|
≡ Tidy up the documentation generally @created(19-10-26 18:49)
|
||||||
|
≡ document how to send host metrics (need to change hostlist to hostidlist) @created(19-10-29 01:09)
|
||||||
|
|
||||||
|
Bugs:
|
||||||
|
✔ Fix single host query not having countVMSFiltered being set @created(19-10-20 07:06) @critical @bug @done (19-10-20 15:12)
|
||||||
|
✔ Allow the querying of VMs without a dict of their name/id before hand, either allow the script to run without and have no name, or require dict to be passed in that matches the name, id pulling back. @created(19-10-21 03:44) @bug @high @done (19-10-25 03:03)
|
||||||
|
|
||||||
|
Tasks:
|
||||||
|
✔ Check the time actually pulls back the range you give it @created(19-10-20 20:59) @done (19-10-20 22:04)
|
||||||
|
|
||||||
|
To DO:
|
||||||
|
✔ Add hosts and cluster to the results @created(19-10-25 03:34) @done (19-10-26 18:49)
|
||||||
|
✔ Add this functionality to the import method @created(19-10-25 03:34) @done (19-10-26 18:49)
|
||||||
|
✔ Add path option to savetodisk @created(19-10-25 03:35) @done (19-10-26 18:49)
|
||||||
11770
vrops-api/approach1-output-example.json
Normal file
11770
vrops-api/approach1-output-example.json
Normal file
File diff suppressed because it is too large
Load Diff
1180
vrops-api/approach1-vms-example.json
Normal file
1180
vrops-api/approach1-vms-example.json
Normal file
File diff suppressed because it is too large
Load Diff
75
vrops-api/approach1.py
Normal file
75
vrops-api/approach1.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all clusters and store a list of Names:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
allClustersList = vrops.getKeysList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these clusters
|
||||||
|
print(allClustersList)
|
||||||
|
|
||||||
|
# Get all hosts and store a list of Names:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
allHostsList = vrops.getKeysList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these hosts
|
||||||
|
print(allHostsList)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all VMs and sore a list of IDs:
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# Save all VMs to disk
|
||||||
|
vrops.saveToDisk(vrops.allVMS, indent=4, filePrefix='approach1-vms')
|
||||||
|
|
||||||
|
# Save all VMs:Hosts to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.VMSHostsNames, indent=4, filePrefix='approach1-vms_hosts'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# # Add a single VM to a list to pull back
|
||||||
|
# myList = []
|
||||||
|
# myList.append(allVMSIdList[0])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export, indent=4, filePrefix='approach1-export', breakLine=True
|
||||||
|
)
|
||||||
5566
vrops-api/approach2-output-example.json
Normal file
5566
vrops-api/approach2-output-example.json
Normal file
File diff suppressed because it is too large
Load Diff
62
vrops-api/approach2.py
Normal file
62
vrops-api/approach2.py
Normal file
@@ -0,0 +1,62 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Load VM dict
|
||||||
|
with open('approach1-vms-example.json', 'r') as vms:
|
||||||
|
vrops.allVMS = json.load(vms)
|
||||||
|
|
||||||
|
|
||||||
|
# Get list of these VMS
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# Load VM_Host dict
|
||||||
|
with open('approach1-vms_hosts-example.json', 'r') as vmsHosts:
|
||||||
|
vrops.VMSHostsNames = json.load(vmsHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# Set the cluster
|
||||||
|
vrops.chosenCluster = 'SC1PRCONTXWHCUXCCL01'
|
||||||
|
|
||||||
|
|
||||||
|
# # Add a single VM to a list to pull back (can add more VMs to this list if
|
||||||
|
# # needed)
|
||||||
|
# myList = []
|
||||||
|
# myList.append(list(vrops.allVMS.values())[100])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='1',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['rescpu|actav1_latest'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export, indent=4, filePrefix='approach2-export', breakLine=True
|
||||||
|
)
|
||||||
677
vrops-api/readme.md
Normal file
677
vrops-api/readme.md
Normal file
@@ -0,0 +1,677 @@
|
|||||||
|
# VROPS API Module
|
||||||
|
|
||||||
|
_Author: Daniel Tomlinson_
|
||||||
|
|
||||||
|
_Team: Capacity & Monitoring_
|
||||||
|
|
||||||
|
_Date: October 2019_
|
||||||
|
|
||||||
|
|
||||||
|
See example.py for an example of the following methods.
|
||||||
|
|
||||||
|
**Requires**: python version 3.6+
|
||||||
|
|
||||||
|
For any bugs create a gitlab issue and give a description of the error and sample code where necessary.
|
||||||
|
|
||||||
|
**To jump right in, clone the repo, install the dependencies and run `approach1.py` and `approach2.py` for a quick and easy demonstration.**
|
||||||
|
|
||||||
|
If you need VROPS api attributes (rollUpTypes, statKeys etc.) follow the links in [Get stats for VMs](#get-stats-for-vms).
|
||||||
|
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
<!-- MarkdownTOC -->
|
||||||
|
|
||||||
|
- [Requirements](#requirements)
|
||||||
|
- [Modules](#modules)
|
||||||
|
- [Workflow](#workflow)
|
||||||
|
* [Get stats for every VM in a cluster](#get-stats-for-every-vm-in-a-cluster)
|
||||||
|
+ [Approach 1: If you **do not** have a dictionary of name:id pairs for all VMs in a cluster.](#approach-1-if-you-do-not-have-a-dictionary-of-nameid-pairs-for-all-vms-in-a-cluster)
|
||||||
|
+ [Approach 2: If you have a dictionary of name:id pairs for all VMs from a previous run.](#approach-2-if-you-have-a-dictionary-of-nameid-pairs-for-all-vms-from-a-previous-run)
|
||||||
|
* [Usage](#usage)
|
||||||
|
+ [Authenticate](#authenticate)
|
||||||
|
+ [Get all clusters](#get-all-clusters)
|
||||||
|
- [Pull back clusters](#pull-back-clusters)
|
||||||
|
- [Convert to list](#convert-to-list)
|
||||||
|
+ [Get all hosts](#get-all-hosts)
|
||||||
|
- [Pull back hosts](#pull-back-hosts)
|
||||||
|
- [Convert to list](#convert-to-list-1)
|
||||||
|
+ [Get VMs](#get-vms)
|
||||||
|
- [Pull back VMs](#pull-back-vms)
|
||||||
|
- [Convert to list](#convert-to-list-2)
|
||||||
|
- [Save VM dictionary as json for import](#save-vm-dictionary-as-json-for-import)
|
||||||
|
- [Load saved dictionary and import](#load-saved-dictionary-and-import)
|
||||||
|
- [Load VM information](#load-vm-information)
|
||||||
|
- [Set the VM dictionary](#set-the-vm-dictionary)
|
||||||
|
+ [Get stats for VMs](#get-stats-for-vms)
|
||||||
|
- [Get results](#get-results)
|
||||||
|
- [Save results to json file on disk](#save-results-to-json-file-on-disk)
|
||||||
|
+ [Get epoch time](#get-epoch-time)
|
||||||
|
- [Get epoch time now](#get-epoch-time-now)
|
||||||
|
- [Get epoch time relative to another tie](#get-epoch-time-relative-to-another-tie)
|
||||||
|
|
||||||
|
<!-- /MarkdownTOC -->
|
||||||
|
|
||||||
|
<br/>
|
||||||
|
|
||||||
|
## Requirements
|
||||||
|
|
||||||
|
See requirements.txt and install to a virtualenv
|
||||||
|
|
||||||
|
```bash
|
||||||
|
python3 -m venv vrops-api
|
||||||
|
source vrops-api/bin/activate
|
||||||
|
pip install -r requirements.txt
|
||||||
|
```
|
||||||
|
|
||||||
|
## Modules
|
||||||
|
|
||||||
|
The current working directory has to be added to the system path for python to import a custom module.
|
||||||
|
|
||||||
|
```python
|
||||||
|
import sys
|
||||||
|
import os
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
```
|
||||||
|
|
||||||
|
## Workflow
|
||||||
|
|
||||||
|
The following documents how you can use the module to do common tasks with the VROPs api. We cover pulling VM metrics for each VM in a **cluster.**
|
||||||
|
|
||||||
|
|
||||||
|
### Get stats for every VM in a cluster
|
||||||
|
|
||||||
|
To pull back data from VMs you need two things:
|
||||||
|
|
||||||
|
1. A dictionary containing the name:id pairs for each VM.
|
||||||
|
2. A list of these id's to pass to the api call.
|
||||||
|
|
||||||
|
Step 1 is optional, but without it you won't have the name of the VM when you query the API.
|
||||||
|
|
||||||
|
The following approaches document how to use this module to accomplish this.
|
||||||
|
|
||||||
|
There are two approaches to do this using this module.
|
||||||
|
|
||||||
|
- The first approach is if you don't have a dictionary containing the name:id pairs for all VMs in the cluster. **Do this if you haven't run the script before.**
|
||||||
|
- The second approach can skip a lot of the steps and jump straight to querying the api using this dictonary of the name:id pairs from an earlier run. **Do this if you've saved the dictionary from a previous run.**
|
||||||
|
|
||||||
|
#### Approach 1: If you **do not** have a dictionary of name:id pairs for all VMs in a cluster.
|
||||||
|
|
||||||
|
*see ```approach1.py``` for an example of these steps*
|
||||||
|
|
||||||
|
1. Authenticate a session with VROPS api. - [Authenticate](#authenticate)
|
||||||
|
2. Get all cluster names and choose the one you require. - [Get all clusters](#get-all-clusters)
|
||||||
|
3. Get all hosts in this cluster + store these **names in a list**. - [Get all hosts](#get-all-hosts)
|
||||||
|
4. Get all VMs under these hosts and store the **names and id's in a dictionary.** - [Get VMs](#get-vms)
|
||||||
|
5. *(Optional) Dump this dictionary in a json file to be reused for future runs.* - [Save VM dictionary as json for import](#save-vm-dictionary-as-json-for-import)
|
||||||
|
6. Get data for these VMs by passing in a list of VM **id's.** - [Get stats for VMs](#get-stats-for-vms)
|
||||||
|
7. Save output to json. - [Save results to json file on disk](#save-results-to-json-file-on-disk)
|
||||||
|
|
||||||
|
#### Approach 2: If you have a dictionary of name:id pairs for all VMs from a previous run.
|
||||||
|
|
||||||
|
*see ```approach2.py``` for an example of these steps*
|
||||||
|
|
||||||
|
1. Authenticate a session with VROPS api. - [Authenticate](#authenticate)
|
||||||
|
2. Load this dictionary from a json file, and update the module's dictionary. - [Load saved dictionary and import](#load-saved-dictionary-and-import)
|
||||||
|
3. Get data for these VMs by passing in a list of VM **id's.** - [Get stats for VMs](#get-stats-for-vms)
|
||||||
|
4. Save output to json. - [Save results to json file on disk](#save-results-to-json-file-on-disk)
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
### Usage
|
||||||
|
|
||||||
|
#### Authenticate
|
||||||
|
|
||||||
|
Authenticates a session with VROPs and generates a token.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
@classmethod
|
||||||
|
def authenticate(
|
||||||
|
cls,
|
||||||
|
vropsURL: str,
|
||||||
|
username: str,
|
||||||
|
authSource: str,
|
||||||
|
password: str,
|
||||||
|
verify: bool = True,
|
||||||
|
):
|
||||||
|
...
|
||||||
|
return cls(vropsURL, authToken, verify)
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
#### Get all clusters
|
||||||
|
|
||||||
|
##### Pull back clusters
|
||||||
|
|
||||||
|
To get all clusters you need to do the following two methods: ```vropsAPI.getClusters()``` and ```vropsAPI.getClusterIdentifiers()```
|
||||||
|
|
||||||
|
You can then use the ```vropsAPI.getKeysList()``` method to get a list of names without the ids.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getClusters(self):
|
||||||
|
...
|
||||||
|
return(self)
|
||||||
|
```
|
||||||
|
```python
|
||||||
|
def getClusterIdentifiers(self):
|
||||||
|
...
|
||||||
|
return(self)
|
||||||
|
```
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.getClusters()
|
||||||
|
vropsAPI.getClusterIdentifiers()
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
print(vropsAPI.allClusters)
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes created:
|
||||||
|
- `vropsAPI.allClusters` - A dictionary containing the name:id pairs of all clusters.
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"SC1PRINNOGEUXCCL01": "00276144-2eda-4f2b-9660-fa9f9cf3cd82",
|
||||||
|
"BRSDRGEUX01": "037f8d5e-01d5-411b-afdc-c25868f04a27",
|
||||||
|
"BRSDRCTXWN01": "05c2ba9a-d6d4-47aa-8fe5-855b40364625",
|
||||||
|
"LD6-BRS-Hosts": "0612327c-d637-4e95-8782-97c97d1e99ed",
|
||||||
|
"BRSPRPCMGEUXCCL01": "08b7d0fb-92ee-4cd9-ba7d-96f21965d7a4",
|
||||||
|
"LD6DRGEUX01": "84f052a8-8aed-4efb-8c39-8bce0f4a3c54"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Convert to list
|
||||||
|
|
||||||
|
Optionally you can convert this to a list of names with
|
||||||
|
|
||||||
|
```python
|
||||||
|
allClustersList = vropsAPI.getKeysList(vropsAPI.allClusters)
|
||||||
|
print(allClustersList)
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```python
|
||||||
|
[
|
||||||
|
"SC1PRINNOGEUXCCL01",
|
||||||
|
"BRSDRGEUX01",
|
||||||
|
"BRSDRCTXWN01",
|
||||||
|
"LD6-BRS-Hosts",
|
||||||
|
"BRSPRPCMGEUXCCL01",
|
||||||
|
"LD6DRGEUX01"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
#### Get all hosts
|
||||||
|
|
||||||
|
##### Pull back hosts
|
||||||
|
|
||||||
|
To get all hosts for cluster ```SC1PRCONTXWHCUXCCL01``` you need to use the following two methods `getHostsFromCluster()` and `getHostIdentifiers()`:
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getHostsFromCluster(self, cluster='SC1PRCONTXWHCUXCCL01'):
|
||||||
|
...
|
||||||
|
return(self)
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getHostIdentifiers(self):
|
||||||
|
...
|
||||||
|
return(self)
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes created:
|
||||||
|
- `vropsAPI.allHosts`. - A dictionary containing the name:ID pairs for all hosts.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vropsAPI.getHostIdentifiers()
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
print(vropsAPI.allHosts)
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```
|
||||||
|
{
|
||||||
|
"sc1hsesx156.prod.williamhill.plc": "15b3ea0c-9f62-4fc2-93b8-d4281196043e",
|
||||||
|
"sc1hsesx187.prod.williamhill.plc": "18ecb9d5-842f-4a4b-b43b-f8bbcdd54775",
|
||||||
|
"sc1hsesx148.prod.williamhill.plc": "1bdf892a-121e-461e-8ef7-8dd174c4c01a",
|
||||||
|
"sc1hsesx155.prod.williamhill.plc": "1ef01a7b-9e61-40b7-8d72-78363352fbfc"
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Convert to list
|
||||||
|
|
||||||
|
You can convert this to a list of names with:
|
||||||
|
|
||||||
|
```python
|
||||||
|
allHostsList = vropsAPI.getKeysList(vropsAPI.allHosts)
|
||||||
|
print(allHostsList)
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```python
|
||||||
|
[
|
||||||
|
"sc1hsesx156.prod.williamhill.plc",
|
||||||
|
"sc1hsesx187.prod.williamhill.plc",
|
||||||
|
"sc1hsesx148.prod.williamhill.plc",
|
||||||
|
"sc1hsesx155.prod.williamhill.plc",
|
||||||
|
"sc1hsesx093.prod.williamhill.plc"
|
||||||
|
]
|
||||||
|
```
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
#### Get VMs
|
||||||
|
|
||||||
|
##### Pull back VMs
|
||||||
|
|
||||||
|
To get all VMs for a single host or a list of hosts (all hosts from a cluster) you need to use the following two methods `getVMSFromHost()` and `getVMSIdentifiers()`:
|
||||||
|
|
||||||
|
Defintion:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getVMSFromHost(self, host: Union(str, list)):
|
||||||
|
...
|
||||||
|
return self
|
||||||
|
```
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getVMSIdentifiers(self):
|
||||||
|
...
|
||||||
|
return self
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes created:
|
||||||
|
- `vropsAPI.allVMS` - A dictionary containing the name:ID pairs for all VMs.
|
||||||
|
- `vropsAPI.totalVMS` - An integer showing the total number of VMs.
|
||||||
|
|
||||||
|
Example (pulling back all VMs for all hosts in a cluster:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.getVMSFromHost(allHostsList)
|
||||||
|
vropsAPI.getVMSIdentifiers()
|
||||||
|
|
||||||
|
```
|
||||||
|
|
||||||
|
Print total number of VMs
|
||||||
|
|
||||||
|
```python
|
||||||
|
print(vropsAPI.totalVMS)
|
||||||
|
```
|
||||||
|
|
||||||
|
Print VM IDs and names
|
||||||
|
|
||||||
|
```python
|
||||||
|
print(vropsAPI.allVMS)
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```python
|
||||||
|
OrderedDict(
|
||||||
|
[
|
||||||
|
('prdxinf21box001', '0981bfe1-b6ba-4a62-bfcc-39892fd038c6'),
|
||||||
|
('prdxcmr21ssb001', '3017e298-67e8-4a5f-bf10-f8c903e3d75f'),
|
||||||
|
('prdxtms14sln014', '35c5a694-2361-44dd-9e5e-893cea16119d'),
|
||||||
|
('prdxtms10sln014', '3b76b4b1-76b3-4fa7-a032-6448a60eded5'),
|
||||||
|
('prdxria01trd003', '458c11be-88ba-4301-aa32-3b748c92a47b')
|
||||||
|
]
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Convert to list
|
||||||
|
|
||||||
|
**You will need a list of IDs, not names, to pull back data from the API**
|
||||||
|
|
||||||
|
You can convert this to a list of names with
|
||||||
|
|
||||||
|
```python
|
||||||
|
allVMSList = vropsAPI.getKeysList(vropsAPI.allVMS)
|
||||||
|
```
|
||||||
|
|
||||||
|
You can convert this to a list of IDs with
|
||||||
|
|
||||||
|
```python
|
||||||
|
allVMSIdList = vropsAPI.getValuesList(vropsAPI.allVMS)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Save VM dictionary as json for import
|
||||||
|
|
||||||
|
You can save this dictionary of VM names:ids for import in future runs.
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.saveToDisk(vropsAPI.allVMS, indent=4, filePrefix='vm-export')
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Load saved dictionary and import
|
||||||
|
|
||||||
|
You can do the following if you already have a list of VMs for the cluster/hosts in question. This means you can import, and go straight to importing metrics, rather than querying the API for clusters/hosts each time.
|
||||||
|
|
||||||
|
|
||||||
|
Assuming file on disk is:
|
||||||
|
|
||||||
|
`vm-export-21-10-2019_09-12-53.json`
|
||||||
|
|
||||||
|
##### Load VM information
|
||||||
|
|
||||||
|
Load this .json into python as a dictionary
|
||||||
|
|
||||||
|
```python
|
||||||
|
import json
|
||||||
|
with open('vm-export-21-10-2019_09-12-53.json', 'r') as file:
|
||||||
|
myFile = json.load(file)
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Set the VM dictionary
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.allVMS = myFile
|
||||||
|
```
|
||||||
|
|
||||||
|
You should see
|
||||||
|
|
||||||
|
```
|
||||||
|
Successfully imported the dictionary.
|
||||||
|
```
|
||||||
|
|
||||||
|
You now have the results from a previous run added to your instance.
|
||||||
|
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
#### Get stats for VMs
|
||||||
|
|
||||||
|
Uses post to pull back data from VROPs for a list of VMs
|
||||||
|
|
||||||
|
VROPs needs epoch time in ms format without the decimal (e.g 1571607237000)
|
||||||
|
|
||||||
|
You can use the following to pull data back for VMs:
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def getStatsFromVMS(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
):
|
||||||
|
...
|
||||||
|
return self
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
|
||||||
|
- Attributes:
|
||||||
|
- begin: epoch time for start
|
||||||
|
- end: epoch time for end
|
||||||
|
- intervalType: `<ns3:intervalType>` [see documentation](https://sc1prapvro01/suite-api/docs/rest/models.html#repr-1190589417)
|
||||||
|
- intervalQuantifier: int
|
||||||
|
- rollUpType: `<ns3:rollUpType>` [see documentation](https://sc1prapvro01/suite-api/-docs/rest/models.html#repr-1735704374)
|
||||||
|
- resourceId: list of vrops resourceIds (**not Names!**)
|
||||||
|
- statKey: list containing vrops StatKey metrics [see documentation](https://docs.vmware.com/en/vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html)
|
||||||
|
|
||||||
|
##### Get results
|
||||||
|
|
||||||
|
Give 5 min avg for CPU usage % and number of cores assigned for past 10 minutes.
|
||||||
|
|
||||||
|
*for relative time go back 11 minutes to ensure we have a complete 10 minute interval.*
|
||||||
|
|
||||||
|
**You need to pass in a list for resourceId. If you have a single VM this should be a list with one entry only.**
|
||||||
|
|
||||||
|
Example, passing in allVMSIdList from before:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.getStatsFromVMS(
|
||||||
|
begin=vropsAPI.epochRelativeTime(vropsAPI.epochNow, minutes=-11),
|
||||||
|
end=vropsAPI.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=list(allVMSIdList),
|
||||||
|
statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes returned:
|
||||||
|
|
||||||
|
- `vropsAPI.allVMS` - A string containing the raw output from the VROPS api. You should use the method `vropsAPI.exportVMData()` for a more workable format.
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
prdxmes21zoo004 has 2 vCPUs and an average CPU utilisation of 1.2% for the past 10 minutes.
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"values": [
|
||||||
|
{
|
||||||
|
"name": "prdxmes21zoo004",
|
||||||
|
"resourceId": "528eb4db-813b-45a1-a456-ce5b44751da6",
|
||||||
|
"stat-list": {
|
||||||
|
"stat": [
|
||||||
|
{
|
||||||
|
"timestamps": [
|
||||||
|
1571608439999,
|
||||||
|
1571608739999
|
||||||
|
],
|
||||||
|
"statKey": {
|
||||||
|
"key": "config|hardware|num_Cpu"
|
||||||
|
},
|
||||||
|
"rollUpType": "AVG",
|
||||||
|
"intervalUnit": {
|
||||||
|
"quantifier": 5,
|
||||||
|
"intervalType": "MINUTES"
|
||||||
|
},
|
||||||
|
"data": [
|
||||||
|
2.0,
|
||||||
|
2.0
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"timestamps": [
|
||||||
|
1571608439999,
|
||||||
|
1571608739999
|
||||||
|
],
|
||||||
|
"statKey": {
|
||||||
|
"key": "cpu|usage_average"
|
||||||
|
},
|
||||||
|
"rollUpType": "AVG",
|
||||||
|
"intervalUnit": {
|
||||||
|
"quantifier": 5,
|
||||||
|
"intervalType": "MINUTES"
|
||||||
|
},
|
||||||
|
"data": [
|
||||||
|
1.218666672706604,
|
||||||
|
1.2406666278839111
|
||||||
|
]
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
}
|
||||||
|
]
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
##### Save results to json file on disk
|
||||||
|
|
||||||
|
To save results to disk you should use `vropsAPI.exportVMData()` then use `vropsAPI.saveToDisk()`.
|
||||||
|
|
||||||
|
The method `vropsAPI.exportVMData()` will format the raw output from VROPS and create the attribute `vropsAPI.export` which is a list that contains a json formatted string for each datapoint you requested. You can pass this list to ```vropsAPI.saveToDisk(breakLine=True)``` **(you should include `breakLine=True`)** which will save each item as a json formatted dictionary.
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def exportVMData(self):
|
||||||
|
...
|
||||||
|
return self
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes returned:
|
||||||
|
|
||||||
|
- `vropsAPI.export` - A python list where each entry is a json string containing the dictionary representation for each datapoint requested.
|
||||||
|
|
||||||
|
|
||||||
|
Example:
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.exportVMData()
|
||||||
|
vropsAPI.saveToDisk(
|
||||||
|
vropsAPI.export, indent=4, filePrefix='SC1PRCONTXWHCUXCCL01', breakLine = True
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Output:
|
||||||
|
|
||||||
|
```json
|
||||||
|
{
|
||||||
|
"name": "prdxmes14sln010",
|
||||||
|
"timestamp": 1571841899999,
|
||||||
|
"value": 4.0,
|
||||||
|
"statKey": "config|hardware|num_Cpu",
|
||||||
|
"rollUpType": "AVG",
|
||||||
|
"intervalUnit": 5
|
||||||
|
}
|
||||||
|
{
|
||||||
|
"name": "prdxmes14sln010",
|
||||||
|
"timestamp": 1571842199999,
|
||||||
|
"value": 4.0,
|
||||||
|
"statKey": "config|hardware|num_Cpu",
|
||||||
|
"rollUpType": "AVG",
|
||||||
|
"intervalUnit": 5
|
||||||
|
}
|
||||||
|
{
|
||||||
|
"name": "prdxmes14sln010",
|
||||||
|
"timestamp": 1571842499999,
|
||||||
|
"value": 4.0,
|
||||||
|
"statKey": "config|hardware|num_Cpu",
|
||||||
|
"rollUpType": "AVG",
|
||||||
|
"intervalUnit": 5
|
||||||
|
}
|
||||||
|
```
|
||||||
|
|
||||||
|
<br>
|
||||||
|
|
||||||
|
#### Get epoch time
|
||||||
|
|
||||||
|
The following method is built in to provide easy generation of epoch times. This functions similar to Splunks relative_time and time snapping.
|
||||||
|
|
||||||
|
##### Get epoch time now
|
||||||
|
|
||||||
|
- `vropsAPI.epochNow` - The following attribute is available instance wide to get the current time in Epoch correctly formatted for the API.
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
##### Get epoch time relative to another tie
|
||||||
|
|
||||||
|
Get epoch time relative to another time:
|
||||||
|
1. Can go back N hours/minutes etc.
|
||||||
|
2. Can set the hour/minute etc. to a specified value (snapping)
|
||||||
|
|
||||||
|
Definition:
|
||||||
|
|
||||||
|
```python
|
||||||
|
def epochRelativeTime(
|
||||||
|
epochTime: int,
|
||||||
|
year: int = datetime.now().year,
|
||||||
|
month: int = datetime.now().month,
|
||||||
|
day: int = datetime.now().day,
|
||||||
|
hour: int = datetime.now().hour,
|
||||||
|
minute: int = datetime.now().minute,
|
||||||
|
second: int = datetime.now().second,
|
||||||
|
years: int = 0,
|
||||||
|
months: int = 0,
|
||||||
|
days: int = 0,
|
||||||
|
hours: int = 0,
|
||||||
|
minutes: int = 0,
|
||||||
|
seconds: int = 0,
|
||||||
|
) -> int:
|
||||||
|
...
|
||||||
|
return relativeTime
|
||||||
|
```
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
|
||||||
|
```
|
||||||
|
**kwargs:
|
||||||
|
epochTime: epoch time for start
|
||||||
|
|
||||||
|
year: int = datetime.now().year # set year
|
||||||
|
month: int = datetime.now().month # set month
|
||||||
|
day: int = datetime.now().day # set day
|
||||||
|
hour: int = datetime.now().hour # set hour
|
||||||
|
minute: int = datetime.now().minute # set minute
|
||||||
|
second: int = datetime.now().second # set second
|
||||||
|
|
||||||
|
years: int = 0 # go back/forward N years
|
||||||
|
months: int = 0 # go back/forward N months
|
||||||
|
days: int = 0 # go back/forward N days
|
||||||
|
hours: int = 0 # go back/forward N hours
|
||||||
|
minutes: int = 0 # go back/forward N minutes
|
||||||
|
seconds: int = 0 # go back/forward N seconds
|
||||||
|
```
|
||||||
|
|
||||||
|
Examples:
|
||||||
|
|
||||||
|
Get epoch 5 minutes ago.
|
||||||
|
|
||||||
|
```python
|
||||||
|
vropsAPI.epochRelativeTime(vropsAPI.epochNow, minutes=-5)
|
||||||
|
```
|
||||||
|
|
||||||
|
Get epoch at start of current hour.
|
||||||
|
```python
|
||||||
|
vropsAPI.epochRelativeTime(
|
||||||
|
vropsAPI.epochNow,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
|
||||||
|
Get epoch 1 week ago at start of day.
|
||||||
|
```python
|
||||||
|
vropsAPI.epochRelativeTime(
|
||||||
|
vropsAPI.epochNow,
|
||||||
|
days=-7
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
```
|
||||||
|
<br>
|
||||||
7
vrops-api/requirements.txt
Normal file
7
vrops-api/requirements.txt
Normal file
@@ -0,0 +1,7 @@
|
|||||||
|
certifi==2019.9.11
|
||||||
|
chardet==3.0.4
|
||||||
|
idna==2.8
|
||||||
|
python-dateutil==2.8.0
|
||||||
|
requests==2.22.0
|
||||||
|
six==1.12.0
|
||||||
|
urllib3==1.25.6
|
||||||
75
vrops-api/test.py
Normal file
75
vrops-api/test.py
Normal file
@@ -0,0 +1,75 @@
|
|||||||
|
import base64
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all clusters and store a list of Names:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
allClustersList = vrops.getKeysList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these clusters
|
||||||
|
print(allClustersList)
|
||||||
|
|
||||||
|
# Get all hosts and store a list of Names:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
allHostsList = vrops.getKeysList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# Print all these hosts
|
||||||
|
print(allHostsList)
|
||||||
|
|
||||||
|
|
||||||
|
# Get all VMs and sore a list of IDs:
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# Save all VMs to disk
|
||||||
|
vrops.saveToDisk(vrops.allVMS, indent=4, filePrefix='approach1-vms')
|
||||||
|
|
||||||
|
# Save all VMs:Hosts to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.VMSHostsNames, indent=4, filePrefix='approach1-vms_hosts'
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# # Add a single VM to a list to pull back
|
||||||
|
# myList = []
|
||||||
|
# myList.append(allVMSIdList[0])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='1',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['rescpu|actav1_latest'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export, indent=4, filePrefix='approach1-export', breakLine=True
|
||||||
|
)
|
||||||
64
vrops-api/testingOutput.py
Normal file
64
vrops-api/testingOutput.py
Normal file
@@ -0,0 +1,64 @@
|
|||||||
|
import base64
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import sys
|
||||||
|
|
||||||
|
sys.path.append(os.getcwd())
|
||||||
|
from vropsAPI import vropsAPI
|
||||||
|
|
||||||
|
|
||||||
|
# Authenticate:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'svc_splunkVROPS@Group.WilliamHill.PLC',
|
||||||
|
'whgroup',
|
||||||
|
base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Load VM dict
|
||||||
|
with open('approach1-vms-example.json', 'r') as vms:
|
||||||
|
vrops.allVMS = json.load(vms)
|
||||||
|
|
||||||
|
|
||||||
|
# Get list of these VMS
|
||||||
|
allVMSIdList = vrops.getValuesList(vrops.allVMS)
|
||||||
|
|
||||||
|
# Load VM_Host dict
|
||||||
|
with open('approach1-vms_hosts-example.json', 'r') as vmsHosts:
|
||||||
|
vrops.VMSHostsNames = json.load(vmsHosts)
|
||||||
|
|
||||||
|
|
||||||
|
# Set the cluster
|
||||||
|
vrops.chosenCluster = 'SC1PRCONTXWHCUXCCL01'
|
||||||
|
|
||||||
|
|
||||||
|
# # Add a single VM to a list to pull back (can add more VMs to this list if
|
||||||
|
# # needed)
|
||||||
|
# myList = []
|
||||||
|
# myList.append(list(vrops.allVMS.values())[100])
|
||||||
|
|
||||||
|
|
||||||
|
# Get data for a vm
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='1',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=allVMSIdList,
|
||||||
|
statKey=['rescpu|actav1_latest'],
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
# Export the data into readable format
|
||||||
|
vrops.exportVMData()
|
||||||
|
|
||||||
|
|
||||||
|
print(vrops.export)
|
||||||
|
|
||||||
|
# Save to disk
|
||||||
|
vrops.saveToDisk(
|
||||||
|
vrops.export, indent=4, filePrefix='approach2-export', breakLine=True
|
||||||
|
)
|
||||||
661
vrops-api/vropsAPI.py
Normal file
661
vrops-api/vropsAPI.py
Normal file
@@ -0,0 +1,661 @@
|
|||||||
|
import requests
|
||||||
|
import json
|
||||||
|
import warnings
|
||||||
|
from typing import Union
|
||||||
|
import copy
|
||||||
|
from datetime import datetime
|
||||||
|
from dateutil.relativedelta import relativedelta
|
||||||
|
import os
|
||||||
|
import math
|
||||||
|
import time
|
||||||
|
from collections import OrderedDict
|
||||||
|
import re
|
||||||
|
|
||||||
|
|
||||||
|
warnings.filterwarnings('ignore')
|
||||||
|
|
||||||
|
|
||||||
|
class vropsAPI(object):
|
||||||
|
"""Module for the vropsAPI for Capacity Management:
|
||||||
|
Author: Daniel Tomlinson
|
||||||
|
Team: Capacity & Monitoring
|
||||||
|
Date: October 2019
|
||||||
|
|
||||||
|
VROPS documentation: https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
index.html
|
||||||
|
StatKeys for VMS:
|
||||||
|
https://docs.vmware.com/en/vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
Authenticate a session:
|
||||||
|
vrops = vropsAPI.authenticate(
|
||||||
|
'https://sc1prapvro01/',
|
||||||
|
'username',
|
||||||
|
'authSource',
|
||||||
|
'password',
|
||||||
|
verify=False,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get all clusters:
|
||||||
|
Query VROPS for available clusters:
|
||||||
|
vrops.getClusters()
|
||||||
|
vrops.getClusterIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all cluster IDs and cluster names:
|
||||||
|
allClustersDict = vrops.allClusters
|
||||||
|
|
||||||
|
Get list of all cluster names:
|
||||||
|
allClustersList = vrops.getList(vrops.allClusters)
|
||||||
|
|
||||||
|
|
||||||
|
Get all hosts:
|
||||||
|
Query VROPS for available hosts:
|
||||||
|
From a single cluster:
|
||||||
|
vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')
|
||||||
|
vrops.getHostIdentifiers()
|
||||||
|
From a list of clusters:
|
||||||
|
<Not implemented>
|
||||||
|
|
||||||
|
Get dict of all host IDs and host names:
|
||||||
|
allHostsDisct =vrops.allHosts
|
||||||
|
|
||||||
|
Get list of host names:
|
||||||
|
allHostsList = vrops.getList(vrops.allHosts)
|
||||||
|
|
||||||
|
|
||||||
|
Get all VMs:
|
||||||
|
Query VROPS for available VMs:
|
||||||
|
For a single host:
|
||||||
|
vrops.getVMSFromHost('sc1hsesx148.prod.williamhill.plc')
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
For a list of hosts:
|
||||||
|
vrops.getVMSFromHost(allHostsList)
|
||||||
|
vrops.getVMSIdentifiers()
|
||||||
|
|
||||||
|
Get dict of all VM IDs and VM names:
|
||||||
|
allVMSDict = vrops.allVMS
|
||||||
|
|
||||||
|
Get list of all VMs:
|
||||||
|
allVMSList = vrops.getList(vrops.allVMS)
|
||||||
|
|
||||||
|
|
||||||
|
Get epoch time relative to another time:
|
||||||
|
Similar to Splunks relative_time command:
|
||||||
|
1. Can go back N hours/minutes etc.
|
||||||
|
2. Can set the hour/minute etc. to a specified value (snapping)
|
||||||
|
|
||||||
|
vrops.epochRelativeTime(epochTime, **kwargs)
|
||||||
|
|
||||||
|
**kwargs:
|
||||||
|
epochTime: int - start time
|
||||||
|
|
||||||
|
year: int = datetime.now().year # set year
|
||||||
|
month: int = datetime.now().month # set month
|
||||||
|
day: int = datetime.now().day # set day
|
||||||
|
hour: int = datetime.now().hour # set hour
|
||||||
|
minute: int = datetime.now().minute # set minute
|
||||||
|
second: int = datetime.now().second # set second
|
||||||
|
|
||||||
|
years: int = 0 # go back/forward N years
|
||||||
|
months: int = 0 # go back/forward N months
|
||||||
|
days: int = 0 # go back/forward N days
|
||||||
|
hours: int = 0 # go back/forward N hours
|
||||||
|
minutes: int = 0 # go back/forward N minutes
|
||||||
|
seconds: int = 0 # go back/forward N seconds
|
||||||
|
Usage:
|
||||||
|
|
||||||
|
Get epoch 5 minutes ago:
|
||||||
|
vrops.epochRelativeTime(vrops.epochNow, minutes=-5)
|
||||||
|
|
||||||
|
Get epoch at start of current hour:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
Get epoch 1 week ago at start of day:
|
||||||
|
vrops.epochRelativeTime(
|
||||||
|
vrops.epochNow,
|
||||||
|
days=-7
|
||||||
|
hour=0,
|
||||||
|
minute=0,
|
||||||
|
second=0,
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Get stats from VMs:
|
||||||
|
Pull back results:
|
||||||
|
Last 30 minutes, 5 minute intervals, average
|
||||||
|
for CPU average and ready %:
|
||||||
|
|
||||||
|
vrops.getStatsFromVMS(
|
||||||
|
begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-30),
|
||||||
|
end=vrops.epochNow,
|
||||||
|
intervalType='MINUTES',
|
||||||
|
intervalQuantifier='5',
|
||||||
|
rollUpType='AVG',
|
||||||
|
resourceId=list(vrops.allVMS.values()),
|
||||||
|
statKey=['cpu|usage_average', 'cpu|readyPct'],
|
||||||
|
)
|
||||||
|
|
||||||
|
Attributes:
|
||||||
|
intervalType: <ns3:intervalType> (see
|
||||||
|
https://sc1prapvro01/suite-api/docs/rest/
|
||||||
|
models.html#repr-1190589417)
|
||||||
|
|
||||||
|
intervalQuantifier: int
|
||||||
|
|
||||||
|
rollUpType: <ns3:rollUpType> (see:
|
||||||
|
https://sc1prapvro01/suite-api/
|
||||||
|
docs/rest/models.html#repr-1735704374)
|
||||||
|
|
||||||
|
resourceId: string or list of vrops resourceIds (not names)
|
||||||
|
|
||||||
|
statKey: vrops api metrics (see https://docs.vmware.com/en/
|
||||||
|
vRealize-Operations-Manager/6.7/com.vmware.vcom.metrics.doc/
|
||||||
|
GUID-1322F5A4-DA1D-481F-BBEA-99B228E96AF2.html)
|
||||||
|
|
||||||
|
|
||||||
|
Print results:
|
||||||
|
for i in range(0, vrops.totalVMS):
|
||||||
|
print(vrops.vmsResources['values'][i])
|
||||||
|
|
||||||
|
Save to disk as json:
|
||||||
|
vrops.saveToDisk(vrops.vmsResources)
|
||||||
|
|
||||||
|
|
||||||
|
Attributes
|
||||||
|
----------
|
||||||
|
vropsURL: str
|
||||||
|
URL of the VROPS instance
|
||||||
|
"https://sc1prapvro01/"
|
||||||
|
|
||||||
|
"""
|
||||||
|
|
||||||
|
defaultHeaders = {
|
||||||
|
'Accept': 'application/json',
|
||||||
|
'Content-Type': 'application/json',
|
||||||
|
'Cache-Control': 'no-cache',
|
||||||
|
'Accept-Encoding': 'gzip, deflate',
|
||||||
|
'Connection': 'keep-alive',
|
||||||
|
'cache-control': 'no-cache',
|
||||||
|
}
|
||||||
|
|
||||||
|
def __init__(self, vropsURL: str, authToken: str, verify: bool = True):
|
||||||
|
super(vropsAPI, self).__init__()
|
||||||
|
self.vropsURL = vropsURL
|
||||||
|
self.authToken = authToken
|
||||||
|
self.verify = verify
|
||||||
|
self.headers = vropsAPI.defaultHeaders
|
||||||
|
self.headers['Authorization'] = f'vRealizeOpsToken {self.authToken}'
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def authenticate(
|
||||||
|
cls,
|
||||||
|
vropsURL: str,
|
||||||
|
username: str,
|
||||||
|
authSource: str,
|
||||||
|
password: str,
|
||||||
|
verify: bool = True,
|
||||||
|
):
|
||||||
|
vropsURLauth = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
payload = {}
|
||||||
|
for key, value in zip(
|
||||||
|
['username', 'authSource', 'password'],
|
||||||
|
[username, authSource, password],
|
||||||
|
):
|
||||||
|
payload[key] = value
|
||||||
|
authToken = vropsAPI.getAuthenticationToken(
|
||||||
|
vropsURLauth, payload, verify
|
||||||
|
)
|
||||||
|
return cls(vropsURL, authToken, verify)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getVropsURL(vropsURL: str, endpointKey: str) -> str:
|
||||||
|
endpoints = {
|
||||||
|
'authenticationURL': 'suite-api/api/auth/token/acquire',
|
||||||
|
'resourcesURL': 'suite-api/api/resources',
|
||||||
|
'statsURL': 'suite-api/api/resources/stats/query',
|
||||||
|
}
|
||||||
|
if endpoints[endpointKey] not in vropsURL:
|
||||||
|
if vropsURL[-1] != '/':
|
||||||
|
vropsURL = vropsURL + '/'
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
else:
|
||||||
|
vropsURL = vropsURL + endpoints[endpointKey]
|
||||||
|
return vropsURL
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def pythonToJSON(pythonObject: any, indent=4) -> str:
|
||||||
|
return json.dumps(pythonObject, indent=indent)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def jsonToPython(jsonObject: str) -> any:
|
||||||
|
return json.loads(jsonObject)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getAuthenticationToken(
|
||||||
|
vropsURL: str, payload: dict, verify=True
|
||||||
|
) -> str:
|
||||||
|
|
||||||
|
payload = vropsAPI.pythonToJSON(payload)
|
||||||
|
vropsURL = vropsAPI.getVropsURL(vropsURL, 'authenticationURL')
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
data=payload,
|
||||||
|
headers=vropsAPI.defaultHeaders,
|
||||||
|
verify=verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
authToken = vropsAPI.jsonToPython(response.text)['token']
|
||||||
|
return authToken
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getIdentifiers(
|
||||||
|
identifierDict: dict,
|
||||||
|
vropsJSON: dict,
|
||||||
|
length: int,
|
||||||
|
resourceKindKey: str,
|
||||||
|
) -> dict:
|
||||||
|
for i in range(0, length):
|
||||||
|
if (
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['resourceKindKey']
|
||||||
|
== resourceKindKey
|
||||||
|
):
|
||||||
|
identifierDict[
|
||||||
|
vropsJSON['resourceList'][i]['resourceKey']['name']
|
||||||
|
] = vropsJSON['resourceList'][i]['identifier']
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
return identifierDict
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getKeysList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.keys():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def getValuesList(pythonDict: dict) -> list:
|
||||||
|
pythonList = []
|
||||||
|
for i in pythonDict.values():
|
||||||
|
pythonList.append(i)
|
||||||
|
return pythonList
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def epochRelativeTime(
|
||||||
|
epochTime: int,
|
||||||
|
year: int = datetime.now().year,
|
||||||
|
month: int = datetime.now().month,
|
||||||
|
day: int = datetime.now().day,
|
||||||
|
hour: int = datetime.now().hour,
|
||||||
|
minute: int = datetime.now().minute,
|
||||||
|
second: int = datetime.now().second,
|
||||||
|
years: int = 0,
|
||||||
|
months: int = 0,
|
||||||
|
days: int = 0,
|
||||||
|
hours: int = 0,
|
||||||
|
minutes: int = 0,
|
||||||
|
seconds: int = 0,
|
||||||
|
) -> int:
|
||||||
|
delta = relativedelta(
|
||||||
|
year=year,
|
||||||
|
month=month,
|
||||||
|
day=day,
|
||||||
|
hour=hour,
|
||||||
|
minute=minute,
|
||||||
|
second=second,
|
||||||
|
years=years,
|
||||||
|
months=months,
|
||||||
|
days=days,
|
||||||
|
hours=hours,
|
||||||
|
minutes=minutes,
|
||||||
|
seconds=seconds,
|
||||||
|
)
|
||||||
|
if len(str(epochTime)) >= 12:
|
||||||
|
epochTime /= 1000
|
||||||
|
relativeTime = datetime.fromtimestamp(epochTime) + delta
|
||||||
|
relativeTime = math.ceil(relativeTime.timestamp() * 1000)
|
||||||
|
return relativeTime
|
||||||
|
|
||||||
|
def getClusters(self):
|
||||||
|
queryString = {'resourceKind': 'ClusterComputeResource'}
|
||||||
|
vropsURL = vropsAPI.getVropsURL(self.vropsURL, 'resourcesURL')
|
||||||
|
response = requests.request(
|
||||||
|
"GET",
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
params=queryString,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._clusterResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getClusterIdentifiers(self):
|
||||||
|
self.totalClusters = len(self._clusterResources['resourceList'])
|
||||||
|
self.allClusters = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allClusters,
|
||||||
|
self._clusterResources,
|
||||||
|
self.totalClusters,
|
||||||
|
'ClusterComputeResource',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostsFromCluster(self, cluster='SC1PRCONTXWHCUXCCL01'):
|
||||||
|
self.cluster = cluster
|
||||||
|
url = (
|
||||||
|
f'{self.vropsURL}/suite-api/api/resources/'
|
||||||
|
f'{self.allClusters[cluster]}/relationships'
|
||||||
|
)
|
||||||
|
self.chosenCluster = cluster
|
||||||
|
response = requests.request(
|
||||||
|
"GET", url, headers=self.headers, verify=self.verify
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
self._hostResources = vropsAPI.jsonToPython(response.text)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getHostIdentifiers(self):
|
||||||
|
self.totalHosts = len(self._hostResources['resourceList'])
|
||||||
|
self.allHosts = {}
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allHosts, self._hostResources, self.totalHosts, 'HostSystem'
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSFromHost(self, host: Union[str, list]):
|
||||||
|
if isinstance(host, list):
|
||||||
|
self.vmType = 'list'
|
||||||
|
self._vmsResourcesRelationships = []
|
||||||
|
self.urlList = []
|
||||||
|
response = []
|
||||||
|
for item in host:
|
||||||
|
self.urlList.append(
|
||||||
|
(
|
||||||
|
f'{self.vropsURL}suite-api/api/resources/'
|
||||||
|
f'{self.allHosts[item]}/relationships'
|
||||||
|
)
|
||||||
|
)
|
||||||
|
for urlItem in self.urlList:
|
||||||
|
response.append(
|
||||||
|
requests.request(
|
||||||
|
'GET',
|
||||||
|
urlItem,
|
||||||
|
headers=self.headers,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(response)
|
||||||
|
for i in range(0, len(response)):
|
||||||
|
self._vmsResourcesRelationships.append(
|
||||||
|
vropsAPI.jsonToPython(response[i].text)
|
||||||
|
)
|
||||||
|
if isinstance(host, str):
|
||||||
|
hostToList = []
|
||||||
|
hostToList.append(host)
|
||||||
|
print(hostToList)
|
||||||
|
return self.getVMSFromHost(host=hostToList)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def _getHostInformation(self, i: int, j: int):
|
||||||
|
pattern = r'(?:.*resources\/)([^\/]+)'
|
||||||
|
vm = self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['name']
|
||||||
|
host = self.urlList[i]
|
||||||
|
match = re.findall(pattern, host)
|
||||||
|
for key, value in self.allHosts.items():
|
||||||
|
if match[0] == value:
|
||||||
|
self.VMSHostsNames[vm] = key
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getVMSIdentifiers(self):
|
||||||
|
self.VMSHostsNames = {}
|
||||||
|
self.allVMS = OrderedDict()
|
||||||
|
if self.vmType == 'list':
|
||||||
|
self.countVMS = []
|
||||||
|
self.countVMSFiltered = []
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
counter = 0
|
||||||
|
for j in range(
|
||||||
|
0, len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships[i]['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self._getHostInformation(i, j)
|
||||||
|
self.countVMS.append(
|
||||||
|
len(self._vmsResourcesRelationships[i]['resourceList'])
|
||||||
|
)
|
||||||
|
self.countVMSFiltered.append(counter)
|
||||||
|
for i in range(0, len(self._vmsResourcesRelationships)):
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships[i],
|
||||||
|
self.countVMS[i],
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
if self.vmType == 'string':
|
||||||
|
counter = 0
|
||||||
|
self.countVMS = len(
|
||||||
|
self._vmsResourcesRelationships['resourceList']
|
||||||
|
)
|
||||||
|
for j in range(0, self.countVMS):
|
||||||
|
if (
|
||||||
|
self._vmsResourcesRelationships['resourceList'][j][
|
||||||
|
'resourceKey'
|
||||||
|
]['resourceKindKey']
|
||||||
|
) == 'VirtualMachine':
|
||||||
|
counter += 1
|
||||||
|
self.countVMSFiltered = counter
|
||||||
|
self.getIdentifiers(
|
||||||
|
self.allVMS,
|
||||||
|
self._vmsResourcesRelationships,
|
||||||
|
self.countVMS,
|
||||||
|
'VirtualMachine',
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
def getStatsFromVMS(
|
||||||
|
self,
|
||||||
|
begin: int,
|
||||||
|
end: int,
|
||||||
|
intervalType: str,
|
||||||
|
intervalQuantifier: str,
|
||||||
|
rollUpType: str,
|
||||||
|
resourceId: list,
|
||||||
|
statKey: Union[str, list],
|
||||||
|
):
|
||||||
|
argList = copy.deepcopy(locals())
|
||||||
|
del argList['self']
|
||||||
|
vropsURL = self.getVropsURL(self.vropsURL, 'statsURL')
|
||||||
|
payload = self.pythonToJSON(argList, indent=0)
|
||||||
|
response = requests.request(
|
||||||
|
'POST',
|
||||||
|
vropsURL,
|
||||||
|
headers=self.headers,
|
||||||
|
data=payload,
|
||||||
|
verify=self.verify,
|
||||||
|
)
|
||||||
|
# print(response.text)
|
||||||
|
# raise Exception
|
||||||
|
self._vmsResources = OrderedDict(self.jsonToPython(response.text))
|
||||||
|
for key, value in self.allVMS.items():
|
||||||
|
for i in range(0, len(self._vmsResources['values'])):
|
||||||
|
if self._vmsResources['values'][i]['resourceId'] == value:
|
||||||
|
self._vmsResources['values'][i] = OrderedDict(
|
||||||
|
self._vmsResources['values'][i]
|
||||||
|
)
|
||||||
|
self._vmsResources['values'][i]['name'] = key
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
] = self.VMSHostsNames[key]
|
||||||
|
self._vmsResources['values'][i][
|
||||||
|
'cluster'
|
||||||
|
] = self.chosenCluster
|
||||||
|
for item in ['cluster', 'host', 'name']:
|
||||||
|
self._vmsResources['values'][i].move_to_end(
|
||||||
|
item, last=False
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def saveToDisk(
|
||||||
|
pythonObject: any,
|
||||||
|
path: str = os.getcwd(),
|
||||||
|
filePrefix: str = '',
|
||||||
|
type: str = 'json',
|
||||||
|
indent: int = 4,
|
||||||
|
breakLine: bool = False,
|
||||||
|
) -> None:
|
||||||
|
timeNow = datetime.now().strftime('%d-%m-%Y_%H-%M-%S')
|
||||||
|
fileName = f'{path}/{filePrefix}-{timeNow}.json'
|
||||||
|
if breakLine:
|
||||||
|
if not isinstance(pythonObject, list):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a list when using' ' breakLine=True'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
with open(fileName, 'a+') as outputFile:
|
||||||
|
try:
|
||||||
|
outputFile.write(
|
||||||
|
json.dump(
|
||||||
|
pythonObject, outputFile, indent=indent
|
||||||
|
)
|
||||||
|
)
|
||||||
|
except TypeError:
|
||||||
|
pass
|
||||||
|
else:
|
||||||
|
with open(fileName, 'w+') as outputFile:
|
||||||
|
json.dump(pythonObject, outputFile, indent=indent)
|
||||||
|
|
||||||
|
def exportVMData(self):
|
||||||
|
self.export = []
|
||||||
|
loopLength = len(self._vmsResources['values'])
|
||||||
|
for i in range(0, loopLength):
|
||||||
|
statKeyLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat']
|
||||||
|
)
|
||||||
|
timeLength = len(
|
||||||
|
self._vmsResources['values'][i]['stat-list']['stat'][0][
|
||||||
|
'timestamps'
|
||||||
|
]
|
||||||
|
)
|
||||||
|
for k in range(0, statKeyLength):
|
||||||
|
for j in range(0, timeLength):
|
||||||
|
self.export.append(
|
||||||
|
{
|
||||||
|
'name': self._vmsResources['values'][i][
|
||||||
|
'name'
|
||||||
|
],
|
||||||
|
'host': self._vmsResources['values'][i][
|
||||||
|
'host'
|
||||||
|
],
|
||||||
|
'cluster': self.chosenCluster,
|
||||||
|
'timestamp': str(self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][0]['timestamps'][j]),
|
||||||
|
'value': str(self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['data'][j]),
|
||||||
|
'statKey': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['statKey']['key'],
|
||||||
|
'rollUpType': self._vmsResources['values'][i][
|
||||||
|
'stat-list'
|
||||||
|
]['stat'][k]['rollUpType'],
|
||||||
|
'intervalQuantifier': str(self._vmsResources[
|
||||||
|
'values'
|
||||||
|
][i]['stat-list']['stat'][k]['intervalUnit'][
|
||||||
|
'quantifier'
|
||||||
|
]),
|
||||||
|
'intervalType': str(self._vmsResources['values'][
|
||||||
|
i
|
||||||
|
]['stat-list']['stat'][0]['intervalUnit'][
|
||||||
|
'intervalType'
|
||||||
|
]),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
return self
|
||||||
|
|
||||||
|
@property
|
||||||
|
def totalVMS(self):
|
||||||
|
if isinstance(self.countVMSFiltered, list):
|
||||||
|
self.__totalVMS = sum(self.countVMSFiltered)
|
||||||
|
elif isinstance(self.countVMSFiltered, int):
|
||||||
|
self.__totalVMS = self.countVMSFiltered
|
||||||
|
return self.__totalVMS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochNow(self):
|
||||||
|
self.__epochNow = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochNow
|
||||||
|
|
||||||
|
@property
|
||||||
|
def epochToday(self):
|
||||||
|
now = datetime.now()
|
||||||
|
self.__epochtoday = now + relativedelta(hour=0, minute=0, second=0)
|
||||||
|
self.__epochToday = math.ceil(time.time() * 1000)
|
||||||
|
return self.__epochToday
|
||||||
|
|
||||||
|
@property
|
||||||
|
def allVMS(self):
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@allVMS.setter
|
||||||
|
def allVMS(self, allVMS):
|
||||||
|
if not isinstance(allVMS, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(allVMS)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__allVMS = allVMS
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__allVMS
|
||||||
|
|
||||||
|
@property
|
||||||
|
def VMSHostsNames(self):
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@VMSHostsNames.setter
|
||||||
|
def VMSHostsNames(self, VMSHostsNames):
|
||||||
|
if not isinstance(VMSHostsNames, dict):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(VMSHostsNames)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__VMSHostsNames = VMSHostsNames
|
||||||
|
print('Successfully imported the dictionary.')
|
||||||
|
return self.__VMSHostsNames
|
||||||
|
|
||||||
|
@property
|
||||||
|
def chosenCluster(self):
|
||||||
|
return self.__chosenCluster
|
||||||
|
|
||||||
|
@chosenCluster.setter
|
||||||
|
def chosenCluster(self, chosenCluster):
|
||||||
|
if not isinstance(chosenCluster, str):
|
||||||
|
raise TypeError(
|
||||||
|
'You must pass a dictionary with a key of the name'
|
||||||
|
f' and a value of the VROPS ID, not {type(chosenCluster)}.'
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
pass
|
||||||
|
self.__chosenCluster = chosenCluster
|
||||||
|
return self.__chosenCluster
|
||||||
@@ -0,0 +1,6 @@
|
|||||||
|
{
|
||||||
|
"cells": [],
|
||||||
|
"metadata": {},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
316
vrops-api/workbooks/Untitled.ipynb
Normal file
316
vrops-api/workbooks/Untitled.ipynb
Normal file
@@ -0,0 +1,316 @@
|
|||||||
|
{
|
||||||
|
"cells": [
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 1,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": [
|
||||||
|
"import os\n",
|
||||||
|
"import sys\n",
|
||||||
|
"\n",
|
||||||
|
"workingDir = '/Users/dtomlinson/OneDrive - William Hill Organisation Limited/Mac/projects/vrops-api'\n",
|
||||||
|
"sys.path.append(workingDir)\n",
|
||||||
|
"\n",
|
||||||
|
"import json\n",
|
||||||
|
"import base64\n",
|
||||||
|
"from vropsAPI import vropsAPI"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 2,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"<Response [200]>\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vrops = vropsAPI.authenticate(\n",
|
||||||
|
" 'https://sc1prapvro01/',\n",
|
||||||
|
" 'svc_splunkVROPS@Group.WilliamHill.PLC',\n",
|
||||||
|
" 'whgroup',\n",
|
||||||
|
" base64.b64decode(b'UmFjaW5nMjEyMg==').decode(),\n",
|
||||||
|
" verify=False,\n",
|
||||||
|
")"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 3,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"<Response [200]>\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vrops.getClusters()\n",
|
||||||
|
"vrops.getClusterIdentifiers()\n",
|
||||||
|
"allClustersList = vrops.getKeysList(vrops.allClusters)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"['SC1PRINNOGEUXCCL01',\n",
|
||||||
|
" 'BRSDRGEUX01',\n",
|
||||||
|
" 'BRSDRCTXWN01',\n",
|
||||||
|
" 'LD6-BRS-Hosts',\n",
|
||||||
|
" 'BRSPRPCMGEUXCCL01',\n",
|
||||||
|
" 'LD6DRGEUX01',\n",
|
||||||
|
" 'BRSPRRTRGEWNCCL01',\n",
|
||||||
|
" 'BRSPRTSRGEUXCCL01',\n",
|
||||||
|
" 'LD6PTWHCUX01',\n",
|
||||||
|
" 'Cluster-2',\n",
|
||||||
|
" 'BRSPRDVOGEUXCCL01',\n",
|
||||||
|
" 'SC1PRPCM01',\n",
|
||||||
|
" 'SC1PRGEWN01',\n",
|
||||||
|
" 'Performance-Test-Cluster',\n",
|
||||||
|
" 'NJ2PRGE01',\n",
|
||||||
|
" 'BRSPRCTXGEWNCCL01',\n",
|
||||||
|
" 'SC1PRRETWN01',\n",
|
||||||
|
" 'SC1PRMGMT01',\n",
|
||||||
|
" 'BRSPRPCM01',\n",
|
||||||
|
" 'AutoDeployHosts',\n",
|
||||||
|
" 'GIBPRCONTXWHCUXCCL01',\n",
|
||||||
|
" 'AutoDeployHosts-DVS',\n",
|
||||||
|
" 'STJPRGEUX01',\n",
|
||||||
|
" 'Cluster-1',\n",
|
||||||
|
" 'SC1PRGEUX01',\n",
|
||||||
|
" 'LD6PTGEUX01',\n",
|
||||||
|
" 'MNLPRGE01',\n",
|
||||||
|
" 'BRSPRSPLGEUXCCL01',\n",
|
||||||
|
" 'LD6PTGEWN01',\n",
|
||||||
|
" 'BRSTSRGEWN01',\n",
|
||||||
|
" 'BRSPRCONTXWHCUXCCL01',\n",
|
||||||
|
" 'BRSDRRETWN01',\n",
|
||||||
|
" 'BRSPRMGMT01',\n",
|
||||||
|
" 'BRSDVGEUX01',\n",
|
||||||
|
" 'BRSTSGEWN01',\n",
|
||||||
|
" 'BRSPRDROGEWNCCL01',\n",
|
||||||
|
" 'BRSDRGEWN01',\n",
|
||||||
|
" 'BRSPRTSOGEUXCCL01',\n",
|
||||||
|
" 'GIBACITEST',\n",
|
||||||
|
" 'LD6DRGEWN01',\n",
|
||||||
|
" 'SC1PRCTXWN01',\n",
|
||||||
|
" 'GIBPRGEWN01',\n",
|
||||||
|
" 'STJPRSHGEUXCCL01',\n",
|
||||||
|
" 'GIBPRSPUX01',\n",
|
||||||
|
" 'LD6PRMGMT01',\n",
|
||||||
|
" 'SC1PRSPLGEUXCCL01',\n",
|
||||||
|
" 'SC1PRCONTXWHCUXCCL01',\n",
|
||||||
|
" 'GIBPRMGMT01',\n",
|
||||||
|
" 'SOFPRGE01',\n",
|
||||||
|
" 'LD6DRWHCUX01',\n",
|
||||||
|
" 'BRSPRTSRGEWNCCL01',\n",
|
||||||
|
" 'BRSPRSHGEMGMTPRC01',\n",
|
||||||
|
" 'GIBPRGEUX01',\n",
|
||||||
|
" 'INDPRGE01',\n",
|
||||||
|
" 'KRAPRGE01',\n",
|
||||||
|
" 'BRSPRDRRGEWNCCL01',\n",
|
||||||
|
" 'BRSPRDROGEUXCCL01',\n",
|
||||||
|
" 'BRSPRTSOGEWNCCL01',\n",
|
||||||
|
" 'WV1PRGE01',\n",
|
||||||
|
" 'BRSTSGEUX01']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 4,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"allClustersList"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 6,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"<Response [200]>\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"vrops.getHostsFromCluster(cluster='SC1PRCONTXWHCUXCCL01')\n",
|
||||||
|
"vrops.getHostIdentifiers()\n",
|
||||||
|
"allHostsList = vrops.getKeysList(vrops.allHosts)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 7,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"data": {
|
||||||
|
"text/plain": [
|
||||||
|
"['sc1hsesx156.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx187.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx148.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx155.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx214.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx093.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx145.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx150.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx136.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx209.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx212.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx175.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx162.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx158.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx140.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx126.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx142.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx208.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx202.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx204.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx182.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx127.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx133.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx152.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx185.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx154.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx121.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx205.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx135.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx138.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx159.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx149.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx147.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx091.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx161.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx146.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx203.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx143.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx132.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx157.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx139.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx160.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx120.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx207.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx095.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx151.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx200.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx092.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx094.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx186.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx090.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx206.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx210.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx141.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx131.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx213.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx137.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx211.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx130.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx134.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx144.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx153.prod.williamhill.plc',\n",
|
||||||
|
" 'sc1hsesx201.prod.williamhill.plc']"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
"execution_count": 7,
|
||||||
|
"metadata": {},
|
||||||
|
"output_type": "execute_result"
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"allHostsList"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": 17,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [
|
||||||
|
{
|
||||||
|
"name": "stdout",
|
||||||
|
"output_type": "stream",
|
||||||
|
"text": [
|
||||||
|
"<Response [200]>\n",
|
||||||
|
"Successfully imported the dictionary.\n",
|
||||||
|
"<Response [200]>\n"
|
||||||
|
]
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"source": [
|
||||||
|
"for host in allHostsList:\n",
|
||||||
|
" vrops.getVMSFromHost(host)\n",
|
||||||
|
" vrops.getVMSIdentifiers()\n",
|
||||||
|
" allVMSIdList = vrops.getValuesList(vrops.allVMS)\n",
|
||||||
|
" allVMSNameList = vrops.getKeysList(vrops.allVMS)\n",
|
||||||
|
" \n",
|
||||||
|
" vrops.getStatsFromVMS(\n",
|
||||||
|
" begin=vrops.epochRelativeTime(vrops.epochNow, minutes=-11),\n",
|
||||||
|
" end=vrops.epochNow,\n",
|
||||||
|
" intervalType='MINUTES',\n",
|
||||||
|
" intervalQuantifier='5',\n",
|
||||||
|
" rollUpType='AVG',\n",
|
||||||
|
" resourceId=allVMSIdList,\n",
|
||||||
|
" statKey=['cpu|usage_average', 'config|hardware|num_Cpu'],\n",
|
||||||
|
" )\n",
|
||||||
|
" \n",
|
||||||
|
" vrops.exportVMData()\n",
|
||||||
|
"\n",
|
||||||
|
" vrops.export\n",
|
||||||
|
" break"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "raw",
|
||||||
|
"metadata": {},
|
||||||
|
"source": [
|
||||||
|
"print(vrops.export)"
|
||||||
|
]
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"cell_type": "code",
|
||||||
|
"execution_count": null,
|
||||||
|
"metadata": {},
|
||||||
|
"outputs": [],
|
||||||
|
"source": []
|
||||||
|
}
|
||||||
|
],
|
||||||
|
"metadata": {
|
||||||
|
"kernelspec": {
|
||||||
|
"display_name": "Python 3",
|
||||||
|
"language": "python",
|
||||||
|
"name": "python3"
|
||||||
|
},
|
||||||
|
"language_info": {
|
||||||
|
"codemirror_mode": {
|
||||||
|
"name": "ipython",
|
||||||
|
"version": 3
|
||||||
|
},
|
||||||
|
"file_extension": ".py",
|
||||||
|
"mimetype": "text/x-python",
|
||||||
|
"name": "python",
|
||||||
|
"nbconvert_exporter": "python",
|
||||||
|
"pygments_lexer": "ipython3",
|
||||||
|
"version": "3.7.3"
|
||||||
|
}
|
||||||
|
},
|
||||||
|
"nbformat": 4,
|
||||||
|
"nbformat_minor": 2
|
||||||
|
}
|
||||||
Reference in New Issue
Block a user