hexsha
string | size
int64 | ext
string | lang
string | max_stars_repo_path
string | max_stars_repo_name
string | max_stars_repo_head_hexsha
string | max_stars_repo_licenses
list | max_stars_count
int64 | max_stars_repo_stars_event_min_datetime
string | max_stars_repo_stars_event_max_datetime
string | max_issues_repo_path
string | max_issues_repo_name
string | max_issues_repo_head_hexsha
string | max_issues_repo_licenses
list | max_issues_count
int64 | max_issues_repo_issues_event_min_datetime
string | max_issues_repo_issues_event_max_datetime
string | max_forks_repo_path
string | max_forks_repo_name
string | max_forks_repo_head_hexsha
string | max_forks_repo_licenses
list | max_forks_count
int64 | max_forks_repo_forks_event_min_datetime
string | max_forks_repo_forks_event_max_datetime
string | content
string | avg_line_length
float64 | max_line_length
int64 | alphanum_fraction
float64 | qsc_code_num_words_quality_signal
int64 | qsc_code_num_chars_quality_signal
float64 | qsc_code_mean_word_length_quality_signal
float64 | qsc_code_frac_words_unique_quality_signal
float64 | qsc_code_frac_chars_top_2grams_quality_signal
float64 | qsc_code_frac_chars_top_3grams_quality_signal
float64 | qsc_code_frac_chars_top_4grams_quality_signal
float64 | qsc_code_frac_chars_dupe_5grams_quality_signal
float64 | qsc_code_frac_chars_dupe_6grams_quality_signal
float64 | qsc_code_frac_chars_dupe_7grams_quality_signal
float64 | qsc_code_frac_chars_dupe_8grams_quality_signal
float64 | qsc_code_frac_chars_dupe_9grams_quality_signal
float64 | qsc_code_frac_chars_dupe_10grams_quality_signal
float64 | qsc_code_frac_chars_replacement_symbols_quality_signal
float64 | qsc_code_frac_chars_digital_quality_signal
float64 | qsc_code_frac_chars_whitespace_quality_signal
float64 | qsc_code_size_file_byte_quality_signal
float64 | qsc_code_num_lines_quality_signal
float64 | qsc_code_num_chars_line_max_quality_signal
float64 | qsc_code_num_chars_line_mean_quality_signal
float64 | qsc_code_frac_chars_alphabet_quality_signal
float64 | qsc_code_frac_chars_comments_quality_signal
float64 | qsc_code_cate_xml_start_quality_signal
float64 | qsc_code_frac_lines_dupe_lines_quality_signal
float64 | qsc_code_cate_autogen_quality_signal
float64 | qsc_code_frac_lines_long_string_quality_signal
float64 | qsc_code_frac_chars_string_length_quality_signal
float64 | qsc_code_frac_chars_long_word_length_quality_signal
float64 | qsc_code_frac_lines_string_concat_quality_signal
float64 | qsc_code_cate_encoded_data_quality_signal
float64 | qsc_code_frac_chars_hex_words_quality_signal
float64 | qsc_code_frac_lines_prompt_comments_quality_signal
float64 | qsc_code_frac_lines_assert_quality_signal
float64 | qsc_codepython_cate_ast_quality_signal
float64 | qsc_codepython_frac_lines_func_ratio_quality_signal
float64 | qsc_codepython_cate_var_zero_quality_signal
bool | qsc_codepython_frac_lines_pass_quality_signal
float64 | qsc_codepython_frac_lines_import_quality_signal
float64 | qsc_codepython_frac_lines_simplefunc_quality_signal
float64 | qsc_codepython_score_lines_no_logic_quality_signal
float64 | qsc_codepython_frac_lines_print_quality_signal
float64 | qsc_code_num_words
int64 | qsc_code_num_chars
int64 | qsc_code_mean_word_length
int64 | qsc_code_frac_words_unique
null | qsc_code_frac_chars_top_2grams
int64 | qsc_code_frac_chars_top_3grams
int64 | qsc_code_frac_chars_top_4grams
int64 | qsc_code_frac_chars_dupe_5grams
int64 | qsc_code_frac_chars_dupe_6grams
int64 | qsc_code_frac_chars_dupe_7grams
int64 | qsc_code_frac_chars_dupe_8grams
int64 | qsc_code_frac_chars_dupe_9grams
int64 | qsc_code_frac_chars_dupe_10grams
int64 | qsc_code_frac_chars_replacement_symbols
int64 | qsc_code_frac_chars_digital
int64 | qsc_code_frac_chars_whitespace
int64 | qsc_code_size_file_byte
int64 | qsc_code_num_lines
int64 | qsc_code_num_chars_line_max
int64 | qsc_code_num_chars_line_mean
int64 | qsc_code_frac_chars_alphabet
int64 | qsc_code_frac_chars_comments
int64 | qsc_code_cate_xml_start
int64 | qsc_code_frac_lines_dupe_lines
int64 | qsc_code_cate_autogen
int64 | qsc_code_frac_lines_long_string
int64 | qsc_code_frac_chars_string_length
int64 | qsc_code_frac_chars_long_word_length
int64 | qsc_code_frac_lines_string_concat
null | qsc_code_cate_encoded_data
int64 | qsc_code_frac_chars_hex_words
int64 | qsc_code_frac_lines_prompt_comments
int64 | qsc_code_frac_lines_assert
int64 | qsc_codepython_cate_ast
int64 | qsc_codepython_frac_lines_func_ratio
int64 | qsc_codepython_cate_var_zero
int64 | qsc_codepython_frac_lines_pass
int64 | qsc_codepython_frac_lines_import
int64 | qsc_codepython_frac_lines_simplefunc
int64 | qsc_codepython_score_lines_no_logic
int64 | qsc_codepython_frac_lines_print
int64 | effective
string | hits
int64 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
76dead288194d6b5e50fd079f21d614687299cb8
| 1,085
|
py
|
Python
|
src/lennybot/model/plan.py
|
raynigon/lenny-bot
|
d906a25dc28d9102829d3d6265d300f65406db02
|
[
"Apache-2.0"
] | 1
|
2021-12-15T14:03:54.000Z
|
2021-12-15T14:03:54.000Z
|
src/lennybot/model/plan.py
|
raynigon/lenny-bot
|
d906a25dc28d9102829d3d6265d300f65406db02
|
[
"Apache-2.0"
] | 1
|
2021-12-15T14:02:57.000Z
|
2021-12-15T17:44:26.000Z
|
src/lennybot/model/plan.py
|
raynigon/lennybot
|
79bee9a834f885a0da2484b239cf6efaf9cb9e4e
|
[
"Apache-2.0"
] | null | null | null |
from typing import Any, List
from ..actions.iaction import IAction
from ..model.state import LennyBotState
class LennyBotPlan:
def __init__(self, state: LennyBotState, actions: List[IAction]) -> None:
self._state = state
self._actions = actions
@property
def applications(self) -> List[str]:
result = []
for action in self._actions:
result.append(action.application)
return list(set(result))
@property
def actions(self) -> List[IAction]:
return self._actions
@property
def state(self) -> LennyBotState:
return self._state
def source_version(self, application: str) -> str:
for action in self._actions:
if action.application != application:
continue
return action.source_version
return None
def target_version(self, application: str) -> str:
for action in self._actions:
if action.application != application:
continue
return action.target_version
return None
| 25.232558
| 77
| 0.621198
| 115
| 1,085
| 5.730435
| 0.278261
| 0.08346
| 0.050076
| 0.068285
| 0.336874
| 0.30349
| 0.30349
| 0.30349
| 0.30349
| 0.30349
| 0
| 0
| 0.297696
| 1,085
| 42
| 78
| 25.833333
| 0.864829
| 0
| 0
| 0.387097
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.193548
| false
| 0
| 0.096774
| 0.064516
| 0.548387
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
76dfdcc4b341cedf794e7489e27908f2ae58e24b
| 10,024
|
py
|
Python
|
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
mllib/nlp/seq2seq.py
|
pmaxit/dlnotebooks
|
5e5a161bbd9d0753850029be29e1488b8858ecd5
|
[
"Apache-2.0"
] | null | null | null |
# AUTOGENERATED! DO NOT EDIT! File to edit: nbs/01_seq2seq.ipynb (unless otherwise specified).
__all__ = ['Encoder', 'NewDecoder', 'Seq2Seq']
# Cell
from torch import nn
from torch import optim
import torch
import torch.nn.functional as F
from torch.nn.utils.rnn import pack_padded_sequence, pad_packed_sequence
# Cell
class Encoder(nn.Module):
def __init__(self, input_size, embedding_size, hidden_size, num_layers=2, p=0.1):
super(Encoder, self).__init__()
self.hidden_size = hidden_size
self.num_layers = num_layers
self.dropout = nn.Dropout(p)
self.embedding = nn.Embedding(input_size, embedding_size)
self.rnn = nn.LSTM(embedding_size, hidden_size, num_layers, dropout=p,batch_first=False)
def forward(self, x, x_len):
# x shape (seq_length, N)
embedding = self.dropout(self.embedding(x))
# embedding shape : (seq_length, N, embedding_size)
x_packed = pack_padded_sequence(embedding, x_len.cpu(), batch_first=False, enforce_sorted=False)
output_packed, (hidden,cell) = self.rnn(x_packed)
# irrelevant because we are interested only in hidden state
#output_padded, output_lengths = pad_packed_sequence(output_packed, batch_first=True)
# output is irrelevant, context vector is important
return hidden,cell
# Cell
class NewDecoder(nn.Module):
def __init__(self, hidden_size, embedding_size, output_size, n_layers=1, dropout_p=0.1):
super(NewDecoder, self).__init__()
# Define parameters
self.hidden_size = hidden_size
self.output_size = output_size
self.n_layers =n_layers
self.dropout_p = dropout_p
# Define layers
self.embedding = nn.Embedding(output_size, embedding_size)
self.dropout=nn.Dropout(dropout_p)
self.rnn = nn.LSTM(embedding_size, hidden_size, n_layers, dropout=dropout_p, batch_first=False)
self.out = nn.Linear(hidden_size, output_size)
def forward(self, word_input, last_hidden, encoder_outputs):
# Note that we will only be running forward for a single decoder time step, but will
# use all encoder outputs
word_input = word_input.unsqueeze(0)
# we are not using encoder_outputs here
word_embedded = self.embedding(word_input) # 1 X B
word_embedded = self.dropout(word_embedded) # 1 X B X emb_length
# Combine embedded input word and hidden vector, run through RNN
output, hidden = self.rnn(word_embedded, last_hidden) # 1 X B X hidden
predictions = self.out(output) # 1, B, out
#output = F.log_softmax(predictions)
return predictions, hidden
# Cell
import random
import pytorch_lightning as pl
import pytorch_lightning.metrics.functional as plfunc
from pytorch_lightning.loggers import TensorBoardLogger
# Cell
class Seq2Seq(pl.LightningModule):
""" Encoder decoder pytorch lightning module for training seq2seq model with teacher forcing
Module try to learn mapping from one sequence to another
"""
@staticmethod
def add_model_specific_args(parent_parser):
parser = ArgumentParser(parents=[parent_parser], add_help=False)
parser.add_argument("--emb_dim", type=int, default=32)
parser.add_argument('--hidden_dim', type=int, default=64)
parser.add_argument('--dropout', type=float, default=0.1)
return parser
def __init__(self,
input_vocab_size,
output_vocab_size,
padding_index = 0,
emb_dim = 8,
hidden_dim=32,
dropout=0.1,
max_length=20,
**kwargs):
super().__init__()
# dynamic, based on tokenizer vocab size defined in datamodule
self.input_dim = input_vocab_size
self.output_dim = output_vocab_size
self.enc_emb_dim = emb_dim
self.dec_emb_dim = emb_dim
self.enc_hid_dim = hidden_dim
self.dec_hid_dim = hidden_dim
self.enc_dropout = dropout
self.dec_dropout = dropout
self.pad_idx = padding_index
self.num_layers = 2
self.max_length =10
self.save_hyperparameters()
self.max_epochs= kwargs.get('max_epochs',5)
self.learning_rate = 0.0005
self._loss = nn.CrossEntropyLoss(ignore_index=self.pad_idx)
self.encoder = Encoder(
self.input_dim,
self.enc_emb_dim,
self.enc_hid_dim,
self.num_layers,
self.enc_dropout
)
self.decoder = NewDecoder(
self.enc_hid_dim,
self.dec_emb_dim,
self.output_dim,
self.num_layers,
self.dec_dropout
)
self._init_weights()
def _init_weights(self):
for name, param in self.named_parameters():
if "weight" in name:
nn.init.normal_(param.data, mean=0, std=0.01)
else:
nn.init.constant_(param.data, 0)
def create_mask(self, src):
mask = (src != self.pad_idx).permute(1, 0)
return mask
def forward(self, src_seq, source_len, trg_seq, teacher_force_ratio=0.5):
"""
teacher_force_ratio is used to help in decoding.
In starting, original input token will be sent as input token
"""
source = src_seq.transpose(0, 1)
target_len = self.max_length
if trg_seq is not None:
target = trg_seq.transpose(0, 1)
target_len = target.shape[0]
batch_size = source.shape[1]
target_vocab_size = self.output_dim
outputs = torch.zeros(target_len, batch_size, target_vocab_size).to(self.device)
encoder_hidden = self.encoder(source, source_len)
# mask = [batch_size, src len]
# without sos token at the beginning and eos token at the end
#x = target[0,:]
decoder_input = torch.ones(batch_size).long().to(self.device)
decoder_hidden = encoder_hidden
encoder_outputs = None
for t in range(target_len):
decoder_output, decoder_hidden = self.decoder(decoder_input, decoder_hidden, encoder_outputs)
outputs[t] = decoder_output
#(N, english_vocab_size)
#best_guess = output.argmax(1)
topv, topi = decoder_output.topk(1)
decoder_input = topi.squeeze().detach()
decoder_input = target[t] if random.random() < teacher_force_ratio and target is not None else decoder_input
return outputs
def loss(self, logits, target):
return self._loss(logits, target)
def configure_optimizers(self):
optimizer = optim.AdamW(self.parameters(), lr=self.learning_rate)
lr_scheduler = {
'scheduler': optim.lr_scheduler.OneCycleLR(
optimizer,
max_lr = self.learning_rate,
steps_per_epoch = 3379,
epochs=self.max_epochs,
anneal_strategy='linear',
final_div_factor=1000,
pct_start = 0.01
),
"name": "learning_rate",
"interval":"step",
"frequency": 1
}
return [optimizer],[lr_scheduler]
def training_step(self, batch, batch_idx):
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
output = self.forward(src_seq, src_lengths,trg_seq)
# do not know if this is a problem, loss will be computed with sos token
# without sos token at the beginning and eos token at the end
output = output.view(-1, self.output_dim)
trg_seq = trg_seq.transpose(0, 1)
trg = trg_seq.reshape(-1)
loss = self.loss(output, trg)
self.log('train_loss',loss.item(),
on_step = True,
on_epoch=True,
prog_bar = True,
logger=True)
return loss
def validation_step(self, batch,batch_idx):
""" validation is in eval model so we do not have to use placeholder input sensors"""
src_seq, trg_seq, src_lengths = batch['src'],batch['trg'], batch['src_len']
outputs = self.forward(src_seq, src_lengths, trg_seq, 0)
logits = outputs[1:].view(-1, self.output_dim)
trg = trg_seq[1:].reshape(-1)
loss = self.loss(logits, trg)
pred_seq = outputs[1:].argmax(2) # seq_len*batch_size*vocab_size -> seq_len * batch_size
# change layout: sesq_len * batch_size -> batch_size * seq_len
pred_seq = pred_seq.T
# change layout: seq_len * batch_size -> batch_size * seq_len
trg_batch = trg_seq[1:].T
# compare list of predicted ids for all sequences in a batch to targets
acc = plfunc.accuracy(pred_seq.reshape(-1), trg_batch.reshape(-1))
# need to cast to list of predicted sequences ( as list of token ids ) [ seq_tok1, seqtok2]
predicted_ids - pred_seq.tolist()
# need to add additional dim to each target reference sequence in order to
# conver to format needed by blue_score_func
# [seq1=[[reference1],[reference2]], seq2=[reference1]]
target_ids = torch.unsqueeze(trg_batch, 1).tolist()
bleu_score - plfunc.nlp.bleu_score(predicted_ids, target_ids, n_gram=3).to(self.device)
self.log(
'val_loss',
loss,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True)
self.log(
"val_acc",
acc,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
self.log(
"val_bleu_idx",
bleu_score,
on_step=False,
on_epoch=True,
prog_bar=True,
logger=True,
sync_dist=True
)
return loss, acc, bleu_score
| 32.025559
| 120
| 0.621409
| 1,303
| 10,024
| 4.537222
| 0.23561
| 0.012179
| 0.01184
| 0.010149
| 0.207037
| 0.139716
| 0.101996
| 0.092862
| 0.064107
| 0.064107
| 0
| 0.013053
| 0.289206
| 10,024
| 312
| 121
| 32.128205
| 0.816702
| 0.189346
| 0
| 0.142105
| 1
| 0
| 0.023146
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.068421
| false
| 0
| 0.047368
| 0.005263
| 0.178947
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76e58be1ebfa1f5a2978f0298b22ab49d27824a1
| 386
|
py
|
Python
|
initdb.py
|
dasmerlon/flunky-bot
|
19dff5a74bee6685e806f98c3f877216ef454a5d
|
[
"MIT"
] | null | null | null |
initdb.py
|
dasmerlon/flunky-bot
|
19dff5a74bee6685e806f98c3f877216ef454a5d
|
[
"MIT"
] | null | null | null |
initdb.py
|
dasmerlon/flunky-bot
|
19dff5a74bee6685e806f98c3f877216ef454a5d
|
[
"MIT"
] | null | null | null |
#!/bin/env python
"""Drop and create a new database with schema."""
from sqlalchemy_utils.functions import database_exists, create_database, drop_database
from flunkybot.db import engine, base
from flunkybot.models import * # noqa
db_url = engine.url
if database_exists(db_url):
drop_database(db_url)
create_database(db_url)
base.metadata.drop_all()
base.metadata.create_all()
| 22.705882
| 86
| 0.790155
| 58
| 386
| 5.034483
| 0.465517
| 0.068493
| 0.089041
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.11658
| 386
| 16
| 87
| 24.125
| 0.856305
| 0.168394
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.333333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
76ebcd294c425806f2a19ba5ab050dfad80e8987
| 826
|
py
|
Python
|
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
trabalho-numerico/tridimensional.py
|
heissonwillen/tcm
|
71da46489f12e64b50436b17447721cb8f7eaf09
|
[
"MIT"
] | null | null | null |
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
from matplotlib import cm
import numpy as np
import os
import contorno
from constantes import INTERVALOS, PASSOS, TAMANHO_BARRA, DELTA_T, DELTA_X
z_temp = contorno.p_3
TAMANHO_BARRA = 2
x = np.linspace(0.0, TAMANHO_BARRA, INTERVALOS+1)
y = np.linspace(0.0, DELTA_T, PASSOS+1)
z = []
for k in range(PASSOS+1):
z_k = np.copy(z_temp)
z.append(z_k)
for i in range(1, INTERVALOS):
z_temp[i] = z_k[i] + (DELTA_T/(DELTA_X**2)) * (z_k[i+1]-2*z_k[i]+z_k[i-1])
z = np.asarray(z)
x, y = np.meshgrid(x, y)
fig = plt.figure()
ax = fig.gca(projection='3d')
surf = ax.plot_surface(x, y, z, cmap=cm.coolwarm, antialiased=False)
ax.set_xlabel('x')
ax.set_ylabel('t')
ax.set_zlabel('T(x,t)')
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
| 22.944444
| 82
| 0.692494
| 160
| 826
| 3.43125
| 0.39375
| 0.021858
| 0.021858
| 0.043716
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.02845
| 0.14891
| 826
| 35
| 83
| 23.6
| 0.752489
| 0
| 0
| 0
| 0
| 0
| 0.012107
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.111111
| 0.259259
| 0
| 0.259259
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
76f0f94143a86c5bd1bdfebcc7fe3a026073720d
| 860
|
py
|
Python
|
SVM/SVM_12_Quiz.py
|
rohit517/Intro-to-machine-learning-Udacity
|
d0b2cc6cac1cb3408b274225cecd4afcea4ee30f
|
[
"MIT"
] | null | null | null |
SVM/SVM_12_Quiz.py
|
rohit517/Intro-to-machine-learning-Udacity
|
d0b2cc6cac1cb3408b274225cecd4afcea4ee30f
|
[
"MIT"
] | null | null | null |
SVM/SVM_12_Quiz.py
|
rohit517/Intro-to-machine-learning-Udacity
|
d0b2cc6cac1cb3408b274225cecd4afcea4ee30f
|
[
"MIT"
] | null | null | null |
import sys
from class_vis import prettyPicture
from prep_terrain_data import makeTerrainData
import matplotlib.pyplot as plt
import copy
import numpy as np
import pylab as pl
features_train, labels_train, features_test, labels_test = makeTerrainData()
########################## SVM #################################
### we handle the import statement and SVC creation for you here
from sklearn.svm import SVC
clf = SVC(kernel="linear")
#### now your job is to fit the classifier
#### using the training features/labels, and to
#### make a set of predictions on the test data
clf.fit(features_train,labels_train)
pred = clf.predict(features_test)
#### store your predictions in a list named pred
from sklearn.metrics import accuracy_score
acc = accuracy_score(pred, labels_test)
def submitAccuracy():
return acc
| 25.294118
| 77
| 0.696512
| 117
| 860
| 5.008547
| 0.564103
| 0.044369
| 0.064846
| 0.081911
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.175581
| 860
| 33
| 78
| 26.060606
| 0.826516
| 0.27093
| 0
| 0
| 0
| 0
| 0.01165
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.0625
| false
| 0
| 0.5625
| 0.0625
| 0.6875
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
76f2637d428beecc1c55ba4761f8ecce6c4c4884
| 26,267
|
py
|
Python
|
runtime/python/Lib/site-packages/isort/output.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 4
|
2021-10-20T12:39:09.000Z
|
2022-02-26T15:02:08.000Z
|
runtime/python/Lib/site-packages/isort/output.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
runtime/python/Lib/site-packages/isort/output.py
|
hwaipy/InteractionFreeNode
|
88642b68430f57b028fd0f276a5709f89279e30d
|
[
"MIT"
] | 3
|
2021-08-28T14:22:36.000Z
|
2021-10-06T18:59:41.000Z
|
import copy
import itertools
from functools import partial
from typing import Any, Iterable, List, Optional, Set, Tuple, Type
from isort.format import format_simplified
from . import parse, sorting, wrap
from .comments import add_to_line as with_comments
from .identify import STATEMENT_DECLARATIONS
from .settings import DEFAULT_CONFIG, Config
def sorted_imports(
parsed: parse.ParsedContent,
config: Config = DEFAULT_CONFIG,
extension: str = "py",
import_type: str = "import",
) -> str:
"""Adds the imports back to the file.
(at the index of the first import) sorted alphabetically and split between groups
"""
if parsed.import_index == -1:
return _output_as_string(parsed.lines_without_imports, parsed.line_separator)
formatted_output: List[str] = parsed.lines_without_imports.copy()
remove_imports = [format_simplified(removal) for removal in config.remove_imports]
sections: Iterable[str] = itertools.chain(parsed.sections, config.forced_separate)
if config.no_sections:
parsed.imports["no_sections"] = {"straight": {}, "from": {}}
base_sections: Tuple[str, ...] = ()
for section in sections:
if section == "FUTURE":
base_sections = ("FUTURE",)
continue
parsed.imports["no_sections"]["straight"].update(
parsed.imports[section].get("straight", {})
)
parsed.imports["no_sections"]["from"].update(parsed.imports[section].get("from", {}))
sections = base_sections + ("no_sections",)
output: List[str] = []
seen_headings: Set[str] = set()
pending_lines_before = False
for section in sections:
straight_modules = parsed.imports[section]["straight"]
if not config.only_sections:
straight_modules = sorting.sort(
config,
straight_modules,
key=lambda key: sorting.module_key(
key, config, section_name=section, straight_import=True
),
reverse=config.reverse_sort,
)
from_modules = parsed.imports[section]["from"]
if not config.only_sections:
from_modules = sorting.sort(
config,
from_modules,
key=lambda key: sorting.module_key(key, config, section_name=section),
reverse=config.reverse_sort,
)
if config.star_first:
star_modules = []
other_modules = []
for module in from_modules:
if "*" in parsed.imports[section]["from"][module]:
star_modules.append(module)
else:
other_modules.append(module)
from_modules = star_modules + other_modules
straight_imports = _with_straight_imports(
parsed, config, straight_modules, section, remove_imports, import_type
)
from_imports = _with_from_imports(
parsed, config, from_modules, section, remove_imports, import_type
)
lines_between = [""] * (
config.lines_between_types if from_modules and straight_modules else 0
)
if config.from_first:
section_output = from_imports + lines_between + straight_imports
else:
section_output = straight_imports + lines_between + from_imports
if config.force_sort_within_sections:
# collapse comments
comments_above = []
new_section_output: List[str] = []
for line in section_output:
if not line:
continue
if line.startswith("#"):
comments_above.append(line)
elif comments_above:
new_section_output.append(_LineWithComments(line, comments_above))
comments_above = []
else:
new_section_output.append(line)
# only_sections options is not imposed if force_sort_within_sections is True
new_section_output = sorting.sort(
config,
new_section_output,
key=partial(sorting.section_key, config=config),
reverse=config.reverse_sort,
)
# uncollapse comments
section_output = []
for line in new_section_output:
comments = getattr(line, "comments", ())
if comments:
section_output.extend(comments)
section_output.append(str(line))
section_name = section
no_lines_before = section_name in config.no_lines_before
if section_output:
if section_name in parsed.place_imports:
parsed.place_imports[section_name] = section_output
continue
section_title = config.import_headings.get(section_name.lower(), "")
if section_title and section_title not in seen_headings:
if config.dedup_headings:
seen_headings.add(section_title)
section_comment = f"# {section_title}"
if section_comment not in parsed.lines_without_imports[0:1]: # pragma: no branch
section_output.insert(0, section_comment)
if pending_lines_before or not no_lines_before:
output += [""] * config.lines_between_sections
output += section_output
pending_lines_before = False
else:
pending_lines_before = pending_lines_before or not no_lines_before
if config.ensure_newline_before_comments:
output = _ensure_newline_before_comment(output)
while output and output[-1].strip() == "":
output.pop() # pragma: no cover
while output and output[0].strip() == "":
output.pop(0)
if config.formatting_function:
output = config.formatting_function(
parsed.line_separator.join(output), extension, config
).splitlines()
output_at = 0
if parsed.import_index < parsed.original_line_count:
output_at = parsed.import_index
formatted_output[output_at:0] = output
if output:
imports_tail = output_at + len(output)
while [
character.strip() for character in formatted_output[imports_tail : imports_tail + 1]
] == [""]:
formatted_output.pop(imports_tail)
if len(formatted_output) > imports_tail:
next_construct = ""
tail = formatted_output[imports_tail:]
for index, line in enumerate(tail): # pragma: no branch
should_skip, in_quote, *_ = parse.skip_line(
line,
in_quote="",
index=len(formatted_output),
section_comments=config.section_comments,
needs_import=False,
)
if not should_skip and line.strip():
if (
line.strip().startswith("#")
and len(tail) > (index + 1)
and tail[index + 1].strip()
):
continue
next_construct = line
break
if in_quote: # pragma: no branch
next_construct = line
break
if config.lines_after_imports != -1:
formatted_output[imports_tail:0] = [
"" for line in range(config.lines_after_imports)
]
elif extension != "pyi" and next_construct.startswith(STATEMENT_DECLARATIONS):
formatted_output[imports_tail:0] = ["", ""]
else:
formatted_output[imports_tail:0] = [""]
if parsed.place_imports:
new_out_lines = []
for index, line in enumerate(formatted_output):
new_out_lines.append(line)
if line in parsed.import_placements:
new_out_lines.extend(parsed.place_imports[parsed.import_placements[line]])
if (
len(formatted_output) <= (index + 1)
or formatted_output[index + 1].strip() != ""
):
new_out_lines.append("")
formatted_output = new_out_lines
return _output_as_string(formatted_output, parsed.line_separator)
def _with_from_imports(
parsed: parse.ParsedContent,
config: Config,
from_modules: Iterable[str],
section: str,
remove_imports: List[str],
import_type: str,
) -> List[str]:
output: List[str] = []
for module in from_modules:
if module in remove_imports:
continue
import_start = f"from {module} {import_type} "
from_imports = list(parsed.imports[section]["from"][module])
if (
not config.no_inline_sort
or (config.force_single_line and module not in config.single_line_exclusions)
) and not config.only_sections:
from_imports = sorting.sort(
config,
from_imports,
key=lambda key: sorting.module_key(
key,
config,
True,
config.force_alphabetical_sort_within_sections,
section_name=section,
),
reverse=config.reverse_sort,
)
if remove_imports:
from_imports = [
line for line in from_imports if f"{module}.{line}" not in remove_imports
]
sub_modules = [f"{module}.{from_import}" for from_import in from_imports]
as_imports = {
from_import: [
f"{from_import} as {as_module}" for as_module in parsed.as_map["from"][sub_module]
]
for from_import, sub_module in zip(from_imports, sub_modules)
if sub_module in parsed.as_map["from"]
}
if config.combine_as_imports and not ("*" in from_imports and config.combine_star):
if not config.no_inline_sort:
for as_import in as_imports:
if not config.only_sections:
as_imports[as_import] = sorting.sort(config, as_imports[as_import])
for from_import in copy.copy(from_imports):
if from_import in as_imports:
idx = from_imports.index(from_import)
if parsed.imports[section]["from"][module][from_import]:
from_imports[(idx + 1) : (idx + 1)] = as_imports.pop(from_import)
else:
from_imports[idx : (idx + 1)] = as_imports.pop(from_import)
only_show_as_imports = False
comments = parsed.categorized_comments["from"].pop(module, ())
above_comments = parsed.categorized_comments["above"]["from"].pop(module, None)
while from_imports:
if above_comments:
output.extend(above_comments)
above_comments = None
if "*" in from_imports and config.combine_star:
import_statement = wrap.line(
with_comments(
_with_star_comments(parsed, module, list(comments or ())),
f"{import_start}*",
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
),
parsed.line_separator,
config,
)
from_imports = [
from_import for from_import in from_imports if from_import in as_imports
]
only_show_as_imports = True
elif config.force_single_line and module not in config.single_line_exclusions:
import_statement = ""
while from_imports:
from_import = from_imports.pop(0)
single_import_line = with_comments(
comments,
import_start + from_import,
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
comment = (
parsed.categorized_comments["nested"].get(module, {}).pop(from_import, None)
)
if comment:
single_import_line += (
f"{comments and ';' or config.comment_prefix} " f"{comment}"
)
if from_import in as_imports:
if (
parsed.imports[section]["from"][module][from_import]
and not only_show_as_imports
):
output.append(
wrap.line(single_import_line, parsed.line_separator, config)
)
from_comments = parsed.categorized_comments["straight"].get(
f"{module}.{from_import}"
)
if not config.only_sections:
output.extend(
with_comments(
from_comments,
wrap.line(
import_start + as_import, parsed.line_separator, config
),
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
for as_import in sorting.sort(config, as_imports[from_import])
)
else:
output.extend(
with_comments(
from_comments,
wrap.line(
import_start + as_import, parsed.line_separator, config
),
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
for as_import in as_imports[from_import]
)
else:
output.append(wrap.line(single_import_line, parsed.line_separator, config))
comments = None
else:
while from_imports and from_imports[0] in as_imports:
from_import = from_imports.pop(0)
if not config.only_sections:
as_imports[from_import] = sorting.sort(config, as_imports[from_import])
from_comments = (
parsed.categorized_comments["straight"].get(f"{module}.{from_import}") or []
)
if (
parsed.imports[section]["from"][module][from_import]
and not only_show_as_imports
):
specific_comment = (
parsed.categorized_comments["nested"]
.get(module, {})
.pop(from_import, None)
)
if specific_comment:
from_comments.append(specific_comment)
output.append(
wrap.line(
with_comments(
from_comments,
import_start + from_import,
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
),
parsed.line_separator,
config,
)
)
from_comments = []
for as_import in as_imports[from_import]:
specific_comment = (
parsed.categorized_comments["nested"]
.get(module, {})
.pop(as_import, None)
)
if specific_comment:
from_comments.append(specific_comment)
output.append(
wrap.line(
with_comments(
from_comments,
import_start + as_import,
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
),
parsed.line_separator,
config,
)
)
from_comments = []
if "*" in from_imports:
output.append(
with_comments(
_with_star_comments(parsed, module, []),
f"{import_start}*",
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
)
from_imports.remove("*")
for from_import in copy.copy(from_imports):
comment = (
parsed.categorized_comments["nested"].get(module, {}).pop(from_import, None)
)
if comment:
from_imports.remove(from_import)
if from_imports:
use_comments = []
else:
use_comments = comments
comments = None
single_import_line = with_comments(
use_comments,
import_start + from_import,
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
single_import_line += (
f"{use_comments and ';' or config.comment_prefix} " f"{comment}"
)
output.append(wrap.line(single_import_line, parsed.line_separator, config))
from_import_section = []
while from_imports and (
from_imports[0] not in as_imports
or (
config.combine_as_imports
and parsed.imports[section]["from"][module][from_import]
)
):
from_import_section.append(from_imports.pop(0))
if config.combine_as_imports:
comments = (comments or []) + list(
parsed.categorized_comments["from"].pop(f"{module}.__combined_as__", ())
)
import_statement = with_comments(
comments,
import_start + (", ").join(from_import_section),
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
if not from_import_section:
import_statement = ""
do_multiline_reformat = False
force_grid_wrap = config.force_grid_wrap
if force_grid_wrap and len(from_import_section) >= force_grid_wrap:
do_multiline_reformat = True
if len(import_statement) > config.line_length and len(from_import_section) > 1:
do_multiline_reformat = True
# If line too long AND have imports AND we are
# NOT using GRID or VERTICAL wrap modes
if (
len(import_statement) > config.line_length
and len(from_import_section) > 0
and config.multi_line_output
not in (wrap.Modes.GRID, wrap.Modes.VERTICAL) # type: ignore
):
do_multiline_reformat = True
if do_multiline_reformat:
import_statement = wrap.import_statement(
import_start=import_start,
from_imports=from_import_section,
comments=comments,
line_separator=parsed.line_separator,
config=config,
)
if config.multi_line_output == wrap.Modes.GRID: # type: ignore
other_import_statement = wrap.import_statement(
import_start=import_start,
from_imports=from_import_section,
comments=comments,
line_separator=parsed.line_separator,
config=config,
multi_line_output=wrap.Modes.VERTICAL_GRID, # type: ignore
)
if (
max(
len(import_line)
for import_line in import_statement.split(parsed.line_separator)
)
> config.line_length
):
import_statement = other_import_statement
if not do_multiline_reformat and len(import_statement) > config.line_length:
import_statement = wrap.line(import_statement, parsed.line_separator, config)
if import_statement:
output.append(import_statement)
return output
def _with_straight_imports(
parsed: parse.ParsedContent,
config: Config,
straight_modules: Iterable[str],
section: str,
remove_imports: List[str],
import_type: str,
) -> List[str]:
output: List[str] = []
as_imports = any((module in parsed.as_map["straight"] for module in straight_modules))
# combine_straight_imports only works for bare imports, 'as' imports not included
if config.combine_straight_imports and not as_imports:
if not straight_modules:
return []
above_comments: List[str] = []
inline_comments: List[str] = []
for module in straight_modules:
if module in parsed.categorized_comments["above"]["straight"]:
above_comments.extend(parsed.categorized_comments["above"]["straight"].pop(module))
if module in parsed.categorized_comments["straight"]:
inline_comments.extend(parsed.categorized_comments["straight"][module])
combined_straight_imports = ", ".join(straight_modules)
if inline_comments:
combined_inline_comments = " ".join(inline_comments)
else:
combined_inline_comments = ""
output.extend(above_comments)
if combined_inline_comments:
output.append(
f"{import_type} {combined_straight_imports} # {combined_inline_comments}"
)
else:
output.append(f"{import_type} {combined_straight_imports}")
return output
for module in straight_modules:
if module in remove_imports:
continue
import_definition = []
if module in parsed.as_map["straight"]:
if parsed.imports[section]["straight"][module]:
import_definition.append((f"{import_type} {module}", module))
import_definition.extend(
(f"{import_type} {module} as {as_import}", f"{module} as {as_import}")
for as_import in parsed.as_map["straight"][module]
)
else:
import_definition.append((f"{import_type} {module}", module))
comments_above = parsed.categorized_comments["above"]["straight"].pop(module, None)
if comments_above:
output.extend(comments_above)
output.extend(
with_comments(
parsed.categorized_comments["straight"].get(imodule),
idef,
removed=config.ignore_comments,
comment_prefix=config.comment_prefix,
)
for idef, imodule in import_definition
)
return output
def _output_as_string(lines: List[str], line_separator: str) -> str:
return line_separator.join(_normalize_empty_lines(lines))
def _normalize_empty_lines(lines: List[str]) -> List[str]:
while lines and lines[-1].strip() == "":
lines.pop(-1)
lines.append("")
return lines
class _LineWithComments(str):
comments: List[str]
def __new__(
cls: Type["_LineWithComments"], value: Any, comments: List[str]
) -> "_LineWithComments":
instance = super().__new__(cls, value)
instance.comments = comments
return instance
def _ensure_newline_before_comment(output: List[str]) -> List[str]:
new_output: List[str] = []
def is_comment(line: Optional[str]) -> bool:
return line.startswith("#") if line else False
for line, prev_line in zip(output, [None] + output): # type: ignore
if is_comment(line) and prev_line != "" and not is_comment(prev_line):
new_output.append("")
new_output.append(line)
return new_output
def _with_star_comments(parsed: parse.ParsedContent, module: str, comments: List[str]) -> List[str]:
star_comment = parsed.categorized_comments["nested"].get(module, {}).pop("*", None)
if star_comment:
return comments + [star_comment]
return comments
| 40.91433
| 100
| 0.516199
| 2,439
| 26,267
| 5.272653
| 0.082411
| 0.034992
| 0.031104
| 0.023328
| 0.477916
| 0.384059
| 0.331882
| 0.288802
| 0.214075
| 0.204355
| 0
| 0.001991
| 0.407317
| 26,267
| 641
| 101
| 40.978159
| 0.824062
| 0.019682
| 0
| 0.377495
| 0
| 0
| 0.034796
| 0.00832
| 0
| 0
| 0
| 0
| 0
| 1
| 0.016334
| false
| 0
| 0.30127
| 0.00363
| 0.344828
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
76f6512f7d0f9be2b22c77b6be1aa4a85a8c2498
| 1,530
|
py
|
Python
|
utils/setAddress.py
|
wedvjin/rs485-moist-sensor
|
90930a34d0e6ec977f6083e70cc4bd931d7453fb
|
[
"Apache-2.0"
] | 1
|
2019-03-04T13:24:42.000Z
|
2019-03-04T13:24:42.000Z
|
utils/setAddress.py
|
wedvjin/rs485-moist-sensor
|
90930a34d0e6ec977f6083e70cc4bd931d7453fb
|
[
"Apache-2.0"
] | null | null | null |
utils/setAddress.py
|
wedvjin/rs485-moist-sensor
|
90930a34d0e6ec977f6083e70cc4bd931d7453fb
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/python
"""Looks for sensor on the bus and changes it's address to the one specified on command line"""
import argparse
import minimalmodbus
import serial
from time import sleep
parser = argparse.ArgumentParser()
parser.add_argument('address', metavar='ADDR', type=int, choices=range(1, 248), help='An address to set')
args = parser.parse_args()
ADDRESS1 = 1
ADDRESS2 = args.address
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
minimalmodbus.PARITY=serial.PARITY_NONE
minimalmodbus.STOPBITS = 2
minimalmodbus.BAUDRATE=19200
minimalmodbus.CLOSE_PORT_AFTER_EACH_CALL = True
def scanModbus():
for i in range(1, 248):
try:
print('Trying address: ' + str(i))
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
addressRead = sensor.read_register(0, functioncode=3)
if(i == addressRead):
print('FOUND!')
return (True, i)
except (IOError):
print("nope...")
pass
return (False, 0)
# sensor.debug=True
(found, i) = scanModbus()
if found:
print('Found sensor at address: ' + str(i))
try:
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=i)
print("writing new address: " + str(ADDRESS2))
sensor.write_register(0, value=ADDRESS2, functioncode=6)
sleep(0.2)
sensor = minimalmodbus.Instrument('/dev/ttyUSB5', slaveaddress=ADDRESS2)
print("reading address from holding register: ")
print(sensor.read_register(0, functioncode=3))
except:
print "Could not change the address. Check your connections"
else:
print('No sensor on the bus found')
| 27.321429
| 105
| 0.733333
| 207
| 1,530
| 5.352657
| 0.483092
| 0.027076
| 0.07852
| 0.086643
| 0.268051
| 0.268051
| 0.16426
| 0
| 0
| 0
| 0
| 0.02439
| 0.142484
| 1,530
| 56
| 106
| 27.321429
| 0.820122
| 0.022222
| 0
| 0.142857
| 0
| 0
| 0.182857
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.02381
| 0.095238
| null | null | 0.214286
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76f7e1b302002b518c986240747a14b0f7bf282f
| 4,291
|
py
|
Python
|
src/manifest.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 13
|
2020-08-12T12:04:19.000Z
|
2022-03-12T03:53:07.000Z
|
src/manifest.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 46
|
2020-09-03T06:00:18.000Z
|
2022-03-25T10:03:53.000Z
|
src/manifest.py
|
silent1mezzo/lightsaber
|
e470be7fb84b810fe846ff0ede78d06bf69cd5e3
|
[
"MIT"
] | 3
|
2021-08-11T19:12:37.000Z
|
2021-11-09T15:19:59.000Z
|
MANIFEST = {
"hilt": {
"h1": {
"offsets": {"blade": 0, "button": {"x": (8, 9), "y": (110, 111)}},
"colours": {
"primary": (216, 216, 216), # d8d8d8
"secondary": (141, 141, 141), # 8d8d8d
"tertiary": (180, 97, 19), # b46113
},
"length": 24,
"materials": "Alloy metal/Salvaged materials",
},
"h2": {
"offsets": {"blade": 20, "button": {"x": (8, 8), "y": (100, 105)}},
"colours": {
"primary": (112, 112, 112), # 707070
"secondary": (0, 0, 0), # 000000
"tertiary": (212, 175, 55), # 000000
},
"length": 24,
"materials": "Alloy metal and carbon composite",
},
"h3": {
"offsets": {"blade": 0, "button": {"x": (10, 10), "y": (100, 118)}},
"colours": {
"primary": (157, 157, 157), # 707070
"secondary": (0, 0, 0), # 000000
"tertiary": (180, 97, 19), # b46113
},
"length": 24,
"materials": "Alloy metal",
},
"h4": {
"offsets": {"blade": 7, "button": {"x": (8, 9), "y": (92, 100)}},
"colours": {
"primary": (0, 0, 0), # 000000
"secondary": (157, 157, 157), # 9d9d9d
"tertiary": (180, 97, 19), # b46113
},
"length": 13,
"materials": "Alloy metal",
},
"h5": {
"offsets": {"blade": 0, "button": {"x": (8, 8), "y": (92, 105)}},
"colours": {
"primary": (111, 111, 111), # 6f6f6f
"secondary": (0, 0, 0), # 000000
"tertiary": (180, 97, 19), # b46113
},
"length": 24,
"materials": "Alloy metal",
},
"h6": {
"offsets": {"blade": 2, "button": {"x": (8, 9), "y": (112, 113)}},
"colours": {
"primary": (120, 120, 120), # 787878
"secondary": (0, 0, 0), # 000000
"tertiary": (180, 97, 19), # b46113
},
"length": 22,
"materials": "Alloy metal/Salvaged materials",
},
"h7": {
"offsets": {"blade": 0, "button": {"x": (8, 9), "y": (105, 113)}},
"colours": {
"primary": (192, 192, 192), # c0c0c0
"secondary": (255, 215, 0), # ffd700
"tertiary": (0, 0, 0), # 000000
},
"length": 22,
"materials": "Alloy metal and Gold",
},
"h8": {
"offsets": {"blade": 0, "button": {"x": (8, 9), "y": (100, 111)}},
"colours": {
"primary": (216, 216, 216), # d8d8d8
"secondary": (180, 97, 19), # b46113
"tertiary": (0, 0, 0), # 000000
},
"length": 24,
"materials": "Alloy metal/Copper",
},
},
"blade": {
"b1": {"colour": "Red", "crystal": "Adegan crystal", "type": "Sith"},
"b2": {"colour": "Blue", "crystal": "Zophis crystal", "type": "Jedi"},
"b3": {"colour": "Green", "crystal": "Nishalorite stone", "type": "Jedi"},
"b4": {"colour": "Yellow", "crystal": "Kimber stone", "type": "Jedi"},
"b5": {"colour": "White", "crystal": "Dragite gem", "type": "Jedi"},
"b6": {"colour": "Purple", "crystal": "Krayt dragon pearl", "type": "Jedi"},
"b7": {"colour": "Blue/Green", "crystal": "Dantari crystal", "type": "Jedi"},
"b8": {
"colour": "Orange",
"crystal": ["Ilum crystal", "Ultima Pearl"],
"type": "Sith",
},
"b9": {
"colour": "Black",
"crystal": "Obsidian",
"type": ["Jedi", "Mandalorian"],
},
},
"pommel": {
"p1": {"length": 5,},
"p2": {"length": 14,},
"p3": {"length": 3,},
"p4": {"length": 8,},
"p5": {"length": 5,},
"p6": {"length": 5,},
"p7": {"length": 8,},
},
# These are lightsabers for a specific Jedi or Sith. Should use their name instead of
"unique_urls": {""},
}
| 37.313043
| 89
| 0.381496
| 384
| 4,291
| 4.260417
| 0.346354
| 0.017115
| 0.09291
| 0.038509
| 0.42176
| 0.343521
| 0.253056
| 0.229829
| 0.137531
| 0.137531
| 0
| 0.154055
| 0.399441
| 4,291
| 114
| 90
| 37.640351
| 0.480792
| 0.058495
| 0
| 0.292035
| 0
| 0
| 0.296462
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76f8632c56e75a6a31f710898b1568e855cfd849
| 9,238
|
py
|
Python
|
apps/interactor/tests/commander/commands/test_animations.py
|
Djelibeybi/photons
|
bc0aa91771d8e88fd3c691fb58f18cb876f292ec
|
[
"MIT"
] | 51
|
2020-07-03T08:34:48.000Z
|
2022-03-16T10:56:08.000Z
|
apps/interactor/tests/commander/commands/test_animations.py
|
delfick/photons
|
bc0aa91771d8e88fd3c691fb58f18cb876f292ec
|
[
"MIT"
] | 81
|
2020-07-03T08:13:59.000Z
|
2022-03-31T23:02:54.000Z
|
apps/interactor/tests/commander/commands/test_animations.py
|
Djelibeybi/photons
|
bc0aa91771d8e88fd3c691fb58f18cb876f292ec
|
[
"MIT"
] | 8
|
2020-07-24T23:48:20.000Z
|
2021-05-24T17:20:16.000Z
|
# coding: spec
from interactor.commander.store import store, load_commands
from photons_app.mimic.event import Events
from photons_app import helpers as hp
from photons_canvas.points.simple_messages import Set64
from unittest import mock
import pytest
@pytest.fixture()
def store_clone():
load_commands()
return store.clone()
@pytest.fixture()
def final_future():
fut = hp.create_future()
try:
yield fut
finally:
fut.cancel()
@pytest.fixture()
async def sender(devices, final_future):
async with devices.for_test(final_future) as sender:
yield sender
@pytest.fixture()
async def make_server(store_clone, server_wrapper, FakeTime, MockedCallLater, sender, final_future):
with FakeTime() as t:
async with MockedCallLater(t) as m:
async with server_wrapper(store_clone, sender, final_future) as server:
yield server, m
@pytest.fixture()
def server(make_server):
return make_server[0]
@pytest.fixture()
def m(make_server):
return make_server[1]
@pytest.fixture(autouse=True)
def set_async_timeout(request):
request.applymarker(pytest.mark.async_timeout(15))
describe "Animation Commands":
async it "can get info and help", server, m:
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
json_output={"animations": {}, "paused": []},
)
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/help"},
)
assert b"Available animations include" in got
assert b"* dice" in got
assert b"To see options for a particular animation, run this again" in got
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/help", "args": {"animation_name": "dice"}},
)
assert b"dice animation" in got
assert b"This animation has the following options:" in got
assert b"colour range options" in got
async it "can control an animation", server, m:
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
json_output={"animations": {}, "paused": []},
)
identity = "first"
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/start", "args": {"identity": identity}},
)
assert "animations" in got
assert got["animations"] == [identity]
assert got["started"] == identity
identity2 = "second"
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/start", "args": {"identity": identity2}},
)
assert "animations" in got
identities = [identity, identity2]
assert got["animations"] == identities
assert got["started"] == identity2
info = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
)
assert info == {"animations": {identity: mock.ANY, identity2: mock.ANY}, "paused": []}
# pause
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/pause", "args": {"pause": identity}},
json_output={"animations": identities, "paused": [identity], "pausing": [identity]},
)
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/pause", "args": {"pause": identity2}},
json_output={
"animations": identities,
"paused": identities,
"pausing": [identity2],
},
)
# resume
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/resume", "args": {"resume": identity2}},
json_output={
"animations": identities,
"paused": [identity],
"resuming": [identity2],
},
)
# pause multiple
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/pause", "args": {"pause": identities}},
json_output={"animations": identities, "paused": identities, "pausing": identities},
)
# resume
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/resume", "args": {"resume": identities}},
json_output={
"animations": identities,
"paused": [],
"resuming": identities,
},
)
# pause
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/pause", "args": {"pause": identity}},
json_output={"animations": identities, "paused": [identity], "pausing": [identity]},
)
# info
info = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
)
assert info["animations"] == {identity: mock.ANY, identity2: mock.ANY}
assert info["paused"] == [identity]
# stop
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/stop", "args": {"stop": identity}},
json_output={
"animations": [identity, identity2],
"paused": [identity],
"stopping": [identity],
},
)
await m.add(0.5)
# info
info = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
)
assert info["animations"] == {identity2: mock.ANY}
assert info["paused"] == []
async it "pausing an animation actually pauses the animation", devices, server, m:
tile = devices["tile"]
io = tile.io["MEMORY"]
store = devices.store(tile)
store.clear()
first_set_64 = tile.attrs.event_waiter.wait_for_incoming(io, Set64)
# start
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/start", "args": {"animations": [["balls", {"every": 3}]]}},
)
identity = got["started"]
await first_set_64
now = store.count(Events.INCOMING(tile, io, pkt=Set64))
assert now > 0
await m.add(5)
now2 = store.count(Events.INCOMING(tile, io, pkt=Set64))
assert now2 > now
identity = got["started"]
await m.add(5)
assert store.count(Events.INCOMING(tile, io, pkt=Set64)) > now
# pause
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/pause", "args": {"pause": [identity]}},
)
await m.add(5)
store.clear()
await m.add(5)
assert store.count(Events.INCOMING(tile, io, pkt=Set64)) == 0
# resume
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/resume", "args": {"resume": [identity]}},
)
await m.add(5)
assert store.count(Events.INCOMING(tile, io, pkt=Set64)) > 0
# stop
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/stop", "args": {"stop": [identity]}},
)
store.clear()
await m.add(5)
store.clear()
await m.add(5)
assert store.count(Events.INCOMING(tile, io, pkt=Set64)) == 0
# info
await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/info"},
json_output={"animations": {}, "paused": []},
)
async it "can get information", server, m:
# start
got = await server.assertCommand(
"/v1/lifx/command",
{"command": "animation/start", "args": {"animations": [["balls", {"every": 0.3}]]}},
)
identity = got["started"]
info = await server.assertCommand("/v1/lifx/command", {"command": "animation/info"})
assert info["paused"] == []
assert identity in info["animations"]
assert info["animations"][identity]["animations_ran"] == 1
assert info["animations"][identity]["current_animation"] == {
"name": "balls",
"options": {
"ball_colors": "<ManyColor:[((0, 360), (1000.0, 1000.0), (1000.0, 1000.0), (3500.0, 3500.0))]>",
"fade_amount": 0.02,
"num_balls": 5,
"rate": "<Rate 0.9 -> 1>",
},
"started": mock.ANY,
}
assert info["animations"][identity]["options"]["combined"]
assert "unlocked" in info["animations"][identity]["options"]["pauser"]
assert info["animations"][identity]["options"]["noisy_network"] == 0
specific = await server.assertCommand(
"/v1/lifx/command", {"command": "animation/info", "args": {"identity": identity}}
)
info["animations"][identity]["current_animation"]["started"] = mock.ANY
assert info["animations"][identity] == specific
| 31.209459
| 112
| 0.544923
| 904
| 9,238
| 5.512168
| 0.173673
| 0.05298
| 0.115593
| 0.125226
| 0.589203
| 0.553482
| 0.514148
| 0.476018
| 0.476018
| 0.44692
| 0
| 0.018894
| 0.306776
| 9,238
| 295
| 113
| 31.315254
| 0.759213
| 0.01115
| 0
| 0.408889
| 0
| 0.004444
| 0.230508
| 0
| 0
| 0
| 0
| 0
| 0.244444
| 0
| null | null | 0
| 0.026667
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
76f93238491c8f0f67d7813df6d0b4a6c7ed0a80
| 245
|
py
|
Python
|
.ipython/profile_pytube/startup/init.py
|
showa-yojyo/dotfiles
|
994cc7df0643d69f62cb59550bdd48a42751c345
|
[
"MIT"
] | null | null | null |
.ipython/profile_pytube/startup/init.py
|
showa-yojyo/dotfiles
|
994cc7df0643d69f62cb59550bdd48a42751c345
|
[
"MIT"
] | 3
|
2018-03-27T14:10:18.000Z
|
2018-03-30T14:06:11.000Z
|
.ipython/profile_pytube/startup/init.py
|
showa-yojyo/dotfiles
|
994cc7df0643d69f62cb59550bdd48a42751c345
|
[
"MIT"
] | null | null | null |
from pytube import YouTube
def download_video(watch_url):
yt = YouTube(watch_url)
(yt.streams
.filter(progressive=True, file_extension='mp4')
.order_by('resolution')
.desc()
.first()
.download())
| 22.272727
| 55
| 0.608163
| 27
| 245
| 5.333333
| 0.814815
| 0.111111
| 0.138889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005556
| 0.265306
| 245
| 10
| 56
| 24.5
| 0.794444
| 0
| 0
| 0
| 0
| 0
| 0.053061
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.222222
| 0
| 1
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a017ba6441979fea8dcb4bd6912e6e472b2970d
| 456
|
py
|
Python
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | 1
|
2018-12-07T09:15:57.000Z
|
2018-12-07T09:15:57.000Z
|
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
brokenChains/migrations/0003_auto_20181106_1819.py
|
bunya017/brokenChains
|
3e20c834efd7f0ade8e3abe7acf547c093f76758
|
[
"MIT"
] | null | null | null |
# Generated by Django 2.1.1 on 2018-11-06 17:19
from django.conf import settings
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('brokenChains', '0002_auto_20181106_1723'),
]
operations = [
migrations.AlterUniqueTogether(
name='habit',
unique_together={('owner', 'name')},
),
]
| 22.8
| 66
| 0.64693
| 48
| 456
| 6
| 0.770833
| 0.069444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.089855
| 0.243421
| 456
| 19
| 67
| 24
| 0.744928
| 0.098684
| 0
| 0
| 1
| 0
| 0.119804
| 0.056235
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.153846
| 0
| 0.384615
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a03afbc022ab3ed1e3b4074455a3f3fdefc3a2e
| 1,189
|
py
|
Python
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 50
|
2019-04-04T17:50:00.000Z
|
2021-08-05T15:08:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nickmoreton/nhsx-website
|
2397d1308376c02b75323d30e6bc916af0daac9d
|
[
"MIT"
] | 434
|
2019-04-04T18:25:32.000Z
|
2022-03-31T18:23:37.000Z
|
app/modules/ai_lab/migrations/0003_ailabcasestudy.py
|
nhsx-mirror/nhsx-website
|
2133b4e275ca35ff77f7d6874e809f139ec4bf86
|
[
"MIT"
] | 23
|
2019-04-04T09:52:07.000Z
|
2021-04-11T07:41:47.000Z
|
# Generated by Django 3.0.4 on 2020-07-14 11:00
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
("core", "0026_auto_20200713_1535"),
("ai_lab", "0002_ailabusecase"),
]
operations = [
migrations.CreateModel(
name="AiLabCaseStudy",
fields=[
(
"articlepage_ptr",
models.OneToOneField(
auto_created=True,
on_delete=django.db.models.deletion.CASCADE,
parent_link=True,
primary_key=True,
serialize=False,
to="core.ArticlePage",
),
),
(
"use_case",
models.ForeignKey(
on_delete=django.db.models.deletion.PROTECT,
to="ai_lab.AiLabUseCase",
),
),
],
options={"abstract": False,},
bases=("core.articlepage", models.Model),
),
]
| 29
| 68
| 0.444071
| 91
| 1,189
| 5.659341
| 0.626374
| 0.062136
| 0.081553
| 0.128155
| 0.116505
| 0.116505
| 0
| 0
| 0
| 0
| 0
| 0.054348
| 0.458368
| 1,189
| 40
| 69
| 29.725
| 0.745342
| 0.037847
| 0
| 0.147059
| 1
| 0
| 0.127846
| 0.02014
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.147059
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a03cda07d112635217a5bbdc7ec5274c0658a7a
| 3,258
|
py
|
Python
|
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | 1
|
2019-06-08T22:19:40.000Z
|
2019-06-08T22:19:40.000Z
|
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | null | null | null |
requests/UpdateWorkbookConnectionRequest.py
|
divinorum-webb/python-tableau-api
|
9d3f130d63b15307ad2b23e2273b52790b8d9018
|
[
"Apache-2.0"
] | null | null | null |
from .BaseRequest import BaseRequest
class UpdateWorkbookConnectionRequest(BaseRequest):
"""
Update workbook connection request for sending API requests to Tableau Server.
:param ts_connection: The Tableau Server connection object.
:type ts_connection: class
:param server_address: The new server for the connection.
:type server_address: string
:param port: The new port for the connection.
:type port: string
:param connection_username: The new username for the connection.
:type connection_username: string
:param connection_password: The new password for the connection.
:type connection_password: string
:param embed_password_flag: Boolean; True to embed the password in the connection, False otherwise.
:type embed_password_flag: boolean
"""
def __init__(self,
ts_connection,
server_address=None,
port=None,
connection_username=None,
connection_password=None,
embed_password_flag=None):
super().__init__(ts_connection)
self._server_address = server_address
self._port = port
self._connection_username = connection_username
self._connection_password = connection_password
self._embed_password_flag = embed_password_flag
self.base_update_workbook_connection_request
@property
def optional_parameter_keys(self):
return [
'serverAddress',
'serverPort',
'userName',
'password',
'embedPassword'
]
@property
def optional_parameter_values_exist(self):
return [
self._server_address,
self._port,
self._connection_username,
self._connection_password,
True if self._embed_password_flag is not None else None
]
@property
def optional_parameter_values(self):
return [
self._server_address,
self._port,
self._connection_username,
self._connection_password,
self._embed_password_flag
]
@property
def base_update_workbook_connection_request(self):
self._request_body.update({'connection': {}})
return self._request_body
@property
def modified_update_workbook_connection_request(self):
if any(self.optional_parameter_values_exist):
self._request_body['connection'].update(
self._get_parameters_dict(self.optional_parameter_keys,
self.optional_parameter_values))
return self._request_body
@staticmethod
def _get_parameters_dict(param_keys, param_values):
"""Override the inherited _get_parameters_dict() method to allow passing boolean values directly"""
params_dict = {}
for i, key in enumerate(param_keys):
if param_values[i] is not None:
params_dict.update({key: param_values[i]})
return params_dict
def get_request(self):
return self.modified_update_workbook_connection_request
| 36.2
| 107
| 0.634131
| 331
| 3,258
| 5.885196
| 0.214502
| 0.046715
| 0.061088
| 0.079569
| 0.297228
| 0.11191
| 0.081109
| 0.081109
| 0.081109
| 0.081109
| 0
| 0
| 0.305402
| 3,258
| 89
| 108
| 36.606742
| 0.860804
| 0.261203
| 0
| 0.285714
| 0
| 0
| 0.030796
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.126984
| false
| 0.15873
| 0.015873
| 0.063492
| 0.269841
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a0e5c306cd6cb5140e3d9096d9aec435b5e905a
| 637
|
py
|
Python
|
src/plat/index_news_remove.py
|
jack139/cnnc
|
c32611ec01af50bedb67dcd4c8a28e4b0c7a9aef
|
[
"BSD-2-Clause"
] | null | null | null |
src/plat/index_news_remove.py
|
jack139/cnnc
|
c32611ec01af50bedb67dcd4c8a28e4b0c7a9aef
|
[
"BSD-2-Clause"
] | null | null | null |
src/plat/index_news_remove.py
|
jack139/cnnc
|
c32611ec01af50bedb67dcd4c8a28e4b0c7a9aef
|
[
"BSD-2-Clause"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
import web
import time
from bson.objectid import ObjectId
from config import setting
import helper
db = setting.db_web
# 删除聊天规则
url = ('/plat/index_news_remove')
class handler:
def GET(self):
if not helper.logged(helper.PRIV_USER, 'TALKBOT'):
raise web.seeother('/')
render = helper.create_render()
user_data = web.input(news_id='')
if user_data.news_id == '':
return render.info('参数错误!')
db.index_news.delete_one({'_id':ObjectId(user_data.news_id)})
return render.info('成功删除!', '/plat/index_news')
| 19.90625
| 69
| 0.629513
| 85
| 637
| 4.541176
| 0.552941
| 0.069948
| 0.067358
| 0.072539
| 0.15544
| 0.15544
| 0.15544
| 0
| 0
| 0
| 0
| 0.002037
| 0.229199
| 637
| 31
| 70
| 20.548387
| 0.784114
| 0.078493
| 0
| 0
| 0
| 0
| 0.103093
| 0.039519
| 0
| 0
| 0
| 0
| 0
| 1
| 0.058824
| false
| 0
| 0.294118
| 0
| 0.529412
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
0a114ea68c2fa1e2738f0d3ff99019e72e2ea941
| 1,074
|
py
|
Python
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 2
|
2020-12-05T05:34:56.000Z
|
2020-12-09T10:27:43.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 3
|
2021-06-28T16:47:23.000Z
|
2021-06-28T16:48:51.000Z
|
sitewebapp/migrations/0011_auto_20210130_0150.py
|
deucaleon18/debsoc-nitdgp-website
|
41bd6ade7f4af143ef34aff01848f830cc533add
|
[
"MIT"
] | 9
|
2021-01-29T17:06:30.000Z
|
2021-08-21T18:23:26.000Z
|
# Generated by Django 2.2.15 on 2021-01-29 20:20
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sitewebapp', '0010_auditionanswers_auditionquestions_audtionrounds_candidates'),
]
operations = [
migrations.CreateModel(
name='auditionRounds',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('roundno', models.IntegerField(default=1)),
('candidate', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='inductees', to='sitewebapp.Candidates')),
],
),
migrations.AlterField(
model_name='auditionquestions',
name='round',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='round', to='sitewebapp.auditionRounds'),
),
migrations.DeleteModel(
name='audtionRounds',
),
]
| 34.645161
| 148
| 0.634078
| 102
| 1,074
| 6.558824
| 0.54902
| 0.047833
| 0.06278
| 0.098655
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0.19133
| 0
| 0.025862
| 0.243948
| 1,074
| 30
| 149
| 35.8
| 0.79803
| 0.042831
| 0
| 0.125
| 1
| 0
| 0.196881
| 0.106238
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.208333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a1a359a4636f368d0f28057e4bf1af274c7fb79
| 3,332
|
py
|
Python
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | 5
|
2019-07-31T04:11:05.000Z
|
2021-01-07T03:23:20.000Z
|
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
influxdb_service_sdk/model/container/resource_requirements_pb2.py
|
easyopsapis/easyops-api-python
|
adf6e3bad33fa6266b5fa0a449dd4ac42f8447d0
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: resource_requirements.proto
import sys
_b=sys.version_info[0]<3 and (lambda x:x) or (lambda x:x.encode('latin1'))
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from influxdb_service_sdk.model.container import resource_list_pb2 as influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='resource_requirements.proto',
package='container',
syntax='proto3',
serialized_options=_b('ZCgo.easyops.local/contracts/protorepo-models/easyops/model/container'),
serialized_pb=_b('\n\x1bresource_requirements.proto\x12\tcontainer\x1a\x38influxdb_service_sdk/model/container/resource_list.proto\"j\n\x14ResourceRequirements\x12\'\n\x06limits\x18\x01 \x01(\x0b\x32\x17.container.ResourceList\x12)\n\x08requests\x18\x02 \x01(\x0b\x32\x17.container.ResourceListBEZCgo.easyops.local/contracts/protorepo-models/easyops/model/containerb\x06proto3')
,
dependencies=[influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2.DESCRIPTOR,])
_RESOURCEREQUIREMENTS = _descriptor.Descriptor(
name='ResourceRequirements',
full_name='container.ResourceRequirements',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='limits', full_name='container.ResourceRequirements.limits', index=0,
number=1, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='requests', full_name='container.ResourceRequirements.requests', index=1,
number=2, type=11, cpp_type=10, label=1,
has_default_value=False, default_value=None,
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=100,
serialized_end=206,
)
_RESOURCEREQUIREMENTS.fields_by_name['limits'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
_RESOURCEREQUIREMENTS.fields_by_name['requests'].message_type = influxdb__service__sdk_dot_model_dot_container_dot_resource__list__pb2._RESOURCELIST
DESCRIPTOR.message_types_by_name['ResourceRequirements'] = _RESOURCEREQUIREMENTS
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
ResourceRequirements = _reflection.GeneratedProtocolMessageType('ResourceRequirements', (_message.Message,), {
'DESCRIPTOR' : _RESOURCEREQUIREMENTS,
'__module__' : 'resource_requirements_pb2'
# @@protoc_insertion_point(class_scope:container.ResourceRequirements)
})
_sym_db.RegisterMessage(ResourceRequirements)
DESCRIPTOR._options = None
# @@protoc_insertion_point(module_scope)
| 40.144578
| 380
| 0.801921
| 396
| 3,332
| 6.358586
| 0.325758
| 0.02224
| 0.035743
| 0.038126
| 0.305798
| 0.289118
| 0.289118
| 0.250993
| 0.250993
| 0.250993
| 0
| 0.024228
| 0.095738
| 3,332
| 82
| 381
| 40.634146
| 0.811484
| 0.076831
| 0
| 0.258065
| 1
| 0.016129
| 0.165526
| 0.121538
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.096774
| 0
| 0.096774
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a1cc533cda21da8b86ba8309652b8179ef12637
| 1,371
|
py
|
Python
|
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | 1
|
2022-02-01T04:05:04.000Z
|
2022-02-01T04:05:04.000Z
|
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | null | null | null |
Episode11-Menu/Pygame/explosion.py
|
Inksaver/Shmup_With_Pygame_Love2D_Monogame
|
84838516d9dd9d6639b1b699dca546bfdfec73dc
|
[
"CC0-1.0"
] | null | null | null |
import pygame
import shared
class Explosion():
def __init__(self, images:list, centre:tuple, key:str) -> None:
''' Class variables. key: 'sm', 'lg', 'player '''
self.images = images # list of 8 images
self.centre = centre # use for all frames
self.key = key # key used later
self.image = images[key][0] # set to first image in the sequence
self.rect = self.image.get_rect() # define rectangle from image size
self.rect.center = self.centre # set centre for all frames
self.frame = 0 # no of first frame
self.time_passed = 0 # set timer to 0
self.frame_rate = 0.1 # 8 images played at 1 frame per 0.1 secs = 0.8 seconds
self.active = True
def update(self, dt):
self.time_passed += dt
if self.time_passed >= self.frame_rate: # 0.1 seconds has passed
self.time_passed = 0 # reset timer
self.frame += 1 # increase frame number
if self.frame >= len(self.images[self.key]): # check if end of list?
self.active = False # animation finished
else:
self.image = self.images[self.key][self.frame] # next frame
self.rect = self.image.get_rect() # new rectangle
self.rect.center = self.centre # set centre to parameter value
return self.active
def draw(self):
shared.screen.blit(self.image, self.rect) # draw current frame
| 41.545455
| 84
| 0.644055
| 206
| 1,371
| 4.228155
| 0.383495
| 0.061998
| 0.064294
| 0.036739
| 0.165327
| 0.130884
| 0.075775
| 0
| 0
| 0
| 0
| 0.016569
| 0.251641
| 1,371
| 33
| 85
| 41.545455
| 0.832359
| 0.326039
| 0
| 0.214286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.107143
| false
| 0.142857
| 0.071429
| 0
| 0.25
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a21ba878c2e6396a56688811ff51897970088c4
| 3,361
|
py
|
Python
|
tinc/tests/parameter_space_test.py
|
AlloSphere-Research-Group/tinc-python
|
4c3390df9911a391833244de1eb1d33a2e19d330
|
[
"BSD-3-Clause"
] | 1
|
2020-11-23T22:42:50.000Z
|
2020-11-23T22:42:50.000Z
|
tinc/tests/parameter_space_test.py
|
AlloSphere-Research-Group/tinc-python
|
4c3390df9911a391833244de1eb1d33a2e19d330
|
[
"BSD-3-Clause"
] | null | null | null |
tinc/tests/parameter_space_test.py
|
AlloSphere-Research-Group/tinc-python
|
4c3390df9911a391833244de1eb1d33a2e19d330
|
[
"BSD-3-Clause"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created on Mon Jun 14 11:49:43 2021
@author: Andres
"""
import sys,time
import unittest
from tinc import *
class ParameterSpaceTest(unittest.TestCase):
def test_parameter(self):
p1 = Parameter("param1")
p2 = Parameter("param2")
ps = ParameterSpace("ps")
ps.register_parameters([p1, p2])
def test_process(self):
p1 = Parameter("param1")
p1.values = [0, 1,2,3,4]
p2 = Parameter("param2")
p2.values = [-0.3,-0.2, -0.1, 0]
ps = ParameterSpace("ps")
ps.register_parameters([p1, p2])
def func(param1, param2):
return param1 * param2
result = ps.run_process(func)
self.assertAlmostEqual(result, p1.value * p2.value)
p1.value = 3
p2.value = -0.1
result = ps.run_process(func)
self.assertAlmostEqual(result, p1.value * p2.value)
p1.value = 3
p2.value = -0.1
def test_sweep_cache(self):
p1 = Parameter("param1")
p1.values = [0, 1,2,3,4]
p2 = Parameter("param2")
p2.values = [-0.3,-0.2, -0.1, 0]
ps = ParameterSpace("ps")
ps.register_parameters([p1, p2])
ps.enable_cache("ps_test")
def func(param1, param2):
return param1 * param2
ps.sweep(func)
def test_data_directories(self):
dim1 = Parameter("dim1")
dim1.values = [0.1,0.2,0.3,0.4, 0.5]
dim2 = Parameter("dim2")
dim2.set_space_representation_type(parameter_space_representation_types.INDEX)
dim2.values = [0.1,0.2,0.3,0.4, 0.5]
dim3 = Parameter("dim3")
dim3.set_space_representation_type(parameter_space_representation_types.ID)
dim2.values = [0.1,0.2,0.3,0.4, 0.5]
ps = ParameterSpace("ps")
ps.register_parameters([dim1, dim2, dim3])
ps.set_current_path_template("file_%%dim1%%_%%dim2:INDEX%%")
dim1.value=0.2
dim2.value=0.2
self.assertEqual(ps.get_current_relative_path(), 'file_0.2_1')
# TODO ML complete tests see C++ tests for parameter space
def test_common_id(self):
dim1 = Parameter("dim1")
dim1.values = [0.1, 0.1, 0.2, 0.2, 0.3, 0.3]
dim1.ids = ["0.1_1" ,"0.1_2","0.2_1" ,"0.2_2", "0.3_1" ,"0.3_2"]
dim2 = Parameter("dim2")
dim2.set_space_representation_type(parameter_space_representation_types.INDEX)
dim2.values = [1,1,1,2,2,2]
dim2.ids = ["0.1_1", "0.2_1", "0.3_1", "0.1_2", "0.2_2", "0.3_2"]
ps = ParameterSpace("ps")
ps.register_parameters([dim1, dim2])
dim1.value = 0.1
dim2.value = 1
self.assertEqual(ps.get_common_id([dim1, dim2]), "0.1_1")
dim1.value = 0.2
dim2.value = 1
self.assertEqual(ps.get_common_id([dim1, dim2]), "0.2_1")
dim1.value = 0.1
dim2.value = 2
self.assertEqual(ps.get_common_id([dim1, dim2]), "0.1_2")
dim1.value = 0.2
dim2.value = 2
self.assertEqual(ps.get_common_id([dim1, dim2]), "0.2_2")
dim1.value = 0.3
dim2.value = 2
self.assertEqual(ps.get_common_id([dim1, dim2]), "0.3_2")
if __name__ == '__main__':
unittest.main()
| 28.974138
| 86
| 0.555489
| 476
| 3,361
| 3.754202
| 0.17437
| 0.021265
| 0.011752
| 0.067152
| 0.730274
| 0.702854
| 0.660325
| 0.620593
| 0.536094
| 0.476217
| 0
| 0.105818
| 0.294258
| 3,361
| 115
| 87
| 29.226087
| 0.647555
| 0.039274
| 0
| 0.607595
| 0
| 0
| 0.063354
| 0.008696
| 0
| 0
| 0
| 0.008696
| 0.101266
| 1
| 0.088608
| false
| 0
| 0.037975
| 0.025316
| 0.164557
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a2ad964a50ee086e447a623b3863c7fbb9ef26a
| 1,977
|
py
|
Python
|
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
src/com/python/email/send_mail.py
|
Leeo1124/pythonDemo
|
72e2209c095301a3f1f61edfe03ea69c3c05be40
|
[
"Apache-2.0"
] | null | null | null |
'''
Created on 2016年8月10日
@author: Administrator
'''
from email import encoders
from email.header import Header
from email.mime.text import MIMEText
from email.mime.multipart import MIMEMultipart
from email.mime.multipart import MIMEBase
from email.utils import parseaddr, formataddr
import smtplib
def _format_addr(s):
name, addr = parseaddr(s)
return formataddr((Header(name, 'utf-8').encode(), addr))
from_addr = '[email protected]'#input('From: ')
password = input('Password: ')
to_addr = '[email protected]'#input('To: ')
smtp_server = 'smtp.163.com'#input('SMTP server: ')
# 发送纯文本邮件
# msg = MIMEText('hello, send by Python...', 'plain', 'utf-8')
# 发送HTML邮件
# msg = MIMEText('<html><body><h1>Hello</h1>' +
# '<p>send by <a href="http://www.python.org">Python</a>...</p>' +
# '</body></html>', 'html', 'utf-8')
# 发送带附件的邮件
# 邮件对象:
msg = MIMEMultipart()
msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)
msg['To'] = _format_addr('管理员 <%s>' % to_addr)
msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode()
# 邮件正文是MIMEText:
msg.attach(MIMEText('send with file...', 'plain', 'utf-8'))
# 添加附件就是加上一个MIMEBase,从本地读取一个图片:
with open('D:/pythonWorkspace/pthonDemo/src/com/python/email/test.jpg', 'rb') as f:
# 设置附件的MIME和文件名,这里是png类型:
mime = MIMEBase('image', 'png', filename='test.png')
# 加上必要的头信息:
mime.add_header('Content-Disposition', 'attachment', filename='test.png')
mime.add_header('Content-ID', '<0>')
mime.add_header('X-Attachment-Id', '0')
# 把附件的内容读进来:
mime.set_payload(f.read())
# 用Base64编码:
encoders.encode_base64(mime)
# 添加到MIMEMultipart:
msg.attach(mime)
msg['From'] = _format_addr('Python爱好者 <%s>' % from_addr)
msg['To'] = _format_addr('管理员 <%s>' % to_addr)
msg['Subject'] = Header('来自SMTP的问候……', 'utf-8').encode()
server = smtplib.SMTP(smtp_server, 25)
server.set_debuglevel(1)
server.login(from_addr, password)
server.sendmail(from_addr, [to_addr], msg.as_string())
server.quit()
| 29.073529
| 83
| 0.676277
| 267
| 1,977
| 4.947566
| 0.397004
| 0.040878
| 0.029523
| 0.033308
| 0.195307
| 0.152914
| 0.152914
| 0.152914
| 0.152914
| 0.152914
| 0
| 0.025015
| 0.130501
| 1,977
| 68
| 84
| 29.073529
| 0.736475
| 0.233687
| 0
| 0.171429
| 0
| 0
| 0.221328
| 0.0389
| 0
| 0
| 0
| 0
| 0
| 1
| 0.028571
| false
| 0.057143
| 0.2
| 0
| 0.257143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a36ce830d4011a6336f73093bb61b54abdb2cbd
| 7,782
|
py
|
Python
|
pypy/interpreter/test/test_generator.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 381
|
2018-08-18T03:37:22.000Z
|
2022-02-06T23:57:36.000Z
|
pypy/interpreter/test/test_generator.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 16
|
2018-09-22T18:12:47.000Z
|
2022-02-22T20:03:59.000Z
|
pypy/interpreter/test/test_generator.py
|
m4sterchain/mesapy
|
ed546d59a21b36feb93e2309d5c6b75aa0ad95c9
|
[
"Apache-2.0",
"OpenSSL"
] | 30
|
2018-08-20T03:16:34.000Z
|
2022-01-12T17:39:22.000Z
|
class AppTestGenerator:
def test_generator(self):
def f():
yield 1
assert f().next() == 1
def test_generator2(self):
def f():
yield 1
g = f()
assert g.next() == 1
raises(StopIteration, g.next)
def test_attributes(self):
def f():
yield 1
assert g.gi_running
g = f()
assert g.gi_code is f.__code__
assert g.__name__ == 'f'
assert g.gi_frame is not None
assert not g.gi_running
g.next()
assert not g.gi_running
raises(StopIteration, g.next)
assert not g.gi_running
assert g.gi_frame is None
assert g.gi_code is f.__code__
assert g.__name__ == 'f'
def test_generator3(self):
def f():
yield 1
g = f()
assert list(g) == [1]
def test_generator4(self):
def f():
yield 1
g = f()
assert [x for x in g] == [1]
def test_generator5(self):
d = {}
exec """if 1:
def f():
v = (yield )
yield v
g = f()
g.next()
""" in d
g = d['g']
assert g.send(42) == 42
def test_throw1(self):
def f():
yield 2
g = f()
# two arguments version
raises(NameError, g.throw, NameError, "Error")
def test_throw2(self):
def f():
yield 2
g = f()
# single argument version
raises(NameError, g.throw, NameError("Error"))
def test_throw3(self):
def f():
try:
yield 1
yield 2
except:
yield 3
g = f()
assert g.next() == 1
assert g.throw(NameError("Error")) == 3
raises(StopIteration, g.next)
def test_throw4(self):
d = {}
exec """if 1:
def f():
try:
yield 1
v = (yield 2)
except:
yield 3
g = f()
""" in d
g = d['g']
assert g.next() == 1
assert g.next() == 2
assert g.throw(NameError("Error")) == 3
raises(StopIteration, g.next)
def test_throw5(self):
def f():
try:
yield 1
except:
x = 3
try:
yield x
except:
pass
g = f()
g.next()
# String exceptions are not allowed anymore
raises(TypeError, g.throw, "Error")
assert g.throw(Exception) == 3
raises(StopIteration, g.throw, Exception)
def test_throw6(self):
def f():
yield 2
g = f()
raises(NameError, g.throw, NameError, "Error", None)
def test_throw_fail(self):
def f():
yield 1
g = f()
raises(TypeError, g.throw, NameError("Error"), "error")
def test_throw_fail2(self):
def f():
yield 1
g = f()
raises(TypeError, g.throw, list())
def test_throw_fail3(self):
def f():
yield 1
g = f()
raises(TypeError, g.throw, NameError("Error"), None, "not tb object")
def test_throw_finishes_generator(self):
def f():
yield 1
g = f()
assert g.gi_frame is not None
raises(ValueError, g.throw, ValueError)
assert g.gi_frame is None
def test_throw_bug(self):
def f():
try:
x.throw(IndexError) # => "generator already executing"
except ValueError:
yield 1
x = f()
res = list(x)
assert res == [1]
def test_throw_on_finished_generator(self):
def f():
yield 1
g = f()
res = g.next()
assert res == 1
raises(StopIteration, g.next)
raises(NameError, g.throw, NameError)
def test_close(self):
def f():
yield 1
g = f()
assert g.close() is None
def test_close2(self):
def f():
try:
yield 1
except GeneratorExit:
raise StopIteration
g = f()
g.next()
assert g.close() is None
def test_close3(self):
def f():
try:
yield 1
except GeneratorExit:
raise NameError
g = f()
g.next()
raises(NameError, g.close)
def test_close_fail(self):
def f():
try:
yield 1
except GeneratorExit:
yield 2
g = f()
g.next()
raises(RuntimeError, g.close)
def test_close_on_collect(self):
## we need to exec it, else it won't run on python2.4
d = {}
exec """
def f():
try:
yield
finally:
f.x = 42
""".strip() in d
g = d['f']()
g.next()
del g
import gc
gc.collect()
assert d['f'].x == 42
def test_generator_raises_typeerror(self):
def f():
yield 1
g = f()
raises(TypeError, g.send) # one argument required
raises(TypeError, g.send, 1) # not started, must send None
def test_generator_explicit_stopiteration(self):
def f():
yield 1
raise StopIteration
g = f()
assert [x for x in g] == [1]
def test_generator_propagate_stopiteration(self):
def f():
it = iter([1])
while 1: yield it.next()
g = f()
assert [x for x in g] == [1]
def test_generator_restart(self):
def g():
i = me.next()
yield i
me = g()
raises(ValueError, me.next)
def test_generator_expression(self):
exec "res = sum(i*i for i in range(5))"
assert res == 30
def test_generator_expression_2(self):
d = {}
exec """
def f():
total = sum(i for i in [x for x in z])
return total, x
z = [1, 2, 7]
res = f()
""" in d
assert d['res'] == (10, 7)
def test_repr(self):
def myFunc():
yield 1
g = myFunc()
r = repr(g)
assert r.startswith("<generator object myFunc at 0x")
assert list(g) == [1]
assert repr(g) == r
def test_unpackiterable_gen(self):
g = (i*i for i in range(-5, 3))
assert set(g) == set([0, 1, 4, 9, 16, 25])
assert set(g) == set()
assert set(i for i in range(0)) == set()
def test_explicit_stop_iteration_unpackiterable(self):
def f():
yield 1
raise StopIteration
assert tuple(f()) == (1,)
def test_exception_is_cleared_by_yield(self):
def f():
try:
foobar
except NameError:
yield 5
raise # should raise "no active exception to re-raise"
gen = f()
next(gen) # --> 5
try:
next(gen)
except TypeError:
pass
def test_multiple_invalid_sends(self):
def mygen():
yield 42
g = mygen()
raises(TypeError, g.send, 2)
raises(TypeError, g.send, 2)
def test_should_not_inline(space):
from pypy.interpreter.generator import should_not_inline
w_co = space.appexec([], '''():
def g(x):
yield x + 5
return g.__code__
''')
assert should_not_inline(w_co) == False
w_co = space.appexec([], '''():
def g(x):
yield x + 5
yield x + 6
return g.__code__
''')
assert should_not_inline(w_co) == True
| 24.092879
| 77
| 0.467489
| 945
| 7,782
| 3.730159
| 0.167196
| 0.069504
| 0.056738
| 0.062695
| 0.495035
| 0.428936
| 0.371915
| 0.272057
| 0.230922
| 0.140426
| 0
| 0.023841
| 0.423285
| 7,782
| 322
| 78
| 24.167702
| 0.761586
| 0.035209
| 0
| 0.592857
| 0
| 0
| 0.104575
| 0
| 0
| 0
| 0
| 0
| 0.146429
| 0
| null | null | 0.007143
| 0.007143
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a3a46f51a8f874a867b535822da740830faf6e6
| 966
|
py
|
Python
|
cybox/common/location.py
|
tirkarthi/python-cybox
|
a378deb68b3ac56360c5cc35ff5aad1cd3dcab83
|
[
"BSD-3-Clause"
] | 40
|
2015-03-05T18:22:51.000Z
|
2022-03-06T07:29:25.000Z
|
cybox/common/location.py
|
tirkarthi/python-cybox
|
a378deb68b3ac56360c5cc35ff5aad1cd3dcab83
|
[
"BSD-3-Clause"
] | 106
|
2015-01-12T18:52:20.000Z
|
2021-04-25T22:57:52.000Z
|
cybox/common/location.py
|
tirkarthi/python-cybox
|
a378deb68b3ac56360c5cc35ff5aad1cd3dcab83
|
[
"BSD-3-Clause"
] | 30
|
2015-03-25T07:24:40.000Z
|
2021-07-23T17:10:11.000Z
|
# Copyright (c) 2017, The MITRE Corporation. All rights reserved.
# See LICENSE.txt for complete terms.
from mixbox import entities, fields
import cybox
import cybox.bindings.cybox_common as common_binding
class LocationFactory(entities.EntityFactory):
@classmethod
def entity_class(cls, key):
return cybox.lookup_extension(key, default=Location)
class Location(entities.Entity):
_binding = common_binding
_binding_class = common_binding.LocationType
_namespace = 'http://cybox.mitre.org/common-2'
_XSI_TYPE = None # overridden by subclasses
id_ = fields.IdrefField("id")
idref = fields.IdrefField("idref")
name = fields.TypedField("Name")
def to_dict(self):
d = super(Location, self).to_dict()
if self._XSI_TYPE:
d["xsi:type"] = self._XSI_TYPE
return d
@staticmethod
def lookup_class(xsi_type):
return cybox.lookup_extension(xsi_type, default=Location)
| 26.108108
| 65
| 0.704969
| 120
| 966
| 5.483333
| 0.508333
| 0.06383
| 0.051672
| 0.079027
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.006477
| 0.200828
| 966
| 36
| 66
| 26.833333
| 0.845855
| 0.128364
| 0
| 0
| 0
| 0
| 0.059666
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.130435
| false
| 0
| 0.130435
| 0.086957
| 0.782609
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
0a3bec6c960ec5a80b8e4e32d4669b80255b605f
| 1,114
|
py
|
Python
|
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
app/rss_feeder_api/migrations/0003_auto_20200813_1623.py
|
RSaab/rss-scraper
|
9bf608878e7d08fea6508ae90b27f1c226b313f1
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1 on 2020-08-13 16:23
from django.db import migrations, models
import django.utils.timezone
class Migration(migrations.Migration):
dependencies = [
('rss_feeder_api', '0002_feed_subtitle'),
]
operations = [
migrations.AlterModelOptions(
name='entry',
options={'ordering': ('-updated_at',), 'verbose_name_plural': 'entries'},
),
migrations.AlterModelOptions(
name='feed',
options={'ordering': ('-updated_at',), 'verbose_name': 'Feed', 'verbose_name_plural': 'Feeds'},
),
migrations.AddField(
model_name='entry',
name='created_at',
field=models.DateTimeField(auto_now_add=True, default=django.utils.timezone.now),
preserve_default=False,
),
migrations.AddField(
model_name='entry',
name='updated_at',
field=models.DateTimeField(auto_now=True),
),
migrations.AlterUniqueTogether(
name='entry',
unique_together={('guid',)},
),
]
| 29.315789
| 107
| 0.577199
| 104
| 1,114
| 5.990385
| 0.528846
| 0.057785
| 0.060995
| 0.077047
| 0.333868
| 0.333868
| 0
| 0
| 0
| 0
| 0
| 0.022785
| 0.290844
| 1,114
| 37
| 108
| 30.108108
| 0.765823
| 0.0386
| 0
| 0.419355
| 1
| 0
| 0.172123
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.064516
| 0
| 0.16129
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4049bea9cce33edfb9f0362df0cd2e91b7aa1a
| 335
|
py
|
Python
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 7
|
2022-01-29T12:10:10.000Z
|
2022-03-28T13:45:20.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/reopt_api
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 12
|
2022-02-01T18:23:18.000Z
|
2022-03-31T17:22:17.000Z
|
reo/migrations/0121_merge_20211001_1841.py
|
NREL/REopt_API
|
fbc70f3b0cdeec9ee220266d6b3b0c5d64f257a6
|
[
"BSD-3-Clause"
] | 3
|
2022-02-08T19:44:40.000Z
|
2022-03-12T11:05:36.000Z
|
# Generated by Django 3.1.13 on 2021-10-01 18:41
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('reo', '0117_financialmodel_generator_fuel_escalation_pct'),
('reo', '0120_auto_20210927_2046'),
('reo', '0121_auto_20211012_0305')
]
operations = [
]
| 20.9375
| 69
| 0.662687
| 40
| 335
| 5.275
| 0.85
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.198473
| 0.21791
| 335
| 15
| 70
| 22.333333
| 0.60687
| 0.137313
| 0
| 0
| 1
| 0
| 0.362369
| 0.33101
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.111111
| 0
| 0.444444
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a42fad82c7026120ddbfdc222f7f45f5ba001fc
| 8,219
|
py
|
Python
|
seqenv/ontology.py
|
xapple/seqenv
|
a898b936b64b51340f439b05fc8909f4ed826247
|
[
"MIT"
] | 7
|
2016-12-02T09:28:00.000Z
|
2021-11-04T13:47:16.000Z
|
seqenv/ontology.py
|
xapple/seqenv
|
a898b936b64b51340f439b05fc8909f4ed826247
|
[
"MIT"
] | 7
|
2016-04-07T17:00:50.000Z
|
2018-05-14T12:16:06.000Z
|
seqenv/ontology.py
|
xapple/seqenv
|
a898b936b64b51340f439b05fc8909f4ed826247
|
[
"MIT"
] | 4
|
2016-03-15T16:41:12.000Z
|
2021-12-06T09:30:35.000Z
|
# Built-in modules #
# Internal modules #
from seqenv import module_dir
from seqenv.common.cache import property_cached
# Third party modules #
import sh, networkx
import matplotlib.colors
# A list of envos to help test this module #
test_envos = [
"ENVO:00000033",
"ENVO:00000043",
"ENVO:00000067",
"ENVO:00000143",
"ENVO:00000210",
"ENVO:00000215",
"ENVO:00000475",
]
################################################################################
class Ontology(object):
"""A object that gives you access to the graph (network with nodes and edges)
of the ENVO ontology from the OBO file's path.
Other libraries not used here that could be added:
* graphviz: http://graphviz.readthedocs.org/en/latest/api.html#digraph
* pydot: https://github.com/erocarrera/pydot
"""
def __init__(self, path=None):
"""Give the path to the OBO file"""
if path is None: path = module_dir + 'data_envo/envo.obo'
self.path = path
# --------------------------- In this section --------------------------- #
# orange_obo
# goatools
# orange_network
# pygraphviz
# networkx
@property_cached
def orange_obo(self):
"""The ontology loaded by the `orange` library.
* http://orange.biolab.si
* http://orange-bioinformatics.readthedocs.org/en/latest/
* https://github.com/biolab/orange-bio
* https://bitbucket.org/biolab/orange-bioinformatics
To install: $ pip install Orange-Bioinformatics
"""
from orangecontrib.bio.ontology import OBOOntology
return OBOOntology(self.path)
@property_cached
def goatools(self):
"""The network loaded into goatools' format.
* https://github.com/tanghaibao/goatools
To install: $ pip install goatools
"""
from goatools import obo_parser
return obo_parser.GODag(self.path)
@property_cached
def orange_network(self):
"""The network converted to `orange network` format.
Doesn't seem to work until they update PyPI.
* https://bitbucket.org/biolab/orange-network/
* http://orange-network.readthedocs.org/en/latest/
To install: $ pip install orange-network
"""
return self.orange_obo.to_network()
@property_cached
def pygraphviz(self):
"""The network converted to `pygraphviz` format.
* http://pygraphviz.github.io/documentation/pygraphviz-1.3rc1/
To install: $ pip install pygraphviz
"""
g = self.orange_obo.to_graphviz()
assert g.is_directed()
assert g.is_strict()
return g
@property_cached
def networkx(self):
"""The network converted to `networkx` format.
Seems like it looses directionality.
* https://networkx.readthedocs.org/en/stable/
To install: $ pip install networkx
"""
g = self.orange_obo.to_networkx()
assert networkx.is_directed_acyclic_graph(g)
return g
# --------------------------- In this section --------------------------- #
# test
# get_subgraph
# add_weights
# draw_to_pdf
# write_to_dot
def get_subgraph(self, envos=None):
"""Given a list of ENVO terms, get the subgraph that contains them all
and all their ancestors, up to the root.
Outputs a networkx DiGraph object."""
# Testing mode #
if envos is None: envos = test_envos
# All nodes #
nodes = set(n for e in envos for n in networkx.descendants(self.networkx, e))
nodes.update(envos)
nodes = list(nodes)
# Return #
return self.networkx.subgraph(nodes)
def add_weights(self, g, weights=None):
"""Input a networkx DiGraph object.
Outputs a pygraphviz AGraph object."""
g = networkx.nx_agraph.to_agraph(g)
if weights is None: return g
for envo in weights:
node = g.get_node(envo)
weight = weights[envo]
color = matplotlib.colors.rgb2hex((1.0, 1.0 - weight, 0.0))
node.attr['fillcolor'] = color
return g
def add_style(self, g):
"""Input a pygraphviz AGraph object.
Outputs a pygraphviz AGraph object."""
for node in g.nodes():
text = node.attr['name']
node.attr['label'] = text.replace(' ','\\n')
node.attr['name'] = ''
node.attr['shape'] = 'Mrecord'
node.attr['style'] = 'filled'
# To add the envo id to each node, uncomment:
#envo = node.attr['label']
#node.attr['label'] = "{<f0> %s|<f1> %s}" % (envo, text)
for edge in g.edges():
if edge.attr['label'] == 'located_in': edge.attr['color'] = 'turquoise4'
edge.attr['label'] = ''
return g
def write_to_dot(self, g, path):
"""Input a pygraphviz AGraph object."""
with open(path, 'w') as handle: handle.write(g.to_string())
def add_legend(self, path):
"""Input the path to a dot file."""
legend_txt = """
digraph {
rankdir=LR
node [shape=plaintext,fontname="helvetica"]
subgraph cluster_01 {
label = "NB: darker nodes weigh more";
key [label=<<table border="0" cellpadding="2" cellspacing="0" cellborder="0">
<tr><td align="right" port="i1">Is</td></tr>
<tr><td align="right" port="i2">Part</td></tr>
<tr><td align="right" port="i3">Located</td></tr>
</table>>];
key2 [label=<<table border="0" cellpadding="2" cellspacing="0" cellborder="0">
<tr><td port="i1">a</td></tr>
<tr><td port="i2">of</td></tr>
<tr><td port="i3">in</td></tr>
</table>>];
key:i1:e -> key2:i1:w [color=red];
key:i2:e -> key2:i2:w [color=blue];
key:i3:e -> key2:i3:w [color=turquoise4];
}"""
orig_txt = [line.rstrip('\n') for line in open(path, 'r') if line]
new_text = [line.lstrip() for line in legend_txt.split('\n') if line]
new_text = '\n'.join(new_text + orig_txt[2:])
with open(path, 'w') as handle: handle.write(new_text)
def draw_to_pdf(self, in_path, out_path):
"""Input a path to a dot file."""
sh.dot(in_path, '-Tpdf', '-o', out_path)
# --------------------------- In this section --------------------------- #
# descends
def descends(self, e, root):
"""Does the envo term `e` descend from the node `root`?
Returns True or False."""
# Auto conversion #
if isinstance(e, int): e = "ENVO:%08d" % e
if isinstance(root, int): root = "ENVO:%08d" % root
# Return #
return e in networkx.ancestors(self.networkx, root)
# --------------------------- In this section --------------------------- #
# print_test
# draw_with_networkx
# draw_with_pygraphviz
def print_test(self, e=None):
"""Just a method to see a bit how the different libraries work."""
# Test node #
if e is None: e = test_envos[0]
# Goa #
print "Goa: "
print self.goatools[e]
# Pygraphviz #
print "pygraphviz: "
print self.pygraphviz[e]
print self.pygraphviz.successors(e)
print self.pygraphviz.predecessors(e)
print self.pygraphviz.get_node(e)
# Networkx #
import networkx
print "networkx: "
print self.networkx[e]
print self.networkx.successors(e)
print self.networkx.predecessors(e)
print networkx.ancestors(self.networkx, e) # same as predecessors
print networkx.descendants(self.networkx, e) # almost as child_to_parents
def draw_with_networkx(self, g, path):
"""Input a networkx DiGraph object."""
from matplotlib import pyplot
networkx.draw(g)
pyplot.savefig(path)
pyplot.close()
def draw_with_pygraphviz(self, g, path):
"""Input a pygraphviz AGraph object."""
with open(path, 'w') as handle:
handle.write(g.to_string())
| 35.426724
| 90
| 0.565397
| 1,002
| 8,219
| 4.558882
| 0.257485
| 0.021016
| 0.018608
| 0.020797
| 0.20359
| 0.091068
| 0.075306
| 0.065674
| 0.058669
| 0.058669
| 0
| 0.01723
| 0.279718
| 8,219
| 231
| 91
| 35.580087
| 0.754392
| 0.100742
| 0
| 0.090164
| 0
| 0.016393
| 0.223596
| 0.03455
| 0
| 0
| 0
| 0
| 0.02459
| 0
| null | null | 0
| 0.065574
| null | null | 0.114754
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4ab6a6c7a8f22ae4262d99f43041e035e6b535
| 602
|
py
|
Python
|
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
project/settings/production.py
|
chiehtu/kissaten
|
a7aad01de569107d5fd5ed2cd781bca6e5750871
|
[
"MIT"
] | null | null | null |
from .base import *
SECRET_KEY = get_env_var('SECRET_KEY')
CSRF_COOKIE_SECURE = True
SESSION_COOKIE_SECURE = True
TEMPLATE_LOADERS = (
('django.template.loaders.cached.Loader', (
'django.template.loaders.filesystem.Loader',
'django.template.loaders.app_directories.Loader',
)),
)
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.gmail.com'
EMAIL_HOST_USER = get_env_var('EMAIL_HOST_USER')
EMAIL_HOST_PASSWORD = get_env_var('EMAIL_HOST_PASSWORD')
EMAIL_PORT = 587
EMAIL_USE_TLS = True
DEFAULT_FROM_EMAIL = ''
USERENA_USE_HTTPS = True
| 18.8125
| 61
| 0.750831
| 81
| 602
| 5.197531
| 0.493827
| 0.106888
| 0.064133
| 0.128266
| 0.085511
| 0
| 0
| 0
| 0
| 0
| 0
| 0.00578
| 0.137874
| 602
| 31
| 62
| 19.419355
| 0.805395
| 0
| 0
| 0
| 0
| 0
| 0.373754
| 0.277409
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.055556
| 0.055556
| 0
| 0.055556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4b453e9f68bd48c8b434b43c7c61e7c47c248d
| 3,400
|
py
|
Python
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 6
|
2020-07-28T19:58:28.000Z
|
2021-05-01T18:51:37.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | 81
|
2020-07-30T07:08:10.000Z
|
2021-07-28T02:17:43.000Z
|
modelflow/graph_viz_from_outputs.py
|
ModelFlow/modelflow
|
c2b720b2da8bb17462baff5c00bbe942644474b0
|
[
"MIT"
] | null | null | null |
import pandas as pd
import argparse
import json
try:
from graphviz import Digraph
except:
print("Note: Optional graphviz not installed")
def generate_graph(df, graph_format='pdf'):
g = Digraph('ModelFlow', filename='modelflow.gv', engine='neato', format=graph_format)
g.attr(overlap='false')
g.attr(splines='true')
column_names = df.columns
states = []
g.attr('node', shape='ellipse')
for column_name in column_names:
if column_name[:6] == 'state_':
states.append((column_name[6:], column_name))
g.node(column_name[6:])
models = []
g.attr('node', shape='box')
for column_name in column_names:
if column_name[:6] != 'state_':
models.append((column_name.split('_')[0], column_name))
g.node(column_name.split('_')[0])
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
state = '_'.join(parts[1:])[6:-7]
print(parts[0], state, df[column_name].min(),
df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
g.edge(state, parts[0])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
g.edge(parts[0], state)
else:
g.edge(parts[0], state)
g.edge(state, parts[0])
if graph_format == 'json':
# TODO: THIS DOES NOT WORK FOR MULTIPLE MODELFLOWS
with open('modelflow.gv.json', 'r') as f:
return json.load(f)
else:
g.view()
def generate_react_flow_chart(outputs):
df = pd.DataFrame()
for key, value in outputs['output_states'].items():
df[key] = value['data']
return generate_react_flow_chart_from_df(df)
def generate_react_flow_chart_from_df(df):
column_names = df.columns
nodes = {}
# Elipses
for column_name in column_names:
if column_name[:6] == 'state_':
nodes[column_name[6:]] = dict(name=column_name[6:], kind='elipse')
# Boxes
for column_name in column_names:
if column_name[:6] != 'state_':
nodes[column_name.split('_')[0]] = dict(name=column_name.split('_')[0], kind='box')
edges = []
for column_name in column_names:
if column_name[:6] != 'state_':
parts = column_name.split('_')
name1 = parts[0]
state = '_'.join(parts[1:])[6:-7]
# print(name1, state, df[column_name].min(),
# df[column_name].max())
if df[column_name].min() < 0 and df[column_name].max() <= 0:
edges.append([state, name1, 'one_way'])
elif df[column_name].min() >= 0 and df[column_name].max() > 0:
edges.append([name1, state, 'one_way'])
else:
edges.append([name1, state, 'both'])
return dict(nodes=list(nodes.values()), edges=edges)
def main(args):
df = pd.read_csv(args.output_file)
# generate_graph(df)
generate_react_flow_chart_from_df(df)
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Generate Graph Viz')
parser.add_argument('-f', '--output_file', type=str,
help='The output file to generate a graph of', required=True)
args = parser.parse_args()
main(args)
| 32.380952
| 95
| 0.577941
| 446
| 3,400
| 4.188341
| 0.262332
| 0.192719
| 0.077088
| 0.04818
| 0.457173
| 0.413276
| 0.38651
| 0.314775
| 0.314775
| 0.314775
| 0
| 0.015783
| 0.273235
| 3,400
| 104
| 96
| 32.692308
| 0.740186
| 0.045
| 0
| 0.371795
| 1
| 0
| 0.087037
| 0
| 0
| 0
| 0
| 0.009615
| 0
| 1
| 0.051282
| false
| 0
| 0.051282
| 0
| 0.141026
| 0.025641
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a4ed29474e7c8d2e3be0b36b2cae77e32eb65c8
| 376
|
py
|
Python
|
controller/base_service.py
|
oopsteams/pansite
|
11896842da66efc72c26eab071f7f802b982f435
|
[
"MIT"
] | null | null | null |
controller/base_service.py
|
oopsteams/pansite
|
11896842da66efc72c26eab071f7f802b982f435
|
[
"MIT"
] | 1
|
2021-06-02T01:00:41.000Z
|
2021-06-02T01:00:41.000Z
|
controller/base_service.py
|
oopsteams/pansite
|
11896842da66efc72c26eab071f7f802b982f435
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Created by susy at 2019/11/8
"""
from dao.dao import DataDao
import pytz
from dao.models import PanAccounts
from cfg import PAN_SERVICE, MASTER_ACCOUNT_ID
class BaseService:
def __init__(self):
self.default_tz = pytz.timezone('Asia/Chongqing')
# self.pan_acc: PanAccounts = DataDao.pan_account_list(MASTER_ACCOUNT_ID, False)
| 23.5
| 88
| 0.726064
| 54
| 376
| 4.814815
| 0.648148
| 0.053846
| 0.115385
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.025559
| 0.167553
| 376
| 15
| 89
| 25.066667
| 0.805112
| 0.345745
| 0
| 0
| 0
| 0
| 0.059072
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0
| 0.571429
| 0
| 0.857143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
0a554fb894afeaf01a54f7e6b34139ca26334475
| 862
|
py
|
Python
|
dbschema/revertDBinstall.py
|
leschzinerlab/myami-3.2-freeHand
|
974b8a48245222de0d9cfb0f433533487ecce60d
|
[
"MIT"
] | null | null | null |
dbschema/revertDBinstall.py
|
leschzinerlab/myami-3.2-freeHand
|
974b8a48245222de0d9cfb0f433533487ecce60d
|
[
"MIT"
] | null | null | null |
dbschema/revertDBinstall.py
|
leschzinerlab/myami-3.2-freeHand
|
974b8a48245222de0d9cfb0f433533487ecce60d
|
[
"MIT"
] | 1
|
2019-09-05T20:58:37.000Z
|
2019-09-05T20:58:37.000Z
|
#!/usr/bin/env python
from sinedon import dbupgrade, dbconfig
import updatelib
project_dbupgrade = dbupgrade.DBUpgradeTools('projectdata', drop=True)
if __name__ == "__main__":
updatelib_inst = updatelib.UpdateLib(project_dbupgrade)
checkout_version = raw_input('Revert to checkout version, for example, 2.1 -->')
if checkout_version != 'trunk':
try:
map((lambda x:int(x)),checkout_version.split('.')[:2])
except:
print "valid versions are 'trunk', '2.1', or '2.1.2' etc"
raise
checkout_revision = int(raw_input('Revert to checkout revision, for example, 16500 -->'))
updatelib_inst.updateDatabaseVersion(checkout_version)
print "\033[35mVersion Updated in the database %s\033[0m" % checkout_version
updatelib_inst.updateDatabaseRevision(checkout_revision)
print "\033[35mRevision Updated in the database as %d\033[0m" % checkout_revision
| 41.047619
| 90
| 0.759861
| 114
| 862
| 5.54386
| 0.517544
| 0.142405
| 0.079114
| 0.050633
| 0.075949
| 0
| 0
| 0
| 0
| 0
| 0
| 0.040789
| 0.118329
| 862
| 20
| 91
| 43.1
| 0.790789
| 0.023202
| 0
| 0
| 0
| 0
| 0.326992
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.117647
| null | null | 0.176471
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a56c8065ff434f391ba424536df2984e5ef9221
| 3,396
|
py
|
Python
|
notebooks/classical_clustering.py
|
prise6/smart-iss-posts
|
fc913078e7fbe6343fd36ec6ca9852322247da5d
|
[
"MIT"
] | null | null | null |
notebooks/classical_clustering.py
|
prise6/smart-iss-posts
|
fc913078e7fbe6343fd36ec6ca9852322247da5d
|
[
"MIT"
] | 10
|
2020-01-28T23:15:20.000Z
|
2022-03-12T00:12:31.000Z
|
notebooks/classical_clustering.py
|
prise6/smart-iss-posts
|
fc913078e7fbe6343fd36ec6ca9852322247da5d
|
[
"MIT"
] | null | null | null |
#%% [markdown]
# # Clustering classique
#%% [markdown]
# ## import classique
import os
#%%
%load_ext autoreload
%autoreload 2
os.chdir('/home/jovyan/work')
#%% [markdown]
# ## Import iss
#%%
from iss.tools import Config
from iss.tools import Tools
from iss.models import SimpleConvAutoEncoder
from iss.clustering import ClassicalClustering
from iss.clustering import AdvancedClustering
from dotenv import find_dotenv, load_dotenv
import numpy as np
#%% [markdown]
# ## Chargement de la config
#%%
load_dotenv(find_dotenv())
cfg = Config(project_dir = os.getenv("PROJECT_DIR"), mode = os.getenv("MODE"))
#%% [markdown]
# ## Chargement du modèle
#%%
## charger le modèle
model_type = 'simple_conv'
cfg.get('models')[model_type]['model_name'] = 'model_colab'
model = SimpleConvAutoEncoder(cfg.get('models')[model_type])
#%% [markdown]
## Chargement des images
#%%
filenames = Tools.list_directory_filenames('data/processed/models/autoencoder/train/k/')
generator_imgs = Tools.generator_np_picture_from_filenames(filenames, target_size = (27, 48), batch = 496, nb_batch = 10)
#%%
pictures_id, pictures_preds = Tools.encoded_pictures_from_generator(generator_imgs, model)
#%%
intermediate_output = pictures_preds.reshape((pictures_preds.shape[0], 3*6*16))
#%% [markdown]
# ## ACP
# Réduction de la dimension
#%%
clustering = ClassicalClustering(cfg.get('clustering')['classical'], pictures_id, intermediate_output)
#%%
clustering.compute_pca()
#%% [markdown]
# ## Kmeans
# Premiers clusters
#%%
clustering.compute_kmeans()
clustering.compute_kmeans_centers()
#%% [markdown]
# ## CAH
# Seconds clusters
#%%
clustering.compute_cah()
clustering.compute_cah_labels()
#%% [markdown]
# ## Résultats
#%% [markdown]
# ### Clusters intermediaires
#%%
fig = plt.figure(1, figsize=(12, 7))
plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.kmeans_labels)
#%% [markdown]
# ### Clusters finaux
#%%
plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = clustering.final_labels)
#%% [markdown]
# ### Sauvegarde des modèles
#%%
clustering.save()
#%%
# clustering = ClassicalClustering(cfg.get('clustering')['classical'])
clustering.load()
#%% [markdown]
# ## Visualisation des clusters
#%%
def select_cluster(clustering, id_cluster):
return [os.path.join('data/processed/models/autoencoder/train/k/', res[0] + '.jpg') for res in clustering.get_zip_results() if res[2] == id_cluster]
#%%
from IPython.display import Image
#%%
for cl in range(0,19):
print("Cluster %s" % (cl))
res_tmp = select_cluster(clustering, cl)
print(len(res_tmp))
image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp[:100]]
# img = Tools.display_mosaic(image_array, nrow = 10)
# fig = plt.figure(1, figsize=(12, 7))
# plt.imshow(img, aspect = 'auto')
# plt.show()
#%% [markdown]
# ## Zoom sur le cluster 0
#%%
res_tmp = select_cluster(clustering, 1)
#%%
print(len(res_tmp))
image_array = [Tools.read_np_picture(f, target_size = (54, 96)) for f in res_tmp]
#%%
Tools.display_mosaic(image_array, nrow = 18)
#%%
col = [1 if l == 1 else 0 for l in clustering.kmeans_labels]
plt.scatter(clustering.pca_reduction[:, 0], clustering.pca_reduction[:, 1], c = col)
#%%
plt.scatter(clustering.pca_reduction[np.array(col) == 1, 0], clustering.pca_reduction[np.array(col) == 1, 1])
| 22.196078
| 152
| 0.707008
| 441
| 3,396
| 5.272109
| 0.342404
| 0.044731
| 0.075699
| 0.03957
| 0.343226
| 0.295914
| 0.190968
| 0.162581
| 0.141505
| 0.141505
| 0
| 0.019614
| 0.12927
| 3,396
| 152
| 153
| 22.342105
| 0.766655
| 0.232332
| 0
| 0.043478
| 0
| 0
| 0.076801
| 0.033426
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.195652
| null | null | 0.065217
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a57479ced46772f03d9c9dc023a3217a695d37d
| 345
|
py
|
Python
|
lambdataalchemani/lambda_test.py
|
Full-Data-Alchemist/lambdata-Mani-alch
|
90dcbc091d8f9841d5a1046e64437058a4156dc5
|
[
"MIT"
] | null | null | null |
lambdataalchemani/lambda_test.py
|
Full-Data-Alchemist/lambdata-Mani-alch
|
90dcbc091d8f9841d5a1046e64437058a4156dc5
|
[
"MIT"
] | null | null | null |
lambdataalchemani/lambda_test.py
|
Full-Data-Alchemist/lambdata-Mani-alch
|
90dcbc091d8f9841d5a1046e64437058a4156dc5
|
[
"MIT"
] | null | null | null |
"""
"""
import unittest
from example_module import COLORS, increment
class ExampleTest(unittest.TestCase):
"""
#TODO
"""
def test_increment(self):
x0 = 0
y0 = increment(x0) #y0 == 1
self.assertEqual(y0, 1)
x1 = 100
y1 = increment(x1) #y1 == 101
self.assertEqual(y1, 101)
| 15.681818
| 44
| 0.550725
| 39
| 345
| 4.820513
| 0.589744
| 0.031915
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.094421
| 0.324638
| 345
| 21
| 45
| 16.428571
| 0.712446
| 0.063768
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.047619
| 0.2
| 1
| 0.1
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a585a8c735b3266210fbee5416e533aa2feb0c6
| 8,847
|
py
|
Python
|
desktop/core/src/desktop/auth/views.py
|
bopopescu/hue-5
|
665c275d0c0570b1a4a34a293503cc72ec35695c
|
[
"Apache-2.0"
] | 1
|
2018-05-07T05:40:36.000Z
|
2018-05-07T05:40:36.000Z
|
desktop/core/src/desktop/auth/views.py
|
lockhart39/HueQualityAndIngestionApp
|
c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c
|
[
"Apache-2.0"
] | null | null | null |
desktop/core/src/desktop/auth/views.py
|
lockhart39/HueQualityAndIngestionApp
|
c75e55a43a8bdeb7aa0f5bf2101ec72b01dcac1c
|
[
"Apache-2.0"
] | 1
|
2022-03-21T09:41:35.000Z
|
2022-03-21T09:41:35.000Z
|
#!/usr/bin/env python
# Licensed to Cloudera, Inc. under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. Cloudera, Inc. licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
try:
import oauth2 as oauth
except:
oauth = None
import cgi
import logging
import urllib
from datetime import datetime
from axes.decorators import watch_login
import django.contrib.auth.views
from django.core import urlresolvers
from django.core.exceptions import SuspiciousOperation
from django.contrib.auth import login, get_backends, authenticate
from django.contrib.auth.models import User
from django.contrib.sessions.models import Session
from django.http import HttpResponseRedirect
from django.utils.translation import ugettext as _
from desktop.auth import forms as auth_forms
from desktop.lib.django_util import render
from desktop.lib.django_util import login_notrequired
from desktop.lib.django_util import JsonResponse
from desktop.log.access import access_warn, last_access_map
from desktop.conf import LDAP, OAUTH, DEMO_ENABLED
from hadoop.fs.exceptions import WebHdfsException
from useradmin.models import get_profile
from useradmin.views import ensure_home_directory, require_change_password
LOG = logging.getLogger(__name__)
def get_current_users():
"""Return dictionary of User objects and
a dictionary of the user's IP address and last access time"""
current_users = { }
for session in Session.objects.all():
try:
uid = session.get_decoded().get(django.contrib.auth.SESSION_KEY)
except SuspiciousOperation:
# If secret_key changed, this resolution won't work.
uid = None
if uid is not None:
try:
userobj = User.objects.get(pk=uid)
current_users[userobj] = last_access_map.get(userobj.username, { })
except User.DoesNotExist:
LOG.debug("User with id=%d does not exist" % uid)
return current_users
def first_login_ever():
backends = get_backends()
for backend in backends:
if hasattr(backend, 'is_first_login_ever') and backend.is_first_login_ever():
return True
return False
def get_backend_names():
return get_backends and [backend.__class__.__name__ for backend in get_backends()]
@login_notrequired
@watch_login
def dt_login(request, from_modal=False):
redirect_to = request.REQUEST.get('next', '/')
is_first_login_ever = first_login_ever()
backend_names = get_backend_names()
is_active_directory = 'LdapBackend' in backend_names and ( bool(LDAP.NT_DOMAIN.get()) or bool(LDAP.LDAP_SERVERS.get()) )
if is_active_directory:
UserCreationForm = auth_forms.LdapUserCreationForm
AuthenticationForm = auth_forms.LdapAuthenticationForm
else:
UserCreationForm = auth_forms.UserCreationForm
AuthenticationForm = auth_forms.AuthenticationForm
if request.method == 'POST':
request.audit = {
'operation': 'USER_LOGIN',
'username': request.POST.get('username')
}
# For first login, need to validate user info!
first_user_form = is_first_login_ever and UserCreationForm(data=request.POST) or None
first_user = first_user_form and first_user_form.is_valid()
if first_user or not is_first_login_ever:
auth_form = AuthenticationForm(data=request.POST)
if auth_form.is_valid():
# Must login by using the AuthenticationForm.
# It provides 'backends' on the User object.
user = auth_form.get_user()
userprofile = get_profile(user)
login(request, user)
if request.session.test_cookie_worked():
request.session.delete_test_cookie()
auto_create_home_backends = ['AllowAllBackend', 'LdapBackend', 'SpnegoDjangoBackend']
if is_first_login_ever or any(backend in backend_names for backend in auto_create_home_backends):
# Create home directory for first user.
try:
ensure_home_directory(request.fs, user.username)
except (IOError, WebHdfsException), e:
LOG.error(_('Could not create home directory.'), exc_info=e)
request.error(_('Could not create home directory.'))
if require_change_password(userprofile):
return HttpResponseRedirect(urlresolvers.reverse('useradmin.views.edit_user', kwargs={'username': user.username}))
userprofile.first_login = False
userprofile.last_activity = datetime.now()
userprofile.save()
msg = 'Successful login for user: %s' % user.username
request.audit['operationText'] = msg
access_warn(request, msg)
if from_modal or request.REQUEST.get('fromModal', 'false') == 'true':
return JsonResponse({'auth': True})
else:
return HttpResponseRedirect(redirect_to)
else:
request.audit['allowed'] = False
msg = 'Failed login for user: %s' % request.POST.get('username')
request.audit['operationText'] = msg
access_warn(request, msg)
if from_modal or request.REQUEST.get('fromModal', 'false') == 'true':
return JsonResponse({'auth': False})
else:
first_user_form = None
auth_form = AuthenticationForm()
if DEMO_ENABLED.get() and not 'admin' in request.REQUEST:
user = authenticate(username=request.user.username, password='HueRocks')
login(request, user)
ensure_home_directory(request.fs, user.username)
return HttpResponseRedirect(redirect_to)
if not from_modal:
request.session.set_test_cookie()
renderable_path = 'login.mako'
if from_modal:
renderable_path = 'login_modal.mako'
return render(renderable_path, request, {
'action': urlresolvers.reverse('desktop.auth.views.dt_login'),
'form': first_user_form or auth_form,
'next': redirect_to,
'first_login_ever': is_first_login_ever,
'login_errors': request.method == 'POST',
'backend_names': backend_names,
'active_directory': is_active_directory
})
def dt_logout(request, next_page=None):
"""Log out the user"""
username = request.user.get_username()
request.audit = {
'username': username,
'operation': 'USER_LOGOUT',
'operationText': 'Logged out user: %s' % username
}
backends = get_backends()
if backends:
for backend in backends:
if hasattr(backend, 'logout'):
response = backend.logout(request, next_page)
if response:
return response
return django.contrib.auth.views.logout(request, next_page)
def profile(request):
"""
Dumps JSON for user-profile information.
"""
return render(None, request, _profile_dict(request.user))
def _profile_dict(user):
return dict(
username=user.username,
first_name=user.first_name,
last_name=user.last_name,
last_login=str(user.last_login), # datetime object needs to be converted
email=user.email)
# OAuth is based on Twitter as example.
@login_notrequired
def oauth_login(request):
assert oauth is not None
consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get())
client = oauth.Client(consumer)
resp, content = client.request(OAUTH.REQUEST_TOKEN_URL.get(), "POST", body=urllib.urlencode({
'oauth_callback': 'http://' + request.get_host() + '/login/oauth_authenticated/'
}))
if resp['status'] != '200':
raise Exception(_("Invalid response from OAuth provider: %s") % resp)
request.session['request_token'] = dict(cgi.parse_qsl(content))
url = "%s?oauth_token=%s" % (OAUTH.AUTHENTICATE_URL.get(), request.session['request_token']['oauth_token'])
return HttpResponseRedirect(url)
@login_notrequired
def oauth_authenticated(request):
consumer = oauth.Consumer(OAUTH.CONSUMER_KEY.get(), OAUTH.CONSUMER_SECRET.get())
token = oauth.Token(request.session['request_token']['oauth_token'], request.session['request_token']['oauth_token_secret'])
client = oauth.Client(consumer, token)
resp, content = client.request(OAUTH.ACCESS_TOKEN_URL.get(), "GET")
if resp['status'] != '200':
raise Exception(_("Invalid response from OAuth provider: %s") % resp)
access_token = dict(cgi.parse_qsl(content))
user = authenticate(access_token=access_token)
login(request, user)
redirect_to = request.REQUEST.get('next', '/')
return HttpResponseRedirect(redirect_to)
| 34.158301
| 126
| 0.722505
| 1,136
| 8,847
| 5.442782
| 0.242958
| 0.019408
| 0.022643
| 0.018114
| 0.19214
| 0.172893
| 0.123403
| 0.110464
| 0.096232
| 0.082161
| 0
| 0.001513
| 0.178026
| 8,847
| 258
| 127
| 34.290698
| 0.848735
| 0.119363
| 0
| 0.222857
| 0
| 0
| 0.11319
| 0.01041
| 0
| 0
| 0
| 0
| 0.005714
| 0
| null | null | 0.017143
| 0.131429
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a5cd9823d91b39775866f431a665d36a045cbd2
| 2,450
|
py
|
Python
|
Code/all-starter-code/search.py
|
diyarkudrat/CS-1.3-Core-Data-Structures
|
7d7d48ad7913cded7b0ea75ced144d0a08989924
|
[
"MIT"
] | null | null | null |
Code/all-starter-code/search.py
|
diyarkudrat/CS-1.3-Core-Data-Structures
|
7d7d48ad7913cded7b0ea75ced144d0a08989924
|
[
"MIT"
] | null | null | null |
Code/all-starter-code/search.py
|
diyarkudrat/CS-1.3-Core-Data-Structures
|
7d7d48ad7913cded7b0ea75ced144d0a08989924
|
[
"MIT"
] | null | null | null |
#!python
"""
ANNOTATE FUNCTIONS WITH TIME AND SPACE COMPLEXITY!!!!!
"""
def linear_search(array, item):
"""return the first index of item in array or None if item is not found"""
return linear_search_iterative(array, item)
# return linear_search_recursive(array, item)
def linear_search_iterative(array, item):
"""Time complexity: O(n) because you iterate through n amount of items in array
Space Complexity: O(n) because there are n amount of items"""
# loop over all array values until item is found
for index, value in enumerate(array): #O(n)
if item == value: #O(1)
return index # found O(1)
return None # not found O(1)
def linear_search_recursive(array, item, index=0):
"""Time complexity: O(n) because you are returning the function continuously until index equals to nth-item
"""
if len(array) <= index:
return index
if array[index] == item:
return index
else:
return linear_search_recursive(array, item, index + 1)
def binary_search(array, item):
"""return the index of item in sorted array or None if item is not found"""
return binary_search_iterative(array, item)
# return binary_search_recursive(array, item)
def binary_search_iterative(array, item):
"""Time Complexity: O(log*n) because you are constantly dividing the length of array by 2 until array length is 1
Space Complexity: O(1) """
left, right = 0, len(array) - 1
if len(array) == 0:
return None
while left <= right:
middle = left + (right - left) // 2
if item == array[middle]:
return middle
elif item > array[middle]:
left = middle + 1
else:
right = middle - 1
return None
def binary_search_recursive(array, item, left=None, right=None):
"""Time Complexity: O(log*n)
Space Complexity: 0(log*n) recursion call stack space"""
# TODO: implement binary search recursively here
if left is None and right is None:
left, right = 0, len(array) - 1
middle = left + (right - left) // 2
if left > right:
return None
if array[middle] == item:
return middle
elif item > array[middle]:
return binary_search_recursive(array, item, middle + 1, right)
else:
return binary_search_recursive(array, item, left, middle - 1)
| 27.222222
| 117
| 0.628571
| 335
| 2,450
| 4.525373
| 0.223881
| 0.077177
| 0.092348
| 0.110818
| 0.48285
| 0.384565
| 0.094987
| 0.043536
| 0.043536
| 0
| 0
| 0.011306
| 0.277959
| 2,450
| 89
| 118
| 27.52809
| 0.845676
| 0.365306
| 0
| 0.439024
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.011236
| 0
| 1
| 0.146341
| false
| 0
| 0
| 0
| 0.487805
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a61c9cfc48e56723e2d98bba70acd01045f443c
| 1,357
|
py
|
Python
|
cv_recommender/account/urls.py
|
hhhameem/CV-Recommender
|
b85d53934f0d888835ab8201be388d7d69f0693d
|
[
"MIT"
] | 1
|
2021-09-14T17:40:17.000Z
|
2021-09-14T17:40:17.000Z
|
cv_recommender/account/urls.py
|
mjohra/Cv-Recommender-Python-Django
|
d231092f7bd989b513210dd6031fb23e28bd5dfe
|
[
"MIT"
] | 1
|
2021-03-31T17:45:15.000Z
|
2021-03-31T17:45:15.000Z
|
cv_recommender/account/urls.py
|
mjohra/Cv-Recommender-Python-Django
|
d231092f7bd989b513210dd6031fb23e28bd5dfe
|
[
"MIT"
] | 1
|
2021-03-31T16:58:50.000Z
|
2021-03-31T16:58:50.000Z
|
from django.urls import path
from django.contrib.auth import views as auth_views
from . import views
urlpatterns = [
path('register/', views.register, name='register'),
path('login/', views.userlogin, name='login'),
path('logout/', views.userlogout, name='logout'),
path('password_change/', auth_views.PasswordChangeView.as_view(),
name='password_change'),
path('password_change/done/', auth_views.PasswordChangeDoneView.as_view(),
name='password_change_done'),
path('password_reset/', auth_views.PasswordResetView.as_view(),
name='password_reset'),
path('password_reset/done/', auth_views.PasswordResetDoneView.as_view(),
name='password_reset_done'),
path('reset/<uidb64>/<token>/', auth_views.PasswordResetConfirmView.as_view(),
name='password_reset_confirm'),
path('reset/done/', auth_views.PasswordResetCompleteView.as_view(),
name='password_reset_complete'),
path('applicantdashboard/', views.applicantdashboard,
name='applicantdashboard'),
path('recruiterdashboard/', views.recruiterdashboard,
name='recruiterdashboard'),
path('applicantdashboard/profile-edit/', views.applicantedit,
name='editapplicantprofile'),
path('recruiterdashboard/profile-edit/', views.recruiteredit,
name='editrecruiterprofile'),
]
| 45.233333
| 82
| 0.709654
| 137
| 1,357
| 6.832117
| 0.284672
| 0.067308
| 0.064103
| 0.115385
| 0.149573
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001729
| 0.147384
| 1,357
| 29
| 83
| 46.793103
| 0.80726
| 0
| 0
| 0
| 0
| 0
| 0.322771
| 0.112749
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.428571
| 0.107143
| 0
| 0.107143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a658f2185402efce42f9a0cf262eb928b7b63f0
| 1,650
|
py
|
Python
|
modules/models.py
|
sbj-ss/github-watcher
|
7d7c4d2a0a6a014b93a2168dc6e508b2b867a414
|
[
"MIT"
] | null | null | null |
modules/models.py
|
sbj-ss/github-watcher
|
7d7c4d2a0a6a014b93a2168dc6e508b2b867a414
|
[
"MIT"
] | null | null | null |
modules/models.py
|
sbj-ss/github-watcher
|
7d7c4d2a0a6a014b93a2168dc6e508b2b867a414
|
[
"MIT"
] | null | null | null |
from dataclasses import asdict, dataclass
from typing import Any, Dict, List, Type
@dataclass(frozen=True)
class StatsBaseModel:
"""Base model for various reports"""
@classmethod
def key(cls: Type) -> str:
name = cls.__name__
return name[0].lower() + name[1:]
def to_table(self) -> List[str]:
raise NotImplementedError
def to_dict(self) -> Dict[str, Any]:
return asdict(self)
@dataclass(frozen=True)
class Contributor:
name: str
commit_count: int
@dataclass(frozen=True)
class ContributorStats(StatsBaseModel):
contributors: List[Contributor]
def to_table(self) -> List[str]:
return [
'Most active contributors:',
'-------------------------',
'Name' + (' ' * 20) + 'Commits',
] + [f'{c.name.ljust(24)}{c.commit_count}' for c in self.contributors]
@dataclass(frozen=True)
class PullRequestStats(StatsBaseModel):
open_count: int
closed_count: int
old_count: int
def to_table(self) -> List[str]:
return [
'Pull requests:',
'--------------',
'Open Closed Old',
f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}'
]
@dataclass(frozen=True)
class IssueStats(StatsBaseModel):
open_count: int
closed_count: int
old_count: int
def to_table(self) -> List[str]:
return [
'Issues:',
'-------',
'Open Closed Old',
f'{str(self.open_count).ljust(8)}{str(self.closed_count).ljust(8)}{str(self.old_count).ljust(8)}'
]
| 25
| 109
| 0.577576
| 191
| 1,650
| 4.86911
| 0.303665
| 0.060215
| 0.070968
| 0.129032
| 0.393548
| 0.393548
| 0.370968
| 0.341935
| 0.341935
| 0.341935
| 0
| 0.009844
| 0.261212
| 1,650
| 65
| 110
| 25.384615
| 0.753076
| 0.018182
| 0
| 0.44898
| 0
| 0.040816
| 0.225527
| 0.153036
| 0
| 0
| 0
| 0
| 0
| 1
| 0.122449
| false
| 0
| 0.040816
| 0.081633
| 0.55102
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
6a55c2af9ac7243f141edb694902ca98eb95a939
| 278
|
py
|
Python
|
ReadSymLink.py
|
ohel/pyorbital-gizmod-tweaks
|
4c02783d1c6287df508351467a5c203a11430b07
|
[
"Unlicense"
] | null | null | null |
ReadSymLink.py
|
ohel/pyorbital-gizmod-tweaks
|
4c02783d1c6287df508351467a5c203a11430b07
|
[
"Unlicense"
] | null | null | null |
ReadSymLink.py
|
ohel/pyorbital-gizmod-tweaks
|
4c02783d1c6287df508351467a5c203a11430b07
|
[
"Unlicense"
] | null | null | null |
import os
def readlinkabs(l):
"""
Return an absolute path for the destination
of a symlink
"""
if not (os.path.islink(l)):
return None
p = os.readlink(l)
if os.path.isabs(p):
return p
return os.path.join(os.path.dirname(l), p)
| 18.533333
| 48
| 0.582734
| 43
| 278
| 3.767442
| 0.55814
| 0.148148
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.294964
| 278
| 14
| 49
| 19.857143
| 0.826531
| 0.205036
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.625
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
6a57cefd47f3150e0a9d0bbdcd3affcfe90d72c9
| 15,520
|
py
|
Python
|
legtool/tabs/servo_tab.py
|
jpieper/legtool
|
ab3946051bd16817b61d3073ce7be8bd27af90d0
|
[
"Apache-2.0"
] | 10
|
2015-09-23T19:28:06.000Z
|
2021-04-27T02:32:27.000Z
|
legtool/tabs/servo_tab.py
|
jpieper/legtool
|
ab3946051bd16817b61d3073ce7be8bd27af90d0
|
[
"Apache-2.0"
] | null | null | null |
legtool/tabs/servo_tab.py
|
jpieper/legtool
|
ab3946051bd16817b61d3073ce7be8bd27af90d0
|
[
"Apache-2.0"
] | 9
|
2015-10-16T07:26:18.000Z
|
2021-01-13T07:18:35.000Z
|
# Copyright 2014 Josh Pieper, [email protected].
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import trollius as asyncio
from trollius import Task, From, Return
import PySide.QtCore as QtCore
import PySide.QtGui as QtGui
from ..servo import selector
from .common import BoolContext
from . import gazebo_config_dialog
def spawn(callback):
def start():
Task(callback())
return start
class ServoTab(object):
def __init__(self, ui, status):
self.ui = ui
self.status = status
self.servo_controls = []
self.monitor_thread = None
self.servo_model = ''
self.servo_name_map = {}
self.ui.statusText.setText('not connected')
self.ui.connectButton.clicked.connect(
spawn(self.handle_connect_clicked))
self.ui.typeCombo.currentIndexChanged.connect(self.handle_type_change)
self.handle_type_change()
self.ui.configureGazeboButton.clicked.connect(
self.handle_configure_gazebo)
servo_layout = QtGui.QVBoxLayout()
servo_layout.setSpacing(0)
servo_layout.setContentsMargins(0, 0, 0, 0)
self.ui.scrollContents.setLayout(servo_layout)
self.ui.servoCountSpin.valueChanged.connect(self.handle_servo_count)
self.handle_servo_count()
self.ui.powerCombo.currentIndexChanged.connect(
spawn(self.handle_power))
self.ui.captureCurrentButton.clicked.connect(
spawn(self.handle_capture_current))
self.update_connected(False)
self.ui.addPoseButton.clicked.connect(self.handle_add_pose)
self.ui.removePoseButton.clicked.connect(self.handle_remove_pose)
self.ui.moveToPoseButton.clicked.connect(
spawn(self.handle_move_to_pose))
self.ui.updatePoseButton.clicked.connect(self.handle_update_pose)
self.ui.poseList.currentItemChanged.connect(
self.handle_poselist_current_changed)
self.controller = None
self.servo_update = BoolContext()
def resizeEvent(self, event):
pass
def poses(self):
result = []
for i in range(self.ui.poseList.count()):
result.append(self.ui.poseList.item(i).text())
return result
def pose(self, name):
for i in range(self.ui.poseList.count()):
if self.ui.poseList.item(i).text() == name:
return self.ui.poseList.item(i).data(QtCore.Qt.UserRole)
return dict([(i, 0.0) for i in range(self.ui.servoCountSpin.value())])
@asyncio.coroutine
def handle_connect_clicked(self):
val = self.ui.typeCombo.currentText().lower()
try:
self.controller = yield From(
selector.select_servo(
val,
serial_port=self.ui.serialPortCombo.currentText(),
model_name=self.servo_model,
servo_name_map=self.servo_name_map))
self.ui.statusText.setText('connected')
self.update_connected(True)
except Exception as e:
self.ui.statusText.setText('error: %s' % str(e))
self.update_connected(False)
def handle_type_change(self):
val = self.ui.typeCombo.currentText().lower()
self.ui.serialPortCombo.setEnabled(val == 'herkulex')
self.ui.configureGazeboButton.setEnabled(val == 'gazebo')
def handle_configure_gazebo(self):
servo_name_map = self.servo_name_map.copy()
for x in range(self.ui.servoCountSpin.value()):
if not x in servo_name_map:
servo_name_map[x] = ''
dialog = gazebo_config_dialog.GazeboConfigDialog(
self.servo_model, servo_name_map)
dialog.setModal(True)
result = dialog.exec_()
if result == QtGui.QDialog.Rejected:
return
self.servo_model = dialog.model_name()
self.servo_name_map = dialog.servo_name_map()
def handle_servo_count(self):
count = self.ui.servoCountSpin.value()
while len(self.servo_controls) > count:
# Remove the last one
last = self.servo_controls[-1]
widget = last['widget']
self.ui.scrollContents.layout().removeWidget(widget)
widget.deleteLater()
self.servo_controls = self.servo_controls[:-1]
while len(self.servo_controls) < count:
# Add a new one.
servo_id = len(self.servo_controls)
label = QtGui.QLabel()
label.setText('ID %d:' % servo_id)
slider = QtGui.QSlider(QtCore.Qt.Horizontal)
slider.setRange(-180, 180)
doublespin = QtGui.QDoubleSpinBox()
doublespin.setRange(-180, 180)
doublespin.setDecimals(1)
save = QtGui.QPushButton()
save.setText("Save")
move = QtGui.QPushButton()
move.setText("Move")
current = QtGui.QLabel()
current.setText('N/A')
current.setMinimumWidth(60)
widget = QtGui.QWidget()
layout = QtGui.QHBoxLayout(widget)
layout.addWidget(label)
layout.addWidget(slider)
layout.addWidget(doublespin)
layout.addWidget(save)
layout.addWidget(move)
layout.addWidget(current)
slider.valueChanged.connect(
functools.partial(self.handle_servo_slider, servo_id))
doublespin.valueChanged.connect(
functools.partial(self.handle_servo_spin, servo_id))
save.clicked.connect(
functools.partial(self.handle_servo_save, servo_id))
move.clicked.connect(
functools.partial(self.handle_servo_move, servo_id))
self.ui.scrollContents.layout().addWidget(widget)
self.servo_controls.append({
'widget': widget,
'label': label,
'slider': slider,
'doublespin': doublespin,
'save': save,
'move': move,
'current': current})
@asyncio.coroutine
def handle_power(self):
text = self.ui.powerCombo.currentText().lower()
value = None
if text == 'free':
value = selector.POWER_FREE
elif text == 'brake':
value = selector.POWER_BRAKE
elif text == 'drive':
value = selector.POWER_ENABLE
else:
raise NotImplementedError()
yield From(self.controller.enable_power(value))
def update_connected(self, value):
self.ui.controlGroup.setEnabled(value)
self.ui.posesGroup.setEnabled(value)
if self.monitor_thread is not None:
self.monitor_thread.cancel()
self.monitor_thread = None
if value:
self.handle_power()
self.monitor_thread = Task(self.monitor_status())
@asyncio.coroutine
def monitor_status(self):
voltages = {}
temperatures = {}
ident = 0
while True:
if (self.controller is not None and
hasattr(self.controller, 'get_voltage')):
try:
ident = (ident + 1) % len(self.servo_controls)
this_voltage = yield From(
self.controller.get_voltage([ident]))
voltages.update(this_voltage)
# Get all temperatures.
this_temp = yield From(
self.controller.get_temperature([ident]))
temperatures.update(this_temp)
def non_None(value):
return [x for x in value if x is not None]
message = "Servo status: "
if len(non_None(voltages.values())):
message += "%.1f/%.1fV" % (
min(non_None(voltages.values())),
max(non_None(voltages.values())))
if len(non_None(temperatures.values())):
message += " %.1f/%.1fC" % (
min(non_None(temperatures.values())),
max(non_None(temperatures.values())))
self.status.showMessage(message, 10000)
except Exception as e:
traceback.print_exc()
print "Error reading servo:", type(e), e
yield From(asyncio.sleep(2.0))
@asyncio.coroutine
def set_single_pose(self, servo_id, value):
yield From(
self.controller.set_single_pose(servo_id, value, pose_time=0.2))
def handle_servo_slider(self, servo_id, event):
if self.servo_update.value:
return
with self.servo_update:
control = self.servo_controls[servo_id]
value = control['slider'].value()
control['doublespin'].setValue(value)
Task(self.set_single_pose(servo_id, value))
def handle_servo_spin(self, servo_id, event):
if self.servo_update.value:
return
with self.servo_update:
control = self.servo_controls[servo_id]
value = control['doublespin'].value()
control['slider'].setSliderPosition(int(value))
Task(self.set_single_pose(servo_id, value))
def handle_servo_save(self, servo_id):
if self.ui.poseList.currentRow() < 0:
return
current_data = self.ui.poseList.currentItem().data(
QtCore.Qt.UserRole)
current_data[servo_id] = (
self.servo_controls[servo_id]['doublespin'].value())
self.ui.poseList.currentItem().setData(
QtCore.Qt.UserRole, current_data)
self.handle_poselist_current_changed(None, None)
def handle_servo_move(self, servo_id):
if self.ui.poseList.currentRow() < 0:
return
data = self.ui.poseList.currentItem().data(QtCore.Qt.UserRole)
self.servo_controls[servo_id]['doublespin'].setValue(data[servo_id])
@asyncio.coroutine
def handle_capture_current(self):
with self.servo_update:
results = yield From(
self.controller.get_pose(range(len(self.servo_controls))))
for ident, angle in results.iteritems():
if angle is None:
continue
control = self.servo_controls[ident]
control['slider'].setSliderPosition(int(angle))
control['doublespin'].setValue(angle)
def add_list_pose(self, name):
self.ui.poseList.addItem(name)
item = self.ui.poseList.item(self.ui.poseList.count() - 1)
item.setFlags(QtCore.Qt.ItemIsEnabled |
QtCore.Qt.ItemIsSelectable |
QtCore.Qt.ItemIsEditable |
QtCore.Qt.ItemIsSelectable)
return item
def get_new_pose_name(self):
poses = set([self.ui.poseList.item(x).text()
for x in range(self.ui.poseList.count())])
count = 0
while True:
name = 'new_pose_%d' % count
if name not in poses:
return name
count += 1
def generate_pose_data(self):
return dict(
[ (i, control['doublespin'].value())
for i, control in enumerate(self.servo_controls) ])
def handle_add_pose(self):
pose_name = self.get_new_pose_name()
item = self.add_list_pose(pose_name)
item.setData(QtCore.Qt.UserRole, self.generate_pose_data())
self.ui.poseList.editItem(item)
def handle_remove_pose(self):
if self.ui.poseList.currentRow() < 0:
return
pose_name = self.ui.poseList.currentItem().text()
del self.poses[pose_name]
self.ui.poseList.takeItem(self.ui.poseList.currentRow())
@asyncio.coroutine
def handle_move_to_pose(self):
if self.ui.poseList.currentRow() < 0:
return
values = self.ui.poseList.currentItem().data(QtCore.Qt.UserRole)
yield From(self.controller.set_pose(values, pose_time=1.0))
with self.servo_update:
for ident, angle_deg in values.iteritems():
control = self.servo_controls[ident]
control['slider'].setSliderPosition(int(angle_deg))
control['doublespin'].setValue(angle_deg)
def handle_update_pose(self):
if self.ui.poseList.currentRow() < 0:
return
self.ui.poseList.currentItem().setData(
QtCore.Qt.UserRole, self.generate_pose_data())
self.handle_poselist_current_changed(None, None)
def handle_poselist_current_changed(self, current, previous):
if self.ui.poseList.currentRow() < 0:
return
data = self.ui.poseList.currentItem().data(QtCore.Qt.UserRole)
for i, control in enumerate(self.servo_controls):
control['current'].setText('%.1f' % data[i])
def read_settings(self, config):
if not config.has_section('servo'):
return
self.ui.typeCombo.setCurrentIndex(config.getint('servo', 'type'))
self.ui.serialPortCombo.setEditText(config.get('servo', 'port'))
self.ui.servoCountSpin.setValue(config.getint('servo', 'count'))
self.servo_model = config.get('servo', 'model')
if config.has_section('servo.names'):
self.servo_name_map = {}
for name, value in config.items('servo.names'):
self.servo_name_map[int(name)] = value
if config.has_section('servo.poses'):
for name, value in config.items('servo.poses'):
this_data = {}
for element in value.split(','):
ident, angle_deg = element.split('=')
this_data[int(ident)] = float(angle_deg)
item = self.add_list_pose(name)
item.setData(QtCore.Qt.UserRole, this_data)
def write_settings(self, config):
config.add_section('servo')
config.add_section('servo.poses')
config.add_section('servo.names')
config.set('servo', 'type', self.ui.typeCombo.currentIndex())
config.set('servo', 'port', self.ui.serialPortCombo.currentText())
config.set('servo', 'count', self.ui.servoCountSpin.value())
config.set('servo', 'model', self.servo_model)
for key, value in self.servo_name_map.iteritems():
config.set('servo.names', str(key), value)
for row in range(self.ui.poseList.count()):
item = self.ui.poseList.item(row)
pose_name = item.text()
values = item.data(QtCore.Qt.UserRole)
config.set(
'servo.poses', pose_name,
','.join(['%d=%.2f' % (ident, angle_deg)
for ident, angle_deg in values.iteritems()]))
| 35.514874
| 78
| 0.593814
| 1,727
| 15,520
| 5.188188
| 0.169658
| 0.042857
| 0.045313
| 0.014286
| 0.333259
| 0.252232
| 0.206027
| 0.152567
| 0.11529
| 0.086496
| 0
| 0.005517
| 0.299227
| 15,520
| 436
| 79
| 35.59633
| 0.818316
| 0.040013
| 0
| 0.168675
| 0
| 0
| 0.032926
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.003012
| 0.024096
| null | null | 0.006024
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a5a09a1f1eb09c5b1fb6c4e179dd1021a0b354e
| 47,088
|
py
|
Python
|
perturbed_images_generation_multiProcess.py
|
gwxie/Synthesize-Distorted-Image-and-Its-Control-Points
|
ed6de3e05a7ee1f3aecf65fcbb87c11d2ede41e7
|
[
"Apache-2.0"
] | 8
|
2022-03-27T18:37:57.000Z
|
2022-03-30T09:17:26.000Z
|
perturbed_images_generation_multiProcess.py
|
gwxie/Synthesize-Distorted-Image-and-Its-Control-Points
|
ed6de3e05a7ee1f3aecf65fcbb87c11d2ede41e7
|
[
"Apache-2.0"
] | null | null | null |
perturbed_images_generation_multiProcess.py
|
gwxie/Synthesize-Distorted-Image-and-Its-Control-Points
|
ed6de3e05a7ee1f3aecf65fcbb87c11d2ede41e7
|
[
"Apache-2.0"
] | 1
|
2022-03-31T02:22:58.000Z
|
2022-03-31T02:22:58.000Z
|
'''
GuoWang xie
set up :2020-1-9
intergrate img and label into one file
-- fiducial1024_v1
'''
import argparse
import sys, os
import pickle
import random
import collections
import json
import numpy as np
import scipy.io as io
import scipy.misc as m
import matplotlib.pyplot as plt
import glob
import math
import time
import threading
import multiprocessing as mp
from multiprocessing import Pool
import re
import cv2
# sys.path.append('/lustre/home/gwxie/hope/project/dewarp/datasets/') # /lustre/home/gwxie/program/project/unwarp/perturbed_imgaes/GAN
import utils
def getDatasets(dir):
return os.listdir(dir)
class perturbed(utils.BasePerturbed):
def __init__(self, path, bg_path, save_path, save_suffix):
self.path = path
self.bg_path = bg_path
self.save_path = save_path
self.save_suffix = save_suffix
def save_img(self, m, n, fold_curve='fold', repeat_time=4, fiducial_points = 16, relativeShift_position='relativeShift_v2'):
origin_img = cv2.imread(self.path, flags=cv2.IMREAD_COLOR)
save_img_shape = [512*2, 480*2] # 320
# reduce_value = np.random.choice([2**4, 2**5, 2**6, 2**7, 2**8], p=[0.01, 0.1, 0.4, 0.39, 0.1])
reduce_value = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.02, 0.18, 0.2, 0.3, 0.1, 0.1, 0.08, 0.02])
# reduce_value = np.random.choice([8*2, 16*2, 24*2, 32*2, 40*2, 48*2], p=[0.01, 0.02, 0.2, 0.4, 0.19, 0.18])
# reduce_value = np.random.choice([16, 24, 32, 40, 48, 64], p=[0.01, 0.1, 0.2, 0.4, 0.2, 0.09])
base_img_shrink = save_img_shape[0] - reduce_value
# enlarge_img_shrink = [1024, 768]
# enlarge_img_shrink = [896, 672] # 420
enlarge_img_shrink = [512*4, 480*4] # 420
# enlarge_img_shrink = [896*2, 768*2] # 420
# enlarge_img_shrink = [896, 768] # 420
# enlarge_img_shrink = [768, 576] # 420
# enlarge_img_shrink = [640, 480] # 420
''''''
im_lr = origin_img.shape[0]
im_ud = origin_img.shape[1]
reduce_value_v2 = np.random.choice([2*2, 4*2, 8*2, 16*2, 24*2, 28*2, 32*2, 48*2], p=[0.02, 0.18, 0.2, 0.2, 0.1, 0.1, 0.1, 0.1])
# reduce_value_v2 = np.random.choice([16, 24, 28, 32, 48, 64], p=[0.01, 0.1, 0.2, 0.3, 0.25, 0.14])
if im_lr > im_ud:
im_ud = min(int(im_ud / im_lr * base_img_shrink), save_img_shape[1] - reduce_value_v2)
im_lr = save_img_shape[0] - reduce_value
else:
base_img_shrink = save_img_shape[1] - reduce_value
im_lr = min(int(im_lr / im_ud * base_img_shrink), save_img_shape[0] - reduce_value_v2)
im_ud = base_img_shrink
if round(im_lr / im_ud, 2) < 0.5 or round(im_ud / im_lr, 2) < 0.5:
repeat_time = min(repeat_time, 8)
edge_padding = 3
im_lr -= im_lr % (fiducial_points-1) - (2*edge_padding) # im_lr % (fiducial_points-1) - 1
im_ud -= im_ud % (fiducial_points-1) - (2*edge_padding) # im_ud % (fiducial_points-1) - 1
im_hight = np.linspace(edge_padding, im_lr - edge_padding, fiducial_points, dtype=np.int64)
im_wide = np.linspace(edge_padding, im_ud - edge_padding, fiducial_points, dtype=np.int64)
# im_lr -= im_lr % (fiducial_points-1) - (1+2*edge_padding) # im_lr % (fiducial_points-1) - 1
# im_ud -= im_ud % (fiducial_points-1) - (1+2*edge_padding) # im_ud % (fiducial_points-1) - 1
# im_hight = np.linspace(edge_padding, im_lr - (1+edge_padding), fiducial_points, dtype=np.int64)
# im_wide = np.linspace(edge_padding, im_ud - (1+edge_padding), fiducial_points, dtype=np.int64)
im_x, im_y = np.meshgrid(im_hight, im_wide)
segment_x = (im_lr) // (fiducial_points-1)
segment_y = (im_ud) // (fiducial_points-1)
# plt.plot(im_x, im_y,
# color='limegreen',
# marker='.',
# linestyle='')
# plt.grid(True)
# plt.show()
self.origin_img = cv2.resize(origin_img, (im_ud, im_lr), interpolation=cv2.INTER_CUBIC)
perturbed_bg_ = getDatasets(self.bg_path)
perturbed_bg_img_ = self.bg_path+random.choice(perturbed_bg_)
perturbed_bg_img = cv2.imread(perturbed_bg_img_, flags=cv2.IMREAD_COLOR)
mesh_shape = self.origin_img.shape[:2]
self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 256, dtype=np.float32)#np.zeros_like(perturbed_bg_img)
# self.synthesis_perturbed_img = np.full((enlarge_img_shrink[0], enlarge_img_shrink[1], 3), 0, dtype=np.int16)#np.zeros_like(perturbed_bg_img)
self.new_shape = self.synthesis_perturbed_img.shape[:2]
perturbed_bg_img = cv2.resize(perturbed_bg_img, (save_img_shape[1], save_img_shape[0]), cv2.INPAINT_TELEA)
origin_pixel_position = np.argwhere(np.zeros(mesh_shape, dtype=np.uint32) == 0).reshape(mesh_shape[0], mesh_shape[1], 2)
pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2)
self.perturbed_xy_ = np.zeros((self.new_shape[0], self.new_shape[1], 2))
# self.perturbed_xy_ = pixel_position.copy().astype(np.float32)
# fiducial_points_grid = origin_pixel_position[im_x, im_y]
self.synthesis_perturbed_label = np.zeros((self.new_shape[0], self.new_shape[1], 2))
x_min, y_min, x_max, y_max = self.adjust_position_v2(0, 0, mesh_shape[0], mesh_shape[1], save_img_shape)
origin_pixel_position += [x_min, y_min]
x_min, y_min, x_max, y_max = self.adjust_position(0, 0, mesh_shape[0], mesh_shape[1])
x_shift = random.randint(-enlarge_img_shrink[0]//16, enlarge_img_shrink[0]//16)
y_shift = random.randint(-enlarge_img_shrink[1]//16, enlarge_img_shrink[1]//16)
x_min += x_shift
x_max += x_shift
y_min += y_shift
y_max += y_shift
'''im_x,y'''
im_x += x_min
im_y += y_min
self.synthesis_perturbed_img[x_min:x_max, y_min:y_max] = self.origin_img
self.synthesis_perturbed_label[x_min:x_max, y_min:y_max] = origin_pixel_position
synthesis_perturbed_img_map = self.synthesis_perturbed_img.copy()
synthesis_perturbed_label_map = self.synthesis_perturbed_label.copy()
foreORbackground_label = np.full((mesh_shape), 1, dtype=np.int16)
foreORbackground_label_map = np.full((self.new_shape), 0, dtype=np.int16)
foreORbackground_label_map[x_min:x_max, y_min:y_max] = foreORbackground_label
# synthesis_perturbed_img_map = self.pad(self.synthesis_perturbed_img.copy(), x_min, y_min, x_max, y_max)
# synthesis_perturbed_label_map = self.pad(synthesis_perturbed_label_map, x_min, y_min, x_max, y_max)
'''*****************************************************************'''
is_normalizationFun_mixture = self.is_perform(0.2, 0.8)
# if not is_normalizationFun_mixture:
normalizationFun_0_1 = False
# normalizationFun_0_1 = self.is_perform(0.5, 0.5)
if fold_curve == 'fold':
fold_curve_random = True
# is_normalizationFun_mixture = False
normalizationFun_0_1 = self.is_perform(0.2, 0.8)
if is_normalizationFun_mixture:
alpha_perturbed = random.randint(80, 120) / 100
else:
if normalizationFun_0_1 and repeat_time < 8:
alpha_perturbed = random.randint(50, 70) / 100
else:
alpha_perturbed = random.randint(70, 130) / 100
else:
fold_curve_random = self.is_perform(0.1, 0.9) # False # self.is_perform(0.01, 0.99)
alpha_perturbed = random.randint(80, 160) / 100
# is_normalizationFun_mixture = False # self.is_perform(0.01, 0.99)
synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256)
# synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 0, dtype=np.int16)
synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label)
alpha_perturbed_change = self.is_perform(0.5, 0.5)
p_pp_choice = self.is_perform(0.8, 0.2) if fold_curve == 'fold' else self.is_perform(0.1, 0.9)
for repeat_i in range(repeat_time):
if alpha_perturbed_change:
if fold_curve == 'fold':
if is_normalizationFun_mixture:
alpha_perturbed = random.randint(80, 120) / 100
else:
if normalizationFun_0_1 and repeat_time < 8:
alpha_perturbed = random.randint(50, 70) / 100
else:
alpha_perturbed = random.randint(70, 130) / 100
else:
alpha_perturbed = random.randint(80, 160) / 100
''''''
linspace_x = [0, (self.new_shape[0] - im_lr) // 2 - 1,
self.new_shape[0] - (self.new_shape[0] - im_lr) // 2 - 1, self.new_shape[0] - 1]
linspace_y = [0, (self.new_shape[1] - im_ud) // 2 - 1,
self.new_shape[1] - (self.new_shape[1] - im_ud) // 2 - 1, self.new_shape[1] - 1]
linspace_x_seq = [1, 2, 3]
linspace_y_seq = [1, 2, 3]
r_x = random.choice(linspace_x_seq)
r_y = random.choice(linspace_y_seq)
perturbed_p = np.array(
[random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10),
random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10
if ((r_x == 1 or r_x == 3) and (r_y == 1 or r_y == 3)) and p_pp_choice:
linspace_x_seq.remove(r_x)
linspace_y_seq.remove(r_y)
r_x = random.choice(linspace_x_seq)
r_y = random.choice(linspace_y_seq)
perturbed_pp = np.array(
[random.randint(linspace_x[r_x-1] * 10, linspace_x[r_x] * 10),
random.randint(linspace_y[r_y-1] * 10, linspace_y[r_y] * 10)])/10
# perturbed_p, perturbed_pp = np.array(
# [random.randint(0, self.new_shape[0] * 10) / 10,
# random.randint(0, self.new_shape[1] * 10) / 10]) \
# , np.array([random.randint(0, self.new_shape[0] * 10) / 10,
# random.randint(0, self.new_shape[1] * 10) / 10])
# perturbed_p, perturbed_pp = np.array(
# [random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10,
# random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10]) \
# , np.array([random.randint((self.new_shape[0]-im_lr)//2*10, (self.new_shape[0]-(self.new_shape[0]-im_lr)//2) * 10) / 10,
# random.randint((self.new_shape[1]-im_ud)//2*10, (self.new_shape[1]-(self.new_shape[1]-im_ud)//2) * 10) / 10])
''''''
perturbed_vp = perturbed_pp - perturbed_p
perturbed_vp_norm = np.linalg.norm(perturbed_vp)
perturbed_distance_vertex_and_line = np.dot((perturbed_p - pixel_position), perturbed_vp) / perturbed_vp_norm
''''''
# perturbed_v = np.array([random.randint(-3000, 3000) / 100, random.randint(-3000, 3000) / 100])
# perturbed_v = np.array([random.randint(-4000, 4000) / 100, random.randint(-4000, 4000) / 100])
if fold_curve == 'fold' and self.is_perform(0.6, 0.4): # self.is_perform(0.3, 0.7):
# perturbed_v = np.array([random.randint(-9000, 9000) / 100, random.randint(-9000, 9000) / 100])
perturbed_v = np.array([random.randint(-10000, 10000) / 100, random.randint(-10000, 10000) / 100])
# perturbed_v = np.array([random.randint(-11000, 11000) / 100, random.randint(-11000, 11000) / 100])
else:
# perturbed_v = np.array([random.randint(-9000, 9000) / 100, random.randint(-9000, 9000) / 100])
# perturbed_v = np.array([random.randint(-16000, 16000) / 100, random.randint(-16000, 16000) / 100])
perturbed_v = np.array([random.randint(-8000, 8000) / 100, random.randint(-8000, 8000) / 100])
# perturbed_v = np.array([random.randint(-3500, 3500) / 100, random.randint(-3500, 3500) / 100])
# perturbed_v = np.array([random.randint(-600, 600) / 10, random.randint(-600, 600) / 10])
''''''
if fold_curve == 'fold':
if is_normalizationFun_mixture:
if self.is_perform(0.5, 0.5):
perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line))
else:
perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2))
else:
if normalizationFun_0_1:
perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2)
else:
perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line))
else:
if is_normalizationFun_mixture:
if self.is_perform(0.5, 0.5):
perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line))
else:
perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), random.randint(1, 2))
else:
if normalizationFun_0_1:
perturbed_d = self.get_0_1_d(np.abs(perturbed_distance_vertex_and_line), 2)
else:
perturbed_d = np.abs(self.get_normalize(perturbed_distance_vertex_and_line))
''''''
if fold_curve_random:
# omega_perturbed = (alpha_perturbed+0.2) / (perturbed_d + alpha_perturbed)
# omega_perturbed = alpha_perturbed**perturbed_d
omega_perturbed = alpha_perturbed / (perturbed_d + alpha_perturbed)
else:
omega_perturbed = 1 - perturbed_d ** alpha_perturbed
'''shadow'''
if self.is_perform(0.6, 0.4):
synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] = np.minimum(np.maximum(synthesis_perturbed_img_map[x_min:x_max, y_min:y_max] - np.int16(np.round(omega_perturbed[x_min:x_max, y_min:y_max].repeat(3).reshape(x_max-x_min, y_max-y_min, 3) * abs(np.linalg.norm(perturbed_v//2))*np.array([0.4-random.random()*0.1, 0.4-random.random()*0.1, 0.4-random.random()*0.1]))), 0), 255)
''''''
if relativeShift_position in ['position', 'relativeShift_v2']:
self.perturbed_xy_ += np.array([omega_perturbed * perturbed_v[0], omega_perturbed * perturbed_v[1]]).transpose(1, 2, 0)
else:
print('relativeShift_position error')
exit()
'''
flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(
self.new_shape[0] * self.new_shape[1], 2)
vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position)
wts_sum = np.abs(wts).sum(-1)
# flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts)
wts = wts[wts_sum <= 1, :]
vtx = vtx[wts_sum <= 1, :]
synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1,
:] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts)
synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1,
:] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts)
foreORbackground_label = np.zeros(self.new_shape)
foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts)
foreORbackground_label[foreORbackground_label < 0.99] = 0
foreORbackground_label[foreORbackground_label >= 0.99] = 1
# synthesis_perturbed_img = np.around(synthesis_perturbed_img).astype(np.uint8)
synthesis_perturbed_label[:, :, 0] *= foreORbackground_label
synthesis_perturbed_label[:, :, 1] *= foreORbackground_label
synthesis_perturbed_img[:, :, 0] *= foreORbackground_label
synthesis_perturbed_img[:, :, 1] *= foreORbackground_label
synthesis_perturbed_img[:, :, 2] *= foreORbackground_label
self.synthesis_perturbed_img = synthesis_perturbed_img
self.synthesis_perturbed_label = synthesis_perturbed_label
'''
'''perspective'''
perspective_shreshold = random.randint(26, 36)*10 # 280
x_min_per, y_min_per, x_max_per, y_max_per = self.adjust_position(perspective_shreshold, perspective_shreshold, self.new_shape[0]-perspective_shreshold, self.new_shape[1]-perspective_shreshold)
pts1 = np.float32([[x_min_per, y_min_per], [x_max_per, y_min_per], [x_min_per, y_max_per], [x_max_per, y_max_per]])
e_1_ = x_max_per - x_min_per
e_2_ = y_max_per - y_min_per
e_3_ = e_2_
e_4_ = e_1_
perspective_shreshold_h = e_1_*0.02
perspective_shreshold_w = e_2_*0.02
a_min_, a_max_ = 70, 110
# if self.is_perform(1, 0):
if fold_curve == 'curve' and self.is_perform(0.5, 0.5):
if self.is_perform(0.5, 0.5):
while True:
pts2 = np.around(
np.float32([[x_min_per - (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold],
[x_max_per - (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold],
[x_min_per + (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold],
[x_max_per + (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold]])) # right
e_1 = np.linalg.norm(pts2[0]-pts2[1])
e_2 = np.linalg.norm(pts2[0]-pts2[2])
e_3 = np.linalg.norm(pts2[1]-pts2[3])
e_4 = np.linalg.norm(pts2[2]-pts2[3])
if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \
e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \
abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w:
a0_, a1_, a2_, a3_ = self.get_angle_4(pts2)
if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_):
break
else:
while True:
pts2 = np.around(
np.float32([[x_min_per + (random.random()) * perspective_shreshold, y_min_per - (random.random()) * perspective_shreshold],
[x_max_per + (random.random()) * perspective_shreshold, y_min_per + (random.random()) * perspective_shreshold],
[x_min_per - (random.random()) * perspective_shreshold, y_max_per - (random.random()) * perspective_shreshold],
[x_max_per - (random.random()) * perspective_shreshold, y_max_per + (random.random()) * perspective_shreshold]]))
e_1 = np.linalg.norm(pts2[0]-pts2[1])
e_2 = np.linalg.norm(pts2[0]-pts2[2])
e_3 = np.linalg.norm(pts2[1]-pts2[3])
e_4 = np.linalg.norm(pts2[2]-pts2[3])
if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \
e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \
abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w:
a0_, a1_, a2_, a3_ = self.get_angle_4(pts2)
if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_):
break
else:
while True:
pts2 = np.around(np.float32([[x_min_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold],
[x_max_per+(random.random()-0.5)*perspective_shreshold, y_min_per+(random.random()-0.5)*perspective_shreshold],
[x_min_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold],
[x_max_per+(random.random()-0.5)*perspective_shreshold, y_max_per+(random.random()-0.5)*perspective_shreshold]]))
e_1 = np.linalg.norm(pts2[0]-pts2[1])
e_2 = np.linalg.norm(pts2[0]-pts2[2])
e_3 = np.linalg.norm(pts2[1]-pts2[3])
e_4 = np.linalg.norm(pts2[2]-pts2[3])
if e_1_+perspective_shreshold_h > e_1 and e_2_+perspective_shreshold_w > e_2 and e_3_+perspective_shreshold_w > e_3 and e_4_+perspective_shreshold_h > e_4 and \
e_1_ - perspective_shreshold_h < e_1 and e_2_ - perspective_shreshold_w < e_2 and e_3_ - perspective_shreshold_w < e_3 and e_4_ - perspective_shreshold_h < e_4 and \
abs(e_1-e_4) < perspective_shreshold_h and abs(e_2-e_3) < perspective_shreshold_w:
a0_, a1_, a2_, a3_ = self.get_angle_4(pts2)
if (a0_ > a_min_ and a0_ < a_max_) or (a1_ > a_min_ and a1_ < a_max_) or (a2_ > a_min_ and a2_ < a_max_) or (a3_ > a_min_ and a3_ < a_max_):
break
M = cv2.getPerspectiveTransform(pts1, pts2)
one = np.ones((self.new_shape[0], self.new_shape[1], 1), dtype=np.int16)
matr = np.dstack((pixel_position, one))
new = np.dot(M, matr.reshape(-1, 3).T).T.reshape(self.new_shape[0], self.new_shape[1], 3)
x = new[:, :, 0]/new[:, :, 2]
y = new[:, :, 1]/new[:, :, 2]
perturbed_xy_ = np.dstack((x, y))
# perturbed_xy_round_int = np.around(cv2.bilateralFilter(perturbed_xy_round_int, 9, 75, 75))
# perturbed_xy_round_int = np.around(cv2.blur(perturbed_xy_, (17, 17)))
# perturbed_xy_round_int = cv2.blur(perturbed_xy_round_int, (17, 17))
# perturbed_xy_round_int = cv2.GaussianBlur(perturbed_xy_round_int, (7, 7), 0)
perturbed_xy_ = perturbed_xy_-np.min(perturbed_xy_.T.reshape(2, -1), 1)
# perturbed_xy_round_int = np.around(perturbed_xy_round_int-np.min(perturbed_xy_round_int.T.reshape(2, -1), 1)).astype(np.int16)
self.perturbed_xy_ += perturbed_xy_
'''perspective end'''
'''to img'''
flat_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(
self.new_shape[0] * self.new_shape[1], 2)
# self.perturbed_xy_ = cv2.blur(self.perturbed_xy_, (7, 7))
self.perturbed_xy_ = cv2.GaussianBlur(self.perturbed_xy_, (7, 7), 0)
'''get fiducial points'''
fiducial_points_coordinate = self.perturbed_xy_[im_x, im_y]
vtx, wts = self.interp_weights(self.perturbed_xy_.reshape(self.new_shape[0] * self.new_shape[1], 2), flat_position)
wts_sum = np.abs(wts).sum(-1)
# flat_img.reshape(flat_shape[0] * flat_shape[1], 3)[:] = interpolate(pixel, vtx, wts)
wts = wts[wts_sum <= 1, :]
vtx = vtx[wts_sum <= 1, :]
synthesis_perturbed_img.reshape(self.new_shape[0] * self.new_shape[1], 3)[wts_sum <= 1,
:] = self.interpolate(synthesis_perturbed_img_map.reshape(self.new_shape[0] * self.new_shape[1], 3), vtx, wts)
synthesis_perturbed_label.reshape(self.new_shape[0] * self.new_shape[1], 2)[wts_sum <= 1,
:] = self.interpolate(synthesis_perturbed_label_map.reshape(self.new_shape[0] * self.new_shape[1], 2), vtx, wts)
foreORbackground_label = np.zeros(self.new_shape)
foreORbackground_label.reshape(self.new_shape[0] * self.new_shape[1], 1)[wts_sum <= 1, :] = self.interpolate(foreORbackground_label_map.reshape(self.new_shape[0] * self.new_shape[1], 1), vtx, wts)
foreORbackground_label[foreORbackground_label < 0.99] = 0
foreORbackground_label[foreORbackground_label >= 0.99] = 1
self.synthesis_perturbed_img = synthesis_perturbed_img
self.synthesis_perturbed_label = synthesis_perturbed_label
self.foreORbackground_label = foreORbackground_label
'''draw fiducial points
stepSize = 0
fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy()
for l in fiducial_points_coordinate.astype(np.int64).reshape(-1,2):
cv2.circle(fiducial_points_synthesis_perturbed_img, (l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1)
cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_large.jpg', fiducial_points_synthesis_perturbed_img)
'''
'''clip'''
perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1]
for x in range(self.new_shape[0] // 2, perturbed_x_max):
if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x:
perturbed_x_max = x
break
for x in range(self.new_shape[0] // 2, perturbed_x_min, -1):
if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0:
perturbed_x_min = x
break
for y in range(self.new_shape[1] // 2, perturbed_y_max):
if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y:
perturbed_y_max = y
break
for y in range(self.new_shape[1] // 2, perturbed_y_min, -1):
if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0:
perturbed_y_min = y
break
if perturbed_x_min == 0 or perturbed_x_max == self.new_shape[0] or perturbed_y_min == self.new_shape[1] or perturbed_y_max == self.new_shape[1]:
raise Exception('clip error')
if perturbed_x_max - perturbed_x_min < im_lr//2 or perturbed_y_max - perturbed_y_min < im_ud//2:
raise Exception('clip error')
perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n)
is_shrink = False
if perturbed_x_max - perturbed_x_min > save_img_shape[0] or perturbed_y_max - perturbed_y_min > save_img_shape[1]:
is_shrink = True
synthesis_perturbed_img = cv2.resize(self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR)
synthesis_perturbed_label = cv2.resize(self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR)
foreORbackground_label = cv2.resize(self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max].copy(), (im_ud, im_lr), interpolation=cv2.INTER_LINEAR)
foreORbackground_label[foreORbackground_label < 0.99] = 0
foreORbackground_label[foreORbackground_label >= 0.99] = 1
'''shrink fiducial points'''
center_x_l, center_y_l = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2
fiducial_points_coordinate_copy = fiducial_points_coordinate.copy()
shrink_x = im_lr/(perturbed_x_max - perturbed_x_min)
shrink_y = im_ud/(perturbed_y_max - perturbed_y_min)
fiducial_points_coordinate *= [shrink_x, shrink_y]
center_x_l *= shrink_x
center_y_l *= shrink_y
# fiducial_points_coordinate[1:, 1:] *= [shrink_x, shrink_y]
# fiducial_points_coordinate[1:, :1, 0] *= shrink_x
# fiducial_points_coordinate[:1, 1:, 1] *= shrink_y
# perturbed_x_min_copy, perturbed_y_min_copy, perturbed_x_max_copy, perturbed_y_max_copy = perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max
perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape)
self.synthesis_perturbed_img = np.full_like(self.synthesis_perturbed_img, 256)
self.synthesis_perturbed_label = np.zeros_like(self.synthesis_perturbed_label)
self.foreORbackground_label = np.zeros_like(self.foreORbackground_label)
self.synthesis_perturbed_img[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_img
self.synthesis_perturbed_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_label
self.foreORbackground_label[perturbed_x_min:perturbed_x_max, perturbed_y_min:perturbed_y_max] = foreORbackground_label
center_x, center_y = perturbed_x_min + (perturbed_x_max - perturbed_x_min) // 2, perturbed_y_min + (perturbed_y_max - perturbed_y_min) // 2
if is_shrink:
fiducial_points_coordinate += [center_x-center_x_l, center_y-center_y_l]
'''draw fiducial points
stepSize = 0
fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_img.copy()
for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2):
cv2.circle(fiducial_points_synthesis_perturbed_img,
(l[1] + math.ceil(stepSize / 2), l[0] + math.ceil(stepSize / 2)), 5, (0, 0, 255), -1)
cv2.imwrite('/lustre/home/gwxie/program/project/unwarp/unwarp_perturbed/TPS/img/cv_TPS_small.jpg',fiducial_points_synthesis_perturbed_img)
'''
self.new_shape = save_img_shape
self.synthesis_perturbed_img = self.synthesis_perturbed_img[
center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2,
center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2,
:].copy()
self.synthesis_perturbed_label = self.synthesis_perturbed_label[
center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2,
center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2,
:].copy()
self.foreORbackground_label = self.foreORbackground_label[
center_x - self.new_shape[0] // 2:center_x + self.new_shape[0] // 2,
center_y - self.new_shape[1] // 2:center_y + self.new_shape[1] // 2].copy()
perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0)
perturbed_x_min = perturbed_x_ // 2
perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1)
perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0)
perturbed_y_min = perturbed_y_ // 2
perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1)
'''clip
perturbed_x_min, perturbed_y_min, perturbed_x_max, perturbed_y_max = -1, -1, self.new_shape[0], self.new_shape[1]
for x in range(self.new_shape[0] // 2, perturbed_x_max):
if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and perturbed_x_max - 1 > x:
perturbed_x_max = x
break
for x in range(self.new_shape[0] // 2, perturbed_x_min, -1):
if np.sum(self.synthesis_perturbed_img[x, :]) == 768 * self.new_shape[1] and x > 0:
perturbed_x_min = x
break
for y in range(self.new_shape[1] // 2, perturbed_y_max):
if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and perturbed_y_max - 1 > y:
perturbed_y_max = y
break
for y in range(self.new_shape[1] // 2, perturbed_y_min, -1):
if np.sum(self.synthesis_perturbed_img[:, y]) == 768 * self.new_shape[0] and y > 0:
perturbed_y_min = y
break
center_x, center_y = perturbed_x_min+(perturbed_x_max - perturbed_x_min)//2, perturbed_y_min+(perturbed_y_max - perturbed_y_min)//2
perfix_ = self.save_suffix+'_'+str(m)+'_'+str(n)
self.new_shape = save_img_shape
perturbed_x_ = max(self.new_shape[0] - (perturbed_x_max - perturbed_x_min), 0)
perturbed_x_min = perturbed_x_ // 2
perturbed_x_max = self.new_shape[0] - perturbed_x_ // 2 if perturbed_x_%2 == 0 else self.new_shape[0] - (perturbed_x_ // 2 + 1)
perturbed_y_ = max(self.new_shape[1] - (perturbed_y_max - perturbed_y_min), 0)
perturbed_y_min = perturbed_y_ // 2
perturbed_y_max = self.new_shape[1] - perturbed_y_ // 2 if perturbed_y_%2 == 0 else self.new_shape[1] - (perturbed_y_ // 2 + 1)
self.synthesis_perturbed_img = self.synthesis_perturbed_img[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy()
self.synthesis_perturbed_label = self.synthesis_perturbed_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2, :].copy()
self.foreORbackground_label = self.foreORbackground_label[center_x-self.new_shape[0]//2:center_x+self.new_shape[0]//2, center_y-self.new_shape[1]//2:center_y+self.new_shape[1]//2].copy()
'''
'''save'''
pixel_position = np.argwhere(np.zeros(self.new_shape, dtype=np.uint32) == 0).reshape(self.new_shape[0], self.new_shape[1], 2)
if relativeShift_position == 'relativeShift_v2':
self.synthesis_perturbed_label -= pixel_position
fiducial_points_coordinate -= [center_x - self.new_shape[0] // 2, center_y - self.new_shape[1] // 2]
self.synthesis_perturbed_label[:, :, 0] *= self.foreORbackground_label
self.synthesis_perturbed_label[:, :, 1] *= self.foreORbackground_label
self.synthesis_perturbed_img[:, :, 0] *= self.foreORbackground_label
self.synthesis_perturbed_img[:, :, 1] *= self.foreORbackground_label
self.synthesis_perturbed_img[:, :, 2] *= self.foreORbackground_label
'''
synthesis_perturbed_img_filter = self.synthesis_perturbed_img.copy()
synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0)
# if self.is_perform(0.9, 0.1) or repeat_time > 5:
# # if self.is_perform(0.1, 0.9) and repeat_time > 9:
# # synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (7, 7), 0)
# # else:
# synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0)
# else:
# synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0)
self.synthesis_perturbed_img[self.foreORbackground_label == 1] = synthesis_perturbed_img_filter[self.foreORbackground_label == 1]
'''
'''
perturbed_bg_img = perturbed_bg_img.astype(np.float32)
perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label
self.synthesis_perturbed_img += perturbed_bg_img
HSV
perturbed_bg_img = perturbed_bg_img.astype(np.float32)
if self.is_perform(0.1, 0.9):
if self.is_perform(0.2, 0.8):
synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy()
synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_RGB2HSV)
H_, S_, V_ = (random.random()-0.2)*20, (random.random()-0.2)/8, (random.random()-0.2)*20
synthesis_perturbed_img_clip_HSV[:, :, 0], synthesis_perturbed_img_clip_HSV[:, :, 1], synthesis_perturbed_img_clip_HSV[:, :, 2] = synthesis_perturbed_img_clip_HSV[:, :, 0]-H_, synthesis_perturbed_img_clip_HSV[:, :, 1]-S_, synthesis_perturbed_img_clip_HSV[:, :, 2]-V_
synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_HSV2RGB)
perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label
perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label
perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label
synthesis_perturbed_img_clip_HSV += perturbed_bg_img
self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV
else:
perturbed_bg_img_HSV = perturbed_bg_img
perturbed_bg_img_HSV = cv2.cvtColor(perturbed_bg_img_HSV, cv2.COLOR_RGB2HSV)
H_, S_, V_ = (random.random()-0.5)*20, (random.random()-0.5)/8, (random.random()-0.2)*20
perturbed_bg_img_HSV[:, :, 0], perturbed_bg_img_HSV[:, :, 1], perturbed_bg_img_HSV[:, :, 2] = perturbed_bg_img_HSV[:, :, 0]-H_, perturbed_bg_img_HSV[:, :, 1]-S_, perturbed_bg_img_HSV[:, :, 2]-V_
perturbed_bg_img_HSV = cv2.cvtColor(perturbed_bg_img_HSV, cv2.COLOR_HSV2RGB)
perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label
perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label
perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label
self.synthesis_perturbed_img += perturbed_bg_img_HSV
# self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771]
else:
synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy()
perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label
synthesis_perturbed_img_clip_HSV += perturbed_bg_img
# synthesis_perturbed_img_clip_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img[np.sum(self.synthesis_perturbed_img, 2) == 771]
synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_RGB2HSV)
H_, S_, V_ = (random.random()-0.5)*20, (random.random()-0.5)/10, (random.random()-0.4)*20
synthesis_perturbed_img_clip_HSV[:, :, 0], synthesis_perturbed_img_clip_HSV[:, :, 1], synthesis_perturbed_img_clip_HSV[:, :, 2] = synthesis_perturbed_img_clip_HSV[:, :, 0]-H_, synthesis_perturbed_img_clip_HSV[:, :, 1]-S_, synthesis_perturbed_img_clip_HSV[:, :, 2]-V_
synthesis_perturbed_img_clip_HSV = cv2.cvtColor(synthesis_perturbed_img_clip_HSV, cv2.COLOR_HSV2RGB)
self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV
'''
'''HSV_v2'''
perturbed_bg_img = perturbed_bg_img.astype(np.float32)
# if self.is_perform(1, 0):
# if self.is_perform(1, 0):
if self.is_perform(0.1, 0.9):
if self.is_perform(0.2, 0.8):
synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy()
synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV)
perturbed_bg_img[:, :, 0] *= 1-self.foreORbackground_label
perturbed_bg_img[:, :, 1] *= 1-self.foreORbackground_label
perturbed_bg_img[:, :, 2] *= 1-self.foreORbackground_label
synthesis_perturbed_img_clip_HSV += perturbed_bg_img
self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV
else:
perturbed_bg_img_HSV = perturbed_bg_img
perturbed_bg_img_HSV = self.HSV_v1(perturbed_bg_img_HSV)
perturbed_bg_img_HSV[:, :, 0] *= 1-self.foreORbackground_label
perturbed_bg_img_HSV[:, :, 1] *= 1-self.foreORbackground_label
perturbed_bg_img_HSV[:, :, 2] *= 1-self.foreORbackground_label
self.synthesis_perturbed_img += perturbed_bg_img_HSV
# self.synthesis_perturbed_img[np.sum(self.synthesis_perturbed_img, 2) == 771] = perturbed_bg_img_HSV[np.sum(self.synthesis_perturbed_img, 2) == 771]
else:
synthesis_perturbed_img_clip_HSV = self.synthesis_perturbed_img.copy()
perturbed_bg_img[:, :, 0] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 1] *= 1 - self.foreORbackground_label
perturbed_bg_img[:, :, 2] *= 1 - self.foreORbackground_label
synthesis_perturbed_img_clip_HSV += perturbed_bg_img
synthesis_perturbed_img_clip_HSV = self.HSV_v1(synthesis_perturbed_img_clip_HSV)
self.synthesis_perturbed_img = synthesis_perturbed_img_clip_HSV
''''''
# cv2.imwrite(self.save_path+'clip/'+perfix_+'_'+fold_curve+str(perturbed_time)+'-'+str(repeat_time)+'.png', synthesis_perturbed_img_clip)
self.synthesis_perturbed_img[self.synthesis_perturbed_img < 0] = 0
self.synthesis_perturbed_img[self.synthesis_perturbed_img > 255] = 255
self.synthesis_perturbed_img = np.around(self.synthesis_perturbed_img).astype(np.uint8)
label = np.zeros_like(self.synthesis_perturbed_img, dtype=np.float32)
label[:, :, :2] = self.synthesis_perturbed_label
label[:, :, 2] = self.foreORbackground_label
# grey = np.around(self.synthesis_perturbed_img[:, :, 0] * 0.2989 + self.synthesis_perturbed_img[:, :, 1] * 0.5870 + self.synthesis_perturbed_img[:, :, 0] * 0.1140).astype(np.int16)
# synthesis_perturbed_grey = np.concatenate((grey.reshape(self.new_shape[0], self.new_shape[1], 1), label), axis=2)
synthesis_perturbed_color = np.concatenate((self.synthesis_perturbed_img, label), axis=2)
self.synthesis_perturbed_color = np.zeros_like(synthesis_perturbed_color, dtype=np.float32)
# self.synthesis_perturbed_grey = np.zeros_like(synthesis_perturbed_grey, dtype=np.float32)
reduce_value_x = int(round(min((random.random() / 2) * (self.new_shape[0] - (perturbed_x_max - perturbed_x_min)), min(reduce_value, reduce_value_v2))))
reduce_value_y = int(round(min((random.random() / 2) * (self.new_shape[1] - (perturbed_y_max - perturbed_y_min)), min(reduce_value, reduce_value_v2))))
perturbed_x_min = max(perturbed_x_min - reduce_value_x, 0)
perturbed_x_max = min(perturbed_x_max + reduce_value_x, self.new_shape[0])
perturbed_y_min = max(perturbed_y_min - reduce_value_y, 0)
perturbed_y_max = min(perturbed_y_max + reduce_value_y, self.new_shape[1])
if im_lr >= im_ud:
self.synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_color[:, perturbed_y_min:perturbed_y_max, :]
# self.synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :] = synthesis_perturbed_grey[:, perturbed_y_min:perturbed_y_max, :]
else:
self.synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_color[perturbed_x_min:perturbed_x_max, :, :]
# self.synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :] = synthesis_perturbed_grey[perturbed_x_min:perturbed_x_max, :, :]
'''blur'''
if self.is_perform(0.1, 0.9):
synthesis_perturbed_img_filter = self.synthesis_perturbed_color[:, :, :3].copy()
if self.is_perform(0.1, 0.9):
synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (5, 5), 0)
else:
synthesis_perturbed_img_filter = cv2.GaussianBlur(synthesis_perturbed_img_filter, (3, 3), 0)
if self.is_perform(0.5, 0.5):
self.synthesis_perturbed_color[:, :, :3][self.synthesis_perturbed_color[:, :, 5] == 1] = synthesis_perturbed_img_filter[self.synthesis_perturbed_color[:, :, 5] == 1]
else:
self.synthesis_perturbed_color[:, :, :3] = synthesis_perturbed_img_filter
fiducial_points_coordinate = fiducial_points_coordinate[:, :, ::-1]
'''draw fiducial points'''
stepSize = 0
fiducial_points_synthesis_perturbed_img = self.synthesis_perturbed_color[:, :, :3].copy()
for l in fiducial_points_coordinate.astype(np.int64).reshape(-1, 2):
cv2.circle(fiducial_points_synthesis_perturbed_img, (l[0] + math.ceil(stepSize / 2), l[1] + math.ceil(stepSize / 2)), 2, (0, 0, 255), -1)
cv2.imwrite(self.save_path + 'fiducial_points/' + perfix_ + '_' + fold_curve + '.png', fiducial_points_synthesis_perturbed_img)
cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3])
'''forward-begin'''
self.forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32)
forward_mapping = np.full((save_img_shape[0], save_img_shape[1], 2), 0, dtype=np.float32)
forward_position = (self.synthesis_perturbed_color[:, :, 3:5] + pixel_position)[self.synthesis_perturbed_color[:, :, 5] != 0, :]
flat_position = np.argwhere(np.zeros(save_img_shape, dtype=np.uint32) == 0)
vtx, wts = self.interp_weights(forward_position, flat_position)
wts_sum = np.abs(wts).sum(-1)
wts = wts[wts_sum <= 1, :]
vtx = vtx[wts_sum <= 1, :]
flat_position_forward = flat_position.reshape(save_img_shape[0], save_img_shape[1], 2)[self.synthesis_perturbed_color[:, :, 5] != 0, :]
forward_mapping.reshape(save_img_shape[0] * save_img_shape[1], 2)[wts_sum <= 1, :] = self.interpolate(flat_position_forward, vtx, wts)
forward_mapping = forward_mapping.reshape(save_img_shape[0], save_img_shape[1], 2)
mapping_x_min_, mapping_y_min_, mapping_x_max_, mapping_y_max_ = self.adjust_position_v2(0, 0, im_lr, im_ud, self.new_shape)
shreshold_zoom_out = 2
mapping_x_min = mapping_x_min_ + shreshold_zoom_out
mapping_y_min = mapping_y_min_ + shreshold_zoom_out
mapping_x_max = mapping_x_max_ - shreshold_zoom_out
mapping_y_max = mapping_y_max_ - shreshold_zoom_out
self.forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max] = forward_mapping[mapping_x_min:mapping_x_max, mapping_y_min:mapping_y_max]
self.scan_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32)
self.scan_img[mapping_x_min_:mapping_x_max_, mapping_y_min_:mapping_y_max_] = self.origin_img
self.origin_img = self.scan_img
# flat_img = np.full((save_img_shape[0], save_img_shape[1], 3), 0, dtype=np.float32)
# cv2.remap(self.synthesis_perturbed_color[:, :, :3], self.forward_mapping[:, :, 1], self.forward_mapping[:, :, 0], cv2.INTER_LINEAR, flat_img)
# cv2.imwrite(self.save_path + 'outputs/1.jpg', flat_img)
'''forward-end'''
synthesis_perturbed_data = {
'fiducial_points': fiducial_points_coordinate,
'segment': np.array((segment_x, segment_y))
}
cv2.imwrite(self.save_path + 'png/' + perfix_ + '_' + fold_curve + '.png', self.synthesis_perturbed_color[:, :, :3])
with open(self.save_path+'color/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f:
pickle_perturbed_data = pickle.dumps(synthesis_perturbed_data)
f.write(pickle_perturbed_data)
# with open(self.save_path+'grey/'+perfix_+'_'+fold_curve+'.gw', 'wb') as f:
# pickle_perturbed_data = pickle.dumps(self.synthesis_perturbed_grey)
# f.write(pickle_perturbed_data)
# cv2.imwrite(self.save_path+'grey_im/'+perfix_+'_'+fold_curve+'.png', self.synthesis_perturbed_color[:, :, :1])
# cv2.imwrite(self.save_path + 'scan/' + self.save_suffix + '_' + str(m) + '.png', self.origin_img)
trian_t = time.time() - begin_train
mm, ss = divmod(trian_t, 60)
hh, mm = divmod(mm, 60)
print(str(m)+'_'+str(n)+'_'+fold_curve+' '+str(repeat_time)+" Time : %02d:%02d:%02d\n" % (hh, mm, ss))
def multiThread(m, n, img_path_, bg_path_, save_path, save_suffix):
saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix)
saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix)
repeat_time = min(max(round(np.random.normal(10, 3)), 5), 16)
fold = threading.Thread(target=saveFold.save_img, args=(m, n, 'fold', repeat_time, 'relativeShift_v2'), name='fold')
curve = threading.Thread(target=saveCurve.save_img, args=(m, n, 'curve', repeat_time, 'relativeShift_v2'), name='curve')
fold.start()
curve.start()
curve.join()
fold.join()
def xgw(args):
path = args.path
bg_path = args.bg_path
if args.output_path is None:
save_path = '/lustre/home/gwxie/data/unwarp_new/train/general1024/general1024_v1/'
else:
save_path = args.output_path
# if not os.path.exists(save_path + 'grey/'):
# os.makedirs(save_path + 'grey/')
if not os.path.exists(save_path + 'color/'):
os.makedirs(save_path + 'color/')
if not os.path.exists(save_path + 'fiducial_points/'):
os.makedirs(save_path + 'fiducial_points/')
if not os.path.exists(save_path + 'png/'):
os.makedirs(save_path + 'png/')
if not os.path.exists(save_path + 'scan/'):
os.makedirs(save_path + 'scan/')
if not os.path.exists(save_path + 'outputs/'):
os.makedirs(save_path + 'outputs/')
save_suffix = str.split(args.path, '/')[-2]
all_img_path = getDatasets(path)
all_bgImg_path = getDatasets(bg_path)
global begin_train
begin_train = time.time()
fiducial_points = 61 # 31
process_pool = Pool(2)
for m, img_path in enumerate(all_img_path):
for n in range(args.sys_num):
img_path_ = path+img_path
bg_path_ = bg_path+random.choice(all_bgImg_path)+'/'
for m_n in range(10):
try:
saveFold = perturbed(img_path_, bg_path_, save_path, save_suffix)
saveCurve = perturbed(img_path_, bg_path_, save_path, save_suffix)
repeat_time = min(max(round(np.random.normal(12, 4)), 1), 18)
# repeat_time = min(max(round(np.random.normal(8, 4)), 1), 12) # random.randint(1, 2) # min(max(round(np.random.normal(8, 4)), 1), 12)
process_pool.apply_async(func=saveFold.save_img, args=(m, n, 'fold', repeat_time, fiducial_points, 'relativeShift_v2'))
repeat_time = min(max(round(np.random.normal(8, 4)), 1), 13)
# repeat_time = min(max(round(np.random.normal(6, 4)), 1), 10)
process_pool.apply_async(func=saveCurve.save_img, args=(m, n, 'curve', repeat_time, fiducial_points, 'relativeShift_v2'))
except BaseException as err:
print(err)
continue
break
# print('end')
process_pool.close()
process_pool.join()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description='Hyperparams')
parser.add_argument('--path',
default='./scan/new/', type=str,
help='the path of origin img.')
parser.add_argument('--bg_path',
default='./background/', type=str,
help='the path of bg img.')
parser.add_argument('--output_path',
default='./output/', type=str,
help='the path of origin img.')
# parser.set_defaults(output_path='test')
parser.add_argument('--count_from', '-p', default=0, type=int,
metavar='N', help='print frequency (default: 10)') # print frequency
parser.add_argument('--repeat_T', default=0, type=int)
parser.add_argument('--sys_num', default=6, type=int)
args = parser.parse_args()
xgw(args)
| 53.692132
| 380
| 0.720417
| 7,535
| 47,088
| 4.138421
| 0.052157
| 0.120065
| 0.057339
| 0.028766
| 0.79088
| 0.720938
| 0.670558
| 0.626014
| 0.595773
| 0.560562
| 0
| 0.048875
| 0.13445
| 47,088
| 876
| 381
| 53.753425
| 0.716221
| 0.144156
| 0
| 0.301205
| 0
| 0
| 0.022177
| 0.002927
| 0
| 0
| 0
| 0
| 0
| 1
| 0.01004
| false
| 0
| 0.038153
| 0.002008
| 0.052209
| 0.008032
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a5ce615b33cd197b365d6e3673610f15fbcf59b
| 12,289
|
py
|
Python
|
assignment1/cs231n/classifiers/neural_net.py
|
zeevikal/CS231n-spring2018
|
50691a947b877047099e7a1fe99a3fdea4a4fcf8
|
[
"MIT"
] | null | null | null |
assignment1/cs231n/classifiers/neural_net.py
|
zeevikal/CS231n-spring2018
|
50691a947b877047099e7a1fe99a3fdea4a4fcf8
|
[
"MIT"
] | 3
|
2019-12-09T06:04:00.000Z
|
2019-12-09T06:05:23.000Z
|
assignment1/cs231n/classifiers/neural_net.py
|
zeevikal/CS231n-spring2018
|
50691a947b877047099e7a1fe99a3fdea4a4fcf8
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
import numpy as np
import matplotlib.pyplot as plt
class TwoLayerNet(object):
"""
A two-layer fully-connected neural network. The net has an input dimension
of N, a hidden layer dimension of H, and performs classification over C
classes.
We train the network with a softmax loss function and L2 regularization on
the weight matrices. The network uses a ReLU nonlinearity after the first
fully connected layer.
In other words, the network has the following architecture:
input - fully connected layer - ReLU - fully connected layer - softmax
The outputs of the second fully-connected layer are the scores for each
class.
"""
def __init__(self, input_size, hidden_size, output_size, std=1e-4):
"""
Initialize the model. Weights are initialized to small random values
and biases are initialized to zero. Weights and biases are stored in
the variable self.params, which is a dictionary with the following keys
W1: First layer weights; has shape (D, H)
b1: First layer biases; has shape (H,)
W2: Second layer weights; has shape (H, C)
b2: Second layer biases; has shape (C,)
Inputs:
- input_size: The dimension D of the input data.
- hidden_size: The number of neurons H in the hidden layer.
- output_size: The number of classes C.
"""
self.params = {}
self.params['W1'] = std * np.random.randn(input_size, hidden_size)
self.params['b1'] = np.zeros(hidden_size)
self.params['W2'] = std * np.random.randn(hidden_size, output_size)
self.params['b2'] = np.zeros(output_size)
def loss(self, X, y=None, reg=0.0):
"""
Compute the loss and gradients for a two layer fully connected neural
network.
Inputs:
- X: Input data of shape (N, D). Each X[i] is a training sample.
- y: Vector of training labels. y[i] is the label for X[i], and each
y[i] is an integer in the range 0 <= y[i] < C. This parameter is
optional; if it is not passed then we only return scores, and if it
is passed then we instead return the loss and gradients.
- reg: Regularization strength.
Returns:
If y is None, return a matrix scores of shape (N, C) where scores[i, c]
is the score for class c on input X[i].
If y is not None, instead return a tuple of:
- loss: Loss (data loss and regularization loss) for this batch of
training samples.
- grads: Dictionary mapping parameter names to gradients of those
parameters with respect to the loss function; has the same keys as
self.params.
"""
# Unpack variables from the params dictionary
W1, b1 = self.params['W1'], self.params['b1']
W2, b2 = self.params['W2'], self.params['b2']
N, D = X.shape
# Compute the forward pass
scores = None
#######################################################################
# TODO: Perform the forward pass, computing the class scores for the #
# input. Store the result in the scores variable, which should be an #
# array of shape (N, C). #
#######################################################################
scores1 = X.dot(W1) + b1 # FC1
X2 = np.maximum(0, scores1) # ReLU FC1
scores = X2.dot(W2) + b2 # FC2
#######################################################################
# END OF YOUR CODE #
#######################################################################
# If the targets are not given then jump out, we're done
if y is None:
return scores
scores -= np.max(scores) # Fix Number instability
scores_exp = np.exp(scores)
probs = scores_exp / np.sum(scores_exp, axis=1, keepdims=True)
# Compute the loss
loss = None
#######################################################################
# TODO: Finish the forward pass, and compute the loss. This should #
# include both the data loss and L2 regularization for W1 and W2. #
# Store the result in the variable loss, which should be a scalar. Use#
# the Softmax classifier loss. #
#######################################################################
correct_probs = -np.log(probs[np.arange(N), y])
# L_i = -log(e^correct_score/sum(e^scores))) = -log(correct_probs)
loss = np.sum(correct_probs)
loss /= N
# L2 regularization WRT W1 and W2
loss += reg * (np.sum(W1 * W1) + np.sum(W2 * W2))
#######################################################################
# END OF YOUR CODE #
#######################################################################
# Backward pass: compute gradients
grads = {}
#############################################################################
# TODO: Compute the backward pass, computing the derivatives of the weights #
# and biases. Store the results in the grads dictionary. For example, #
# grads['W1'] should store the gradient on W1, and be a matrix of same size #
#############################################################################
# gradient of loss_i WRT scores_k
# dL_i/ds_k = probs_k-1(y_i == k)
# this means the gradient is the score for "other" classes and score-1
# for the target class
d_scores = probs.copy()
d_scores[np.arange(N), y] -= 1
d_scores /= N
# W2 were multiplied with X2, by chain rule and multiplication
# derivative, WRT W2 we need to multiply downstream derivative by X2
d_W2 = X2.T.dot(d_scores)
# b2 was added, so it's d is 1 but we must multiply it with chain rule
# (downstream), in this case d_scores
d_b2 = np.sum(d_scores, axis=0)
# W1 is upstream of X2, so we continue this way
d_X2 = d_scores.dot(W2.T)
# ReLU derivative is 1 for > 0, else 0
d_scores1 = d_X2 * (scores1 > 0)
d_W1 = X.T.dot(d_scores1)
# b1 gradient
d_b1 = d_scores1.sum(axis=0)
# regularization gradient (reg*W2^2)
d_W2 += reg * 2 * W2
d_W1 += reg * 2 * W1
grads['W1'] = d_W1
grads['b1'] = d_b1
grads['W2'] = d_W2
grads['b2'] = d_b2
#######################################################################
# END OF YOUR CODE #
#######################################################################
return loss, grads
def train(self, X, y, X_val, y_val,
learning_rate=1e-3, learning_rate_decay=0.95,
reg=5e-6, num_iters=100,
batch_size=200, verbose=False):
"""
Train this neural network using stochastic gradient descent.
Inputs:
- X: A numpy array of shape (N, D) giving training data.
- y: A numpy array f shape (N,) giving training labels; y[i] = c means
that X[i] has label c, where 0 <= c < C.
- X_val: A numpy array of shape (N_val, D) giving validation data.
- y_val: A numpy array of shape (N_val,) giving validation labels.
- learning_rate: Scalar giving learning rate for optimization.
- learning_rate_decay: Scalar giving factor used to decay the learning
rate after each epoch.
- reg: Scalar giving regularization strength.
- num_iters: Number of steps to take when optimizing.
- batch_size: Number of training examples to use per step.
- verbose: boolean; if true print progress during optimization.
"""
num_train = X.shape[0]
iterations_per_epoch = max(num_train / batch_size, 1)
# Use SGD to optimize the parameters in self.model
loss_history = []
train_acc_history = []
val_acc_history = []
for it in range(num_iters):
X_batch = None
y_batch = None
###################################################################
# TODO: Create a random minibatch of training data and labels, #
# storing them in X_batch and y_batch respectively. #
###################################################################
# random indexes to sample training data/labels
sample_idx = np.random.choice(num_train, batch_size, replace=True)
X_batch = X[sample_idx]
y_batch = y[sample_idx]
###################################################################
# END OF YOUR CODE #
###################################################################
# Compute loss and gradients using the current minibatch
loss, grads = self.loss(X_batch, y=y_batch, reg=reg)
loss_history.append(loss)
###################################################################
# TODO: Use the gradients in the grads dictionary to update the #
# parameters of the network (stored in the dictionary self.params)#
# using stochastic gradient descent. You'll need to use the #
# gradients stored in the grads dictionary defined above. #
###################################################################
# For each weight in network parameters, update it with the
# corresponding calculated gradient
for key in self.params:
self.params[key] -= learning_rate * grads[key]
###################################################################
# END OF YOUR CODE #
###################################################################
if verbose and it % 100 == 0:
print('iteration %d / %d: loss %f' % (it, num_iters, loss))
# Every epoch, check train and val accuracy and decay learning rate
if it % iterations_per_epoch == 0:
# Check accuracy
train_acc = (self.predict(X_batch) == y_batch).mean()
val_acc = (self.predict(X_val) == y_val).mean()
train_acc_history.append(train_acc)
val_acc_history.append(val_acc)
# Decay learning rate
learning_rate *= learning_rate_decay
return {
'loss_history': loss_history,
'train_acc_history': train_acc_history,
'val_acc_history': val_acc_history,
}
def predict(self, X):
"""
Use the trained weights of this two-layer network to predict labels for
data points. For each data point we predict scores for each of the C
classes, and assign each data point to the class with the highest score
Inputs:
- X: A numpy array of shape (N, D) giving N D-dimensional data points
to classify.
Returns:
- y_pred: A numpy array of shape (N,) giving predicted labels for each
of the elements of X. For all i, y_pred[i] = c means that X[i] is
predicted to have class c, where 0 <= c < C.
"""
y_pred = None
#######################################################################
# TODO: Implement this function; it should be VERY simple! #
#######################################################################
y_pred = np.argmax(self.loss(X), axis=1)
#######################################################################
# END OF YOUR CODE #
#######################################################################
return y_pred
| 45.854478
| 85
| 0.487509
| 1,421
| 12,289
| 4.123153
| 0.214638
| 0.023895
| 0.010923
| 0.013313
| 0.084144
| 0.051715
| 0.044035
| 0.019799
| 0.011265
| 0.011265
| 0
| 0.013716
| 0.311824
| 12,289
| 268
| 86
| 45.854478
| 0.679082
| 0.480104
| 0
| 0
| 0
| 0
| 0.02309
| 0
| 0
| 0
| 0
| 0.011194
| 0
| 1
| 0.049383
| false
| 0
| 0.037037
| 0
| 0.148148
| 0.024691
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a60c251c96da7b05351011b63ba88125eca7fb7
| 9,790
|
py
|
Python
|
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
sdk/python/pulumi_azure_native/storage/storage_account_static_website.py
|
sebtelko/pulumi-azure-native
|
711ec021b5c73da05611c56c8a35adb0ce3244e4
|
[
"Apache-2.0"
] | null | null | null |
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from .. import _utilities
__all__ = ['StorageAccountStaticWebsiteArgs', 'StorageAccountStaticWebsite']
@pulumi.input_type
class StorageAccountStaticWebsiteArgs:
def __init__(__self__, *,
account_name: pulumi.Input[str],
resource_group_name: pulumi.Input[str],
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None):
"""
The set of arguments for constructing a StorageAccountStaticWebsite resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
pulumi.set(__self__, "account_name", account_name)
pulumi.set(__self__, "resource_group_name", resource_group_name)
if error404_document is not None:
pulumi.set(__self__, "error404_document", error404_document)
if index_document is not None:
pulumi.set(__self__, "index_document", index_document)
@property
@pulumi.getter(name="accountName")
def account_name(self) -> pulumi.Input[str]:
"""
The name of the storage account within the specified resource group.
"""
return pulumi.get(self, "account_name")
@account_name.setter
def account_name(self, value: pulumi.Input[str]):
pulumi.set(self, "account_name", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group within the user's subscription. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> Optional[pulumi.Input[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@error404_document.setter
def error404_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "error404_document", value)
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> Optional[pulumi.Input[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
@index_document.setter
def index_document(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "index_document", value)
class StorageAccountStaticWebsite(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] account_name: The name of the storage account within the specified resource group.
:param pulumi.Input[str] error404_document: The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
:param pulumi.Input[str] index_document: The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
:param pulumi.Input[str] resource_group_name: The name of the resource group within the user's subscription. The name is case insensitive.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: StorageAccountStaticWebsiteArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Enables the static website feature of a storage account.
:param str resource_name: The name of the resource.
:param StorageAccountStaticWebsiteArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(StorageAccountStaticWebsiteArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
account_name: Optional[pulumi.Input[str]] = None,
error404_document: Optional[pulumi.Input[str]] = None,
index_document: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
if account_name is None and not opts.urn:
raise TypeError("Missing required property 'account_name'")
__props__.__dict__["account_name"] = account_name
__props__.__dict__["error404_document"] = error404_document
__props__.__dict__["index_document"] = index_document
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["container_name"] = None
super(StorageAccountStaticWebsite, __self__).__init__(
'azure-native:storage:StorageAccountStaticWebsite',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'StorageAccountStaticWebsite':
"""
Get an existing StorageAccountStaticWebsite resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = StorageAccountStaticWebsiteArgs.__new__(StorageAccountStaticWebsiteArgs)
__props__.__dict__["container_name"] = None
__props__.__dict__["error404_document"] = None
__props__.__dict__["index_document"] = None
return StorageAccountStaticWebsite(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="containerName")
def container_name(self) -> pulumi.Output[str]:
"""
The name of the container to upload blobs to.
"""
return pulumi.get(self, "container_name")
@property
@pulumi.getter(name="error404Document")
def error404_document(self) -> pulumi.Output[Optional[str]]:
"""
The absolute path to a custom webpage that should be used when a request is made which does not correspond to an existing file.
"""
return pulumi.get(self, "error404_document")
@property
@pulumi.getter(name="indexDocument")
def index_document(self) -> pulumi.Output[Optional[str]]:
"""
The webpage that Azure Storage serves for requests to the root of a website or any sub-folder. For example, 'index.html'. The value is case-sensitive.
"""
return pulumi.get(self, "index_document")
| 48.226601
| 199
| 0.674157
| 1,146
| 9,790
| 5.49651
| 0.140489
| 0.054136
| 0.066677
| 0.048897
| 0.653755
| 0.575806
| 0.548976
| 0.502461
| 0.480235
| 0.422448
| 0
| 0.008201
| 0.240245
| 9,790
| 202
| 200
| 48.465347
| 0.838666
| 0.309806
| 0
| 0.330645
| 1
| 0
| 0.130107
| 0.024199
| 0
| 0
| 0
| 0
| 0
| 1
| 0.137097
| false
| 0.008065
| 0.040323
| 0
| 0.258065
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a61c6ef3ad58f9b8003931de1870b0f5ad404c7
| 1,247
|
py
|
Python
|
python/example_code/s3/s3-python-example-get-bucket-policy.py
|
onehitcombo/aws-doc-sdk-examples
|
03e2e0c5dee75c5decbbb99e849c51417521fd82
|
[
"Apache-2.0"
] | 3
|
2021-01-19T20:23:17.000Z
|
2021-01-19T21:38:59.000Z
|
python/example_code/s3/s3-python-example-get-bucket-policy.py
|
onehitcombo/aws-doc-sdk-examples
|
03e2e0c5dee75c5decbbb99e849c51417521fd82
|
[
"Apache-2.0"
] | null | null | null |
python/example_code/s3/s3-python-example-get-bucket-policy.py
|
onehitcombo/aws-doc-sdk-examples
|
03e2e0c5dee75c5decbbb99e849c51417521fd82
|
[
"Apache-2.0"
] | 2
|
2019-12-27T13:58:00.000Z
|
2020-05-21T18:35:40.000Z
|
# Copyright 2010-2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
#
# This file is licensed under the Apache License, Version 2.0 (the "License").
# You may not use this file except in compliance with the License. A copy of the
# License is located at
#
# http://aws.amazon.com/apache2.0/
#
# This file is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS
# OF ANY KIND, either express or implied. See the License for the specific
# language governing permissions and limitations under the License.
import boto3
# Create an S3 client
s3 = boto3.client('s3')
# Call to S3 to retrieve the policy for the given bucket
result = s3.get_bucket_policy(Bucket='my-bucket')
print(result)
# snippet-comment:[These are tags for the AWS doc team's sample catalog. Do not remove.]
# snippet-sourcedescription:[s3-python-example-get-bucket-policy.py demonstrates how to list the Amazon S3 Buckets in your account.]
# snippet-keyword:[Python]
# snippet-keyword:[AWS SDK for Python (Boto3)]
# snippet-keyword:[Code Sample]
# snippet-keyword:[Amazon S3]
# snippet-service:[s3]
# snippet-sourcetype:[full-example]
# snippet-sourcedate:[2018-06-25]
# snippet-sourceauthor:[jschwarzwalder (AWS)]
| 35.628571
| 133
| 0.735365
| 186
| 1,247
| 4.919355
| 0.586022
| 0.054645
| 0.021858
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.03074
| 0.165196
| 1,247
| 34
| 134
| 36.676471
| 0.848223
| 0.847634
| 0
| 0
| 0
| 0
| 0.083969
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.25
| 0
| 0.25
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a630004921c5a5ff2ec4e4b2d0a96b0bf000baa
| 897
|
py
|
Python
|
data_io/util/value_blob_erosion.py
|
Rekrau/PyGreentea
|
457d7dc5be12b15c3c7663ceaf6d74301de56e43
|
[
"BSD-2-Clause"
] | null | null | null |
data_io/util/value_blob_erosion.py
|
Rekrau/PyGreentea
|
457d7dc5be12b15c3c7663ceaf6d74301de56e43
|
[
"BSD-2-Clause"
] | 4
|
2016-04-22T15:39:21.000Z
|
2016-11-15T21:23:58.000Z
|
data_io/util/value_blob_erosion.py
|
Rekrau/PyGreentea
|
457d7dc5be12b15c3c7663ceaf6d74301de56e43
|
[
"BSD-2-Clause"
] | 4
|
2017-05-12T00:17:55.000Z
|
2019-07-01T19:23:32.000Z
|
import numpy as np
from scipy import ndimage
def erode_value_blobs(array, steps=1, values_to_ignore=tuple(), new_value=0):
unique_values = list(np.unique(array))
all_entries_to_keep = np.zeros(shape=array.shape, dtype=np.bool)
for unique_value in unique_values:
entries_of_this_value = array == unique_value
if unique_value in values_to_ignore:
all_entries_to_keep = np.logical_or(entries_of_this_value, all_entries_to_keep)
else:
eroded_unique_indicator = ndimage.binary_erosion(entries_of_this_value, iterations=steps)
all_entries_to_keep = np.logical_or(eroded_unique_indicator, all_entries_to_keep)
result = array * all_entries_to_keep
if new_value != 0:
eroded_entries = np.logical_not(all_entries_to_keep)
new_values = new_value * eroded_entries
result += new_values
return result
| 42.714286
| 101
| 0.733556
| 132
| 897
| 4.560606
| 0.340909
| 0.116279
| 0.139535
| 0.186047
| 0.162791
| 0.089701
| 0.089701
| 0
| 0
| 0
| 0
| 0.004172
| 0.198439
| 897
| 20
| 102
| 44.85
| 0.833102
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.111111
| 0
| 0.222222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a6b124cb7b2cd1d6d09ae5b84d5b49e63612508
| 679
|
py
|
Python
|
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
test_f_login_andy.py
|
KotoLLC/peacenik-tests
|
760f7799ab2b9312fe0cce373890195151c48fce
|
[
"Apache-2.0"
] | null | null | null |
from helpers import *
def test_f_login_andy():
url = "http://central.orbits.local/rpc.AuthService/Login"
raw_payload = {"name": "andy","password": "12345"}
payload = json.dumps(raw_payload)
headers = {'Content-Type': 'application/json'}
# convert dict to json by json.dumps() for body data.
response = requests.request("POST", url, headers=headers, data=payload)
save_cookies(response.cookies,"cookies.txt")
# Validate response headers and body contents, e.g. status code.
assert response.status_code == 200
# print full request and response
pretty_print_request(response.request)
pretty_print_response(response)
| 35.736842
| 75
| 0.696613
| 86
| 679
| 5.372093
| 0.604651
| 0.04329
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.014467
| 0.185567
| 679
| 19
| 76
| 35.736842
| 0.820976
| 0.216495
| 0
| 0
| 0
| 0
| 0.213611
| 0
| 0
| 0
| 0
| 0
| 0.090909
| 1
| 0.090909
| false
| 0.090909
| 0.090909
| 0
| 0.181818
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a6dcc4d9c3e1b2437b6c8b26173ce12b1dfa929
| 7,761
|
py
|
Python
|
week2/Assignment2Answer.py
|
RayshineRen/Introduction_to_Data_Science_in_Python
|
b19aa781a8f8d0e25853c4e86dadd4c9bebbcd71
|
[
"MIT"
] | 1
|
2020-09-22T15:06:02.000Z
|
2020-09-22T15:06:02.000Z
|
week2/Assignment2Answer.py
|
RayshineRen/Introduction_to_Data_Science_in_Python
|
b19aa781a8f8d0e25853c4e86dadd4c9bebbcd71
|
[
"MIT"
] | 1
|
2020-11-03T14:11:02.000Z
|
2020-11-03T14:24:50.000Z
|
week2/Assignment2Answer.py
|
RayshineRen/Introduction_to_Data_Science_in_Python
|
b19aa781a8f8d0e25853c4e86dadd4c9bebbcd71
|
[
"MIT"
] | 2
|
2020-09-22T05:27:09.000Z
|
2020-11-05T10:39:49.000Z
|
# -*- coding: utf-8 -*-
"""
Created on Fri Sep 18 21:56:15 2020
@author: Ray
@email: [email protected]
@wechat: RayTing0305
"""
'''
Question 1
Write a function called proportion_of_education which returns the proportion of children in the dataset who had a mother with the education levels equal to less than high school (<12), high school (12), more than high school but not a college graduate (>12) and college degree.
This function should return a dictionary in the form of (use the correct numbers, do not round numbers):
{"less than high school":0.2,
"high school":0.4,
"more than high school but not college":0.2,
"college":0.2}
'''
import scipy.stats as stats
import numpy as np
import pandas as pd
df = pd.read_csv("./assets/NISPUF17.csv")
def proportion_of_education():
# your code goes here
# YOUR CODE HERE
df_edu = df.EDUC1
edu_list = [1, 2, 3, 4]
zero_df = pd.DataFrame(np.zeros((df_edu.shape[0], len(edu_list))), columns=edu_list)
for edu in edu_list:
zero_df[edu][df_edu==edu]=1
#zero_df
sum_ret = zero_df.sum(axis=0)
name_l = ["less than high school", "high school", "more than high school but not college", "college"]
rat = sum_ret.values/sum(sum_ret.values)
dic = dict()
for i in range(4):
dic[name_l[i]] = rat[i]
return dic
raise NotImplementedError()
assert type(proportion_of_education())==type({}), "You must return a dictionary."
assert len(proportion_of_education()) == 4, "You have not returned a dictionary with four items in it."
assert "less than high school" in proportion_of_education().keys(), "You have not returned a dictionary with the correct keys."
assert "high school" in proportion_of_education().keys(), "You have not returned a dictionary with the correct keys."
assert "more than high school but not college" in proportion_of_education().keys(), "You have not returned a dictionary with the correct keys."
assert "college" in proportion_of_education().keys(), "You have not returned a dictionary with the correct"
'''
Question 2
Let's explore the relationship between being fed breastmilk as a child and getting a seasonal influenza vaccine from a healthcare provider. Return a tuple of the average number of influenza vaccines for those children we know received breastmilk as a child and those who know did not.
This function should return a tuple in the form (use the correct numbers:
(2.5, 0.1)
'''
def average_influenza_doses():
# YOUR CODE HERE
#是否喂养母乳
fed_breastmilk = list(df.groupby(by='CBF_01'))
be_fed_breastmilk = fed_breastmilk[0][1]
not_fed_breastmilk = fed_breastmilk[1][1]
#喂养母乳的influenza数目
be_fed_breastmilk_influenza = be_fed_breastmilk.P_NUMFLU
num_be_fed_breastmilk_influenza = be_fed_breastmilk_influenza.dropna().mean()
#未喂养母乳的influenza数目
not_be_fed_breastmilk_influenza = not_fed_breastmilk.P_NUMFLU
num_not_be_fed_breastmilk_influenza = not_be_fed_breastmilk_influenza.dropna().mean()
return num_be_fed_breastmilk_influenza, num_not_be_fed_breastmilk_influenza
raise NotImplementedError()
assert len(average_influenza_doses())==2, "Return two values in a tuple, the first for yes and the second for no."
'''
Question 3
It would be interesting to see if there is any evidence of a link between vaccine effectiveness and sex of the child. Calculate the ratio of the number of children who contracted chickenpox but were vaccinated against it (at least one varicella dose) versus those who were vaccinated but did not contract chicken pox. Return results by sex.
This function should return a dictionary in the form of (use the correct numbers):
{"male":0.2,
"female":0.4}
Note: To aid in verification, the chickenpox_by_sex()['female'] value the autograder is looking for starts with the digits 0.0077.
'''
def chickenpox_by_sex():
# YOUR CODE HERE
#是否感染Varicella
cpox = df.HAD_CPOX
#cpox.value_counts()
cpox_group = list(df.groupby(by='HAD_CPOX'))
have_cpox = cpox_group[0][1]
not_have_cpox = cpox_group[1][1]
#男女分开
have_cpox_group = list(have_cpox.groupby(by='SEX'))
not_have_cpox_group = list(not_have_cpox.groupby(by='SEX'))
have_cpox_boy = have_cpox_group[0][1]
have_cpox_girl = have_cpox_group[1][1]
not_have_cpox_boy = not_have_cpox_group[0][1]
not_have_cpox_girl = not_have_cpox_group[1][1]
#接种感染
#have_cpox_boy_injected = have_cpox_boy[(have_cpox_boy['P_NUMMMR']>0) | (have_cpox_boy['P_NUMVRC']>0)]
have_cpox_boy_injected = have_cpox_boy[(have_cpox_boy['P_NUMVRC']>0)]
num_have_cpox_boy_injected = have_cpox_boy_injected.count()['SEQNUMC']
have_cpox_girl_injected = have_cpox_girl[(have_cpox_girl['P_NUMVRC']>0)]
num_have_cpox_girl_injected = have_cpox_girl_injected.count()['SEQNUMC']
#接种未感染
not_have_cpox_boy_injected = not_have_cpox_boy[(not_have_cpox_boy['P_NUMVRC']>0)]
num_not_have_cpox_boy_injected = not_have_cpox_boy_injected.count()['SEQNUMC']
not_have_cpox_girl_injected = not_have_cpox_girl[(not_have_cpox_girl['P_NUMVRC']>0)]
num_not_have_cpox_girl_injected = not_have_cpox_girl_injected.count()['SEQNUMC']
#计算比例
ratio_boy = num_have_cpox_boy_injected / num_not_have_cpox_boy_injected
ratio_girl = num_have_cpox_girl_injected / num_not_have_cpox_girl_injected
dic = {}
dic['male'] = ratio_boy
dic['female'] = ratio_girl
return dic
raise NotImplementedError()
assert len(chickenpox_by_sex())==2, "Return a dictionary with two items, the first for males and the second for females."
'''
Question 4
A correlation is a statistical relationship between two variables. If we wanted to know if vaccines work, we might look at the correlation between the use of the vaccine and whether it results in prevention of the infection or disease [1]. In this question, you are to see if there is a correlation between having had the chicken pox and the number of chickenpox vaccine doses given (varicella).
Some notes on interpreting the answer. The had_chickenpox_column is either 1 (for yes) or 2 (for no), and the num_chickenpox_vaccine_column is the number of doses a child has been given of the varicella vaccine. A positive correlation (e.g., corr > 0) means that an increase in had_chickenpox_column (which means more no’s) would also increase the values of num_chickenpox_vaccine_column (which means more doses of vaccine). If there is a negative correlation (e.g., corr < 0), it indicates that having had chickenpox is related to an increase in the number of vaccine doses.
Also, pval is the probability that we observe a correlation between had_chickenpox_column and num_chickenpox_vaccine_column which is greater than or equal to a particular value occurred by chance. A small pval means that the observed correlation is highly unlikely to occur by chance. In this case, pval should be very small (will end in e-18 indicating a very small number).
[1] This isn’t really the full picture, since we are not looking at when the dose was given. It’s possible that children had chickenpox and then their parents went to get them the vaccine. Does this dataset have the data we would need to investigate the timing of the dose?
'''
def corr_chickenpox():
cpox = df[(df.P_NUMVRC).notnull()]
have_cpox = cpox[(cpox.HAD_CPOX==1) | (cpox.HAD_CPOX==2)]
df1=pd.DataFrame({"had_chickenpox_column":have_cpox.HAD_CPOX,
"num_chickenpox_vaccine_column":have_cpox.P_NUMVRC})
corr, pval=stats.pearsonr(df1["had_chickenpox_column"],df1["num_chickenpox_vaccine_column"])
return corr
raise NotImplementedError()
| 53.895833
| 576
| 0.74024
| 1,237
| 7,761
| 4.43169
| 0.23848
| 0.065669
| 0.038125
| 0.031193
| 0.356804
| 0.270522
| 0.212149
| 0.12915
| 0.12915
| 0.102153
| 0
| 0.017069
| 0.177168
| 7,761
| 143
| 577
| 54.272727
| 0.841372
| 0.048963
| 0
| 0.085714
| 0
| 0
| 0.203759
| 0.029921
| 0
| 0
| 0
| 0.020979
| 0.114286
| 1
| 0.057143
| false
| 0
| 0.042857
| 0
| 0.157143
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a6f28bb63a4999e5f2dcb27c1de7d562bafcd05
| 1,664
|
py
|
Python
|
Experimente/Experiment ID 8/run-cifar10-v7.py
|
MichaelSchwabe/conv-ebnas-abgabe
|
f463d7bbd9b514597e19d25007913f7994cbbf7c
|
[
"MIT"
] | 6
|
2021-11-03T07:20:48.000Z
|
2021-11-10T08:20:44.000Z
|
Experimente/Experiment ID 8/run-cifar10-v7.py
|
MichaelSchwabe/conv-ebnas-abgabe
|
f463d7bbd9b514597e19d25007913f7994cbbf7c
|
[
"MIT"
] | 1
|
2021-11-02T21:10:51.000Z
|
2021-11-02T21:11:05.000Z
|
Experimente/Experiment ID 8/run-cifar10-v7.py
|
MichaelSchwabe/conv-ebnas-abgabe
|
f463d7bbd9b514597e19d25007913f7994cbbf7c
|
[
"MIT"
] | null | null | null |
from __future__ import print_function
from keras.datasets import mnist
from keras.datasets import cifar10
from keras.utils.np_utils import to_categorical
import numpy as np
from keras import backend as K
from evolution import Evolution
from genome_handler import GenomeHandler
import tensorflow as tf
#import mlflow.keras
#import mlflow
#import mlflow.tensorflow
#mlflow.tensorflow.autolog()
#mlflow.keras.autolog()
print("Num GPUs Available: ", len(tf.config.list_physical_devices('GPU')))
K.set_image_data_format("channels_last")
#(x_train, y_train), (x_test, y_test) = mnist.load_data()
(x_train, y_train), (x_test, y_test) = cifar10.load_data()
x_train = x_train.reshape(x_train.shape[0], x_train.shape[1], x_train.shape[2],x_train.shape[3]).astype('float32') / 255
x_test = x_test.reshape(x_test.shape[0], x_test.shape[1], x_test.shape[2], x_test.shape[3]).astype('float32') / 255
# nCLasses
y_train = to_categorical(y_train)
y_test = to_categorical(y_test)
#y_train.shape
dataset = ((x_train, y_train), (x_test, y_test))
genome_handler = GenomeHandler(max_conv_layers=4,
max_dense_layers=2, # includes final dense layer
max_filters=512,
max_dense_nodes=1024,
input_shape=x_train.shape[1:],
n_classes=10)
evo = Evolution(genome_handler, data_path="log/evo_cifar10_gen40_pop10_e20.csv")
model = evo.run(dataset=dataset,
num_generations=40,
pop_size=10,
epochs=20,metric='acc')
#epochs=10,metric='loss')
print(model.summary())
| 37.818182
| 120
| 0.676683
| 239
| 1,664
| 4.435146
| 0.380753
| 0.056604
| 0.051887
| 0.033962
| 0.103774
| 0.062264
| 0.062264
| 0.062264
| 0
| 0
| 0
| 0.038139
| 0.212139
| 1,664
| 44
| 121
| 37.818182
| 0.770404
| 0.141226
| 0
| 0
| 0
| 0
| 0.061972
| 0.024648
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.310345
| 0
| 0.310345
| 0.103448
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
6a725ee4987cc406e04ed4e04ead31dbd1e9b6ea
| 1,088
|
py
|
Python
|
To-D0-App-main/base/views.py
|
shagun-agrawal/To-Do-App
|
083081690fe9d291f13c0452a695a092b7544ab2
|
[
"MIT"
] | 1
|
2021-04-08T14:12:38.000Z
|
2021-04-08T14:12:38.000Z
|
To-D0-App-main/base/views.py
|
shagun-agrawal/To-Do-App
|
083081690fe9d291f13c0452a695a092b7544ab2
|
[
"MIT"
] | null | null | null |
To-D0-App-main/base/views.py
|
shagun-agrawal/To-Do-App
|
083081690fe9d291f13c0452a695a092b7544ab2
|
[
"MIT"
] | null | null | null |
from django.shortcuts import render
from django.views.generic.list import ListView
from django.views.generic.detail import DetailView
from django.views.generic.edit import CreateView, UpdateView, DeleteView
from django.urls import reverse_lazy
from django.contrib.auth.views import LoginView
from .models import Task
# Create your views here.
class CustomLoginView(LoginView):
template_name='base/login.html'
fiels='__all__'
redirect_auhenticated_user = True
def get_success_url(self):
return reverse_lazy('tasks')
class TaskList(ListView):
model = Task
context_object_name = 'tasks'
class TaskDetail(DetailView):
model = Task
context_object_name = 'task'
class TaskCreate(CreateView):
model = Task
fields = '__all__'
success_url = reverse_lazy('tasks')
class TaskUpdate(UpdateView):
model = Task
fields = '__all__'
success_url = reverse_lazy('tasks')
class TaskDelete(DeleteView):
model = Task
context_object_name='Task'
success_url = reverse_lazy('tasks')
| 24.727273
| 73
| 0.714154
| 128
| 1,088
| 5.828125
| 0.429688
| 0.080429
| 0.085791
| 0.088472
| 0.281501
| 0.211796
| 0.131367
| 0.131367
| 0.131367
| 0.131367
| 0
| 0
| 0.204963
| 1,088
| 43
| 74
| 25.302326
| 0.862428
| 0.02114
| 0
| 0.322581
| 0
| 0
| 0.067713
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.032258
| false
| 0
| 0.225806
| 0.032258
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
6a75c6bcf2a235fe76f46e51c4cc31283811626a
| 2,534
|
py
|
Python
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 42
|
2021-08-17T02:27:59.000Z
|
2022-03-26T16:00:57.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 1
|
2021-09-25T11:15:20.000Z
|
2021-09-27T04:18:25.000Z
|
simulation/dataset_G_1q_X_Z_N1.py
|
eperrier/QDataSet
|
383b38b9b4166848f72fac0153800525e66b477b
|
[
"MIT"
] | 6
|
2021-08-17T02:28:04.000Z
|
2022-03-22T07:11:48.000Z
|
##############################################
"""
This module generate a dataset
"""
##############################################
# preample
import numpy as np
from utilites import Pauli_operators, simulate, CheckNoise
################################################
# meta parameters
name = "G_1q_X_Z_N1"
################################################
# quantum parameters
dim = 2 # dimension of the system
Omega = 12 # qubit energy gap
static_operators = [0.5*Pauli_operators[3]*Omega] # drift Hamiltonian
dynamic_operators = [0.5*Pauli_operators[1]] # control Hamiltonian
noise_operators = [0.5*Pauli_operators[3]] # noise Hamiltonian
initial_states = [
np.array([[0.5,0.5],[0.5,0.5]]), np.array([[0.5,-0.5],[-0.5,0.5]]),
np.array([[0.5,-0.5j],[0.5j,0.5]]),np.array([[0.5,0.5j],[-0.5j,0.5]]),
np.array([[1,0],[0,0]]), np.array([[0,0],[0,1]])
] # intial state of qubit
measurement_operators = Pauli_operators[1:] # measurement operators
##################################################
# simulation parameters
T = 1 # Evolution time
M = 1024 # Number of time steps
num_ex = 10000 # Number of examples
batch_size = 50 # batch size for TF
##################################################
# noise parameters
K = 2000 # Number of realzations
noise_profile = [1] # Noise type
###################################################
# control parameters
pulse_shape = "Gaussian" # Control pulse shape
num_pulses = 5 # Number of pulses per sequence
####################################################
# Generate the dataset
sim_parameters = dict( [(k,eval(k)) for k in ["name", "dim", "Omega", "static_operators", "dynamic_operators", "noise_operators", "measurement_operators", "initial_states", "T", "M", "num_ex", "batch_size", "K", "noise_profile", "pulse_shape", "num_pulses"] ])
CheckNoise(sim_parameters)
simulate(sim_parameters)
####################################################
| 56.311111
| 261
| 0.404893
| 228
| 2,534
| 4.359649
| 0.372807
| 0.030181
| 0.024145
| 0.024145
| 0.148893
| 0.123742
| 0.071429
| 0.071429
| 0.071429
| 0.071429
| 0
| 0.042815
| 0.327151
| 2,534
| 45
| 262
| 56.311111
| 0.540176
| 0.190608
| 0
| 0
| 1
| 0
| 0.108795
| 0.013681
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.08
| 0
| 0.08
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a7641f27315b4a34aa454452b185ab3ffeddc05
| 505
|
py
|
Python
|
user_service/user_service/api.py
|
Ziang-Lu/Flask-Blog
|
8daf901a0ea0e079ad24a61fd7f16f1298514d4c
|
[
"MIT"
] | null | null | null |
user_service/user_service/api.py
|
Ziang-Lu/Flask-Blog
|
8daf901a0ea0e079ad24a61fd7f16f1298514d4c
|
[
"MIT"
] | 2
|
2020-06-09T08:40:42.000Z
|
2021-04-30T21:20:35.000Z
|
user_service/user_service/api.py
|
Ziang-Lu/Flask-Blog
|
8daf901a0ea0e079ad24a61fd7f16f1298514d4c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
API definition module.
"""
from flask import Blueprint
from flask_restful import Api
from .resources.user import UserAuth, UserItem, UserList, UserFollow
# Create an API-related blueprint
api_bp = Blueprint(name='api', import_name=__name__)
api = Api(api_bp)
api.add_resource(UserList, '/users')
api.add_resource(UserItem, '/users/<int:id>')
api.add_resource(UserAuth, '/user-auth')
api.add_resource(
UserFollow, '/user-follow/<int:follower_id>/<followed_username>'
)
| 22.954545
| 68
| 0.740594
| 69
| 505
| 5.217391
| 0.463768
| 0.066667
| 0.155556
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.002227
| 0.110891
| 505
| 21
| 69
| 24.047619
| 0.799555
| 0.152475
| 0
| 0
| 0
| 0
| 0.200477
| 0.119332
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.363636
| 0
| 0.363636
| 0.181818
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
6a77df2fb34c60a66cb0710a264af376f888be93
| 2,112
|
py
|
Python
|
advanced/itertools_funcs.py
|
ariannasg/python3-essential-training
|
9b52645f5ccb57d2bda5d5f4a3053681a026450a
|
[
"MIT"
] | 1
|
2020-06-02T08:37:41.000Z
|
2020-06-02T08:37:41.000Z
|
advanced/itertools_funcs.py
|
ariannasg/python3-training
|
9b52645f5ccb57d2bda5d5f4a3053681a026450a
|
[
"MIT"
] | null | null | null |
advanced/itertools_funcs.py
|
ariannasg/python3-training
|
9b52645f5ccb57d2bda5d5f4a3053681a026450a
|
[
"MIT"
] | null | null | null |
#!usr/bin/env python3
import itertools
# itertools is a module that's not technically a set of built-in functions but
# it is part of the standard library that comes with python.
# it's useful for for creating and using iterators.
def main():
print('some infinite iterators')
# cycle iterator can be used to cycle over a collection over and over
seq1 = ["Joe", "John", "Mike"]
cycle1 = itertools.cycle(seq1)
print(next(cycle1))
print(next(cycle1))
print(next(cycle1))
print(next(cycle1))
print(next(cycle1))
# use count to create a simple counter
count1 = itertools.count(100, 3)
print(next(count1))
print(next(count1))
print(next(count1))
print('some non-infinite iterators')
values = [10, 5, 20, 30, 40, 50, 40, 30]
# accumulate creates an iterator that accumulates/aggregates values
print(list(itertools.accumulate(values))) # this defaults to addition
print(list(itertools.accumulate(values, max)))
print(list(itertools.accumulate(values, min)))
# use chain to connect sequences together
x = itertools.chain('ABCD', '1234')
print(list(x))
# dropwhile and takewhile will return values until
# a certain condition is met that stops them. they are similar to the
# filter built-in function.
# dropwhile will drop the values from the sequence as long as the
# condition of the function is true and then returns the rest of values
print(list(itertools.dropwhile(is_less_than_forty, values)))
# takewhile will keep the values from the sequence as long as the
# condition of the function is true and then stops giving data
print(list(itertools.takewhile(is_less_than_forty, values)))
def is_less_than_forty(x):
return x < 40
if __name__ == "__main__":
main()
# CONSOLE OUTPUT:
# some infinite iterators
# Joe
# John
# Mike
# Joe
# John
# 100
# 103
# 106
# some non-infinite iterators
# [10, 15, 35, 65, 105, 155, 195, 225]
# [10, 10, 20, 30, 40, 50, 50, 50]
# [10, 5, 5, 5, 5, 5, 5, 5]
# ['A', 'B', 'C', 'D', '1', '2', '3', '4']
# [40, 50, 40, 30]
# [10, 5, 20, 30]
| 29.333333
| 78
| 0.673295
| 321
| 2,112
| 4.376947
| 0.420561
| 0.051246
| 0.053381
| 0.05694
| 0.296085
| 0.193594
| 0.188612
| 0.153025
| 0.153025
| 0.153025
| 0
| 0.066547
| 0.210227
| 2,112
| 71
| 79
| 29.746479
| 0.775779
| 0.529356
| 0
| 0.296296
| 0
| 0
| 0.080125
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.074074
| false
| 0
| 0.037037
| 0.037037
| 0.148148
| 0.592593
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
|
0
| 1
|
6a782fcc9b346f1edc133e8b8d12314c1cc0a5ff
| 421
|
py
|
Python
|
aula 05/model/Pessoa.py
|
Azenha/AlgProg2
|
062b5caac24435717074a18a7499f80130489a46
|
[
"MIT"
] | null | null | null |
aula 05/model/Pessoa.py
|
Azenha/AlgProg2
|
062b5caac24435717074a18a7499f80130489a46
|
[
"MIT"
] | null | null | null |
aula 05/model/Pessoa.py
|
Azenha/AlgProg2
|
062b5caac24435717074a18a7499f80130489a46
|
[
"MIT"
] | null | null | null |
class Pessoa:
def __init__(self, codigo, nome, endereco, telefone):
self.__codigo = int(codigo)
self.nome = str(nome)
self._endereco = str(endereco)
self.__telefone = str(telefone)
def imprimeNome(self):
print(f"Você pode chamar essa pessoa de {self.nome}.")
def __imprimeTelefone(self):
print(f"Você pode ligar para esta pessoa no número {self.__telefone}.")
| 35.083333
| 79
| 0.650831
| 53
| 421
| 4.924528
| 0.471698
| 0.076628
| 0.076628
| 0.10728
| 0.137931
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.239905
| 421
| 12
| 79
| 35.083333
| 0.815625
| 0
| 0
| 0
| 0
| 0
| 0.248815
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.3
| false
| 0
| 0
| 0
| 0.4
| 0.2
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a78c857a857449cf31704c6af0759d610215a2d
| 25,852
|
py
|
Python
|
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | null | null | null |
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | null | null | null |
pypyrus_logbook/logger.py
|
t3eHawk/pypyrus_logbook
|
bd647a1c355b07e8df28c0d7298fcfe68cd9572e
|
[
"MIT"
] | 2
|
2019-02-06T08:05:43.000Z
|
2019-02-06T08:06:35.000Z
|
import atexit
import datetime as dt
import os
import platform
import pypyrus_logbook as logbook
import sys
import time
import traceback
from .conf import all_loggers
from .formatter import Formatter
from .header import Header
from .output import Root
from .record import Record
from .sysinfo import Sysinfo
class Logger():
"""This class represents a single logger.
Logger by it self is a complex set of methods, items and commands that
together gives funcionality for advanced logging in different outputs:
console, file, email, database table, HTML document - and using information
from diffrent inputs: user messages, traceback, frames, user parameters,
execution arguments and systems descriptors.
Each logger must have an unique name which will help to identify it.
Main application logger will have the same name as a python script file.
It can be accessed by native logbook methods or by calling `getlogger()`
method with no name.
Parameters
----------
name : str, optional
The argument is used te define `name` attribute
app : str, optional
The argument is used to set the `app` attribute.
desc : str, optional
The argument is used to set the `desc` attribute.
version : str, optional
The argument is used to set the `version` attribute.
status : bool, optional
The argument is used to open or close output `root`.
console : bool, optional
The argument is used to open or close output `console`.
file : bool, optional
The argument is used to open or close output `file`.
email : bool, optional
The argument is used to open or close output `email`.
html : bool, optional
The argument is used to open or close output `html`.
table : bool, optional
The argument is used to open or close output `table`.
directory : str, optional
The argument is used to set logging file folder.
filename : str, optional
The argument is used to set logging file name.
extension : str, optional
The argument is used to set logging file extension.
smtp : dict, optional
The argument is used to configure SMTP connection.
db : dict, optional
The argument is used to configure DB connection.
format : str, optional
The argument is used to set record template.
info : bool, optional
The argument is used to filter info records. The default is True.
debug : bool, optional
The argument is used to filter debug records. The default is False.
warning : bool, optional
The argument is used to filter warning records. The default is True.
error : bool, optional
The argument is used to filter error records. The default is True.
critical : bool, optional
The argument is used to filter critical records. The default is True.
alarming : bool, optional
The argument is used to enable or disable alarming mechanism. The
default is True.
control : bool, optional
The argument is used to enable or disable execution break in case
on error. The default is True.
maxsize : int or bool, optional
The argument is used to define maximum size of output file. Must be
presented as number of bytes. The default is 10 Mb.
maxdays : int or bool, optional
The argument is used to define maximum number of days that will be
logged to same file. The default is 1 which means that new output file
will be opened at each 00:00:00.
maxlevel : int or bool, optional
The argument is used to define the break error level (WARNING = 0,
ERRROR = 1, CRITICAL = 2). All that higher the break level will
interrupt application execution. The default is 1.
maxerrors : int or bool, optional
The argument is used to define maximun number of errors. The default
is False which means it is disabled.
Attributes
----------
name : str
Name of the logger.
app : str
Name of the application that we are logging.
desc : str
Description of the application that we are logging.
version : str
Version of the application that we are logging.
start_date : datetime.datetime
Date when logging was started.
rectypes : dict
All available record types. Keys are used in `Logger` write methods as
`rectype` argument. Values are used in formatting. So if you wish to
modify `rectype` form then edit appropriate one here. If you wish to
use own record types then just add it to that dictinary. By default we
provide the next few record types:
+---------+---------+
| Key | Value |
+=========+=========+
|none |NONE |
+---------+---------+
|info |INFO |
+---------+---------+
|debug |DEBUG |
+---------+---------+
|warning |WARNING |
+---------+---------+
|error |ERROR |
+---------+---------+
|critical |CRITICAL |
+---------+---------+
messages : dict
Messages that are printed with some `Logger` methods like `ok()`,
`success()`, `fail()`. If you wish to modify the text of this messages
just edit the value of appropriate item.
with_errors : int
The flag shows that logger catched errors in the application during its
execution.
count_errors : int
Number of errors that logger catched in the application during its
execution.
filters : dict
Record types filters. To filter record type just set corresponding
item value to False.
root : pypyrus_logbook.output.Root
The output `Root` object.
console : pypyrus_logbook.output.Console
The output `Console` object. Shortcut for `Logger.root.console`.
file : pypyrus_logbook.output.File
The output file. Shortcut for `Logger.output.file`.
email : pypyrus_logbook.output.Email
The output email. Shortcut for `Logger.output.email`.
html: pypyrus_logbook.output.HTML
The output HTML document. Shortcut for `Logger.output.html`.
table: pypyrus_logbook.output.Table
The output table. Shortcut for `Logger.output.table`.
formatter : pypyrus_logbook.formatter.Formatter
Logger formatter which sets all formatting configuration like
record template, error message template, line length etc.
sysinfo : pypyrus_logbook.sysinfo.Sysinfo
Special input object which parse different inputs includeing system
specifications, flag arguments, execution parameters, user parameters
and environment variables and transforms all of that to `Dataset`
object. Through the `Dataset` object data can be easily accessed by
get item operation or by point like `sysinfo.desc['hostname']` or
`sysinfo.desc.hostname`.
header : pypyrus_logbook.header.Header
The header that can be printed to the writable output.
"""
def __init__(self, name=None, app=None, desc=None, version=None,
status=True, console=True, file=True, email=False, html=False,
table=False, directory=None, filename=None, extension=None,
smtp=None, db=None, format=None, info=True, debug=False,
warning=True, error=True, critical=True, alarming=True,
control=True, maxsize=(1024*1024*10), maxdays=1, maxlevel=2,
maxerrors=False):
# Unique name of the logger.
self._name = name
# Attributes describing the application.
self.app = None
self.desc = None
self.version = None
# Some logger important attributes
self._start_date = dt.datetime.now()
self.rectypes = {'none': 'NONE', 'info': 'INFO', 'debug': 'DEBUG',
'warning': 'WARNING', 'error': 'ERROR',
'critical': 'CRITICAL'}
self.messages = {'ok': 'OK', 'success': 'SUCCESS', 'fail': 'FAIL'}
self._with_error = False
self._count_errors = 0
# Complete the initial configuration.
self.configure(app=app, desc=desc, version=version, status=status,
console=console, file=file, email=email, html=html,
table=table, directory=directory, filename=filename,
extension=extension, smtp=smtp, db=db, format=format,
info=info, debug=debug, warning=warning, error=error,
critical=critical, alarming=alarming, control=control,
maxsize=maxsize, maxdays=maxdays, maxlevel=maxlevel,
maxerrors=maxerrors)
# Output shortcuts.
self.console = self.root.console
self.file = self.root.file
self.email = self.root.email
self.html = self.root.html
self.table = self.root.table
# Set exit function.
atexit.register(self._exit)
# Add creating logger to special all_loggers dictinary.
all_loggers[self._name] = self
pass
def __str__(self):
return f'<Logger object "{self._name}">'
__repr__ = __str__
@property
def name(self):
"""Unique logger name."""
return self._name
@property
def start_date(self):
"""Logging start date."""
return self._start_date
@property
def with_error(self):
"""Flag that shows was an error or not."""
return self._with_error
@property
def count_errors(self):
"""The number of occured errors."""
return self._count_errors
def configure(self, app=None, desc=None, version=None, status=None,
console=None, file=None, email=None, html=None, table=None,
directory=None, filename=None, extension=None, smtp=None,
db=None, format=None, info=None, debug=None, warning=None,
error=None, critical=None, alarming=None, control=None,
maxsize=None, maxdays=None, maxlevel=None, maxerrors=None):
"""Main method to configure the logger and all its attributes.
This is an only one right way to customize logger. Parameters are the
same as for creatrion.
Parameters
----------
app : str, optional
The argument is used to set the `app` attribute.
desc : str, optional
The argument is used to set the `desc` attribute.
version : str, optional
The argument is used to set the `version` attribute.
status : bool, optional
The argument is used to open or close output `root`.
console : bool, optional
The argument is used to open or close output `console`.
file : bool, optional
The argument is used to open or close output `file`.
email : bool, optional
The argument is used to open or close output `email`.
html : bool, optional
The argument is used to open or close output `html`.
table : bool, optional
The argument is used to open or close output `table`.
directory : str, optional
The argument is used to set logging file folder.
filename : str, optional
The argument is used to set logging file name.
extension : str, optional
The argument is used to set logging file extension.
smtp : dict, optional
The argument is used to configure SMTP connection.
db : dict, optional
The argument is used to configure DB connection.
format : str, optional
The argument is used to set record template.
info : bool, optional
The argument is used to filter info records.
debug : bool, optional
The argument is used to filter debug records.
warning : bool, optional
The argument is used to filter warning records.
error : bool, optional
The argument is used to filter error records.
critical : bool, optional
The argument is used to filter critical records.
alarming : bool, optional
The argument is used to enable or disable alarming mechanism.
control : bool, optional
The argument is used to enable or disable execution break in case
on error.
maxsize : int or bool, optional
The argument is used to define maximum size of output file.
maxdays : int or bool, optional
The argument is used to define maximum number of days that will be
logged to same file.
maxlevel : int or bool, optional
The argument is used to define the break error level.
maxerrors : int or bool, optional
The argument is used to define maximun number of errors.
"""
if isinstance(app, str) is True: self.app = app
if isinstance(desc, str) is True: self.desc = desc
if isinstance(version, (str, int, float)) is True:
self.version = version
# Build the output root if it is not exists. In other case modify
# existing output if it is requested.
if hasattr(self, 'root') is False:
self.root = Root(self, console=console, file=file, email=email,
html=html, table=table, status=status,
directory=directory, filename=filename,
extension=extension, smtp=smtp, db=db)
else:
for key, value in {'console': console, 'file': file,
'email': email, 'html': html,
'table': table}.items():
if value is True:
getattr(self.root, key).open()
if key == 'file':
getattr(self.root, key).new()
elif value is False:
getattr(self.root, key).close()
# Customize output file path.
path = {}
if directory is not None: path['dir'] = directory
if filename is not None: path['name'] = filename
if extension is not None: path['ext'] = extension
if len(path) > 0:
self.root.file.configure(**path)
# Customize SMTP server.
if isinstance(smtp, dict) is True:
self.root.email.configure(**smtp)
# Customize database connection.
if isinstance(db, dict) is True:
self.root.table.configure(**db)
# Create formatter in case it is not exists yet or just customize it.
# Parameter format can be either string or dictionary.
# When it is string then it must describe records format.
# When it is dictionary it can contaion any parameter of formatter
# that must be customized.
if isinstance(format, str) is True:
format = {'record': format}
if hasattr(self, 'formatter') is False:
format = {} if isinstance(format, dict) is False else format
self.formatter = Formatter(**format)
elif isinstance(format, dict) is True:
self.formatter.configure(**format)
# Create or customize record type filters.
if hasattr(self, 'filters') is False:
self.filters = {}
for key, value in {'info': info, 'debug': debug, 'error': error,
'warning': warning, 'critical': critical}.items():
if isinstance(value, bool) is True:
self.filters[key] = value
# Customize limits and parameters of execution behaviour.
if isinstance(maxsize, (int, float, bool)) is True:
self._maxsize = maxsize
if isinstance(maxdays, (int, float, bool)) is True:
self._maxdays = maxdays
self.__calculate_restart_date()
if isinstance(maxlevel, (int, float, bool)) is True:
self._maxlevel = maxlevel
if isinstance(maxerrors, (int, float, bool)) is True:
self._maxerrors = maxerrors
if isinstance(alarming, bool) is True:
self._alarming = alarming
if isinstance(control, bool) is True:
self._control = control
# Initialize sysinfo instance when not exists.
if hasattr(self, 'sysinfo') is False:
self.sysinfo = Sysinfo(self)
# Initialize header instance when not exists.
if hasattr(self, 'header') is False:
self.header = Header(self)
pass
def write(self, record):
"""Direct write to the output.
Parameters
----------
record : Record
The argument is used to send it to the output `root`.
"""
self.__check_file_stats()
self.root.write(record)
pass
def record(self, rectype, message, error=False, **kwargs):
"""Basic method to write records.
Parameters
----------
rectype : str
By default method creates the record with the type NONE.
That can be changed but depends on available record types.
All registered record types are stored in the instance attribute
rectypes. If you wish to use own record type or change the
presentaion of exeisting one then edit this dictinary.
message : str
The message that must be written.
error : bool, optional
If record is error then set that parameter to `True`.
**kwargs
The keyword arguments used for additional forms (variables) for
record and message formatting.
"""
if self.filters.get(rectype, True) is True:
record = Record(self, rectype, message, error=error, **kwargs)
self.write(record)
pass
def info(self, message, **kwargs):
"""Send INFO record to output."""
rectype = 'info'
self.record(rectype, message, **kwargs)
pass
def debug(self, message, **kwargs):
"""Send DEBUG record to the output."""
rectype = 'debug'
self.record(rectype, message, **kwargs)
pass
def error(self, message=None, rectype='error', format=None, alarming=False,
level=1, **kwargs):
"""Send ERROR record to the output.
If exception in current traceback exists then method will format the
exception according to `formatter.error` string presentation. If
`formatter.error` is set to `False` the exception will be just printed
in original Python style.
Also method will send an alarm if alarming attribute is `True`, email
output is enabled and SMTP server is configurated.
If one of the limit triggers worked then application will be aborted.
Parameters
----------
message : str, optional
The message that must be written instead of exception.
rectype : str, optional
The type of error according to `rectypes` dictionary.
format : str, optional
The format of the error message.
alarming : bool
The argument is used to enable or disable the alarming mechanism
for this certain call.
level : int
The argument is used to describe the error level.
**kwargs
The keyword arguments used for additional forms (variables) for
record and message formatting.
"""
self._with_error = True
self._count_errors += 1
format = self.formatter.error if format is None else format
# Parse the error.
err_type, err_value, err_tb = sys.exc_info()
if message is None and err_type is not None:
if isinstance(format, str) is True:
err_name = err_type.__name__
err_value = err_value
for tb in traceback.walk_tb(err_tb):
f_code = tb[0].f_code
err_file = os.path.abspath(f_code.co_filename)
err_line = tb[1]
err_obj = f_code.co_name
self.record(rectype, message, error=True,
err_name=err_name, err_value=err_value,
err_file=err_file, err_line=err_line,
err_obj=err_obj, **kwargs)
elif format is False:
exception = traceback.format_exception(err_type, err_value,
err_tb)
message = '\n'
message += ''.join(exception)
self.record(rectype, message, **kwargs)
else:
message = message or ''
self.record(rectype, message, **kwargs)
# Break execution in case of critical error if permitted.
# The alarm will be generated at exit if it is configured.
if self._control is True:
if level >= self._maxlevel:
sys.exit()
if self._maxerrors is not False:
if self._count_errors > self._maxerrors:
sys.exit()
# Send alarm if execution was not aborted but alarm is needed.
if alarming is True:
self.root.email.alarm()
pass
def warning(self, message=None, **kwargs):
"""Send WARNING error record to the output."""
self.error(message, rectype='warning', level=0, **kwargs)
pass
def critical(self, message=None, **kwargs):
"""Send CRITICAL error record to the output."""
self.error(message, rectype='critical', level=2, **kwargs)
pass
def head(self):
"""Send header to the output."""
string = self.header.create()
self.write(string)
pass
def subhead(self, string):
"""Send subheader as upper-case text between two border lines to the
output.
Parameters
----------
string : str
The text that will be presented as subheader.
"""
bound = f'{self.formatter.div*self.formatter.length}\n'
string = f'{bound}\t{string}\n{bound}'.upper()
self.write(string)
pass
def line(self, message):
"""Send raw text with the new line to the output.
Parameters
----------
message : str
The message that must be written.
"""
self.write(f'{message}\n')
pass
def bound(self, div=None, length=None):
"""Write horizontal border in the output. Useful when need to separate
different blocks of information.
Parameters
----------
div : str, optional
Symbol that is used to bulid the bound.
length : int, optional
Lenght of the bound.
"""
border = self.formatter.div * self.formatter.length
self.write(border + '\n')
pass
def blank(self, number=1):
"""Write blank lines in the output.
Parameters
----------
number : int, optional
The number of the blank lines that must be written.
"""
string = '\n'*number
self.write(string)
pass
def ok(self, **kwargs):
"""Print INFO message with OK."""
rectype = 'info'
message = self.messages['ok']
self.record(rectype, message, **kwargs)
pass
def success(self, **kwargs):
"""Print INFO message with SUCCESS."""
rectype = 'info'
message = self.messages['success']
self.record(rectype, message, **kwargs)
pass
def fail(self, **kwargs):
"""Print INFO message with FAIL."""
rectype = 'info'
message = self.messages['fail']
self.record(rectype, message, **kwargs)
pass
def restart(self):
"""Restart logging. Will open new file."""
self._start_date = dt.datetime.now()
self.__calculate_restart_date()
if self.root.file.status is True:
self.root.file.new()
if self.header.used is True:
self.head()
pass
def send(self, *args, **kwargs):
"""Send email message. Note that SMTP server connection must be
configured.
"""
self.root.email.send(*args, **kwargs)
pass
def set(self, **kwargs):
"""Update values in table. Note that DB connection must be
configured.
"""
self.root.table.write(**kwargs)
pass
def _exit(self):
# Inform about the error.
if self._alarming is True and self._with_error is True:
self.root.email.alarm()
pass
def __calculate_restart_date(self):
"""Calculate the date when logger must be restarted according to
maxdays parameter.
"""
self.__restart_date = (self._start_date
+ dt.timedelta(days=self._maxdays))
pass
def __check_file_stats(self):
"""Check the output file statistics to catch when current file must be
closed and new one must be opened.
"""
if self.root.file.status is True:
if self._maxsize is not False:
if self.root.file.size is not None:
if self.root.file.size > self._maxsize:
self.restart()
return
if self._maxdays is not False:
if self.__restart_date.day == dt.datetime.now().day:
self.restart()
return
| 39.348554
| 79
| 0.592952
| 3,127
| 25,852
| 4.856092
| 0.1244
| 0.041291
| 0.047942
| 0.062693
| 0.394929
| 0.349687
| 0.314587
| 0.276984
| 0.265789
| 0.259862
| 0
| 0.001947
| 0.324501
| 25,852
| 656
| 80
| 39.408537
| 0.867606
| 0.492999
| 0
| 0.224806
| 0
| 0
| 0.032819
| 0.006209
| 0
| 0
| 0
| 0
| 0
| 1
| 0.108527
| false
| 0.085271
| 0.054264
| 0.003876
| 0.197674
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a79e21ee2f5d7ad67e69bd27f9206807683db56
| 488
|
py
|
Python
|
darling_ansible/python_venv/lib/python3.7/site-packages/oci/object_storage/transfer/constants.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/object_storage/transfer/constants.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | null | null | null |
darling_ansible/python_venv/lib/python3.7/site-packages/oci/object_storage/transfer/constants.py
|
revnav/sandbox
|
f9c8422233d093b76821686b6c249417502cf61d
|
[
"Apache-2.0"
] | 1
|
2020-06-25T03:12:58.000Z
|
2020-06-25T03:12:58.000Z
|
# coding: utf-8
# Copyright (c) 2016, 2020, Oracle and/or its affiliates. All rights reserved.
# This software is dual-licensed to you under the Universal Permissive License (UPL) 1.0 as shown at https://oss.oracle.com/licenses/upl or Apache License 2.0 as shown at http://www.apache.org/licenses/LICENSE-2.0. You may choose either license.
MEBIBYTE = 1024 * 1024
STREAMING_DEFAULT_PART_SIZE = 10 * MEBIBYTE
DEFAULT_PART_SIZE = 128 * MEBIBYTE
OBJECT_USE_MULTIPART_SIZE = 128 * MEBIBYTE
| 54.222222
| 245
| 0.772541
| 80
| 488
| 4.6125
| 0.7
| 0.01626
| 0.04336
| 0.054201
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.07381
| 0.139344
| 488
| 8
| 246
| 61
| 0.804762
| 0.686475
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a7ebe45370c220d4cb3303c8715bdc2a5f264ae
| 7,074
|
py
|
Python
|
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
python/sdk/client/api/log_api.py
|
ashwinath/merlin
|
087a7fa6fb21e4c771d64418bd58873175226ca1
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
"""
Merlin
API Guide for accessing Merlin's model management, deployment, and serving functionalities # noqa: E501
OpenAPI spec version: 0.7.0
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import re # noqa: F401
# python 2 and python 3 compatibility library
import six
from client.api_client import ApiClient
class LogApi(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
if api_client is None:
api_client = ApiClient()
self.api_client = api_client
def logs_get(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('async_req'):
return self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
else:
(data) = self.logs_get_with_http_info(name, pod_name, namespace, cluster, **kwargs) # noqa: E501
return data
def logs_get_with_http_info(self, name, pod_name, namespace, cluster, **kwargs): # noqa: E501
"""Retrieve log from a container # noqa: E501
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please pass async_req=True
>>> thread = api.logs_get_with_http_info(name, pod_name, namespace, cluster, async_req=True)
>>> result = thread.get()
:param async_req bool
:param str name: (required)
:param str pod_name: (required)
:param str namespace: (required)
:param str cluster: (required)
:param str follow:
:param str limit_bytes:
:param str pretty:
:param str previous:
:param str since_seconds:
:param str since_time:
:param str tail_lines:
:param str timestamps:
:return: None
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['name', 'pod_name', 'namespace', 'cluster', 'follow', 'limit_bytes', 'pretty', 'previous', 'since_seconds', 'since_time', 'tail_lines', 'timestamps'] # noqa: E501
all_params.append('async_req')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in six.iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method logs_get" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'name' is set
if ('name' not in params or
params['name'] is None):
raise ValueError("Missing the required parameter `name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'pod_name' is set
if ('pod_name' not in params or
params['pod_name'] is None):
raise ValueError("Missing the required parameter `pod_name` when calling `logs_get`") # noqa: E501
# verify the required parameter 'namespace' is set
if ('namespace' not in params or
params['namespace'] is None):
raise ValueError("Missing the required parameter `namespace` when calling `logs_get`") # noqa: E501
# verify the required parameter 'cluster' is set
if ('cluster' not in params or
params['cluster'] is None):
raise ValueError("Missing the required parameter `cluster` when calling `logs_get`") # noqa: E501
collection_formats = {}
path_params = {}
query_params = []
if 'name' in params:
query_params.append(('name', params['name'])) # noqa: E501
if 'pod_name' in params:
query_params.append(('pod_name', params['pod_name'])) # noqa: E501
if 'namespace' in params:
query_params.append(('namespace', params['namespace'])) # noqa: E501
if 'cluster' in params:
query_params.append(('cluster', params['cluster'])) # noqa: E501
if 'follow' in params:
query_params.append(('follow', params['follow'])) # noqa: E501
if 'limit_bytes' in params:
query_params.append(('limit_bytes', params['limit_bytes'])) # noqa: E501
if 'pretty' in params:
query_params.append(('pretty', params['pretty'])) # noqa: E501
if 'previous' in params:
query_params.append(('previous', params['previous'])) # noqa: E501
if 'since_seconds' in params:
query_params.append(('since_seconds', params['since_seconds'])) # noqa: E501
if 'since_time' in params:
query_params.append(('since_time', params['since_time'])) # noqa: E501
if 'tail_lines' in params:
query_params.append(('tail_lines', params['tail_lines'])) # noqa: E501
if 'timestamps' in params:
query_params.append(('timestamps', params['timestamps'])) # noqa: E501
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# Authentication setting
auth_settings = ['Bearer'] # noqa: E501
return self.api_client.call_api(
'/logs', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type=None, # noqa: E501
auth_settings=auth_settings,
async_req=params.get('async_req'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
| 39.3
| 185
| 0.607577
| 837
| 7,074
| 4.946237
| 0.193548
| 0.050242
| 0.057488
| 0.055072
| 0.525604
| 0.454589
| 0.408696
| 0.390338
| 0.36715
| 0.342029
| 0
| 0.017327
| 0.290218
| 7,074
| 179
| 186
| 39.519553
| 0.80721
| 0.326124
| 0
| 0
| 1
| 0
| 0.221276
| 0.015197
| 0
| 0
| 0
| 0
| 0
| 1
| 0.033333
| false
| 0
| 0.044444
| 0
| 0.122222
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a7f701b1440f625bfec8817f0a39a899231c69f
| 105,704
|
py
|
Python
|
tencentcloud/dbbrain/v20210527/models.py
|
lleiyyang/tencentcloud-sdk-python
|
e6e6a4ce89286673b2322ae92d3c2fbf8665aa0b
|
[
"Apache-2.0"
] | 465
|
2018-04-27T09:54:59.000Z
|
2022-03-29T02:18:01.000Z
|
tencentcloud/dbbrain/v20210527/models.py
|
lleiyyang/tencentcloud-sdk-python
|
e6e6a4ce89286673b2322ae92d3c2fbf8665aa0b
|
[
"Apache-2.0"
] | 91
|
2018-04-27T09:48:11.000Z
|
2022-03-12T08:04:04.000Z
|
tencentcloud/dbbrain/v20210527/models.py
|
lleiyyang/tencentcloud-sdk-python
|
e6e6a4ce89286673b2322ae92d3c2fbf8665aa0b
|
[
"Apache-2.0"
] | 232
|
2018-05-02T08:02:46.000Z
|
2022-03-30T08:02:48.000Z
|
# -*- coding: utf8 -*-
# Copyright (c) 2017-2021 THL A29 Limited, a Tencent company. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import warnings
from tencentcloud.common.abstract_model import AbstractModel
class AddUserContactRequest(AbstractModel):
"""AddUserContact请求参数结构体
"""
def __init__(self):
r"""
:param Name: 联系人姓名,由中英文、数字、空格、!@#$%^&*()_+-=()组成,不能以下划线开头,长度在20以内。
:type Name: str
:param ContactInfo: 邮箱地址,支持大小写字母、数字、下划线及@字符, 不能以下划线开头,邮箱地址不可重复。
:type ContactInfo: str
:param Product: 服务产品类型,固定值:"mysql"。
:type Product: str
"""
self.Name = None
self.ContactInfo = None
self.Product = None
def _deserialize(self, params):
self.Name = params.get("Name")
self.ContactInfo = params.get("ContactInfo")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class AddUserContactResponse(AbstractModel):
"""AddUserContact返回参数结构体
"""
def __init__(self):
r"""
:param Id: 添加成功的联系人id。
:type Id: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Id = None
self.RequestId = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.RequestId = params.get("RequestId")
class ContactItem(AbstractModel):
"""联系人contact描述。
"""
def __init__(self):
r"""
:param Id: 联系人id。
:type Id: int
:param Name: 联系人姓名。
:type Name: str
:param Mail: 联系人绑定的邮箱。
:type Mail: str
"""
self.Id = None
self.Name = None
self.Mail = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Name = params.get("Name")
self.Mail = params.get("Mail")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateDBDiagReportTaskRequest(AbstractModel):
"""CreateDBDiagReportTask请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param StartTime: 开始时间,如“2020-11-08T14:00:00+08:00”。
:type StartTime: str
:param EndTime: 结束时间,如“2020-11-09T14:00:00+08:00”。
:type EndTime: str
:param SendMailFlag: 是否发送邮件: 0 - 否,1 - 是。
:type SendMailFlag: int
:param ContactPerson: 接收邮件的联系人ID数组。
:type ContactPerson: list of int
:param ContactGroup: 接收邮件的联系组ID数组。
:type ContactGroup: list of int
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认值为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.SendMailFlag = None
self.ContactPerson = None
self.ContactGroup = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.SendMailFlag = params.get("SendMailFlag")
self.ContactPerson = params.get("ContactPerson")
self.ContactGroup = params.get("ContactGroup")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateDBDiagReportTaskResponse(AbstractModel):
"""CreateDBDiagReportTask返回参数结构体
"""
def __init__(self):
r"""
:param AsyncRequestId: 异步任务的请求 ID,可使用此 ID 查询异步任务的执行结果。
注意:此字段可能返回 null,表示取不到有效值。
:type AsyncRequestId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AsyncRequestId = None
self.RequestId = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.RequestId = params.get("RequestId")
class CreateDBDiagReportUrlRequest(AbstractModel):
"""CreateDBDiagReportUrl请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param AsyncRequestId: 健康报告相应的任务ID,可通过DescribeDBDiagReportTasks查询。
:type AsyncRequestId: int
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL;"cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.AsyncRequestId = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.AsyncRequestId = params.get("AsyncRequestId")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateDBDiagReportUrlResponse(AbstractModel):
"""CreateDBDiagReportUrl返回参数结构体
"""
def __init__(self):
r"""
:param ReportUrl: 健康报告浏览地址。
:type ReportUrl: str
:param ExpireTime: 健康报告浏览地址到期时间戳(秒)。
:type ExpireTime: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ReportUrl = None
self.ExpireTime = None
self.RequestId = None
def _deserialize(self, params):
self.ReportUrl = params.get("ReportUrl")
self.ExpireTime = params.get("ExpireTime")
self.RequestId = params.get("RequestId")
class CreateMailProfileRequest(AbstractModel):
"""CreateMailProfile请求参数结构体
"""
def __init__(self):
r"""
:param ProfileInfo: 邮件配置内容。
:type ProfileInfo: :class:`tencentcloud.dbbrain.v20210527.models.ProfileInfo`
:param ProfileLevel: 配置级别,支持值包括:"User" - 用户级别,"Instance" - 实例级别,其中数据库巡检邮件配置为用户级别,定期生成邮件配置为实例级别。
:type ProfileLevel: str
:param ProfileName: 配置名称,需要保持唯一性,数据库巡检邮件配置名称自拟;定期生成邮件配置命名格式:"scheduler_" + {instanceId},如"schduler_cdb-test"。
:type ProfileName: str
:param ProfileType: 配置类型,支持值包括:"dbScan_mail_configuration" - 数据库巡检邮件配置,"scheduler_mail_configuration" - 定期生成邮件配置。
:type ProfileType: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL。
:type Product: str
:param BindInstanceIds: 配置绑定的实例ID,当配置级别为"Instance"时需要传入且只能为一个实例;当配置级别为“User”时,此参数不填。
:type BindInstanceIds: list of str
"""
self.ProfileInfo = None
self.ProfileLevel = None
self.ProfileName = None
self.ProfileType = None
self.Product = None
self.BindInstanceIds = None
def _deserialize(self, params):
if params.get("ProfileInfo") is not None:
self.ProfileInfo = ProfileInfo()
self.ProfileInfo._deserialize(params.get("ProfileInfo"))
self.ProfileLevel = params.get("ProfileLevel")
self.ProfileName = params.get("ProfileName")
self.ProfileType = params.get("ProfileType")
self.Product = params.get("Product")
self.BindInstanceIds = params.get("BindInstanceIds")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateMailProfileResponse(AbstractModel):
"""CreateMailProfile返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CreateSchedulerMailProfileRequest(AbstractModel):
"""CreateSchedulerMailProfile请求参数结构体
"""
def __init__(self):
r"""
:param WeekConfiguration: 取值范围1-7,分别代表周一至周日。
:type WeekConfiguration: list of int
:param ProfileInfo: 邮件配置内容。
:type ProfileInfo: :class:`tencentcloud.dbbrain.v20210527.models.ProfileInfo`
:param ProfileName: 配置名称,需要保持唯一性,定期生成邮件配置命名格式:"scheduler_" + {instanceId},如"schduler_cdb-test"。
:type ProfileName: str
:param BindInstanceId: 配置订阅的实例ID。
:type BindInstanceId: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
"""
self.WeekConfiguration = None
self.ProfileInfo = None
self.ProfileName = None
self.BindInstanceId = None
self.Product = None
def _deserialize(self, params):
self.WeekConfiguration = params.get("WeekConfiguration")
if params.get("ProfileInfo") is not None:
self.ProfileInfo = ProfileInfo()
self.ProfileInfo._deserialize(params.get("ProfileInfo"))
self.ProfileName = params.get("ProfileName")
self.BindInstanceId = params.get("BindInstanceId")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateSchedulerMailProfileResponse(AbstractModel):
"""CreateSchedulerMailProfile返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class CreateSecurityAuditLogExportTaskRequest(AbstractModel):
"""CreateSecurityAuditLogExportTask请求参数结构体
"""
def __init__(self):
r"""
:param SecAuditGroupId: 安全审计组Id。
:type SecAuditGroupId: str
:param StartTime: 导出日志开始时间,例如2020-12-28 00:00:00。
:type StartTime: str
:param EndTime: 导出日志结束时间,例如2020-12-28 01:00:00。
:type EndTime: str
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL。
:type Product: str
:param DangerLevels: 日志风险等级列表,支持值包括:0 无风险;1 低风险;2 中风险;3 高风险。
:type DangerLevels: list of int
"""
self.SecAuditGroupId = None
self.StartTime = None
self.EndTime = None
self.Product = None
self.DangerLevels = None
def _deserialize(self, params):
self.SecAuditGroupId = params.get("SecAuditGroupId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Product = params.get("Product")
self.DangerLevels = params.get("DangerLevels")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class CreateSecurityAuditLogExportTaskResponse(AbstractModel):
"""CreateSecurityAuditLogExportTask返回参数结构体
"""
def __init__(self):
r"""
:param AsyncRequestId: 日志导出任务Id。
:type AsyncRequestId: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.AsyncRequestId = None
self.RequestId = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.RequestId = params.get("RequestId")
class DeleteSecurityAuditLogExportTasksRequest(AbstractModel):
"""DeleteSecurityAuditLogExportTasks请求参数结构体
"""
def __init__(self):
r"""
:param SecAuditGroupId: 安全审计组Id。
:type SecAuditGroupId: str
:param AsyncRequestIds: 日志导出任务Id列表,接口会忽略不存在或已删除的任务Id。
:type AsyncRequestIds: list of int non-negative
:param Product: 服务产品类型,支持值: "mysql" - 云数据库 MySQL。
:type Product: str
"""
self.SecAuditGroupId = None
self.AsyncRequestIds = None
self.Product = None
def _deserialize(self, params):
self.SecAuditGroupId = params.get("SecAuditGroupId")
self.AsyncRequestIds = params.get("AsyncRequestIds")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DeleteSecurityAuditLogExportTasksResponse(AbstractModel):
"""DeleteSecurityAuditLogExportTasks返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class DescribeAllUserContactRequest(AbstractModel):
"""DescribeAllUserContact请求参数结构体
"""
def __init__(self):
r"""
:param Product: 服务产品类型,固定值:mysql。
:type Product: str
:param Names: 联系人名数组,支持模糊搜索。
:type Names: list of str
"""
self.Product = None
self.Names = None
def _deserialize(self, params):
self.Product = params.get("Product")
self.Names = params.get("Names")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeAllUserContactResponse(AbstractModel):
"""DescribeAllUserContact返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 联系人的总数量。
:type TotalCount: int
:param Contacts: 联系人的信息。
注意:此字段可能返回 null,表示取不到有效值。
:type Contacts: list of ContactItem
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Contacts = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Contacts") is not None:
self.Contacts = []
for item in params.get("Contacts"):
obj = ContactItem()
obj._deserialize(item)
self.Contacts.append(obj)
self.RequestId = params.get("RequestId")
class DescribeAllUserGroupRequest(AbstractModel):
"""DescribeAllUserGroup请求参数结构体
"""
def __init__(self):
r"""
:param Product: 服务产品类型,固定值:mysql。
:type Product: str
:param Names: 联系组名称数组,支持模糊搜索。
:type Names: list of str
"""
self.Product = None
self.Names = None
def _deserialize(self, params):
self.Product = params.get("Product")
self.Names = params.get("Names")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeAllUserGroupResponse(AbstractModel):
"""DescribeAllUserGroup返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 组总数。
:type TotalCount: int
:param Groups: 组信息。
注意:此字段可能返回 null,表示取不到有效值。
:type Groups: list of GroupItem
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Groups = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Groups") is not None:
self.Groups = []
for item in params.get("Groups"):
obj = GroupItem()
obj._deserialize(item)
self.Groups.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBDiagEventRequest(AbstractModel):
"""DescribeDBDiagEvent请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param EventId: 事件 ID 。通过“获取实例诊断历史DescribeDBDiagHistory”获取。
:type EventId: int
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.EventId = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.EventId = params.get("EventId")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBDiagEventResponse(AbstractModel):
"""DescribeDBDiagEvent返回参数结构体
"""
def __init__(self):
r"""
:param DiagItem: 诊断项。
:type DiagItem: str
:param DiagType: 诊断类型。
:type DiagType: str
:param EventId: 事件 ID 。
:type EventId: int
:param Explanation: 诊断事件详情,若无附加解释信息则输出为空。
:type Explanation: str
:param Outline: 诊断概要。
:type Outline: str
:param Problem: 诊断出的问题。
:type Problem: str
:param Severity: 严重程度。严重程度分为5级,按影响程度从高至低分别为:1:致命,2:严重,3:告警,4:提示,5:健康。
:type Severity: int
:param StartTime: 开始时间
:type StartTime: str
:param Suggestions: 诊断建议,若无建议则输出为空。
:type Suggestions: str
:param Metric: 保留字段。
注意:此字段可能返回 null,表示取不到有效值。
:type Metric: str
:param EndTime: 结束时间。
:type EndTime: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.DiagItem = None
self.DiagType = None
self.EventId = None
self.Explanation = None
self.Outline = None
self.Problem = None
self.Severity = None
self.StartTime = None
self.Suggestions = None
self.Metric = None
self.EndTime = None
self.RequestId = None
def _deserialize(self, params):
self.DiagItem = params.get("DiagItem")
self.DiagType = params.get("DiagType")
self.EventId = params.get("EventId")
self.Explanation = params.get("Explanation")
self.Outline = params.get("Outline")
self.Problem = params.get("Problem")
self.Severity = params.get("Severity")
self.StartTime = params.get("StartTime")
self.Suggestions = params.get("Suggestions")
self.Metric = params.get("Metric")
self.EndTime = params.get("EndTime")
self.RequestId = params.get("RequestId")
class DescribeDBDiagHistoryRequest(AbstractModel):
"""DescribeDBDiagHistory请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param StartTime: 开始时间,如“2019-09-10 12:13:14”。
:type StartTime: str
:param EndTime: 结束时间,如“2019-09-11 12:13:14”,结束时间与开始时间的间隔最大可为2天。
:type EndTime: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBDiagHistoryResponse(AbstractModel):
"""DescribeDBDiagHistory返回参数结构体
"""
def __init__(self):
r"""
:param Events: 事件描述。
:type Events: list of DiagHistoryEventItem
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Events = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Events") is not None:
self.Events = []
for item in params.get("Events"):
obj = DiagHistoryEventItem()
obj._deserialize(item)
self.Events.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBDiagReportTasksRequest(AbstractModel):
"""DescribeDBDiagReportTasks请求参数结构体
"""
def __init__(self):
r"""
:param StartTime: 第一个任务的开始时间,用于范围查询,时间格式如:2019-09-10 12:13:14。
:type StartTime: str
:param EndTime: 最后一个任务的开始时间,用于范围查询,时间格式如:2019-09-10 12:13:14。
:type EndTime: str
:param InstanceIds: 实例ID数组,用于筛选指定实例的任务列表。
:type InstanceIds: list of str
:param Sources: 任务的触发来源,支持的取值包括:"DAILY_INSPECTION" - 实例巡检;"SCHEDULED" - 定时生成;"MANUAL" - 手动触发。
:type Sources: list of str
:param HealthLevels: 报告的健康等级,支持的取值包括:"HEALTH" - 健康;"SUB_HEALTH" - 亚健康;"RISK" - 危险;"HIGH_RISK" - 高危。
:type HealthLevels: str
:param TaskStatuses: 任务的状态,支持的取值包括:"created" - 新建;"chosen" - 待执行; "running" - 执行中;"failed" - 失败;"finished" - 已完成。
:type TaskStatuses: str
:param Offset: 偏移量,默认0。
:type Offset: int
:param Limit: 返回数量,默认20,最大值为100。
:type Limit: int
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL;"cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
"""
self.StartTime = None
self.EndTime = None
self.InstanceIds = None
self.Sources = None
self.HealthLevels = None
self.TaskStatuses = None
self.Offset = None
self.Limit = None
self.Product = None
def _deserialize(self, params):
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.InstanceIds = params.get("InstanceIds")
self.Sources = params.get("Sources")
self.HealthLevels = params.get("HealthLevels")
self.TaskStatuses = params.get("TaskStatuses")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBDiagReportTasksResponse(AbstractModel):
"""DescribeDBDiagReportTasks返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 任务总数目。
:type TotalCount: int
:param Tasks: 任务列表。
:type Tasks: list of HealthReportTask
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Tasks = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Tasks") is not None:
self.Tasks = []
for item in params.get("Tasks"):
obj = HealthReportTask()
obj._deserialize(item)
self.Tasks.append(obj)
self.RequestId = params.get("RequestId")
class DescribeDBSpaceStatusRequest(AbstractModel):
"""DescribeDBSpaceStatus请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param RangeDays: 时间段天数,截止日期为当日,默认为7天。
:type RangeDays: int
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.RangeDays = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.RangeDays = params.get("RangeDays")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDBSpaceStatusResponse(AbstractModel):
"""DescribeDBSpaceStatus返回参数结构体
"""
def __init__(self):
r"""
:param Growth: 磁盘增长量(MB)。
:type Growth: int
:param Remain: 磁盘剩余(MB)。
:type Remain: int
:param Total: 磁盘总量(MB)。
:type Total: int
:param AvailableDays: 预计可用天数。
:type AvailableDays: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Growth = None
self.Remain = None
self.Total = None
self.AvailableDays = None
self.RequestId = None
def _deserialize(self, params):
self.Growth = params.get("Growth")
self.Remain = params.get("Remain")
self.Total = params.get("Total")
self.AvailableDays = params.get("AvailableDays")
self.RequestId = params.get("RequestId")
class DescribeDiagDBInstancesRequest(AbstractModel):
"""DescribeDiagDBInstances请求参数结构体
"""
def __init__(self):
r"""
:param IsSupported: 是否是DBbrain支持的实例,固定传 true。
:type IsSupported: bool
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
:param Offset: 分页参数,偏移量。
:type Offset: int
:param Limit: 分页参数,分页值,最大值为100。
:type Limit: int
:param InstanceNames: 根据实例名称条件查询。
:type InstanceNames: list of str
:param InstanceIds: 根据实例ID条件查询。
:type InstanceIds: list of str
:param Regions: 根据地域条件查询。
:type Regions: list of str
"""
self.IsSupported = None
self.Product = None
self.Offset = None
self.Limit = None
self.InstanceNames = None
self.InstanceIds = None
self.Regions = None
def _deserialize(self, params):
self.IsSupported = params.get("IsSupported")
self.Product = params.get("Product")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
self.InstanceNames = params.get("InstanceNames")
self.InstanceIds = params.get("InstanceIds")
self.Regions = params.get("Regions")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeDiagDBInstancesResponse(AbstractModel):
"""DescribeDiagDBInstances返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 实例总数。
:type TotalCount: int
:param DbScanStatus: 全实例巡检状态:0:开启全实例巡检;1:未开启全实例巡检。
:type DbScanStatus: int
:param Items: 实例相关信息。
:type Items: list of InstanceInfo
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.DbScanStatus = None
self.Items = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
self.DbScanStatus = params.get("DbScanStatus")
if params.get("Items") is not None:
self.Items = []
for item in params.get("Items"):
obj = InstanceInfo()
obj._deserialize(item)
self.Items.append(obj)
self.RequestId = params.get("RequestId")
class DescribeHealthScoreRequest(AbstractModel):
"""DescribeHealthScore请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 需要获取健康得分的实例ID。
:type InstanceId: str
:param Time: 获取健康得分的时间,时间格式如:2019-09-10 12:13:14。
:type Time: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Time = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Time = params.get("Time")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeHealthScoreResponse(AbstractModel):
"""DescribeHealthScore返回参数结构体
"""
def __init__(self):
r"""
:param Data: 健康得分以及异常扣分项。
:type Data: :class:`tencentcloud.dbbrain.v20210527.models.HealthScoreInfo`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Data = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Data") is not None:
self.Data = HealthScoreInfo()
self.Data._deserialize(params.get("Data"))
self.RequestId = params.get("RequestId")
class DescribeMailProfileRequest(AbstractModel):
"""DescribeMailProfile请求参数结构体
"""
def __init__(self):
r"""
:param ProfileType: 配置类型,支持值包括:"dbScan_mail_configuration" - 数据库巡检邮件配置,"scheduler_mail_configuration" - 定期生成邮件配置。
:type ProfileType: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
:param Offset: 分页偏移量。
:type Offset: int
:param Limit: 分页单位,最大支持50。
:type Limit: int
:param ProfileName: 根据邮件配置名称查询,定期发送的邮件配置名称遵循:"scheduler_"+{instanceId}的规则。
:type ProfileName: str
"""
self.ProfileType = None
self.Product = None
self.Offset = None
self.Limit = None
self.ProfileName = None
def _deserialize(self, params):
self.ProfileType = params.get("ProfileType")
self.Product = params.get("Product")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
self.ProfileName = params.get("ProfileName")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMailProfileResponse(AbstractModel):
"""DescribeMailProfile返回参数结构体
"""
def __init__(self):
r"""
:param ProfileList: 邮件配置详情。
注意:此字段可能返回 null,表示取不到有效值。
:type ProfileList: list of UserProfile
:param TotalCount: 邮件模版总数。
注意:此字段可能返回 null,表示取不到有效值。
:type TotalCount: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ProfileList = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("ProfileList") is not None:
self.ProfileList = []
for item in params.get("ProfileList"):
obj = UserProfile()
obj._deserialize(item)
self.ProfileList.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeMySqlProcessListRequest(AbstractModel):
"""DescribeMySqlProcessList请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param ID: 线程的ID,用于筛选线程列表。
:type ID: int
:param User: 线程的操作账号名,用于筛选线程列表。
:type User: str
:param Host: 线程的操作主机地址,用于筛选线程列表。
:type Host: str
:param DB: 线程的操作数据库,用于筛选线程列表。
:type DB: str
:param State: 线程的操作状态,用于筛选线程列表。
:type State: str
:param Command: 线程的执行类型,用于筛选线程列表。
:type Command: str
:param Time: 线程的操作时长最小值,单位秒,用于筛选操作时长大于该值的线程列表。
:type Time: int
:param Info: 线程的操作语句,用于筛选线程列表。
:type Info: str
:param Limit: 返回数量,默认20。
:type Limit: int
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL;"cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.ID = None
self.User = None
self.Host = None
self.DB = None
self.State = None
self.Command = None
self.Time = None
self.Info = None
self.Limit = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.ID = params.get("ID")
self.User = params.get("User")
self.Host = params.get("Host")
self.DB = params.get("DB")
self.State = params.get("State")
self.Command = params.get("Command")
self.Time = params.get("Time")
self.Info = params.get("Info")
self.Limit = params.get("Limit")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeMySqlProcessListResponse(AbstractModel):
"""DescribeMySqlProcessList返回参数结构体
"""
def __init__(self):
r"""
:param ProcessList: 实时线程列表。
:type ProcessList: list of MySqlProcess
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.ProcessList = None
self.RequestId = None
def _deserialize(self, params):
if params.get("ProcessList") is not None:
self.ProcessList = []
for item in params.get("ProcessList"):
obj = MySqlProcess()
obj._deserialize(item)
self.ProcessList.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSecurityAuditLogDownloadUrlsRequest(AbstractModel):
"""DescribeSecurityAuditLogDownloadUrls请求参数结构体
"""
def __init__(self):
r"""
:param SecAuditGroupId: 安全审计组Id。
:type SecAuditGroupId: str
:param AsyncRequestId: 异步任务Id。
:type AsyncRequestId: int
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL。
:type Product: str
"""
self.SecAuditGroupId = None
self.AsyncRequestId = None
self.Product = None
def _deserialize(self, params):
self.SecAuditGroupId = params.get("SecAuditGroupId")
self.AsyncRequestId = params.get("AsyncRequestId")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSecurityAuditLogDownloadUrlsResponse(AbstractModel):
"""DescribeSecurityAuditLogDownloadUrls返回参数结构体
"""
def __init__(self):
r"""
:param Urls: 导出结果的COS链接列表。当结果集很大时,可能会切分为多个url下载。
:type Urls: list of str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Urls = None
self.RequestId = None
def _deserialize(self, params):
self.Urls = params.get("Urls")
self.RequestId = params.get("RequestId")
class DescribeSecurityAuditLogExportTasksRequest(AbstractModel):
"""DescribeSecurityAuditLogExportTasks请求参数结构体
"""
def __init__(self):
r"""
:param SecAuditGroupId: 安全审计组Id。
:type SecAuditGroupId: str
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL。
:type Product: str
:param AsyncRequestIds: 日志导出任务Id列表。
:type AsyncRequestIds: list of int non-negative
:param Offset: 偏移量,默认0。
:type Offset: int
:param Limit: 返回数量,默认20,最大值为100。
:type Limit: int
"""
self.SecAuditGroupId = None
self.Product = None
self.AsyncRequestIds = None
self.Offset = None
self.Limit = None
def _deserialize(self, params):
self.SecAuditGroupId = params.get("SecAuditGroupId")
self.Product = params.get("Product")
self.AsyncRequestIds = params.get("AsyncRequestIds")
self.Offset = params.get("Offset")
self.Limit = params.get("Limit")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSecurityAuditLogExportTasksResponse(AbstractModel):
"""DescribeSecurityAuditLogExportTasks返回参数结构体
"""
def __init__(self):
r"""
:param Tasks: 安全审计日志导出任务列表。
:type Tasks: list of SecLogExportTaskInfo
:param TotalCount: 安全审计日志导出任务总数。
:type TotalCount: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Tasks = None
self.TotalCount = None
self.RequestId = None
def _deserialize(self, params):
if params.get("Tasks") is not None:
self.Tasks = []
for item in params.get("Tasks"):
obj = SecLogExportTaskInfo()
obj._deserialize(item)
self.Tasks.append(obj)
self.TotalCount = params.get("TotalCount")
self.RequestId = params.get("RequestId")
class DescribeSlowLogTimeSeriesStatsRequest(AbstractModel):
"""DescribeSlowLogTimeSeriesStats请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param StartTime: 开始时间,如“2019-09-10 12:13:14”。
:type StartTime: str
:param EndTime: 结束时间,如“2019-09-10 12:13:14”,结束时间与开始时间的间隔最大可为7天。
:type EndTime: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSlowLogTimeSeriesStatsResponse(AbstractModel):
"""DescribeSlowLogTimeSeriesStats返回参数结构体
"""
def __init__(self):
r"""
:param Period: 柱间单位时间间隔,单位为秒。
:type Period: int
:param TimeSeries: 单位时间间隔内慢日志数量统计。
:type TimeSeries: list of TimeSlice
:param SeriesData: 单位时间间隔内的实例 cpu 利用率监控数据。
:type SeriesData: :class:`tencentcloud.dbbrain.v20210527.models.MonitorMetricSeriesData`
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Period = None
self.TimeSeries = None
self.SeriesData = None
self.RequestId = None
def _deserialize(self, params):
self.Period = params.get("Period")
if params.get("TimeSeries") is not None:
self.TimeSeries = []
for item in params.get("TimeSeries"):
obj = TimeSlice()
obj._deserialize(item)
self.TimeSeries.append(obj)
if params.get("SeriesData") is not None:
self.SeriesData = MonitorMetricSeriesData()
self.SeriesData._deserialize(params.get("SeriesData"))
self.RequestId = params.get("RequestId")
class DescribeSlowLogTopSqlsRequest(AbstractModel):
"""DescribeSlowLogTopSqls请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param StartTime: 开始时间,如“2019-09-10 12:13:14”。
:type StartTime: str
:param EndTime: 截止时间,如“2019-09-11 10:13:14”,截止时间与开始时间的间隔小于7天。
:type EndTime: str
:param SortBy: 排序键,目前支持 QueryTime,ExecTimes,RowsSent,LockTime以及RowsExamined 等排序键,默认为QueryTime。
:type SortBy: str
:param OrderBy: 排序方式,支持ASC(升序)以及DESC(降序),默认为DESC。
:type OrderBy: str
:param Limit: 返回数量,默认为20,最大值为100。
:type Limit: int
:param Offset: 偏移量,默认为0。
:type Offset: int
:param SchemaList: 数据库名称数组。
:type SchemaList: list of SchemaItem
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.SortBy = None
self.OrderBy = None
self.Limit = None
self.Offset = None
self.SchemaList = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.SortBy = params.get("SortBy")
self.OrderBy = params.get("OrderBy")
self.Limit = params.get("Limit")
self.Offset = params.get("Offset")
if params.get("SchemaList") is not None:
self.SchemaList = []
for item in params.get("SchemaList"):
obj = SchemaItem()
obj._deserialize(item)
self.SchemaList.append(obj)
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSlowLogTopSqlsResponse(AbstractModel):
"""DescribeSlowLogTopSqls返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 符合条件的记录总数。
:type TotalCount: int
:param Rows: 慢日志 top sql 列表
:type Rows: list of SlowLogTopSqlItem
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Rows = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Rows") is not None:
self.Rows = []
for item in params.get("Rows"):
obj = SlowLogTopSqlItem()
obj._deserialize(item)
self.Rows.append(obj)
self.RequestId = params.get("RequestId")
class DescribeSlowLogUserHostStatsRequest(AbstractModel):
"""DescribeSlowLogUserHostStats请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param StartTime: 查询范围的开始时间,时间格式如:2019-09-10 12:13:14。
:type StartTime: str
:param EndTime: 查询范围的结束时间,时间格式如:2019-09-10 12:13:14。
:type EndTime: str
:param Product: 服务产品类型,支持值:"mysql" - 云数据库 MySQL;"cynosdb" - 云数据库 TDSQL-C for MySQL,默认为"mysql"。
:type Product: str
:param Md5: SOL模板的MD5值
:type Md5: str
"""
self.InstanceId = None
self.StartTime = None
self.EndTime = None
self.Product = None
self.Md5 = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Product = params.get("Product")
self.Md5 = params.get("Md5")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeSlowLogUserHostStatsResponse(AbstractModel):
"""DescribeSlowLogUserHostStats返回参数结构体
"""
def __init__(self):
r"""
:param TotalCount: 来源地址数目。
:type TotalCount: int
:param Items: 各来源地址的慢日志占比详情列表。
:type Items: list of SlowLogHost
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TotalCount = None
self.Items = None
self.RequestId = None
def _deserialize(self, params):
self.TotalCount = params.get("TotalCount")
if params.get("Items") is not None:
self.Items = []
for item in params.get("Items"):
obj = SlowLogHost()
obj._deserialize(item)
self.Items.append(obj)
self.RequestId = params.get("RequestId")
class DescribeTopSpaceSchemaTimeSeriesRequest(AbstractModel):
"""DescribeTopSpaceSchemaTimeSeries请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param Limit: 返回的Top库数量,最大值为100,默认为20。
:type Limit: int
:param SortBy: 筛选Top库所用的排序字段,可选字段包含DataLength、IndexLength、TotalLength、DataFree、FragRatio、TableRows、PhysicalFileSize(仅云数据库 MySQL实例支持),云数据库 MySQL实例默认为 PhysicalFileSize,其他产品实例默认为TotalLength。
:type SortBy: str
:param StartDate: 开始日期,如“2021-01-01”,最早为当日的前第29天,默认为截止日期的前第6天。
:type StartDate: str
:param EndDate: 截止日期,如“2021-01-01”,最早为当日的前第29天,默认为当日。
:type EndDate: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Limit = None
self.SortBy = None
self.StartDate = None
self.EndDate = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.SortBy = params.get("SortBy")
self.StartDate = params.get("StartDate")
self.EndDate = params.get("EndDate")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeTopSpaceSchemaTimeSeriesResponse(AbstractModel):
"""DescribeTopSpaceSchemaTimeSeries返回参数结构体
"""
def __init__(self):
r"""
:param TopSpaceSchemaTimeSeries: 返回的Top库空间统计信息的时序数据列表。
:type TopSpaceSchemaTimeSeries: list of SchemaSpaceTimeSeries
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TopSpaceSchemaTimeSeries = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TopSpaceSchemaTimeSeries") is not None:
self.TopSpaceSchemaTimeSeries = []
for item in params.get("TopSpaceSchemaTimeSeries"):
obj = SchemaSpaceTimeSeries()
obj._deserialize(item)
self.TopSpaceSchemaTimeSeries.append(obj)
self.RequestId = params.get("RequestId")
class DescribeTopSpaceSchemasRequest(AbstractModel):
"""DescribeTopSpaceSchemas请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param Limit: 返回的Top库数量,最大值为100,默认为20。
:type Limit: int
:param SortBy: 筛选Top库所用的排序字段,可选字段包含DataLength、IndexLength、TotalLength、DataFree、FragRatio、TableRows、PhysicalFileSize(仅云数据库 MySQL实例支持),云数据库 MySQL实例默认为 PhysicalFileSize,其他产品实例默认为TotalLength。
:type SortBy: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Limit = None
self.SortBy = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.SortBy = params.get("SortBy")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeTopSpaceSchemasResponse(AbstractModel):
"""DescribeTopSpaceSchemas返回参数结构体
"""
def __init__(self):
r"""
:param TopSpaceSchemas: 返回的Top库空间统计信息列表。
:type TopSpaceSchemas: list of SchemaSpaceData
:param Timestamp: 采集库空间数据的时间戳(秒)。
:type Timestamp: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TopSpaceSchemas = None
self.Timestamp = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TopSpaceSchemas") is not None:
self.TopSpaceSchemas = []
for item in params.get("TopSpaceSchemas"):
obj = SchemaSpaceData()
obj._deserialize(item)
self.TopSpaceSchemas.append(obj)
self.Timestamp = params.get("Timestamp")
self.RequestId = params.get("RequestId")
class DescribeTopSpaceTableTimeSeriesRequest(AbstractModel):
"""DescribeTopSpaceTableTimeSeries请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param Limit: 返回的Top表数量,最大值为100,默认为20。
:type Limit: int
:param SortBy: 筛选Top表所用的排序字段,可选字段包含DataLength、IndexLength、TotalLength、DataFree、FragRatio、TableRows、PhysicalFileSize,默认为 PhysicalFileSize。
:type SortBy: str
:param StartDate: 开始日期,如“2021-01-01”,最早为当日的前第29天,默认为截止日期的前第6天。
:type StartDate: str
:param EndDate: 截止日期,如“2021-01-01”,最早为当日的前第29天,默认为当日。
:type EndDate: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Limit = None
self.SortBy = None
self.StartDate = None
self.EndDate = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.SortBy = params.get("SortBy")
self.StartDate = params.get("StartDate")
self.EndDate = params.get("EndDate")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeTopSpaceTableTimeSeriesResponse(AbstractModel):
"""DescribeTopSpaceTableTimeSeries返回参数结构体
"""
def __init__(self):
r"""
:param TopSpaceTableTimeSeries: 返回的Top表空间统计信息的时序数据列表。
:type TopSpaceTableTimeSeries: list of TableSpaceTimeSeries
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TopSpaceTableTimeSeries = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TopSpaceTableTimeSeries") is not None:
self.TopSpaceTableTimeSeries = []
for item in params.get("TopSpaceTableTimeSeries"):
obj = TableSpaceTimeSeries()
obj._deserialize(item)
self.TopSpaceTableTimeSeries.append(obj)
self.RequestId = params.get("RequestId")
class DescribeTopSpaceTablesRequest(AbstractModel):
"""DescribeTopSpaceTables请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param Limit: 返回的Top表数量,最大值为100,默认为20。
:type Limit: int
:param SortBy: 筛选Top表所用的排序字段,可选字段包含DataLength、IndexLength、TotalLength、DataFree、FragRatio、TableRows、PhysicalFileSize(仅云数据库 MySQL实例支持),云数据库 MySQL实例默认为 PhysicalFileSize,其他产品实例默认为TotalLength。
:type SortBy: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Limit = None
self.SortBy = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Limit = params.get("Limit")
self.SortBy = params.get("SortBy")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeTopSpaceTablesResponse(AbstractModel):
"""DescribeTopSpaceTables返回参数结构体
"""
def __init__(self):
r"""
:param TopSpaceTables: 返回的Top表空间统计信息列表。
:type TopSpaceTables: list of TableSpaceData
:param Timestamp: 采集表空间数据的时间戳(秒)。
:type Timestamp: int
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.TopSpaceTables = None
self.Timestamp = None
self.RequestId = None
def _deserialize(self, params):
if params.get("TopSpaceTables") is not None:
self.TopSpaceTables = []
for item in params.get("TopSpaceTables"):
obj = TableSpaceData()
obj._deserialize(item)
self.TopSpaceTables.append(obj)
self.Timestamp = params.get("Timestamp")
self.RequestId = params.get("RequestId")
class DescribeUserSqlAdviceRequest(AbstractModel):
"""DescribeUserSqlAdvice请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param SqlText: SQL语句。
:type SqlText: str
:param Schema: 库名。
:type Schema: str
"""
self.InstanceId = None
self.SqlText = None
self.Schema = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.SqlText = params.get("SqlText")
self.Schema = params.get("Schema")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class DescribeUserSqlAdviceResponse(AbstractModel):
"""DescribeUserSqlAdvice返回参数结构体
"""
def __init__(self):
r"""
:param Advices: SQL优化建议,可解析为JSON数组,无需优化时输出为空。
:type Advices: str
:param Comments: SQL优化建议备注,可解析为String数组,无需优化时输出为空。
:type Comments: str
:param SqlText: SQL语句。
:type SqlText: str
:param Schema: 库名。
:type Schema: str
:param Tables: 相关表的DDL信息,可解析为JSON数组。
:type Tables: str
:param SqlPlan: SQL执行计划,可解析为JSON,无需优化时输出为空。
:type SqlPlan: str
:param Cost: SQL优化后的成本节约详情,可解析为JSON,无需优化时输出为空。
:type Cost: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Advices = None
self.Comments = None
self.SqlText = None
self.Schema = None
self.Tables = None
self.SqlPlan = None
self.Cost = None
self.RequestId = None
def _deserialize(self, params):
self.Advices = params.get("Advices")
self.Comments = params.get("Comments")
self.SqlText = params.get("SqlText")
self.Schema = params.get("Schema")
self.Tables = params.get("Tables")
self.SqlPlan = params.get("SqlPlan")
self.Cost = params.get("Cost")
self.RequestId = params.get("RequestId")
class DiagHistoryEventItem(AbstractModel):
"""实例诊断历史事件
"""
def __init__(self):
r"""
:param DiagType: 诊断类型。
:type DiagType: str
:param EndTime: 结束时间。
:type EndTime: str
:param StartTime: 开始时间。
:type StartTime: str
:param EventId: 事件唯一ID 。
:type EventId: int
:param Severity: 严重程度。严重程度分为5级,按影响程度从高至低分别为:1:致命,2:严重,3:告警,4:提示,5:健康。
:type Severity: int
:param Outline: 诊断概要。
:type Outline: str
:param DiagItem: 诊断项说明。
:type DiagItem: str
:param InstanceId: 实例 ID 。
:type InstanceId: str
:param Metric: 保留字段。
注意:此字段可能返回 null,表示取不到有效值。
:type Metric: str
:param Region: 地域。
:type Region: str
"""
self.DiagType = None
self.EndTime = None
self.StartTime = None
self.EventId = None
self.Severity = None
self.Outline = None
self.DiagItem = None
self.InstanceId = None
self.Metric = None
self.Region = None
def _deserialize(self, params):
self.DiagType = params.get("DiagType")
self.EndTime = params.get("EndTime")
self.StartTime = params.get("StartTime")
self.EventId = params.get("EventId")
self.Severity = params.get("Severity")
self.Outline = params.get("Outline")
self.DiagItem = params.get("DiagItem")
self.InstanceId = params.get("InstanceId")
self.Metric = params.get("Metric")
self.Region = params.get("Region")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class EventInfo(AbstractModel):
"""异常事件信息。
"""
def __init__(self):
r"""
:param EventId: 事件 ID 。
:type EventId: int
:param DiagType: 诊断类型。
:type DiagType: str
:param StartTime: 开始时间。
:type StartTime: str
:param EndTime: 结束时间。
:type EndTime: str
:param Outline: 概要。
:type Outline: str
:param Severity: 严重程度。严重程度分为5级,按影响程度从高至低分别为:1:致命,2:严重,3:告警,4:提示,5:健康。
:type Severity: int
:param ScoreLost: 扣分。
:type ScoreLost: int
:param Metric: 保留字段。
:type Metric: str
:param Count: 告警数目。
:type Count: int
"""
self.EventId = None
self.DiagType = None
self.StartTime = None
self.EndTime = None
self.Outline = None
self.Severity = None
self.ScoreLost = None
self.Metric = None
self.Count = None
def _deserialize(self, params):
self.EventId = params.get("EventId")
self.DiagType = params.get("DiagType")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.Outline = params.get("Outline")
self.Severity = params.get("Severity")
self.ScoreLost = params.get("ScoreLost")
self.Metric = params.get("Metric")
self.Count = params.get("Count")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class GroupItem(AbstractModel):
"""描述组信息。
"""
def __init__(self):
r"""
:param Id: 组id。
:type Id: int
:param Name: 组名称。
:type Name: str
:param MemberCount: 组成员数量。
:type MemberCount: int
"""
self.Id = None
self.Name = None
self.MemberCount = None
def _deserialize(self, params):
self.Id = params.get("Id")
self.Name = params.get("Name")
self.MemberCount = params.get("MemberCount")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class HealthReportTask(AbstractModel):
"""健康报告任务详情。
"""
def __init__(self):
r"""
:param AsyncRequestId: 异步任务请求 ID。
:type AsyncRequestId: int
:param Source: 任务的触发来源,支持的取值包括:"DAILY_INSPECTION" - 实例巡检;"SCHEDULED" - 定时生成;"MANUAL" - 手动触发。
:type Source: str
:param Progress: 任务完成进度,单位%。
:type Progress: int
:param CreateTime: 任务创建时间。
:type CreateTime: str
:param StartTime: 任务开始执行时间。
:type StartTime: str
:param EndTime: 任务完成执行时间。
:type EndTime: str
:param InstanceInfo: 任务所属实例的基础信息。
:type InstanceInfo: :class:`tencentcloud.dbbrain.v20210527.models.InstanceBasicInfo`
:param HealthStatus: 健康报告中的健康信息。
:type HealthStatus: :class:`tencentcloud.dbbrain.v20210527.models.HealthStatus`
"""
self.AsyncRequestId = None
self.Source = None
self.Progress = None
self.CreateTime = None
self.StartTime = None
self.EndTime = None
self.InstanceInfo = None
self.HealthStatus = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.Source = params.get("Source")
self.Progress = params.get("Progress")
self.CreateTime = params.get("CreateTime")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
if params.get("InstanceInfo") is not None:
self.InstanceInfo = InstanceBasicInfo()
self.InstanceInfo._deserialize(params.get("InstanceInfo"))
if params.get("HealthStatus") is not None:
self.HealthStatus = HealthStatus()
self.HealthStatus._deserialize(params.get("HealthStatus"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class HealthScoreInfo(AbstractModel):
"""获取健康得分返回的详情。
"""
def __init__(self):
r"""
:param IssueTypes: 异常详情。
:type IssueTypes: list of IssueTypeInfo
:param EventsTotalCount: 异常事件总数。
:type EventsTotalCount: int
:param HealthScore: 健康得分。
:type HealthScore: int
:param HealthLevel: 健康等级, 如:"HEALTH", "SUB_HEALTH", "RISK", "HIGH_RISK"。
:type HealthLevel: str
"""
self.IssueTypes = None
self.EventsTotalCount = None
self.HealthScore = None
self.HealthLevel = None
def _deserialize(self, params):
if params.get("IssueTypes") is not None:
self.IssueTypes = []
for item in params.get("IssueTypes"):
obj = IssueTypeInfo()
obj._deserialize(item)
self.IssueTypes.append(obj)
self.EventsTotalCount = params.get("EventsTotalCount")
self.HealthScore = params.get("HealthScore")
self.HealthLevel = params.get("HealthLevel")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class HealthStatus(AbstractModel):
"""实例健康详情。
"""
def __init__(self):
r"""
:param HealthScore: 健康分数,满分100。
:type HealthScore: int
:param HealthLevel: 健康等级,取值包括:"HEALTH" - 健康;"SUB_HEALTH" - 亚健康;"RISK"- 危险;"HIGH_RISK" - 高危。
:type HealthLevel: str
:param ScoreLost: 总扣分分数。
:type ScoreLost: int
:param ScoreDetails: 扣分详情。
注意:此字段可能返回 null,表示取不到有效值。
:type ScoreDetails: list of ScoreDetail
"""
self.HealthScore = None
self.HealthLevel = None
self.ScoreLost = None
self.ScoreDetails = None
def _deserialize(self, params):
self.HealthScore = params.get("HealthScore")
self.HealthLevel = params.get("HealthLevel")
self.ScoreLost = params.get("ScoreLost")
if params.get("ScoreDetails") is not None:
self.ScoreDetails = []
for item in params.get("ScoreDetails"):
obj = ScoreDetail()
obj._deserialize(item)
self.ScoreDetails.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InstanceBasicInfo(AbstractModel):
"""实例基础信息。
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param InstanceName: 实例名称。
:type InstanceName: str
:param Vip: 实例内网IP。
:type Vip: str
:param Vport: 实例内网Port。
:type Vport: int
:param Product: 实例产品。
:type Product: str
:param EngineVersion: 实例引擎版本。
:type EngineVersion: str
"""
self.InstanceId = None
self.InstanceName = None
self.Vip = None
self.Vport = None
self.Product = None
self.EngineVersion = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.InstanceName = params.get("InstanceName")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.Product = params.get("Product")
self.EngineVersion = params.get("EngineVersion")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InstanceConfs(AbstractModel):
"""实例配置。
"""
def __init__(self):
r"""
:param DailyInspection: 数据库巡检开关, Yes/No。
:type DailyInspection: str
:param OverviewDisplay: 实例概览开关,Yes/No。
:type OverviewDisplay: str
"""
self.DailyInspection = None
self.OverviewDisplay = None
def _deserialize(self, params):
self.DailyInspection = params.get("DailyInspection")
self.OverviewDisplay = params.get("OverviewDisplay")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class InstanceInfo(AbstractModel):
"""查询实例列表,返回实例的相关信息的对象。
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param InstanceName: 实例名称。
:type InstanceName: str
:param Region: 实例所属地域。
:type Region: str
:param HealthScore: 健康得分。
:type HealthScore: int
:param Product: 所属产品。
:type Product: str
:param EventCount: 异常事件数量。
:type EventCount: int
:param InstanceType: 实例类型:1:MASTER;2:DR,3:RO,4:SDR。
:type InstanceType: int
:param Cpu: 核心数。
:type Cpu: int
:param Memory: 内存,单位MB。
:type Memory: int
:param Volume: 硬盘存储,单位GB。
:type Volume: int
:param EngineVersion: 数据库版本。
:type EngineVersion: str
:param Vip: 内网地址。
:type Vip: str
:param Vport: 内网端口。
:type Vport: int
:param Source: 接入来源。
:type Source: str
:param GroupId: 分组ID。
:type GroupId: str
:param GroupName: 分组组名。
:type GroupName: str
:param Status: 实例状态:0:发货中;1:运行正常;4:销毁中;5:隔离中。
:type Status: int
:param UniqSubnetId: 子网统一ID。
:type UniqSubnetId: str
:param DeployMode: cdb类型。
:type DeployMode: str
:param InitFlag: cdb实例初始化标志:0:未初始化;1:已初始化。
:type InitFlag: int
:param TaskStatus: 任务状态。
:type TaskStatus: int
:param UniqVpcId: 私有网络统一ID。
:type UniqVpcId: str
:param InstanceConf: 实例巡检/概览的状态。
:type InstanceConf: :class:`tencentcloud.dbbrain.v20210527.models.InstanceConfs`
:param DeadlineTime: 资源到期时间。
:type DeadlineTime: str
:param IsSupported: 是否是DBbrain支持的实例。
:type IsSupported: bool
:param SecAuditStatus: 实例安全审计日志开启状态:ON: 安全审计开启;OFF: 未开启安全审计。
:type SecAuditStatus: str
:param AuditPolicyStatus: 实例审计日志开启状态,ALL_AUDIT: 开启全审计;RULE_AUDIT: 开启规则审计;UNBOUND: 未开启审计。
:type AuditPolicyStatus: str
:param AuditRunningStatus: 实例审计日志运行状态:normal: 运行中; paused: 欠费暂停。
:type AuditRunningStatus: str
"""
self.InstanceId = None
self.InstanceName = None
self.Region = None
self.HealthScore = None
self.Product = None
self.EventCount = None
self.InstanceType = None
self.Cpu = None
self.Memory = None
self.Volume = None
self.EngineVersion = None
self.Vip = None
self.Vport = None
self.Source = None
self.GroupId = None
self.GroupName = None
self.Status = None
self.UniqSubnetId = None
self.DeployMode = None
self.InitFlag = None
self.TaskStatus = None
self.UniqVpcId = None
self.InstanceConf = None
self.DeadlineTime = None
self.IsSupported = None
self.SecAuditStatus = None
self.AuditPolicyStatus = None
self.AuditRunningStatus = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.InstanceName = params.get("InstanceName")
self.Region = params.get("Region")
self.HealthScore = params.get("HealthScore")
self.Product = params.get("Product")
self.EventCount = params.get("EventCount")
self.InstanceType = params.get("InstanceType")
self.Cpu = params.get("Cpu")
self.Memory = params.get("Memory")
self.Volume = params.get("Volume")
self.EngineVersion = params.get("EngineVersion")
self.Vip = params.get("Vip")
self.Vport = params.get("Vport")
self.Source = params.get("Source")
self.GroupId = params.get("GroupId")
self.GroupName = params.get("GroupName")
self.Status = params.get("Status")
self.UniqSubnetId = params.get("UniqSubnetId")
self.DeployMode = params.get("DeployMode")
self.InitFlag = params.get("InitFlag")
self.TaskStatus = params.get("TaskStatus")
self.UniqVpcId = params.get("UniqVpcId")
if params.get("InstanceConf") is not None:
self.InstanceConf = InstanceConfs()
self.InstanceConf._deserialize(params.get("InstanceConf"))
self.DeadlineTime = params.get("DeadlineTime")
self.IsSupported = params.get("IsSupported")
self.SecAuditStatus = params.get("SecAuditStatus")
self.AuditPolicyStatus = params.get("AuditPolicyStatus")
self.AuditRunningStatus = params.get("AuditRunningStatus")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class IssueTypeInfo(AbstractModel):
"""指标信息。
"""
def __init__(self):
r"""
:param IssueType: 指标分类:AVAILABILITY:可用性,MAINTAINABILITY:可维护性,PERFORMANCE,性能,RELIABILITY可靠性。
:type IssueType: str
:param Events: 异常事件。
:type Events: list of EventInfo
:param TotalCount: 异常事件总数。
:type TotalCount: int
"""
self.IssueType = None
self.Events = None
self.TotalCount = None
def _deserialize(self, params):
self.IssueType = params.get("IssueType")
if params.get("Events") is not None:
self.Events = []
for item in params.get("Events"):
obj = EventInfo()
obj._deserialize(item)
self.Events.append(obj)
self.TotalCount = params.get("TotalCount")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class KillMySqlThreadsRequest(AbstractModel):
"""KillMySqlThreads请求参数结构体
"""
def __init__(self):
r"""
:param InstanceId: 实例ID。
:type InstanceId: str
:param Stage: kill会话任务的阶段,取值包括:"Prepare"-准备阶段,"Commit"-提交阶段。
:type Stage: str
:param Threads: 需要kill的sql会话ID列表,此参数用于Prepare阶段。
:type Threads: list of int
:param SqlExecId: 执行ID,此参数用于Commit阶段。
:type SqlExecId: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL,默认为"mysql"。
:type Product: str
"""
self.InstanceId = None
self.Stage = None
self.Threads = None
self.SqlExecId = None
self.Product = None
def _deserialize(self, params):
self.InstanceId = params.get("InstanceId")
self.Stage = params.get("Stage")
self.Threads = params.get("Threads")
self.SqlExecId = params.get("SqlExecId")
self.Product = params.get("Product")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class KillMySqlThreadsResponse(AbstractModel):
"""KillMySqlThreads返回参数结构体
"""
def __init__(self):
r"""
:param Threads: kill完成的sql会话ID列表。
:type Threads: list of int
:param SqlExecId: 执行ID, Prepare阶段的任务输出,用于Commit阶段中指定执行kill操作的会话ID。
注意:此字段可能返回 null,表示取不到有效值。
:type SqlExecId: str
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.Threads = None
self.SqlExecId = None
self.RequestId = None
def _deserialize(self, params):
self.Threads = params.get("Threads")
self.SqlExecId = params.get("SqlExecId")
self.RequestId = params.get("RequestId")
class MailConfiguration(AbstractModel):
"""邮件发送配置
"""
def __init__(self):
r"""
:param SendMail: 是否开启邮件发送: 0, 否; 1, 是。
:type SendMail: int
:param Region: 地域配置, 如["ap-guangzhou", "ap-shanghai"]。巡检的邮件发送模版,配置需要发送巡检邮件的地域;订阅的邮件发送模版,配置当前订阅实例的所属地域。
:type Region: list of str
:param HealthStatus: 发送指定的健康等级的报告, 如["HEALTH", "SUB_HEALTH", "RISK", "HIGH_RISK"]。
:type HealthStatus: list of str
:param ContactPerson: 联系人id, 联系人/联系组不能都为空。
:type ContactPerson: list of int
:param ContactGroup: 联系组id, 联系人/联系组不能都为空。
:type ContactGroup: list of int
"""
self.SendMail = None
self.Region = None
self.HealthStatus = None
self.ContactPerson = None
self.ContactGroup = None
def _deserialize(self, params):
self.SendMail = params.get("SendMail")
self.Region = params.get("Region")
self.HealthStatus = params.get("HealthStatus")
self.ContactPerson = params.get("ContactPerson")
self.ContactGroup = params.get("ContactGroup")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDiagDBInstanceConfRequest(AbstractModel):
"""ModifyDiagDBInstanceConf请求参数结构体
"""
def __init__(self):
r"""
:param InstanceConfs: 实例配置,包括巡检、概览开关等。
:type InstanceConfs: :class:`tencentcloud.dbbrain.v20210527.models.InstanceConfs`
:param Regions: 生效实例地域,取值为"All",代表全地域。
:type Regions: str
:param Product: 服务产品类型,支持值包括: "mysql" - 云数据库 MySQL, "cynosdb" - 云数据库 CynosDB for MySQL。
:type Product: str
:param InstanceIds: 指定更改巡检状态的实例ID。
:type InstanceIds: list of str
"""
self.InstanceConfs = None
self.Regions = None
self.Product = None
self.InstanceIds = None
def _deserialize(self, params):
if params.get("InstanceConfs") is not None:
self.InstanceConfs = InstanceConfs()
self.InstanceConfs._deserialize(params.get("InstanceConfs"))
self.Regions = params.get("Regions")
self.Product = params.get("Product")
self.InstanceIds = params.get("InstanceIds")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ModifyDiagDBInstanceConfResponse(AbstractModel):
"""ModifyDiagDBInstanceConf返回参数结构体
"""
def __init__(self):
r"""
:param RequestId: 唯一请求 ID,每次请求都会返回。定位问题时需要提供该次请求的 RequestId。
:type RequestId: str
"""
self.RequestId = None
def _deserialize(self, params):
self.RequestId = params.get("RequestId")
class MonitorFloatMetric(AbstractModel):
"""监控数据(浮点型)
"""
def __init__(self):
r"""
:param Metric: 指标名称。
:type Metric: str
:param Unit: 指标单位。
:type Unit: str
:param Values: 指标值。
注意:此字段可能返回 null,表示取不到有效值。
:type Values: list of float
"""
self.Metric = None
self.Unit = None
self.Values = None
def _deserialize(self, params):
self.Metric = params.get("Metric")
self.Unit = params.get("Unit")
self.Values = params.get("Values")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MonitorFloatMetricSeriesData(AbstractModel):
"""单位时间间隔内的监控指标数据(浮点型)
"""
def __init__(self):
r"""
:param Series: 监控指标。
:type Series: list of MonitorFloatMetric
:param Timestamp: 监控指标对应的时间戳。
:type Timestamp: list of int
"""
self.Series = None
self.Timestamp = None
def _deserialize(self, params):
if params.get("Series") is not None:
self.Series = []
for item in params.get("Series"):
obj = MonitorFloatMetric()
obj._deserialize(item)
self.Series.append(obj)
self.Timestamp = params.get("Timestamp")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MonitorMetric(AbstractModel):
"""监控数据
"""
def __init__(self):
r"""
:param Metric: 指标名称。
:type Metric: str
:param Unit: 指标单位。
:type Unit: str
:param Values: 指标值。
注意:此字段可能返回 null,表示取不到有效值。
:type Values: list of float
"""
self.Metric = None
self.Unit = None
self.Values = None
def _deserialize(self, params):
self.Metric = params.get("Metric")
self.Unit = params.get("Unit")
self.Values = params.get("Values")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MonitorMetricSeriesData(AbstractModel):
"""单位时间间隔内的监控指标数据
"""
def __init__(self):
r"""
:param Series: 监控指标。
:type Series: list of MonitorMetric
:param Timestamp: 监控指标对应的时间戳。
:type Timestamp: list of int
"""
self.Series = None
self.Timestamp = None
def _deserialize(self, params):
if params.get("Series") is not None:
self.Series = []
for item in params.get("Series"):
obj = MonitorMetric()
obj._deserialize(item)
self.Series.append(obj)
self.Timestamp = params.get("Timestamp")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class MySqlProcess(AbstractModel):
"""关系型数据库线程
"""
def __init__(self):
r"""
:param ID: 线程ID。
:type ID: str
:param User: 线程的操作账号名。
:type User: str
:param Host: 线程的操作主机地址。
:type Host: str
:param DB: 线程的操作数据库。
:type DB: str
:param State: 线程的操作状态。
:type State: str
:param Command: 线程的执行类型。
:type Command: str
:param Time: 线程的操作时长,单位秒。
:type Time: str
:param Info: 线程的操作语句。
:type Info: str
"""
self.ID = None
self.User = None
self.Host = None
self.DB = None
self.State = None
self.Command = None
self.Time = None
self.Info = None
def _deserialize(self, params):
self.ID = params.get("ID")
self.User = params.get("User")
self.Host = params.get("Host")
self.DB = params.get("DB")
self.State = params.get("State")
self.Command = params.get("Command")
self.Time = params.get("Time")
self.Info = params.get("Info")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ProfileInfo(AbstractModel):
"""用户配置的信息
"""
def __init__(self):
r"""
:param Language: 语言, 如"zh"。
:type Language: str
:param MailConfiguration: 邮件模板的内容。
:type MailConfiguration: :class:`tencentcloud.dbbrain.v20210527.models.MailConfiguration`
"""
self.Language = None
self.MailConfiguration = None
def _deserialize(self, params):
self.Language = params.get("Language")
if params.get("MailConfiguration") is not None:
self.MailConfiguration = MailConfiguration()
self.MailConfiguration._deserialize(params.get("MailConfiguration"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SchemaItem(AbstractModel):
"""SchemaItem数组
"""
def __init__(self):
r"""
:param Schema: 数据库名称
:type Schema: str
"""
self.Schema = None
def _deserialize(self, params):
self.Schema = params.get("Schema")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SchemaSpaceData(AbstractModel):
"""库空间统计数据。
"""
def __init__(self):
r"""
:param TableSchema: 库名。
:type TableSchema: str
:param DataLength: 数据空间(MB)。
:type DataLength: float
:param IndexLength: 索引空间(MB)。
:type IndexLength: float
:param DataFree: 碎片空间(MB)。
:type DataFree: float
:param TotalLength: 总使用空间(MB)。
:type TotalLength: float
:param FragRatio: 碎片率(%)。
:type FragRatio: float
:param TableRows: 行数。
:type TableRows: int
:param PhysicalFileSize: 库中所有表对应的独立物理文件大小加和(MB)。
注意:此字段可能返回 null,表示取不到有效值。
:type PhysicalFileSize: float
"""
self.TableSchema = None
self.DataLength = None
self.IndexLength = None
self.DataFree = None
self.TotalLength = None
self.FragRatio = None
self.TableRows = None
self.PhysicalFileSize = None
def _deserialize(self, params):
self.TableSchema = params.get("TableSchema")
self.DataLength = params.get("DataLength")
self.IndexLength = params.get("IndexLength")
self.DataFree = params.get("DataFree")
self.TotalLength = params.get("TotalLength")
self.FragRatio = params.get("FragRatio")
self.TableRows = params.get("TableRows")
self.PhysicalFileSize = params.get("PhysicalFileSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SchemaSpaceTimeSeries(AbstractModel):
"""库空间时序数据
"""
def __init__(self):
r"""
:param TableSchema: 库名
:type TableSchema: str
:param SeriesData: 单位时间间隔内的空间指标数据。
:type SeriesData: :class:`tencentcloud.dbbrain.v20210527.models.MonitorMetricSeriesData`
"""
self.TableSchema = None
self.SeriesData = None
def _deserialize(self, params):
self.TableSchema = params.get("TableSchema")
if params.get("SeriesData") is not None:
self.SeriesData = MonitorMetricSeriesData()
self.SeriesData._deserialize(params.get("SeriesData"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ScoreDetail(AbstractModel):
"""扣分详情。
"""
def __init__(self):
r"""
:param IssueType: 扣分项分类,取值包括:可用性、可维护性、性能及可靠性。
:type IssueType: str
:param ScoreLost: 扣分总分。
:type ScoreLost: int
:param ScoreLostMax: 扣分总分上限。
:type ScoreLostMax: int
:param Items: 扣分项列表。
注意:此字段可能返回 null,表示取不到有效值。
:type Items: list of ScoreItem
"""
self.IssueType = None
self.ScoreLost = None
self.ScoreLostMax = None
self.Items = None
def _deserialize(self, params):
self.IssueType = params.get("IssueType")
self.ScoreLost = params.get("ScoreLost")
self.ScoreLostMax = params.get("ScoreLostMax")
if params.get("Items") is not None:
self.Items = []
for item in params.get("Items"):
obj = ScoreItem()
obj._deserialize(item)
self.Items.append(obj)
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class ScoreItem(AbstractModel):
"""诊断扣分项。
"""
def __init__(self):
r"""
:param DiagItem: 异常诊断项名称。
:type DiagItem: str
:param IssueType: 诊断项分类,取值包括:可用性、可维护性、性能及可靠性。
:type IssueType: str
:param TopSeverity: 健康等级,取值包括:信息、提示、告警、严重、致命。
:type TopSeverity: str
:param Count: 该异常诊断项出现次数。
:type Count: int
:param ScoreLost: 扣分分数。
:type ScoreLost: int
"""
self.DiagItem = None
self.IssueType = None
self.TopSeverity = None
self.Count = None
self.ScoreLost = None
def _deserialize(self, params):
self.DiagItem = params.get("DiagItem")
self.IssueType = params.get("IssueType")
self.TopSeverity = params.get("TopSeverity")
self.Count = params.get("Count")
self.ScoreLost = params.get("ScoreLost")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SecLogExportTaskInfo(AbstractModel):
"""安全审计日志导出任务信息
"""
def __init__(self):
r"""
:param AsyncRequestId: 异步任务Id。
:type AsyncRequestId: int
:param StartTime: 任务开始时间。
注意:此字段可能返回 null,表示取不到有效值。
:type StartTime: str
:param EndTime: 任务结束时间。
注意:此字段可能返回 null,表示取不到有效值。
:type EndTime: str
:param CreateTime: 任务创建时间。
:type CreateTime: str
:param Status: 任务状态。
:type Status: str
:param Progress: 任务执行进度。
:type Progress: int
:param LogStartTime: 导出日志开始时间。
注意:此字段可能返回 null,表示取不到有效值。
:type LogStartTime: str
:param LogEndTime: 导出日志结束时间。
注意:此字段可能返回 null,表示取不到有效值。
:type LogEndTime: str
:param TotalSize: 日志文件总大小,单位KB。
注意:此字段可能返回 null,表示取不到有效值。
:type TotalSize: int
:param DangerLevels: 风险等级列表。0 无风险;1 低风险;2 中风险;3 高风险。
注意:此字段可能返回 null,表示取不到有效值。
:type DangerLevels: list of int non-negative
"""
self.AsyncRequestId = None
self.StartTime = None
self.EndTime = None
self.CreateTime = None
self.Status = None
self.Progress = None
self.LogStartTime = None
self.LogEndTime = None
self.TotalSize = None
self.DangerLevels = None
def _deserialize(self, params):
self.AsyncRequestId = params.get("AsyncRequestId")
self.StartTime = params.get("StartTime")
self.EndTime = params.get("EndTime")
self.CreateTime = params.get("CreateTime")
self.Status = params.get("Status")
self.Progress = params.get("Progress")
self.LogStartTime = params.get("LogStartTime")
self.LogEndTime = params.get("LogEndTime")
self.TotalSize = params.get("TotalSize")
self.DangerLevels = params.get("DangerLevels")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SlowLogHost(AbstractModel):
"""慢日志来源地址详情。
"""
def __init__(self):
r"""
:param UserHost: 来源地址。
:type UserHost: str
:param Ratio: 该来源地址的慢日志数目占总数目的比例,单位%。
:type Ratio: float
:param Count: 该来源地址的慢日志数目。
:type Count: int
"""
self.UserHost = None
self.Ratio = None
self.Count = None
def _deserialize(self, params):
self.UserHost = params.get("UserHost")
self.Ratio = params.get("Ratio")
self.Count = params.get("Count")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class SlowLogTopSqlItem(AbstractModel):
"""慢日志TopSql
"""
def __init__(self):
r"""
:param LockTime: sql总锁等待时间,单位秒
:type LockTime: float
:param LockTimeMax: 最大锁等待时间,单位秒
:type LockTimeMax: float
:param LockTimeMin: 最小锁等待时间,单位秒
:type LockTimeMin: float
:param RowsExamined: 总扫描行数
:type RowsExamined: int
:param RowsExaminedMax: 最大扫描行数
:type RowsExaminedMax: int
:param RowsExaminedMin: 最小扫描行数
:type RowsExaminedMin: int
:param QueryTime: 总耗时,单位秒
:type QueryTime: float
:param QueryTimeMax: 最大执行时间,单位秒
:type QueryTimeMax: float
:param QueryTimeMin: 最小执行时间,单位秒
:type QueryTimeMin: float
:param RowsSent: 总返回行数
:type RowsSent: int
:param RowsSentMax: 最大返回行数
:type RowsSentMax: int
:param RowsSentMin: 最小返回行数
:type RowsSentMin: int
:param ExecTimes: 执行次数
:type ExecTimes: int
:param SqlTemplate: sql模板
:type SqlTemplate: str
:param SqlText: 带参数SQL(随机)
:type SqlText: str
:param Schema: 数据库名
:type Schema: str
:param QueryTimeRatio: 总耗时占比,单位%
:type QueryTimeRatio: float
:param LockTimeRatio: sql总锁等待时间占比,单位%
:type LockTimeRatio: float
:param RowsExaminedRatio: 总扫描行数占比,单位%
:type RowsExaminedRatio: float
:param RowsSentRatio: 总返回行数占比,单位%
:type RowsSentRatio: float
:param QueryTimeAvg: 平均执行时间,单位秒
:type QueryTimeAvg: float
:param RowsSentAvg: 平均返回行数
:type RowsSentAvg: float
:param LockTimeAvg: 平均锁等待时间,单位秒
:type LockTimeAvg: float
:param RowsExaminedAvg: 平均扫描行数
:type RowsExaminedAvg: float
:param Md5: SOL模板的MD5值
:type Md5: str
"""
self.LockTime = None
self.LockTimeMax = None
self.LockTimeMin = None
self.RowsExamined = None
self.RowsExaminedMax = None
self.RowsExaminedMin = None
self.QueryTime = None
self.QueryTimeMax = None
self.QueryTimeMin = None
self.RowsSent = None
self.RowsSentMax = None
self.RowsSentMin = None
self.ExecTimes = None
self.SqlTemplate = None
self.SqlText = None
self.Schema = None
self.QueryTimeRatio = None
self.LockTimeRatio = None
self.RowsExaminedRatio = None
self.RowsSentRatio = None
self.QueryTimeAvg = None
self.RowsSentAvg = None
self.LockTimeAvg = None
self.RowsExaminedAvg = None
self.Md5 = None
def _deserialize(self, params):
self.LockTime = params.get("LockTime")
self.LockTimeMax = params.get("LockTimeMax")
self.LockTimeMin = params.get("LockTimeMin")
self.RowsExamined = params.get("RowsExamined")
self.RowsExaminedMax = params.get("RowsExaminedMax")
self.RowsExaminedMin = params.get("RowsExaminedMin")
self.QueryTime = params.get("QueryTime")
self.QueryTimeMax = params.get("QueryTimeMax")
self.QueryTimeMin = params.get("QueryTimeMin")
self.RowsSent = params.get("RowsSent")
self.RowsSentMax = params.get("RowsSentMax")
self.RowsSentMin = params.get("RowsSentMin")
self.ExecTimes = params.get("ExecTimes")
self.SqlTemplate = params.get("SqlTemplate")
self.SqlText = params.get("SqlText")
self.Schema = params.get("Schema")
self.QueryTimeRatio = params.get("QueryTimeRatio")
self.LockTimeRatio = params.get("LockTimeRatio")
self.RowsExaminedRatio = params.get("RowsExaminedRatio")
self.RowsSentRatio = params.get("RowsSentRatio")
self.QueryTimeAvg = params.get("QueryTimeAvg")
self.RowsSentAvg = params.get("RowsSentAvg")
self.LockTimeAvg = params.get("LockTimeAvg")
self.RowsExaminedAvg = params.get("RowsExaminedAvg")
self.Md5 = params.get("Md5")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class TableSpaceData(AbstractModel):
"""库表空间统计数据。
"""
def __init__(self):
r"""
:param TableName: 表名。
:type TableName: str
:param TableSchema: 库名。
:type TableSchema: str
:param Engine: 库表的存储引擎。
:type Engine: str
:param DataLength: 数据空间(MB)。
:type DataLength: float
:param IndexLength: 索引空间(MB)。
:type IndexLength: float
:param DataFree: 碎片空间(MB)。
:type DataFree: float
:param TotalLength: 总使用空间(MB)。
:type TotalLength: float
:param FragRatio: 碎片率(%)。
:type FragRatio: float
:param TableRows: 行数。
:type TableRows: int
:param PhysicalFileSize: 表对应的独立物理文件大小(MB)。
:type PhysicalFileSize: float
"""
self.TableName = None
self.TableSchema = None
self.Engine = None
self.DataLength = None
self.IndexLength = None
self.DataFree = None
self.TotalLength = None
self.FragRatio = None
self.TableRows = None
self.PhysicalFileSize = None
def _deserialize(self, params):
self.TableName = params.get("TableName")
self.TableSchema = params.get("TableSchema")
self.Engine = params.get("Engine")
self.DataLength = params.get("DataLength")
self.IndexLength = params.get("IndexLength")
self.DataFree = params.get("DataFree")
self.TotalLength = params.get("TotalLength")
self.FragRatio = params.get("FragRatio")
self.TableRows = params.get("TableRows")
self.PhysicalFileSize = params.get("PhysicalFileSize")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class TableSpaceTimeSeries(AbstractModel):
"""库表空间时序数据
"""
def __init__(self):
r"""
:param TableName: 表名。
:type TableName: str
:param TableSchema: 库名。
:type TableSchema: str
:param Engine: 库表的存储引擎。
:type Engine: str
:param SeriesData: 单位时间间隔内的空间指标数据。
:type SeriesData: :class:`tencentcloud.dbbrain.v20210527.models.MonitorFloatMetricSeriesData`
"""
self.TableName = None
self.TableSchema = None
self.Engine = None
self.SeriesData = None
def _deserialize(self, params):
self.TableName = params.get("TableName")
self.TableSchema = params.get("TableSchema")
self.Engine = params.get("Engine")
if params.get("SeriesData") is not None:
self.SeriesData = MonitorFloatMetricSeriesData()
self.SeriesData._deserialize(params.get("SeriesData"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class TimeSlice(AbstractModel):
"""单位时间间隔内的慢日志统计
"""
def __init__(self):
r"""
:param Count: 总数
:type Count: int
:param Timestamp: 统计开始时间
:type Timestamp: int
"""
self.Count = None
self.Timestamp = None
def _deserialize(self, params):
self.Count = params.get("Count")
self.Timestamp = params.get("Timestamp")
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
class UserProfile(AbstractModel):
"""用户配置的相关信息,包括邮件配置。
"""
def __init__(self):
r"""
:param ProfileId: 配置的id。
注意:此字段可能返回 null,表示取不到有效值。
:type ProfileId: str
:param ProfileType: 配置类型,支持值包括:"dbScan_mail_configuration" - 数据库巡检邮件配置,"scheduler_mail_configuration" - 定期生成邮件配置。
注意:此字段可能返回 null,表示取不到有效值。
:type ProfileType: str
:param ProfileLevel: 配置级别,支持值包括:"User" - 用户级别,"Instance" - 实例级别,其中数据库巡检邮件配置为用户级别,定期生成邮件配置为实例级别。
注意:此字段可能返回 null,表示取不到有效值。
:type ProfileLevel: str
:param ProfileName: 配置名称。
注意:此字段可能返回 null,表示取不到有效值。
:type ProfileName: str
:param ProfileInfo: 配置详情。
:type ProfileInfo: :class:`tencentcloud.dbbrain.v20210527.models.ProfileInfo`
"""
self.ProfileId = None
self.ProfileType = None
self.ProfileLevel = None
self.ProfileName = None
self.ProfileInfo = None
def _deserialize(self, params):
self.ProfileId = params.get("ProfileId")
self.ProfileType = params.get("ProfileType")
self.ProfileLevel = params.get("ProfileLevel")
self.ProfileName = params.get("ProfileName")
if params.get("ProfileInfo") is not None:
self.ProfileInfo = ProfileInfo()
self.ProfileInfo._deserialize(params.get("ProfileInfo"))
memeber_set = set(params.keys())
for name, value in vars(self).items():
if name in memeber_set:
memeber_set.remove(name)
if len(memeber_set) > 0:
warnings.warn("%s fileds are useless." % ",".join(memeber_set))
| 31.310427
| 195
| 0.597149
| 10,779
| 105,704
| 5.781705
| 0.088505
| 0.064264
| 0.015532
| 0.016945
| 0.658077
| 0.609153
| 0.559009
| 0.541391
| 0.510069
| 0.499318
| 0
| 0.007231
| 0.293518
| 105,704
| 3,376
| 196
| 31.310427
| 0.827299
| 0.307793
| 0
| 0.727163
| 0
| 0
| 0.080074
| 0.001422
| 0
| 0
| 0
| 0
| 0
| 1
| 0.105769
| false
| 0
| 0.001202
| 0
| 0.159856
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a836399736ccfbfdcec602215566bd6e9ae598c
| 2,201
|
py
|
Python
|
melisa/utils/snowflake.py
|
MelisaDev/melisa
|
53fee10d8c1bf4dd716bc90096c16f096e11bfbf
|
[
"MIT"
] | 5
|
2022-03-11T19:51:28.000Z
|
2022-03-13T16:28:58.000Z
|
melisa/utils/snowflake.py
|
jungledev1/melisa
|
835e4b644e50b5038599ecbd1bfa510a0d3200e9
|
[
"MIT"
] | 2
|
2022-03-19T18:09:39.000Z
|
2022-03-23T12:18:49.000Z
|
melisa/utils/snowflake.py
|
jungledev1/melisa
|
835e4b644e50b5038599ecbd1bfa510a0d3200e9
|
[
"MIT"
] | 1
|
2022-03-23T07:30:04.000Z
|
2022-03-23T07:30:04.000Z
|
# Copyright MelisaDev 2022 - Present
# Full MIT License can be found in `LICENSE.txt` at the project root.
from __future__ import annotations
class Snowflake(int):
"""
Discord utilizes Twitter's snowflake format for uniquely identifiable descriptors (IDs).
These IDs are guaranteed to be unique across all of Discord,
except in some unique scenarios in which child objects share their parent's ID.
Because Snowflake IDs are up to 64 bits in size (e.g. a uint64),
they are always returned as strings in the HTTP API
to prevent integer overflows in some languages.
See Gateway ETF/JSON for more information regarding Gateway encoding.
Read more here: https://discord.com/developers/docs/reference#snowflakes
"""
_MAX_VALUE: int = 9223372036854775807
_MIN_VALUE: int = 0
def __init__(self, _):
super().__init__()
if self < self._MIN_VALUE:
raise ValueError("snowflake value should be greater than or equal to 0.")
if self > self._MAX_VALUE:
raise ValueError(
"snowflake value should be less than or equal to 9223372036854775807."
)
@classmethod
def __factory__(cls, string: str) -> Snowflake:
return cls.from_string(string)
@classmethod
def from_string(cls, string: str):
"""Initialize a new Snowflake from a string.
Parameters
----------
string: :class:`str`
The snowflake as a string.
"""
return Snowflake(int(string))
@property
def timestamp(self) -> int:
"""
Milliseconds since Discord Epoch, the first second of 2015 or 1420070400000.
"""
return self >> 22
@property
def worker_id(self) -> int:
"""Internal worker ID"""
return (self >> 17) % 16
@property
def process_id(self) -> int:
"""Internal process ID"""
return (self >> 12) % 16
@property
def increment(self) -> int:
"""For every ID that is generated on that process, this number is incremented"""
return self % 2048
@property
def unix(self) -> int:
return self.timestamp + 1420070400000
| 30.150685
| 92
| 0.63562
| 272
| 2,201
| 5.036765
| 0.518382
| 0.040146
| 0.014599
| 0.042336
| 0.061314
| 0.061314
| 0.061314
| 0
| 0
| 0
| 0
| 0.058081
| 0.280327
| 2,201
| 72
| 93
| 30.569444
| 0.806818
| 0.431168
| 0
| 0.212121
| 0
| 0
| 0.107747
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.242424
| false
| 0
| 0.030303
| 0.060606
| 0.575758
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a89b2893b587e6d66f6aa207ca89999bce84710
| 846
|
py
|
Python
|
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
utils/config.py
|
jtr109/Alpha2kindle
|
a411d05cafa9036a732eeb75fa13f68963f254e3
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
import os
class BaseConf(object):
HEADERS = {
"User-Agent": "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_11_6) "
"AppleWebKit/537.36 (KHTML, like Gecko) "
"Chrome/55.0.2883.95 "
"Safari/537.36",
"Accept": "text/html,application/xhtml+xml,application/xml;"
"q=0.9,image/webp,*/*;"
"q=0.8",
"Accept-Encoding": "gzip, deflate, sdch, br",
"Accept-Language": "zh-CN,zh;q=0.8,en;q=0.6,zh-TW;q=0.4",
"Cache-Control": "max-age=0",
}
class TestConf(BaseConf):
REDIS_URL = "redis://:{password}@{hostname}:{port}/{db_number}".format(
password=os.environ.get("REDIS_PWD"),
hostname='127.0.0.1',
port=6379,
db_number=0
)
CURCONF = TestConf
| 27.290323
| 75
| 0.51773
| 110
| 846
| 3.927273
| 0.663636
| 0.023148
| 0.013889
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.082631
| 0.299054
| 846
| 30
| 76
| 28.2
| 0.645868
| 0.024823
| 0
| 0
| 0
| 0.045455
| 0.470231
| 0.185905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.090909
| 0.045455
| 0
| 0.227273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6a8c916961dcdf5b4bdd11f085941afc268401f1
| 771
|
py
|
Python
|
inventory/admin.py
|
shakyasaijal/businessAnalytics
|
9312bae79709387c6eadd50f87f6be85bd52c396
|
[
"BSD-3-Clause"
] | null | null | null |
inventory/admin.py
|
shakyasaijal/businessAnalytics
|
9312bae79709387c6eadd50f87f6be85bd52c396
|
[
"BSD-3-Clause"
] | 8
|
2021-03-30T13:03:11.000Z
|
2022-03-12T00:20:13.000Z
|
inventory/admin.py
|
shakyasaijal/businessAnalytics
|
9312bae79709387c6eadd50f87f6be85bd52c396
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib import admin
from . import models
class SupplierAdmin(admin.ModelAdmin):
list_display = ('supplier_name', 'contact', )
search_fields = ['supplier_name', 'contact', ]
admin.site.register(models.Suppliers, SupplierAdmin)
class InventoryUserAdmin(admin.ModelAdmin):
list_display = ('employee_name', 'user_type')
search_fields = ['employee_name', 'user_type']
list_filter = ("user_type",)
admin.site.register(models.InventoryUser, InventoryUserAdmin)
class ProductsAdmin(admin.ModelAdmin):
list_display = ('name', 'quantity', 'cost_price', 'selling_price',)
search_fields = ['name', 'quantity', 'cost_price', 'selling_price',]
list_filter = ("branch", "supplier",)
admin.site.register(models.Product, ProductsAdmin)
| 32.125
| 72
| 0.731518
| 84
| 771
| 6.488095
| 0.380952
| 0.082569
| 0.104587
| 0.143119
| 0.121101
| 0.121101
| 0
| 0
| 0
| 0
| 0
| 0
| 0.124514
| 771
| 23
| 73
| 33.521739
| 0.807407
| 0
| 0
| 0
| 0
| 0
| 0.229572
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.125
| 0
| 0.8125
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
6a8e7fcaf4ca3d67de4aab013987d7db788188b5
| 252
|
py
|
Python
|
pyqtgraph/examples/template.py
|
secantsquared/pyqtgraph
|
3ef7f5b91639543e43bcd66a84290fb9bc18fc5c
|
[
"MIT"
] | null | null | null |
pyqtgraph/examples/template.py
|
secantsquared/pyqtgraph
|
3ef7f5b91639543e43bcd66a84290fb9bc18fc5c
|
[
"MIT"
] | null | null | null |
pyqtgraph/examples/template.py
|
secantsquared/pyqtgraph
|
3ef7f5b91639543e43bcd66a84290fb9bc18fc5c
|
[
"MIT"
] | null | null | null |
# -*- coding: utf-8 -*-
"""
Description of example
"""
import pyqtgraph as pg
from pyqtgraph.Qt import QtCore, QtGui, mkQApp
import numpy as np
app = mkQApp()
# win.setWindowTitle('pyqtgraph example: ____')
if __name__ == '__main__':
pg.exec()
| 15.75
| 47
| 0.68254
| 32
| 252
| 5
| 0.75
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.004808
| 0.174603
| 252
| 15
| 48
| 16.8
| 0.764423
| 0.361111
| 0
| 0
| 0
| 0
| 0.052632
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.5
| 0
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
6a8f3e25920be24fb569cc55eff90ae879efa647
| 73,328
|
py
|
Python
|
ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
|
cas-packone/ambari-chs
|
68033fbd4b810b6642853f2ad9128cbbd4e0cb7b
|
[
"Apache-2.0"
] | 3
|
2019-06-20T11:49:36.000Z
|
2020-12-11T10:44:29.000Z
|
ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
|
cas-packone/ambari-chs
|
68033fbd4b810b6642853f2ad9128cbbd4e0cb7b
|
[
"Apache-2.0"
] | null | null | null |
ambari-server/src/test/python/stacks/2.0.6/common/test_stack_advisor.py
|
cas-packone/ambari-chs
|
68033fbd4b810b6642853f2ad9128cbbd4e0cb7b
|
[
"Apache-2.0"
] | 1
|
2019-03-20T08:36:17.000Z
|
2019-03-20T08:36:17.000Z
|
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import socket
from unittest import TestCase
from mock.mock import patch, MagicMock
class TestHDP206StackAdvisor(TestCase):
def setUp(self):
import imp
import os
testDirectory = os.path.dirname(os.path.abspath(__file__))
stackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/stack_advisor.py')
hdp206StackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/HDP/2.0.6/services/stack_advisor.py')
hdp206StackAdvisorClassName = 'HDP206StackAdvisor'
with open(stackAdvisorPath, 'rb') as fp:
stack_advisor = imp.load_module( 'stack_advisor', fp, stackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE) )
with open(hdp206StackAdvisorPath, 'rb') as fp:
self.stack_advisor_impl = imp.load_module('stack_advisor_impl', fp, hdp206StackAdvisorPath, ('.py', 'rb', imp.PY_SOURCE))
clazz = getattr(self.stack_advisor_impl, hdp206StackAdvisorClassName)
self.stackAdvisor = clazz()
self.maxDiff = None
# substitute method in the instance
self.get_system_min_uid_real = self.stackAdvisor.get_system_min_uid
self.stackAdvisor.get_system_min_uid = self.get_system_min_uid_magic
@patch('__builtin__.open')
@patch('os.path.exists')
def get_system_min_uid_magic(self, exists_mock, open_mock):
class MagicFile(object):
def read(self):
return """
#test line UID_MIN 200
UID_MIN 500
"""
def __exit__(self, exc_type, exc_val, exc_tb):
pass
def __enter__(self):
return self
exists_mock.return_value = True
open_mock.return_value = MagicFile()
return self.get_system_min_uid_real()
def test_recommendationCardinalityALL(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_MONITOR": ["host1", "host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_recommendOnAllHosts(self):
""" Recommend on all hosts for cardinality ALL even if the component has been installed in the cluster before """
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_MONITOR": ["host1", "host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_recommendationIsNotPreferableOnAmbariServer(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [{"name": "GANGLIA_SERVER", "cardinality": "ALL", "category": "MASTER", "is_master": True}]
}
]
services = self.prepareServices(servicesInfo)
localhost = socket.getfqdn()
hosts = self.prepareHosts([localhost, "host2"])
result = self.stackAdvisor.recommendComponentLayout(services, hosts)
expectedComponentsHostsMap = {
"GANGLIA_SERVER": ["host2"]
}
self.assertHostLayout(expectedComponentsHostsMap, result)
def test_validationNamenodeAndSecondaryNamenode2Hosts_noMessagesForSameHost(self):
servicesInfo = [
{
"name": "HDFS",
"components": [
{"name": "NAMENODE", "cardinality": "1-2", "category": "MASTER", "is_master": True, "hostnames": ["host1"]},
{"name": "SECONDARY_NAMENODE", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host1"]}]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "level": "ERROR", "host": "host2"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityALL(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "1-2", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Ganglia Monitor component should be installed on all hosts in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityExactAmount(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "2", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "2", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Exactly 2 Ganglia Monitor components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinalityAtLeast(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "display_name": "Ganglia Monitor", "cardinality": "1+", "category": "SLAVE", "is_master": False, "hostnames": ["host1"]},
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "3+", "category": "MASTER", "is_master": True, "hostnames": ["host2", "host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "At least 3 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationWarnMessagesIfLessThanDefault(self):
servicesInfo = [
{
"name": "YARN",
"components": []
}
]
services = self.prepareServices(servicesInfo)
services["configurations"] = {"yarn-site":{"properties":{"yarn.nodemanager.resource.memory-mb": "0",
"yarn.scheduler.minimum-allocation-mb": "str"}}}
hosts = self.prepareHosts([])
result = self.stackAdvisor.validateConfigurations(services, hosts)
expectedItems = [
{"message": "Value is less than the recommended default of 512", "level": "WARN"},
{'message': 'Value should be set for yarn.nodemanager.linux-container-executor.group', 'level': 'ERROR'},
{"message": "Value should be integer", "level": "ERROR"},
{"message": "Value should be set", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationYARNServicecheckQueueName(self):
servicesInfo = [
{
"name": "YARN",
"components": []
}
]
services = self.prepareServices(servicesInfo)
services["configurations"] = {"yarn-env":{"properties":{"service_check.queue.name": "default"}},
"capacity-scheduler":{"properties":{"capacity-scheduler": "yarn.scheduler.capacity.root.queues=ndfqueue\n"}}}
hosts = self.prepareHosts([])
result = self.stackAdvisor.validateConfigurations(services, hosts)
expectedItems = [
{'message': 'Queue is not exist, or not corresponds to existing YARN leaf queue', 'level': 'ERROR'}
]
self.assertValidationResult(expectedItems, result)
services["configurations"]["yarn-env"]["properties"]["service_check.queue.name"] = "ndfqueue"
expectedItems = []
result = self.stackAdvisor.validateConfigurations(services, hosts)
self.assertValidationResult(expectedItems, result)
def test_validationMinMax(self):
configurations = {
"mapred-site": {
"properties": {
"mapreduce.task.io.sort.mb": "4096",
"some_float_value": "0.5",
"no_min_or_max_attribute_property": "STRING_VALUE"
}
}
}
recommendedDefaults = {
"mapred-site": {
"properties": {
"mapreduce.task.io.sort.mb": "2047",
"some_float_value": "0.8",
"no_min_or_max_attribute_property": "STRING_VALUE"
},
"property_attributes": {
'mapreduce.task.io.sort.mb': {'maximum': '2047'},
'some_float_value': {'minimum': '0.8'}
}
}
}
items = []
self.stackAdvisor.validateMinMax(items, recommendedDefaults, configurations)
expectedItems = [
{
'message': 'Value is greater than the recommended maximum of 2047 ',
'level': 'WARN',
'config-type': 'mapred-site',
'config-name': 'mapreduce.task.io.sort.mb',
'type': 'configuration'
},
{
'message': 'Value is less than the recommended minimum of 0.8 ',
'level': 'WARN',
'config-type': 'mapred-site',
'config-name': 'some_float_value',
'type': 'configuration'
}
]
self.assertEquals(expectedItems, items)
def test_validationHostIsNotUsedForNonValuableComponent(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_MONITOR", "cardinality": "ALL", "category": "SLAVE", "is_master": False, "hostnames": ["host1", "host2"]},
{"name": "GANGLIA_SERVER", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host2"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "host": "host1", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationCardinality01TwoHostsAssigned(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_SERVER", "display_name": "Ganglia Server", "cardinality": "0-1", "category": "MASTER", "is_master": True, "hostnames": ["host1", "host2"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Between 0 and 1 Ganglia Server components should be installed in cluster.", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_validationHostIsNotUsed(self):
servicesInfo = [
{
"name": "GANGLIA",
"components": [
{"name": "GANGLIA_SERVER", "cardinality": "1", "category": "MASTER", "is_master": True, "hostnames": ["host1"]}
]
}
]
services = self.prepareServices(servicesInfo)
hosts = self.prepareHosts(["host1", "host2"])
result = self.stackAdvisor.validateComponentLayout(services, hosts)
expectedItems = [
{"message": "Host is not used", "host": "host2", "level": "ERROR"}
]
self.assertValidationResult(expectedItems, result)
def test_getConfigurationClusterSummary_withHBaseAnd6gbRam(self):
servicesList = ["HBASE"]
components = []
hosts = {
"items" : [
{
"Hosts" : {
"cpu_count" : 8,
"total_mem" : 6291456,
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
}
]
}
expected = {
"hBaseInstalled": True,
"components": components,
"cpu": 8,
"disk": 8,
"ram": 6,
"reservedRam": 2,
"hbaseRam": 1,
"minContainerSize": 512,
"totalAvailableRam": 3072,
"containers": 6,
"ramPerContainer": 512,
"mapMemory": 512,
"reduceMemory": 512,
"amMemory": 512,
"referenceHost": hosts["items"][0]["Hosts"]
}
# Test - Cluster data with 1 host
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
self.assertEquals(result, expected)
# Test - Cluster data with 2 hosts - pick minimum memory
servicesList.append("YARN")
services = services = {"services":
[{"StackServices":
{"service_name" : "YARN",
"service_version" : "2.6.0.2.2"
},
"components":[
{
"StackServiceComponents":{
"advertise_version":"true",
"cardinality":"1+",
"component_category":"SLAVE",
"component_name":"NODEMANAGER",
"custom_commands":[
],
"display_name":"NodeManager",
"is_client":"false",
"is_master":"false",
"service_name":"YARN",
"stack_name":"HDP",
"stack_version":"2.2",
"hostnames":[
"host1",
"host2"
]
},
"dependencies":[
]
}
],
}],
"configurations": {}
}
hosts["items"][0]["Hosts"]["host_name"] = "host1"
hosts["items"].append({
"Hosts": {
"cpu_count" : 4,
"total_mem" : 500000,
"host_name" : "host2",
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
})
expected["referenceHost"] = hosts["items"][1]["Hosts"]
expected["referenceNodeManagerHost"] = hosts["items"][1]["Hosts"]
expected["amMemory"] = 170.66666666666666
expected["containers"] = 3.0
expected["cpu"] = 4
expected["totalAvailableRam"] = 512
expected["mapMemory"] = 170
expected["minContainerSize"] = 256
expected["reduceMemory"] = 170.66666666666666
expected["ram"] = 0
expected["ramPerContainer"] = 170.66666666666666
expected["reservedRam"] = 1
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, services)
self.assertEquals(result, expected)
def test_getConfigurationClusterSummary_withHBaseAnd48gbRam(self):
servicesList = ["HBASE"]
components = []
hosts = {
"items" : [
{
"Hosts" : {
"cpu_count" : 6,
"total_mem" : 50331648,
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
}
]
}
expected = {
"hBaseInstalled": True,
"components": components,
"cpu": 6,
"disk": 6,
"ram": 48,
"reservedRam": 6,
"hbaseRam": 8,
"minContainerSize": 2048,
"totalAvailableRam": 34816,
"containers": 11,
"ramPerContainer": 3072,
"mapMemory": 3072,
"reduceMemory": 3072,
"amMemory": 3072,
"referenceHost": hosts["items"][0]["Hosts"]
}
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
self.assertEquals(result, expected)
def test_recommendStormConfigurations(self):
# no AMS
configurations = {}
services = {
"services": [
],
"configurations": configurations
}
expected = {
"storm-site": {
"properties": {
}
},
}
self.stackAdvisor.recommendStormConfigurations(configurations, None, services, None)
self.assertEquals(configurations, expected)
# with AMS
configurations = {}
services = {
"services": [
{
"StackServices": {
"service_name": "AMBARI_METRICS"
}
}
],
"configurations": configurations
}
expected = {
"storm-site": {
"properties": {
"metrics.reporter.register": "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter"
}
},
}
self.stackAdvisor.recommendStormConfigurations(configurations, None, services, None)
self.assertEquals(configurations, expected)
def test_recommendYARNConfigurations(self):
configurations = {}
services = {"configurations": configurations, "services": []}
clusterData = {
"containers" : 5,
"ramPerContainer": 256
}
expected = {
"yarn-env": {
"properties": {
"min_user_id": "500",
'service_check.queue.name': 'default'
}
},
"yarn-site": {
"properties": {
"yarn.nodemanager.linux-container-executor.group": "hadoop",
"yarn.nodemanager.resource.memory-mb": "1280",
"yarn.scheduler.minimum-allocation-mb": "256",
"yarn.scheduler.maximum-allocation-mb": "1280"
}
}
}
self.stackAdvisor.recommendYARNConfigurations(configurations, clusterData, services, None)
self.assertEquals(configurations, expected)
def test_recommendMapReduce2Configurations_mapMemoryLessThan2560(self):
configurations = {}
clusterData = {
"mapMemory": 567,
"reduceMemory": 345.6666666666666,
"amMemory": 123.54
}
expected = {
"mapred-site": {
"properties": {
'mapreduce.job.queuename': 'default',
"yarn.app.mapreduce.am.resource.mb": "123",
"yarn.app.mapreduce.am.command-opts": "-Xmx99m",
"mapreduce.map.memory.mb": "567",
"mapreduce.reduce.memory.mb": "345",
"mapreduce.map.java.opts": "-Xmx454m",
"mapreduce.reduce.java.opts": "-Xmx277m",
"mapreduce.task.io.sort.mb": "227"
}
}
}
self.stackAdvisor.recommendMapReduce2Configurations(configurations, clusterData, None, None)
self.assertEquals(configurations, expected)
def test_getConfigurationClusterSummary_noHostsWithoutHBase(self):
servicesList = []
components = []
hosts = {
"items" : []
}
result = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
expected = {
"hBaseInstalled": False,
"components": components,
"cpu": 0,
"disk": 0,
"ram": 0,
"reservedRam": 1,
"hbaseRam": 1,
"minContainerSize": 256,
"totalAvailableRam": 512,
"containers": 3,
"ramPerContainer": 170.66666666666666,
"mapMemory": 170,
"reduceMemory": 170.66666666666666,
"amMemory": 170.66666666666666
}
self.assertEquals(result, expected)
def prepareHosts(self, hostsNames):
hosts = { "items": [] }
for hostName in hostsNames:
nextHost = {"Hosts":{"host_name" : hostName}}
hosts["items"].append(nextHost)
return hosts
def prepareServices(self, servicesInfo):
services = { "Versions" : { "stack_name" : "HDP", "stack_version" : "2.0.6" } }
services["services"] = []
for serviceInfo in servicesInfo:
nextService = {"StackServices":{"service_name" : serviceInfo["name"]}}
nextService["components"] = []
for component in serviceInfo["components"]:
nextComponent = {
"StackServiceComponents": {
"component_name": component["name"],
"cardinality": component["cardinality"],
"component_category": component["category"],
"is_master": component["is_master"]
}
}
try:
nextComponent["StackServiceComponents"]["hostnames"] = component["hostnames"]
except KeyError:
nextComponent["StackServiceComponents"]["hostnames"] = []
try:
nextComponent["StackServiceComponents"]["display_name"] = component["display_name"]
except KeyError:
nextComponent["StackServiceComponents"]["display_name"] = component["name"]
nextService["components"].append(nextComponent)
services["services"].append(nextService)
return services
def assertHostLayout(self, componentsHostsMap, recommendation):
blueprintMapping = recommendation["recommendations"]["blueprint"]["host_groups"]
bindings = recommendation["recommendations"]["blueprint_cluster_binding"]["host_groups"]
actualComponentHostsMap = {}
for hostGroup in blueprintMapping:
hostGroupName = hostGroup["name"]
hostsInfos = [binding["hosts"] for binding in bindings if binding["name"] == hostGroupName][0]
hosts = [info["fqdn"] for info in hostsInfos]
for component in hostGroup["components"]:
componentName = component["name"]
try:
actualComponentHostsMap[componentName]
except KeyError, err:
actualComponentHostsMap[componentName] = []
for host in hosts:
if host not in actualComponentHostsMap[componentName]:
actualComponentHostsMap[componentName].append(host)
for componentName in componentsHostsMap.keys():
expectedHosts = componentsHostsMap[componentName]
actualHosts = actualComponentHostsMap[componentName]
self.checkEqual(expectedHosts, actualHosts)
def checkEqual(self, l1, l2):
if not len(l1) == len(l2) or not sorted(l1) == sorted(l2):
raise AssertionError("list1={0}, list2={1}".format(l1, l2))
def assertValidationResult(self, expectedItems, result):
actualItems = []
for item in result["items"]:
next = {"message": item["message"], "level": item["level"]}
try:
next["host"] = item["host"]
except KeyError, err:
pass
actualItems.append(next)
self.checkEqual(expectedItems, actualItems)
def test_recommendHbaseConfigurations(self):
servicesList = ["HBASE"]
configurations = {}
components = []
host_item = {
"Hosts" : {
"cpu_count" : 6,
"total_mem" : 50331648,
"disk_info" : [
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"},
{"mountpoint" : "/"},
{"mountpoint" : "/dev/shm"},
{"mountpoint" : "/vagrant"}
]
}
}
hosts = {
"items" : [host_item for i in range(1, 300)]
}
services = {
"services" : [
],
"configurations": {
"hbase-site": {
"properties": {
"hbase.superuser": "hbase"
}
},
"hbase-env": {
"properties": {
"hbase_user": "hbase123"
}
}
}
}
expected = {
'hbase-site': {
'properties': {
'hbase.superuser': 'hbase123'
}
},
"hbase-env": {
"properties": {
"hbase_master_heapsize": "4096",
"hbase_regionserver_heapsize": "8192",
}
}
}
clusterData = self.stackAdvisor.getConfigurationClusterSummary(servicesList, hosts, components, None)
self.assertEquals(clusterData['hbaseRam'], 8)
self.stackAdvisor.recommendHbaseConfigurations(configurations, clusterData, services, hosts)
self.assertEquals(configurations, expected)
def test_recommendRangerConfigurations(self):
clusterData = {}
# Recommend for not existing DB_FLAVOR and http enabled, HDP-2.3
services = {
"Versions" : {
"stack_version" : "2.3",
},
"services": [
{
"StackServices": {
"service_name": "RANGER",
"service_version": "0.5.0"
},
"components": [
{
"StackServiceComponents": {
"component_name": "RANGER_ADMIN",
"hostnames": ["host1"]
}
}
]
},
{
"StackServices": {
"service_name": "HDFS"
},
"components": [
{
"StackServiceComponents": {
"component_name": "NAMENODE",
"hostnames": ["host1"]
}
}
]
}
],
"configurations": {
"admin-properties": {
"properties": {
"DB_FLAVOR": "NOT_EXISTING",
}
},
"ranger-admin-site": {
"properties": {
"ranger.service.http.port": "7777",
"ranger.service.http.enabled": "true",
}
}
}
}
expected = {
"admin-properties": {
"properties": {
"policymgr_external_url": "http://host1:7777"
}
}
}
recommendedConfigurations = {}
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
self.assertEquals(recommendedConfigurations, expected, "Test for not existing DB_FLAVOR and http enabled, HDP-2.3")
# Recommend for DB_FLAVOR POSTGRES and https enabled, HDP-2.3
configurations = {
"admin-properties": {
"properties": {
"DB_FLAVOR": "POSTGRES",
}
},
"ranger-admin-site": {
"properties": {
"ranger.service.https.port": "7777",
"ranger.service.http.enabled": "false",
}
}
}
services['configurations'] = configurations
expected = {
"admin-properties": {
"properties": {
"policymgr_external_url": "https://host1:7777"
}
}
}
recommendedConfigurations = {}
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
self.assertEquals(recommendedConfigurations, expected, "Test for DB_FLAVOR POSTGRES and https enabled, HDP-2.3")
# Recommend for DB_FLAVOR ORACLE and https enabled, HDP-2.2
configurations = {
"admin-properties": {
"properties": {
"DB_FLAVOR": "ORACLE",
}
},
"ranger-site": {
"properties": {
"http.enabled": "false",
"https.service.port": "8888",
}
}
}
services['configurations'] = configurations
expected = {
"admin-properties": {
"properties": {
"policymgr_external_url": "https://host1:8888"
}
},
"ranger-env": {"properties": {}}
}
recommendedConfigurations = {}
services['services'][0]['StackServices']['service_version'] = "0.4.0"
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
self.assertEquals(recommendedConfigurations, expected, "Test for DB_FLAVOR ORACLE and https enabled, HDP-2.2")
# Test Recommend LDAP values
services["ambari-server-properties"] = {
"ambari.ldap.isConfigured" : "true",
"authentication.ldap.bindAnonymously" : "false",
"authentication.ldap.baseDn" : "dc=apache,dc=org",
"authentication.ldap.groupNamingAttr" : "cn",
"authentication.ldap.primaryUrl" : "c6403.ambari.apache.org:636",
"authentication.ldap.userObjectClass" : "posixAccount",
"authentication.ldap.secondaryUrl" : "c6403.ambari.apache.org:636",
"authentication.ldap.usernameAttribute" : "uid",
"authentication.ldap.dnAttribute" : "dn",
"authentication.ldap.useSSL" : "true",
"authentication.ldap.managerPassword" : "/etc/ambari-server/conf/ldap-password.dat",
"authentication.ldap.groupMembershipAttr" : "memberUid",
"authentication.ldap.groupObjectClass" : "posixGroup",
"authentication.ldap.managerDn" : "uid=hdfs,ou=people,ou=dev,dc=apache,dc=org"
}
services["configurations"] = {}
expected = {
'admin-properties': {
'properties': {
'policymgr_external_url': 'http://host1:6080',
}
},
'ranger-env': {'properties': {}},
'usersync-properties': {
'properties': {
'SYNC_LDAP_URL': 'ldaps://c6403.ambari.apache.org:636',
'SYNC_LDAP_BIND_DN': 'uid=hdfs,ou=people,ou=dev,dc=apache,dc=org',
'SYNC_LDAP_USER_OBJECT_CLASS': 'posixAccount',
'SYNC_LDAP_USER_NAME_ATTRIBUTE': 'uid'
}
}
}
recommendedConfigurations = {}
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
self.assertEquals(recommendedConfigurations, expected, "Test Recommend LDAP values")
# Test Ranger Audit properties
del services["ambari-server-properties"]
services["configurations"] = {
"core-site": {
"properties": {
"fs.defaultFS": "hdfs://host1:8080",
}
},
"ranger-env": {
"properties": {
"xasecure.audit.destination.db": "true",
"xasecure.audit.destination.hdfs":"false",
"xasecure.audit.destination.hdfs.dir":"hdfs://localhost:8020/ranger/audit/%app-type%/%time:yyyyMMdd%"
}
},
"ranger-hdfs-plugin-properties": {
"properties": {}
}
}
expected = {
'admin-properties': {
'properties': {
'policymgr_external_url': 'http://host1:6080'
}
},
'ranger-hdfs-plugin-properties': {
'properties': {
'XAAUDIT.HDFS.IS_ENABLED': 'false',
'XAAUDIT.HDFS.DESTINATION_DIRECTORY': 'hdfs://host1:8080/ranger/audit/%app-type%/%time:yyyyMMdd%',
'XAAUDIT.DB.IS_ENABLED': 'true'
}
},
'ranger-env': {
'properties': {
'xasecure.audit.destination.hdfs.dir': 'hdfs://host1:8080/ranger/audit/%app-type%/%time:yyyyMMdd%'
}
}
}
recommendedConfigurations = {}
self.stackAdvisor.recommendRangerConfigurations(recommendedConfigurations, clusterData, services, None)
self.assertEquals(recommendedConfigurations, expected, "Test Ranger Audit properties")
def test_recommendHDFSConfigurations(self):
configurations = {
"hadoop-env": {
"properties": {
"hdfs_user": "hdfs",
"proxyuser_group": "users"
}
},
"hive-env": {
"properties": {
"webhcat_user": "webhcat",
"hive_user": "hive"
}
},
"oozie-env": {
"properties": {
"oozie_user": "oozie"
}
},
"falcon-env": {
"properties": {
"falcon_user": "falcon"
}
}
}
hosts = {
"items": [
{
"href": "/api/v1/hosts/host1",
"Hosts": {
"cpu_count": 1,
"host_name": "c6401.ambari.apache.org",
"os_arch": "x86_64",
"os_type": "centos6",
"ph_cpu_count": 1,
"public_host_name": "c6401.ambari.apache.org",
"rack_info": "/default-rack",
"total_mem": 2097152,
"disk_info": [{
"size": '8',
"mountpoint": "/"
}]
}
},
{
"href": "/api/v1/hosts/host2",
"Hosts": {
"cpu_count": 1,
"host_name": "c6402.ambari.apache.org",
"os_arch": "x86_64",
"os_type": "centos6",
"ph_cpu_count": 1,
"public_host_name": "c6402.ambari.apache.org",
"rack_info": "/default-rack",
"total_mem": 1048576,
"disk_info": [{
"size": '8',
"mountpoint": "/"
}]
}
},
]}
services = {
"services": [
{
"StackServices": {
"service_name": "HDFS"
}, "components": []
},
{
"StackServices": {
"service_name": "FALCON"
}, "components": []
},
{
"StackServices": {
"service_name": "HIVE"
}, "components": [{
"href": "/api/v1/stacks/HDP/versions/2.0.6/services/HIVE/components/HIVE_SERVER",
"StackServiceComponents": {
"advertise_version": "true",
"cardinality": "1",
"component_category": "MASTER",
"component_name": "HIVE_SERVER",
"custom_commands": [],
"display_name": "Hive Server",
"is_client": "false",
"is_master": "true",
"service_name": "HIVE",
"stack_name": "HDP",
"stack_version": "2.0.6",
"hostnames": ["c6401.ambari.apache.org","c6402.ambari.apache.org"]
}},
{
"href": "/api/v1/stacks/HDP/versions/2.0.6/services/HIVE/components/WEBHCAT_SERVER",
"StackServiceComponents": {
"advertise_version": "true",
"cardinality": "1",
"component_category": "MASTER",
"component_name": "WEBHCAT_SERVER",
"custom_commands": [],
"display_name": "WebHCat Server",
"is_client": "false",
"is_master": "true",
"service_name": "HIVE",
"stack_name": "HDP",
"stack_version": "2.0.6",
"hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
}}]
},
{
"StackServices": {
"service_name": "OOZIE"
}, "components": [{
"href": "/api/v1/stacks/HDP/versions/2.0.6/services/HIVE/components/OOZIE_SERVER",
"StackServiceComponents": {
"advertise_version": "true",
"cardinality": "1",
"component_category": "MASTER",
"component_name": "OOZIE_SERVER",
"custom_commands": [],
"display_name": "Oozie Server",
"is_client": "false",
"is_master": "true",
"service_name": "HIVE",
"stack_name": "HDP",
"stack_version": "2.0.6",
"hostnames": ["c6401.ambari.apache.org", "c6402.ambari.apache.org"]
}, }]
}],
"configurations": configurations,
"ambari-server-properties": {"ambari-server.user":"ambari_user"}
}
clusterData = {
"totalAvailableRam": 2048
}
ambariHostName = socket.getfqdn()
expected = {'oozie-env':
{'properties':
{'oozie_user': 'oozie'}},
'core-site':
{'properties':
{'hadoop.proxyuser.ambari_user.groups': '*',
'hadoop.proxyuser.ambari_user.hosts': ambariHostName,
'hadoop.proxyuser.oozie.groups': '*',
'hadoop.proxyuser.hive.groups': '*',
'hadoop.proxyuser.webhcat.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.falcon.hosts': '*',
'hadoop.proxyuser.webhcat.groups': '*',
'hadoop.proxyuser.hdfs.groups': '*',
'hadoop.proxyuser.hdfs.hosts': '*',
'hadoop.proxyuser.hive.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.oozie.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.falcon.groups': '*'}},
'falcon-env':
{'properties':
{'falcon_user': 'falcon'}},
'hdfs-site':
{'properties':
{'dfs.datanode.data.dir': '/hadoop/hdfs/data',
'dfs.datanode.du.reserved': '1024'}},
'hive-env':
{'properties':
{'hive_user': 'hive',
'webhcat_user': 'webhcat'}},
'hadoop-env':
{'properties':
{'hdfs_user': 'hdfs',
'namenode_heapsize': '1024',
'proxyuser_group': 'users',
'namenode_opt_maxnewsize': '256',
'namenode_opt_newsize': '256'}}}
self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
self.assertEquals(configurations, expected)
configurations["hadoop-env"]["properties"]['hdfs_user'] = "hdfs1"
changedConfigurations = [{"type":"hadoop-env",
"name":"hdfs_user",
"old_value":"hdfs"}]
services["changed-configurations"] = changedConfigurations
services['configurations'] = configurations
expected = {'oozie-env':
{'properties':
{'oozie_user': 'oozie'}},
'core-site': {'properties':
{'hadoop.proxyuser.ambari_user.groups': '*',
'hadoop.proxyuser.ambari_user.hosts': ambariHostName,
'hadoop.proxyuser.oozie.groups': '*',
'hadoop.proxyuser.hive.groups': '*',
'hadoop.proxyuser.hdfs1.groups': '*',
'hadoop.proxyuser.hdfs1.hosts': '*',
'hadoop.proxyuser.webhcat.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.falcon.hosts': '*',
'hadoop.proxyuser.webhcat.groups': '*',
'hadoop.proxyuser.hdfs.groups': '*',
'hadoop.proxyuser.hdfs.hosts': '*',
'hadoop.proxyuser.hive.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.oozie.hosts': 'c6401.ambari.apache.org,c6402.ambari.apache.org',
'hadoop.proxyuser.falcon.groups': '*'},
'property_attributes':
{'hadoop.proxyuser.hdfs.groups': {'delete': 'true'},
'hadoop.proxyuser.hdfs.hosts': {'delete': 'true'}}},
'falcon-env':
{'properties':
{'falcon_user': 'falcon'}},
'hive-env':
{'properties':
{'hive_user': 'hive',
'webhcat_user': 'webhcat'}},
'hdfs-site':
{'properties':
{'dfs.datanode.data.dir': '/hadoop/hdfs/data',
'dfs.datanode.du.reserved': '1024'}},
'hadoop-env':
{'properties':
{'hdfs_user': 'hdfs1',
'namenode_heapsize': '1024',
'proxyuser_group': 'users',
'namenode_opt_maxnewsize': '256',
'namenode_opt_newsize': '256'}}}
self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
self.assertEquals(configurations, expected)
# Verify dfs.namenode.rpc-address is recommended to be deleted when NN HA
configurations["hdfs-site"]["properties"]['dfs.internal.nameservices'] = "mycluster"
configurations["hdfs-site"]["properties"]['dfs.ha.namenodes.mycluster'] = "nn1,nn2"
services['configurations'] = configurations
expected["hdfs-site"] = {
'properties': {
'dfs.datanode.data.dir': '/hadoop/hdfs/data',
'dfs.datanode.du.reserved': '1024',
'dfs.internal.nameservices': 'mycluster',
'dfs.ha.namenodes.mycluster': 'nn1,nn2'
},
'property_attributes': {
'dfs.namenode.rpc-address': {
'delete': 'true'
}
}
}
self.stackAdvisor.recommendHDFSConfigurations(configurations, clusterData, services, hosts)
self.assertEquals(configurations, expected)
def test_getHostNamesWithComponent(self):
services = {
"services": [
{
"StackServices": {
"service_name": "SERVICE"
},
"components": [
{
"StackServiceComponents": {
"component_name": "COMPONENT",
"hostnames": ["host1","host2","host3"]
}
}
]
}
],
"configurations": {}
}
result = self.stackAdvisor.getHostNamesWithComponent("SERVICE","COMPONENT", services)
expected = ["host1","host2","host3"]
self.assertEquals(result, expected)
def test_getZKHostPortString(self):
configurations = {
"zoo.cfg": {
"properties": {
'clientPort': "2183"
}
}
}
services = {
"services": [
{
"StackServices": {
"service_name": "ZOOKEEPER"
},
"components": [
{
"StackServiceComponents": {
"component_name": "ZOOKEEPER_SERVER",
"hostnames": ["zk.host1","zk.host2","zk.host3"]
}
}, {
"StackServiceComponents": {
"component_name": "ZOOKEEPER_CLIENT",
"hostnames": ["host1"]
}
}
]
}
],
"configurations": configurations
}
result = self.stackAdvisor.getZKHostPortString(services)
expected = "zk.host1:2183,zk.host2:2183,zk.host3:2183"
self.assertEquals(result, expected)
def test_validateHDFSConfigurations(self):
configurations = {}
services = ''
hosts = ''
#Default configuration
recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
properties = {'dfs.datanode.du.reserved': '1024'}
res = self.stackAdvisor.validateHDFSConfigurations(properties,
recommendedDefaults, configurations, services, hosts)
self.assertFalse(res)
#Value is less then expected
recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
properties = {'dfs.datanode.du.reserved': '512'}
res = self.stackAdvisor.validateHDFSConfigurations(properties,
recommendedDefaults, configurations, services, hosts)
self.assertTrue(res)
#Value is begger then expected
recommendedDefaults = {'dfs.datanode.du.reserved': '1024'}
properties = {'dfs.datanode.du.reserved': '2048'}
res = self.stackAdvisor.validateHDFSConfigurations(properties,
recommendedDefaults, configurations, services, hosts)
self.assertFalse(res)
def test_validateHDFSConfigurationsEnv(self):
configurations = {}
# 1) ok: namenode_heapsize > recommended
recommendedDefaults = {'namenode_heapsize': '1024',
'namenode_opt_newsize' : '256',
'namenode_opt_maxnewsize' : '256'}
properties = {'namenode_heapsize': '2048',
'namenode_opt_newsize' : '300',
'namenode_opt_maxnewsize' : '300'}
res_expected = []
res = self.stackAdvisor.validateHDFSConfigurationsEnv(properties, recommendedDefaults, configurations, '', '')
self.assertEquals(res, res_expected)
# 2) fail: namenode_heapsize, namenode_opt_maxnewsize < recommended
properties['namenode_heapsize'] = '1022'
properties['namenode_opt_maxnewsize'] = '255'
res_expected = [{'config-type': 'hadoop-env',
'message': 'Value is less than the recommended default of 1024',
'type': 'configuration',
'config-name': 'namenode_heapsize',
'level': 'WARN'},
{'config-name': 'namenode_opt_maxnewsize',
'config-type': 'hadoop-env',
'level': 'WARN',
'message': 'Value is less than the recommended default of 256',
'type': 'configuration'}]
res = self.stackAdvisor.validateHDFSConfigurationsEnv(properties, recommendedDefaults, configurations, '', '')
self.assertEquals(res, res_expected)
def test_validateAmsHbaseSiteConfigurations(self):
configurations = {
"hdfs-site": {
"properties": {
'dfs.datanode.data.dir': "/hadoop/data"
}
},
"core-site": {
"properties": {
"fs.defaultFS": "hdfs://c6401.ambari.apache.org:8020"
}
},
"ams-site": {
"properties": {
"timeline.metrics.service.operation.mode": "embedded"
}
}
}
recommendedDefaults = {
'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
properties = {
'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
host = {
"href" : "/api/v1/hosts/host1",
"Hosts" : {
"cpu_count" : 1,
"host_name" : "host1",
"os_arch" : "x86_64",
"os_type" : "centos6",
"ph_cpu_count" : 1,
"public_host_name" : "host1",
"rack_info" : "/default-rack",
"total_mem" : 2097152,
"disk_info": [
{
"available": str(15<<30), # 15 GB
"type": "ext4",
"mountpoint": "/"
}
]
}
}
hosts = {
"items" : [
host
]
}
services = {
"services": [
{
"StackServices": {
"service_name": "AMBARI_METRICS"
},
"components": [
{
"StackServiceComponents": {
"component_name": "METRICS_COLLECTOR",
"hostnames": ["host1"]
}
}, {
"StackServiceComponents": {
"component_name": "METRICS_MONITOR",
"hostnames": ["host1"]
}
}
]
},
{
"StackServices": {
"service_name": "HDFS"
},
"components": [
{
"StackServiceComponents": {
"component_name": "DATANODE",
"hostnames": ["host1"]
}
}
]
}
],
"configurations": configurations
}
# only 1 partition, enough disk space, no warnings
res = self.stackAdvisor.validateAmsHbaseSiteConfigurations(properties, recommendedDefaults, configurations, services, hosts)
expected = []
self.assertEquals(res, expected)
# 1 partition, no enough disk space
host['Hosts']['disk_info'] = [
{
"available" : '1',
"type" : "ext4",
"mountpoint" : "/"
}
]
res = self.stackAdvisor.validateAmsHbaseSiteConfigurations(properties, recommendedDefaults, configurations, services, hosts)
expected = [
{'config-name': 'hbase.rootdir',
'config-type': 'ams-hbase-site',
'level': 'WARN',
'message': 'Ambari Metrics disk space requirements not met. '
'\nRecommended disk space for partition / is 10G',
'type': 'configuration'
}
]
self.assertEquals(res, expected)
# 2 partitions
host['Hosts']['disk_info'] = [
{
"available": str(15<<30), # 15 GB
"type" : "ext4",
"mountpoint" : "/grid/0"
},
{
"available" : str(15<<30), # 15 GB
"type" : "ext4",
"mountpoint" : "/"
}
]
recommendedDefaults = {
'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir': '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
properties = {
'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
res = self.stackAdvisor.validateAmsHbaseSiteConfigurations(properties, recommendedDefaults, configurations, services, hosts)
expected = []
self.assertEquals(res, expected)
# dfs.dir & hbase.rootdir crosscheck + root partition + hbase.rootdir == hbase.tmp.dir warnings
properties = {
'hbase.rootdir': 'file:///var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
res = self.stackAdvisor.validateAmsHbaseSiteConfigurations(properties, recommendedDefaults, configurations, services, hosts)
expected = [
{
'config-name': 'hbase.rootdir',
'config-type': 'ams-hbase-site',
'level': 'WARN',
'message': 'It is not recommended to use root partition for hbase.rootdir',
'type': 'configuration'
},
{
'config-name': 'hbase.tmp.dir',
'config-type': 'ams-hbase-site',
'level': 'WARN',
'message': 'Consider not using / partition for storing metrics temporary data. '
'/ partition is already used as hbase.rootdir to store metrics data',
'type': 'configuration'
},
{
'config-name': 'hbase.rootdir',
'config-type': 'ams-hbase-site',
'level': 'WARN',
'message': 'Consider not using / partition for storing metrics data. '
'/ is already used by datanode to store HDFS data',
'type': 'configuration'
}
]
self.assertEquals(res, expected)
# incorrect hbase.rootdir in distributed mode
properties = {
'hbase.rootdir': 'file:///grid/0/var/lib/ambari-metrics-collector/hbase',
'hbase.tmp.dir' : '/var/lib/ambari-metrics-collector/hbase',
'hbase.cluster.distributed': 'false'
}
configurations['ams-site']['properties']['timeline.metrics.service.operation.mode'] = 'distributed'
res = self.stackAdvisor.validateAmsHbaseSiteConfigurations(properties, recommendedDefaults, configurations, services, hosts)
expected = [
{
'config-name': 'hbase.rootdir',
'config-type': 'ams-hbase-site',
'level': 'WARN',
'message': 'In distributed mode hbase.rootdir should point to HDFS.',
'type': 'configuration'
},
{
'config-name': 'hbase.cluster.distributed',
'config-type': 'ams-hbase-site',
'level': 'ERROR',
'message': 'hbase.cluster.distributed property should be set to true for distributed mode',
'type': 'configuration'
}
]
self.assertEquals(res, expected)
def test_validateStormSiteConfigurations(self):
configurations = {
"storm-site": {
"properties": {
'metrics.reporter.register': "org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter"
}
}
}
recommendedDefaults = {
'metrics.reporter.register': 'org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter',
}
properties = {
'metrics.reporter.register': 'org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter',
}
services = {
"services": [
{
"StackServices": {
"service_name": "AMBARI_METRICS"
}
}
],
"configurations": configurations
}
# positive
res = self.stackAdvisor.validateStormConfigurations(properties, recommendedDefaults, configurations, services, None)
expected = []
self.assertEquals(res, expected)
properties['metrics.reporter.register'] = ''
res = self.stackAdvisor.validateStormConfigurations(properties, recommendedDefaults, configurations, services, None)
expected = [
{'config-name': 'metrics.reporter.register',
'config-type': 'storm-site',
'level': 'WARN',
'message': 'Should be set to org.apache.hadoop.metrics2.sink.storm.StormTimelineMetricsReporter '
'to report the metrics to Ambari Metrics service.',
'type': 'configuration'
}
]
self.assertEquals(res, expected)
def test_getHostsWithComponent(self):
services = {"services":
[{"StackServices":
{"service_name" : "HDFS",
"service_version" : "2.6.0.2.2"
},
"components":[
{
"href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/DATANODE",
"StackServiceComponents":{
"advertise_version":"true",
"cardinality":"1+",
"component_category":"SLAVE",
"component_name":"DATANODE",
"custom_commands":[
],
"display_name":"DataNode",
"is_client":"false",
"is_master":"false",
"service_name":"HDFS",
"stack_name":"HDP",
"stack_version":"2.2",
"hostnames":[
"host1",
"host2"
]
},
"dependencies":[
]
},
{
"href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/JOURNALNODE",
"StackServiceComponents":{
"advertise_version":"true",
"cardinality":"0+",
"component_category":"SLAVE",
"component_name":"JOURNALNODE",
"custom_commands":[
],
"display_name":"JournalNode",
"is_client":"false",
"is_master":"false",
"service_name":"HDFS",
"stack_name":"HDP",
"stack_version":"2.2",
"hostnames":[
"host1"
]
},
"dependencies":[
{
"href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/JOURNALNODE/dependencies/HDFS_CLIENT",
"Dependencies":{
"component_name":"HDFS_CLIENT",
"dependent_component_name":"JOURNALNODE",
"dependent_service_name":"HDFS",
"stack_name":"HDP",
"stack_version":"2.2"
}
}
]
},
{
"href":"/api/v1/stacks/HDP/versions/2.2/services/HDFS/components/NAMENODE",
"StackServiceComponents":{
"advertise_version":"true",
"cardinality":"1-2",
"component_category":"MASTER",
"component_name":"NAMENODE",
"custom_commands":[
"DECOMMISSION",
"REBALANCEHDFS"
],
"display_name":"NameNode",
"is_client":"false",
"is_master":"true",
"service_name":"HDFS",
"stack_name":"HDP",
"stack_version":"2.2",
"hostnames":[
"host2"
]
},
"dependencies":[
]
},
],
}],
"configurations": {}
}
hosts = {
"items" : [
{
"href" : "/api/v1/hosts/host1",
"Hosts" : {
"cpu_count" : 1,
"host_name" : "host1",
"os_arch" : "x86_64",
"os_type" : "centos6",
"ph_cpu_count" : 1,
"public_host_name" : "host1",
"rack_info" : "/default-rack",
"total_mem" : 2097152
}
},
{
"href" : "/api/v1/hosts/host2",
"Hosts" : {
"cpu_count" : 1,
"host_name" : "host2",
"os_arch" : "x86_64",
"os_type" : "centos6",
"ph_cpu_count" : 1,
"public_host_name" : "host2",
"rack_info" : "/default-rack",
"total_mem" : 1048576
}
},
]
}
datanodes = self.stackAdvisor.getHostsWithComponent("HDFS", "DATANODE", services, hosts)
self.assertEquals(len(datanodes), 2)
self.assertEquals(datanodes, hosts["items"])
datanode = self.stackAdvisor.getHostWithComponent("HDFS", "DATANODE", services, hosts)
self.assertEquals(datanode, hosts["items"][0])
namenodes = self.stackAdvisor.getHostsWithComponent("HDFS", "NAMENODE", services, hosts)
self.assertEquals(len(namenodes), 1)
# [host2]
self.assertEquals(namenodes, [hosts["items"][1]])
namenode = self.stackAdvisor.getHostWithComponent("HDFS", "NAMENODE", services, hosts)
# host2
self.assertEquals(namenode, hosts["items"][1])
# not installed
nodemanager = self.stackAdvisor.getHostWithComponent("YARN", "NODEMANAGER", services, hosts)
self.assertEquals(nodemanager, None)
# unknown component
unknown_component = self.stackAdvisor.getHostWithComponent("YARN", "UNKNOWN", services, hosts)
self.assertEquals(nodemanager, None)
# unknown service
unknown_component = self.stackAdvisor.getHostWithComponent("UNKNOWN", "NODEMANAGER", services, hosts)
self.assertEquals(nodemanager, None)
def test_mergeValidators(self):
childValidators = {
"HDFS": {"hdfs-site": "validateHDFSConfigurations2.3"},
"HIVE": {"hiveserver2-site": "validateHiveServer2Configurations2.3"},
"HBASE": {"hbase-site": "validateHBASEConfigurations2.3",
"newconf": "new2.3"},
"NEWSERVICE" : {"newserviceconf": "abc2.3"}
}
parentValidators = {
"HDFS": {"hdfs-site": "validateHDFSConfigurations2.2",
"hadoop-env": "validateHDFSConfigurationsEnv2.2"},
"YARN": {"yarn-env": "validateYARNEnvConfigurations2.2"},
"HIVE": {"hiveserver2-site": "validateHiveServer2Configurations2.2",
"hive-site": "validateHiveConfigurations2.2",
"hive-env": "validateHiveConfigurationsEnv2.2"},
"HBASE": {"hbase-site": "validateHBASEConfigurations2.2",
"hbase-env": "validateHBASEEnvConfigurations2.2"},
"MAPREDUCE2": {"mapred-site": "validateMapReduce2Configurations2.2"},
"TEZ": {"tez-site": "validateTezConfigurations2.2"}
}
expected = {
"HDFS": {"hdfs-site": "validateHDFSConfigurations2.3",
"hadoop-env": "validateHDFSConfigurationsEnv2.2"},
"YARN": {"yarn-env": "validateYARNEnvConfigurations2.2"},
"HIVE": {"hiveserver2-site": "validateHiveServer2Configurations2.3",
"hive-site": "validateHiveConfigurations2.2",
"hive-env": "validateHiveConfigurationsEnv2.2"},
"HBASE": {"hbase-site": "validateHBASEConfigurations2.3",
"hbase-env": "validateHBASEEnvConfigurations2.2",
"newconf": "new2.3"},
"MAPREDUCE2": {"mapred-site": "validateMapReduce2Configurations2.2"},
"TEZ": {"tez-site": "validateTezConfigurations2.2"},
"NEWSERVICE" : {"newserviceconf": "abc2.3"}
}
self.stackAdvisor.mergeValidators(parentValidators, childValidators)
self.assertEquals(expected, parentValidators)
def test_getProperMountPoint(self):
hostInfo = None
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
hostInfo = {"some_key": []}
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
hostInfo["disk_info"] = []
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# root mountpoint with low space available
hostInfo["disk_info"].append(
{
"available" : "1",
"type" : "ext4",
"mountpoint" : "/"
}
)
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# tmpfs with more space available
hostInfo["disk_info"].append(
{
"available" : "2",
"type" : "tmpfs",
"mountpoint" : "/dev/shm"
}
)
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# /boot with more space available
hostInfo["disk_info"].append(
{
"available" : "3",
"type" : "tmpfs",
"mountpoint" : "/boot/grub"
}
)
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# /boot with more space available
hostInfo["disk_info"].append(
{
"available" : "4",
"type" : "tmpfs",
"mountpoint" : "/mnt/external_hdd"
}
)
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# virtualbox fs with more space available
hostInfo["disk_info"].append(
{
"available" : "5",
"type" : "vboxsf",
"mountpoint" : "/vagrant"
}
)
self.assertEquals(["/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# proper mountpoint with more space available
hostInfo["disk_info"].append(
{
"available" : "6",
"type" : "ext4",
"mountpoint" : "/grid/0"
}
)
self.assertEquals(["/grid/0", "/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
# proper mountpoint with more space available
hostInfo["disk_info"].append(
{
"available" : "7",
"type" : "ext4",
"mountpoint" : "/grid/1"
}
)
self.assertEquals(["/grid/1", "/grid/0", "/"], self.stackAdvisor.getPreferredMountPoints(hostInfo))
def test_validateNonRootFs(self):
hostInfo = {"disk_info": [
{
"available" : "2",
"type" : "ext4",
"mountpoint" : "/"
}
]}
properties = {"property1": "file:///var/dir"}
recommendedDefaults = {"property1": "file:///var/dir"}
# only / mountpoint - no warning
self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
# More preferable /grid/0 mountpoint - warning
hostInfo["disk_info"].append(
{
"available" : "3",
"type" : "ext4",
"mountpoint" : "/grid/0"
}
)
recommendedDefaults = {"property1": "file:///grid/0/var/dir"}
warn = self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo)
self.assertTrue(warn != None)
self.assertEquals({'message': 'It is not recommended to use root partition for property1', 'level': 'WARN'}, warn)
# Set by user /var mountpoint, which is non-root , but not preferable - no warning
hostInfo["disk_info"].append(
{
"available" : "1",
"type" : "ext4",
"mountpoint" : "/var"
}
)
self.assertTrue(self.stackAdvisor.validatorNotRootFs(properties, recommendedDefaults, 'property1', hostInfo) == None)
def test_validatorEnoughDiskSpace(self):
reqiuredDiskSpace = 1048576
errorMsg = "Ambari Metrics disk space requirements not met. \n" \
"Recommended disk space for partition / is 1G"
# local FS, enough space
hostInfo = {"disk_info": [
{
"available" : "1048578",
"type" : "ext4",
"mountpoint" : "/"
}
]}
properties = {"property1": "file:///var/dir"}
self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
# local FS, no enough space
hostInfo = {"disk_info": [
{
"available" : "1",
"type" : "ext4",
"mountpoint" : "/"
}
]}
warn = self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace)
self.assertTrue(warn != None)
self.assertEquals({'message': errorMsg, 'level': 'WARN'}, warn)
# non-local FS, HDFS
properties = {"property1": "hdfs://h1"}
self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
# non-local FS, WASB
properties = {"property1": "wasb://h1"}
self.assertTrue(self.stackAdvisor.validatorEnoughDiskSpace(properties, 'property1', hostInfo, reqiuredDiskSpace) == None)
def test_round_to_n(self):
self.assertEquals(self.stack_advisor_impl.round_to_n(0), 0)
self.assertEquals(self.stack_advisor_impl.round_to_n(1000), 1024)
self.assertEquals(self.stack_advisor_impl.round_to_n(2000), 2048)
self.assertEquals(self.stack_advisor_impl.round_to_n(4097), 4096)
def test_getMountPointForDir(self):
self.assertEquals(self.stack_advisor_impl.getMountPointForDir("/var/log", ["/"]), "/")
self.assertEquals(self.stack_advisor_impl.getMountPointForDir("/var/log", ["/var", "/"]), "/var")
self.assertEquals(self.stack_advisor_impl.getMountPointForDir("file:///var/log", ["/var", "/"]), "/var")
self.assertEquals(self.stack_advisor_impl.getMountPointForDir("hdfs:///hdfs_path", ["/var", "/"]), None)
self.assertEquals(self.stack_advisor_impl.getMountPointForDir("relative/path", ["/var", "/"]), None)
def test_getValidatorEqualsToRecommendedItem(self):
properties = {"property1": "value1"}
recommendedDefaults = {"property1": "value1"}
self.assertEquals(self.stackAdvisor.validatorEqualsToRecommendedItem(properties, recommendedDefaults, "property1"), None)
properties = {"property1": "value1"}
recommendedDefaults = {"property1": "value2"}
expected = {'message': 'It is recommended to set value value2 for property property1', 'level': 'WARN'}
self.assertEquals(self.stackAdvisor.validatorEqualsToRecommendedItem(properties, recommendedDefaults, "property1"), expected)
properties = {}
recommendedDefaults = {"property1": "value2"}
expected = {'level': 'ERROR', 'message': 'Value should be set for property1'}
self.assertEquals(self.stackAdvisor.validatorEqualsToRecommendedItem(properties, recommendedDefaults, "property1"), expected)
properties = {"property1": "value1"}
recommendedDefaults = {}
expected = {'level': 'ERROR', 'message': 'Value should be recommended for property1'}
self.assertEquals(self.stackAdvisor.validatorEqualsToRecommendedItem(properties, recommendedDefaults, "property1"), expected)
def test_getServicesSiteProperties(self):
import imp, os
testDirectory = os.path.dirname(os.path.abspath(__file__))
hdp206StackAdvisorPath = os.path.join(testDirectory, '../../../../../main/resources/stacks/HDP/2.0.6/services/stack_advisor.py')
stack_advisor = imp.load_source('stack_advisor', hdp206StackAdvisorPath)
services = {
"services": [
{
"StackServices": {
"service_name": "RANGER"
},
"components": [
{
"StackServiceComponents": {
"component_name": "RANGER_ADMIN",
"hostnames": ["host1"]
}
}
]
},
],
"configurations": {
"admin-properties": {
"properties": {
"DB_FLAVOR": "NOT_EXISTING",
}
},
"ranger-admin-site": {
"properties": {
"ranger.service.http.port": "7777",
"ranger.service.http.enabled": "true",
}
}
}
}
expected = {
"ranger.service.http.port": "7777",
"ranger.service.http.enabled": "true",
}
siteProperties = stack_advisor.getServicesSiteProperties(services, "ranger-admin-site")
self.assertEquals(siteProperties, expected)
def test_createComponentLayoutRecommendations_addService_1freeHost(self):
"""
Test that already installed slaves are not added to any free hosts (not having any component installed)
as part of recommendation received during Add service operation.
For already installed services, recommendation for installed components should match the existing layout
"""
services = {
"services" : [
{
"StackServices" : {
"service_name" : "HDFS"
},
"components" : [ {
"StackServiceComponents" : {
"cardinality" : "1+",
"component_category" : "SLAVE",
"component_name" : "DATANODE",
"hostnames" : [ "c6401.ambari.apache.org" ]
}
} ]
} ]
}
hosts = self.prepareHosts(["c6401.ambari.apache.org", "c6402.ambari.apache.org"])
recommendations = self.stackAdvisor.createComponentLayoutRecommendations(services, hosts)
"""
Recommendation received should be as below:
{
'blueprint': {
'host_groups': [{
'name': 'host-group-1',
'components': []
}, {
'name': 'host-group-2',
'components': [{
'name': 'DATANODE'
}]
}]
},
'blueprint_cluster_binding': {
'host_groups': [{
'hosts': [{
'fqdn': 'c6402.ambari.apache.org'
}],
'name': 'host-group-1'
}, {
'hosts': [{
'fqdn': 'c6401.ambari.apache.org'
}],
'name': 'host-group-2'
}]
}
}
"""
# Assert that the list is empty for host-group-1
self.assertFalse(recommendations['blueprint']['host_groups'][0]['components'])
# Assert that DATANODE is placed on host-group-2
self.assertEquals(recommendations['blueprint']['host_groups'][1]['components'][0]['name'], 'DATANODE')
| 36.13997
| 166
| 0.551836
| 5,880
| 73,328
| 6.787245
| 0.115476
| 0.031672
| 0.011652
| 0.007517
| 0.639555
| 0.586584
| 0.534165
| 0.493473
| 0.45496
| 0.410509
| 0
| 0.023864
| 0.307972
| 73,328
| 2,028
| 167
| 36.157791
| 0.762597
| 0.021179
| 0
| 0.463305
| 0
| 0.003922
| 0.333962
| 0.112355
| 0
| 0
| 0
| 0
| 0.052661
| 0
| null | null | 0.001681
| 0.003361
| null | null | 0.002801
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a8fddf8511ca7d429d8644119f475536d5dae17
| 2,486
|
py
|
Python
|
main.py
|
ThomasDLi/simple-photo-editor
|
f8b3f1025155e2542b93b94c12d607b9b5e45731
|
[
"MIT"
] | 1
|
2021-05-21T19:21:26.000Z
|
2021-05-21T19:21:26.000Z
|
main.py
|
ThomasDLi/simple-photo-editor
|
f8b3f1025155e2542b93b94c12d607b9b5e45731
|
[
"MIT"
] | null | null | null |
main.py
|
ThomasDLi/simple-photo-editor
|
f8b3f1025155e2542b93b94c12d607b9b5e45731
|
[
"MIT"
] | null | null | null |
from PIL import Image, ImageEnhance
user_account_name = "Thomas.Li26"
def main():
mode = input("Specify image editing mode. Type DEEPFRY, STRETCH, BRIGHTNESS, SHARPEN, or INVERT: ")
if mode == "DEEPFRY":
DEEPFRY()
if mode == "STRETCH":
STRETCH()
if mode == "INVERT":
INVERT()
if mode == "BRIGHTNESS":
BRIGHTNESS()
if mode == "SHARPEN":
SHARPEN()
def DEEPFRY():
img = input("Insert the name of an image found in the Downloads folder (for example: Image.png): ")
im = Image.open(r"C:\Users\{}\Downloads\{}".format(user_account_name, img))
enhancer = ImageEnhance.Contrast(im)
factor = float(input("Specify deepfry amount (0-100): "))
im_output = enhancer.enhance(factor)
im_output.save('more-contrast-image.png')
im_output.show()
def STRETCH():
img = input("Insert the name of an image found in the Downloads folder (for example: Image.png): ")
im = Image.open(r"C:\Users\{}\Downloads\{}".format(user_account_name, img))
factor = int(input("Specify width: "))
factor2 = int(input("Specify height: "))
im_output = im.resize((factor,factor2))
im_output.save('more-contrast-image.png')
im_output.show()
def INVERT():
img = input("Insert the name of an image found in the Downloads folder (for example: Image.png): ")
im = Image.open(r"C:\Users\{}\Downloads\{}".format(user_account_name, img))
enhancer = ImageEnhance.Contrast(im)
im_output = enhancer.enhance(-1)
im_output.save('more-contrast-image.png')
im_output.show()
def BRIGHTNESS():
img = input("Insert the name of an image found in the Downloads folder (for example: Image.png): ")
im = Image.open(r"C:\Users\{}\Downloads\{}".format(user_account_name, img))
enhancer = ImageEnhance.Brightness(im)
factor = float(input("Specify brightness amount: "))
im_output = enhancer.enhance(factor)
im_output.save('more-contrast-image.png')
im_output.show()
def SHARPEN():
img = input("Insert the name of an image found in the Downloads folder (for example: Image.png): ")
im = Image.open(r"C:\Users\{}\Downloads\{}".format(user_account_name, img))
enhancer = ImageEnhance.Sharpness(im)
factor = float(input("Specify sharpening amount: "))
im_output = enhancer.enhance(factor)
im_output.save('more-contrast-image.png')
im_output.show()
if __name__ == "__main__":
main()
| 38.84375
| 104
| 0.650442
| 326
| 2,486
| 4.852761
| 0.190184
| 0.075853
| 0.063211
| 0.053729
| 0.710493
| 0.664349
| 0.664349
| 0.664349
| 0.664349
| 0.664349
| 0
| 0.004555
| 0.205149
| 2,486
| 63
| 105
| 39.460317
| 0.796053
| 0
| 0
| 0.454545
| 0
| 0
| 0.37598
| 0.096987
| 0
| 0
| 0
| 0
| 0
| 1
| 0.109091
| false
| 0
| 0.018182
| 0
| 0.127273
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a9907c6e19624e9a00da0b3cff99ba87e746680
| 3,206
|
py
|
Python
|
models2.py
|
Lydia-Tan/MindLife
|
644f1a3834f337d51c99650c3924df99c5200d06
|
[
"MIT"
] | 1
|
2020-01-20T19:49:07.000Z
|
2020-01-20T19:49:07.000Z
|
models2.py
|
lindaweng/Mindlife
|
30be070b39728fb3fe149d4c95e5bce280a3b6a7
|
[
"MIT"
] | null | null | null |
models2.py
|
lindaweng/Mindlife
|
30be070b39728fb3fe149d4c95e5bce280a3b6a7
|
[
"MIT"
] | null | null | null |
import nltk
import re
import sys
from sys import argv
from nltk.sentiment.vader import SentimentIntensityAnalyzer
def ajay(ans):
ajay = SentimentIntensityAnalyzer()
completeScore = 0
questionWeights = [0.05, 0.20, 0.05, 0.05, 0.05, 0.20, 0.05, 0.05, 0.20, 0.10]
print ans
ansList = ans.split("$")
for j in range(10):
print ansList[j]
for i in range(10):
results = []
score = 0
count = 0
# print (count)
for paragraph in ansList:
for line in paragraph:
#Split Paragraph on basis of '.' or ? or !.
for l in re.split(r"\.|\?|\!",paragraph):
# print(l)
ss = ajay.polarity_scores(l)
results.append(ss);
# print(ss['compound'])
score += ss['compound']
count += 1
completeScore += (score/count)*questionWeights[i]
#print(completeScore)
if (completeScore >= 0.1):
return "False Alarm! You don't have Depression."
elif (completeScore >= -0.1):
return ("Seasonal affective disorder (SAD). This type of depression " +
"emerges as days get shorter in the fall and winter. The mood "
+ "change may result from alterations in the body's natural daily "
+ "rhythms, in the eyes' sensitivity to light, or in how chemical "
+ "messengers like serotonin and melatonin function. The leading "
+ "treatment is light therapy, which involves daily sessions sitting "
+ "close to an especially intense light source. The usual treatments "
+ "for depression, such as psychotherapy and medication, may also be "
+ "effective.");
elif (completeScore >= -0.4):
return ("Persistent depressive disorder. Formerly called dysthymia, this "
+ "type of depression refers to low mood that has lasted for at least "
+ "two years but may not reach the intensity of major depression. Many "
+ "people with this type of depression type are able to function day to "
+ "but feel low or joyless much of the time. Some depressive symptoms, "
+ "such as appetite and sleep changes, low energy, low self-esteem, or "
+ "hopelessness, are usually part of the picture.")
else:
return ("The classic depression type, major depression is a state where a dark "
+ "mood is all-consuming and one loses interest in activities, even ones "
+ "that are usually pleasurable. Symptoms of this type of depression "
+ "include trouble sleeping, changes in appetite or weight, loss of energy, "
+ "and feeling worthless. Thoughts of death or suicide may occur. It is "
+ "usually treated with psychotherapy and medication. For some people with "
+ "severe depression that isn't alleviated with psychotherapy or antidepressant "
+ "medications, electroconvulsive therapy may be effective.")
| 51.709677
| 98
| 0.585153
| 379
| 3,206
| 4.94723
| 0.490765
| 0.0096
| 0.0128
| 0.042667
| 0.014933
| 0.014933
| 0.014933
| 0.013333
| 0.013333
| 0
| 0
| 0.020648
| 0.335309
| 3,206
| 62
| 99
| 51.709677
| 0.859221
| 0.033063
| 0
| 0
| 0
| 0
| 0.510204
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.092593
| null | null | 0.037037
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a9d299ac035789dcfbdc5b67b56e5ebe19176e2
| 33,321
|
py
|
Python
|
bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py
|
AngelRuizMoreno/Jupyter_Dock_devel
|
6d23bc174d5294d1e9909a0a1f9da0713042339e
|
[
"MIT"
] | null | null | null |
bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py
|
AngelRuizMoreno/Jupyter_Dock_devel
|
6d23bc174d5294d1e9909a0a1f9da0713042339e
|
[
"MIT"
] | null | null | null |
bin/ADFRsuite/CCSBpckgs/mglutil/gui/BasicWidgets/Tk/Dial.py
|
AngelRuizMoreno/Jupyter_Dock_devel
|
6d23bc174d5294d1e9909a0a1f9da0713042339e
|
[
"MIT"
] | 1
|
2021-11-04T21:48:14.000Z
|
2021-11-04T21:48:14.000Z
|
################################################################################
##
## This library is free software; you can redistribute it and/or
## modify it under the terms of the GNU Lesser General Public
## License as published by the Free Software Foundation; either
## version 2.1 of the License, or (at your option) any later version.
##
## This library is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
## Lesser General Public License for more details.
##
## You should have received a copy of the GNU Lesser General Public
## License along with this library; if not, write to the Free Software
## Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA
##
## (C) Copyrights Dr. Michel F. Sanner and TSRI 2016
##
################################################################################
#########################################################################
#
# Date: Mai 2001 Authors: Michel Sanner, Daniel Stoffler
#
# [email protected]
# [email protected]
#
# Copyright: Michel Sanner, Daniel Stoffler and TSRI
#
#########################################################################
import Tkinter
import math
import types
import sys
import os
from mglutil.util.callback import CallbackManager
from mglutil.util.misc import ensureFontCase
from optionsPanel import OptionsPanel
from KeyboardEntry import KeyboardEntry
class Dial(Tkinter.Frame, KeyboardEntry):
"""This class implements a Dial widget.
The widget has a pointer that can be moved around a circle.
The range corresponding to one full turn can be specified as well as the min
and max values that are allowed. By defaults these are set to None meaning that
there is no min and no max. One turn corresponds to 360 units by default.
A dial can also operate in discrete mode (if self.increment is set to x). In
this mode the values will be restrained to be multiples of self.increment.
The Widget has a Callback manager. Callback functions get called at every value
change if self.contiguous is set to 1, else they get called when the mouse
button is released. They always get called with the current value as an
argument.
An optional label can be displayed at the center of the Dial widget.
The size of the dial has to be specified at instanciation. Other parameters
can be set after the widget has been created.
The widget tried to adjust automatically the size of the arrow according to
the size of the dial.
The widget has a configure() method: type, min, max, increment, precision,
showLabel, value, continuous, oneTurn can be set this way.
master, labCfg and size can be passed only to the constructor.
a lock() method is used to disable the various gui components of the
options panel. Usage: <instance>.lock(<component>=<value>)
components see configure(). value is 0 or 1. 1 disables,
0 enables.
Setting values with increment enabled:
if using the method set(), the actual value will 'snap' to the next increment.
i.e., if the value is set to 3, and the increment is set to 2, setting the
value to 6 will actually result in 7 (3,5,7,9,.....)
To still be able to set the value, disregarding the current active increment,
the set method understands the optional keyword force=True, i.e.
dial.set(<value>, force=True)), which will set the value to <value>. The
increment will now be added to this new <value>
"""
def __init__(self, master=None, type='float',
labCfg={'fg':'black','side':'left', 'text':None},
min=None, max=None, increment=.0, precision=2,
showLabel=1, value=0.0, continuous=1, oneTurn=360.,
size=50, callback=None,
lockMin=0, lockBMin=0, lockMax=0, lockBMax=0,
lockIncrement=0, lockBIncrement=0,
lockPrecision=0, lockShowLabel=0, lockValue=0,
lockType=0, lockContinuous=0, lockOneTurn=0, **kw):
Tkinter.Frame.__init__(self, master)
Tkinter.Pack.config(self)
self.callbacks = CallbackManager() # object to manage callback
# functions. They get called with the
# current value as an argument
# initialize various attributes with default values
self.precision = 2 # decimal places
self.min = None # minimum value
self.max = None # maximum value
self.increment = increment # value increment
self.minOld = 0. # used to store old values
self.maxOld = 0.
self.incrementOld = increment
self.size = 50 # defines widget size
self.offsetValue = 0. # used to set increment correctly
self.lab = None # label
self.callback = None # user specified callback
self.opPanel = None # option panel widget
self.oneTurn = 360. # value increment for 1 full turn
self.value = 0.0 # current value of widget
self.oldValue = 0.0 # old value of widget
self.showLabel = 1 # turn on to display label on
self.continuous = 1 # set to 1 to call callbacks at
# each value change, else gets called
# on button release event
self.angle = 0. # angle corresponding to value
self.labCfg = labCfg # Tkinter Label options
self.labelFont = (
ensureFontCase('helvetica'), 14, 'bold') # label font
self.labelColor = 'yellow' # label color
self.canvas = None # the canvas to create the widget in
self.usedArcColor = '#aaaaaa' # filled arc color of used portion
self.unusedArcColor = '#cccccc' # filled arc color of unused portion
self.pyOver180 = math.pi/180.0 # constants used in various places
self.threeSixtyOver1turn = 1
self.piOver1turn = math.pi/360.
self.lockMin = lockMin # lock<X> vars are used in self.lock()
self.lockMax = lockMax # to lock/unlock entries in optionpanel
self.lockIncrement = lockIncrement
self.lockBMin = lockBMin
self.lockBMax = lockBMax
self.lockBIncrement = lockBIncrement
self.lockPrecision = lockPrecision
self.lockShowLabel = lockShowLabel
self.lockValue = lockValue
self.lockType = lockType
self.lockContinuous = lockContinuous
self.lockOneTurn = lockOneTurn
self.setArrow()
# configure with user-defined values
self.setSize(size)
self.setCallback(callback)
self.setContinuous(continuous)
self.setType(type)
self.setPrecision(precision)
self.setOneTurn(oneTurn)
self.setMin(min)
self.setMax(max)
self.setIncrement(increment)
self.setShowLabel(showLabel)
self.setValue(value)
self.setLabel(self.labCfg)
self.createCanvas(master)
canvas = self.canvas
canvas.bind("<ButtonPress-1>", self.mouseDown)
canvas.bind("<ButtonRelease-1>", self.mouseUp)
canvas.bind("<B1-Motion>", self.mouseMove)
canvas.bind("<Button-3>", self.toggleOptPanel)
if os.name == 'nt': #sys.platform == 'win32':
canvas.bind("<MouseWheel>", self.mouseWheel)
else:
canvas.bind("<Button-4>", self.mouseWheel)
canvas.bind("<Button-5>", self.mouseWheel)
KeyboardEntry.__init__(self, (canvas,), self.setFromEntry)
self.opPanel = OptionsPanel(master = self, title="Dial Options")
## if self.callback:
## self.callbacks.AddCallback(self.callback)
def setFromEntry(self, valueString):
try:
self.set(self.type(valueString))
except ValueError:
# fixme we would like to pop this up in a window maybe
import traceback
traceback.print_stack()
traceback.print_exc()
def handleKeyStroke(self, event):
# handle key strokes for numbers only in widget keyboard entry label
key = event.keysym
if key.isdigit() or key=='period' or key=='minus' or key=='plus':
if key == 'period':
key = '.'
elif key == 'minus':
key = '-'
elif key == 'plus':
key = '+'
self.typedValue += key
self.typedValueTK.configure(text=self.typedValue)
else:
KeyboardEntry.handleKeyStroke(self, event)
def setSize(self, size):
"""Set widget size. Size must be of type int and greater than 0"""
assert isinstance(size, types.IntType),\
"Illegal size: expected type %s, got %s"%(type(1), type(size) )
assert size > 0, "Illegal size: must be > 0, got %s"%size
self.size = size
def setCallback(self, cb):
"""Set widget callback. Must be callable function. Callback is called
every time the widget value is set/modified"""
assert cb is None or callable(cb) or type(cb) is types.ListType,\
"Illegal callback: must be either None or callable, or list. Got %s"%cb
if cb is None: return
elif type(cb) is types.ListType:
for func in cb:
assert callable(func), "Illegal callback must be callable. Got %s"%func
self.callbacks.AddCallback(func)
else:
self.callbacks.AddCallback(cb)
self.callback = cb
def toggleOptPanel(self, event=None):
if self.opPanel.flag:
self.opPanel.Dismiss_cb()
else:
if not hasattr(self.opPanel, 'optionsForm'):
self.opPanel.displayPanel(create=1)
else:
self.opPanel.displayPanel(create=0)
def setArrow(self, size=None):
if size is not None:
self.setSize(size)
aS = self.size/40
self.arrowLength = max(3, 3*aS) # arrow head length
self.arrowWidth = max(2, aS) # half the arrow body width
self.arrowBorderwidth = max(1, self.arrowWidth/2) # width of arrow
# shadow lines
self.arrowHeadWidth = 2*self.arrowWidth # width of arrow head base
def mouseDown(self, event):
# remember where the mouse went down
self.lastx = event.x
self.lasty = event.y
def mouseUp(self, event):
# call callbacks if not in continuous mode
if not self.continuous:
self.callbacks.CallCallbacks(self.opPanel.valInput.get())
if self.showLabel == 2:
# no widget labels on mouse release
self.canvas.itemconfigure(self.labelId2, text='')
self.canvas.itemconfigure(self.labelId, text='')
def mouseMove(self, event):
dx = event.x-self.xm
dy = self.ym-event.y
n = math.sqrt(dx*dx+dy*dy)
if n == 0.0: v = [0.0, 0.0]
else: v = [dx/n, dy/n]
# find the cosine of the angle between new hand position and previous
# hand position
ma = v[0]*self.vector[0] + v[1]*self.vector[1]
# assure no rounding errors
if ma > 1.0: ma = 1.0
elif ma < -1.0: ma = -1.0
# compute angle increment compared to current vector
ang = math.acos(ma)
# find the sign of the rotation, sign of z component of vector prod.
oldv = self.vector
normz = oldv[0]*v[1] - oldv[1]*v[0]
if normz>0: ang = -1. * ang
# compute the new value
val = self.value + ang*self.oneTurnOver2pi
self.set(val)
self.lastx = event.x
self.lasty = event.y
def mouseWheel(self, event):
#print "mouseWheel", event, event.num
if os.name == 'nt': #sys.platform == 'win32':
if event.delta > 0:
lEventNum = 4
else:
lEventNum = 5
else:
lEventNum = event.num
if lEventNum == 4:
self.set(self.value+self.oneTurn)
else:
self.set(self.value-self.oneTurn)
def get(self):
return self.type(self.value)
def printLabel(self):
if self.canvas is None:
return
self.canvas.itemconfigure(self.labelId2,
text=self.labelFormat%self.value)#newVal)
self.canvas.itemconfigure(self.labelId,
text=self.labelFormat%self.value)#newVal)
def set(self, val, update=1, force=0):
# if force is set to 1, we call this method regardless of the
# widget configuration. This is for example the case if the dial
# is set to continuous=0, but the value is set in the options panel
# snap to closest increment
if self.increment is not None and self.increment != 0. and not force:
offset = self.offsetValue%self.increment
dval = round(val/self.increment) * self.increment
if val < dval:
dval = dval + offset - self.increment
else:
dval = dval + offset
if self.min is not None and dval < self.min:
dval = self.min
elif self.max is not None and dval > self.max:
dval = self.max
# recompute vector and angle corresponding to val
self.angle = (dval%self.oneTurn)*self.threeSixtyOver1turn
if dval <0.0:
self.angle = self.angle - 360.0
a = self.angle*self.pyOver180
self.vector = [math.sin(a), math.cos(a)]
self.value = dval
self.offsetValue = dval
else:
# 'regular' mode, i.e. no step-wise increment
if self.min is not None and val < self.min: val = self.min
elif self.max is not None and val > self.max: val = self.max
# recompute vector and angle corresponding to val
self.angle = (val%self.oneTurn)*self.threeSixtyOver1turn
if val <0.0: self.angle = self.angle - 360.0
a = self.angle*self.pyOver180
self.vector = [math.sin(a), math.cos(a)]
self.value = val
self.offsetValue = val
#update arrow in display
self.drawArrow()
newVal = self.get()
if self.continuous or force:
if update and self.oldValue != newVal or force:
self.oldValue = newVal
self.callbacks.CallCallbacks(newVal)
if self.showLabel==2:
self.printLabel()
else:
if self.showLabel==2:
self.printLabel()
if self.showLabel==1:
self.printLabel()
if self.opPanel:
self.opPanel.valInput.set(self.labelFormat%newVal)
def drawArrow(self):
if self.canvas is None:
return
# end point
x1 = self.xm + self.vector[0]*self.rad
y1 = self.ym - self.vector[1]*self.rad
# point at arrow head base
xb = self.xm + self.vector[0]*self.radNoArrow
yb = self.xm - self.vector[1]*self.radNoArrow
# vector orthogonal to arrow
n = [-self.vector[1], -self.vector[0]]
pts1 = [ self.xm+n[0]*self.arrowWidth, self.ym+n[1]*self.arrowWidth,
xb+n[0]*self.arrowWidth, yb+n[1]*self.arrowWidth,
xb+n[0]*self.arrowHeadWidth, yb+n[1]*self.arrowHeadWidth,
x1, y1 ]
pts2 = [ x1, y1,
xb-n[0]*self.arrowHeadWidth, yb-n[1]*self.arrowHeadWidth,
xb-n[0]*self.arrowWidth, yb-n[1]*self.arrowWidth,
self.xm-n[0]*self.arrowWidth, self.ym-n[1]*self.arrowWidth ]
canvas = self.canvas
if self.vector[0] > 0.0:
col1 = '#DDDDDD'
col2 = 'black'
else:
col1 = 'black'
col2 = '#DDDDDD'
apply( canvas.coords, (self.arrowPolId,) + tuple(pts1+pts2) )
apply( canvas.coords, (self.arrowPolborder1,) + tuple(pts1) )
canvas.itemconfigure( self.arrowPolborder1, fill=col1 )
apply( canvas.coords, (self.arrowPolborder2,) + tuple(pts2) )
canvas.itemconfigure( self.arrowPolborder2, fill=col2 )
canvas.itemconfigure(self.arcId, extent = 0.0-self.angle)
def createCanvas(self, master):
size = self.size
self.frame = Tkinter.Frame(self, borderwidth=3, relief='sunken')
self.canvas = Tkinter.Canvas(self.frame, width=size+2, height=size+2)
self.xm = self.ym = size/2+2
self.rad = size/2
self.radNoArrow = self.rad-self.arrowLength
self.vector = [0, 1]
x1 = self.xm + self.vector[0]*self.rad
y1 = self.ym + self.vector[1]*self.rad
canvas = self.canvas
self.circleId = canvas.create_oval(2,2,size,size, width=1,
fill=self.unusedArcColor)
self.arcId = canvas.create_arc(2,2,size,size, start=90.,
extent=0, fill=self.usedArcColor)
canvas.create_line(2, self.ym, size+2, self.ym)
canvas.create_line(self.xm, 2, self.ym, size+2)
self.arrowPolId = canvas.create_polygon( 0,0,0,0,0,0,0,0,
0,0,0,0,0,0,0,0,
fill='gray75' )
self.arrowPolborder1 = canvas.create_line( 0,0,0,0,0,0,0,0,
fill='black',
width = self.arrowBorderwidth)
self.arrowPolborder2 = canvas.create_line( 0,0,0,0,0,0,0,0,
fill='white',
width = self.arrowBorderwidth )
r = size/20
off = self.arrowBorderwidth
canvas.create_oval(self.xm-r,self.ym-r-off/2,self.xm+r,self.ym+r-off/2,
fill='#DDDDDD', outline='white')
canvas.create_oval(self.xm-r,self.ym-r+off,self.xm+r,self.ym+r+off,
fill='black', outline='black')
canvas.create_oval(self.xm-r,self.ym-r,self.xm+r,self.ym+r,
fill='gray70', outline='#DDDDDD')
self.labelId2 = canvas.create_text(self.xm+2, self.ym+2,
fill='black',
justify='center', text='',
font = self.labelFont)
self.labelId = canvas.create_text(self.xm, self.ym,
fill=self.labelColor,
justify='center', text='',
font = self.labelFont)
self.drawArrow()
self.opPanel = OptionsPanel(master = self, title="Dial Options")
# pack em up
self.canvas.pack(side=Tkinter.TOP)
self.frame.pack(expand=1, fill='x')
self.toggleWidgetLabel(self.showLabel)
def toggleWidgetLabel(self, val):
if val == 0:
# no widget labels
self.showLabel=0
self.canvas.itemconfigure(self.labelId2,
text='')
self.canvas.itemconfigure(self.labelId,
text='')
if val == 1:
# show always widget labels
self.showLabel=1
self.printLabel()
if val == 2:
# show widget labels only when mouse moves
self.showLabel=2
self.canvas.itemconfigure(self.labelId2,
text='')
self.canvas.itemconfigure(self.labelId,
text='')
def setValue(self, val):
if type(val) == types.StringType:
val = float(val)
assert type(val) in [types.IntType, types.FloatType],\
"Illegal type for value: expected %s or %s, got %s"%(
type(1), type(1.0), type(val) )
# setValue does NOT call a callback!
if self.min is not None and val < self.min: val = self.min
if self.max is not None and val > self.max: val = self.max
self.value = self.type(val)
self.offsetValue=self.value
self.oldValue = self.value
#update arrow in display
self.angle = (self.value%self.oneTurn)*self.threeSixtyOver1turn
if self.value <0.0: self.angle = self.angle - 360.0
a = self.angle*self.pyOver180
self.vector = [math.sin(a), math.cos(a)]
self.drawArrow()
if self.showLabel == 1:
self.printLabel()
if self.opPanel:
self.opPanel.valInput.set(self.labelFormat%self.value)
def setLabel(self, labCfg):
self.labCfg = labCfg
text = labCfg.get('text', None)
if text is None or text=='':
return
d={}
for k, w in self.labCfg.items():
if k == 'side': continue
else: d[k] = w
if not 'side' in self.labCfg.keys():
self.labCfg['side'] = 'left'
if not self.lab:
self.lab = Tkinter.Label(self, d)
self.lab.pack(side=self.labCfg['side'])
self.lab.bind("<Button-3>", self.toggleOptPanel)
else:
self.lab.configure(text)
#####################################################################
# the 'configure' methods:
#####################################################################
def configure(self, **kw):
for key,value in kw.items():
# the 'set' parameter callbacks
if key=='labCfg': self.setLabel(value)
elif key=='type': self.setType(value)
elif key=='min': self.setMin(value)
elif key=='max': self.setMax(value)
elif key=='increment': self.setIncrement(value)
elif key=='precision': self.setPrecision(value)
elif key=='showLabel': self.setShowLabel(value)
elif key=='continuous': self.setContinuous(value)
elif key=='oneTurn': self.setOneTurn(value)
# the 'lock' entries callbacks
elif key=='lockType': self.lockTypeCB(value)
elif key=='lockMin': self.lockMinCB(value)
elif key=='lockBMin': self.lockBMinCB(value)
elif key=='lockMax': self.lockMaxCB(value)
elif key=='lockBMax': self.lockBMaxCB(value)
elif key=='lockIncrement': self.lockIncrementCB(value)
elif key=='lockBIncrement': self.lockBIncrementCB(value)
elif key=='lockPrecision': self.lockPrecisionCB(value)
elif key=='lockShowLabel': self.lockShowLabelCB(value)
elif key=='lockValue': self.lockValueCB(value)
elif key=='lockContinuous': self.lockContinuousCB(value)
elif key=='lockOneTurn': self.lockOneTurnCB(value)
def setType(self, Type):
assert type(Type) in [types.StringType, types.TypeType],\
"Illegal type for datatype. Expected %s or %s, got %s"%(
type('a'), type(type), type(Type) )
if type(Type) == type(""): # type str
assert Type in ('int','float'),\
"Illegal type descriptor. Expected 'int' or 'float', got '%s'"%Type
self.type = eval(Type)
else:
self.type = Type
if self.type == int:
self.labelFormat = "%d"
self.int_value = self.value
else:
self.labelFormat = "%."+str(self.precision)+"f"
if hasattr(self.opPanel, 'optionsForm'):
w = self.opPanel.idf.entryByName['togIntFloat']['widget']
if self.type == int:
w.setvalue('int')
elif self.type == 'float':
w.setvalue('float')
if self.opPanel:
self.opPanel.updateDisplay()
# and update the printed label
if self.canvas and self.showLabel == 1:
self.printLabel()
def setMin(self, min):
if min is not None:
assert type(min) in [types.IntType, types.FloatType],\
"Illegal type for minimum. Expected type %s or %s, got %s"%(
type(0), type(0.0), type(min) )
if self.max and min > self.max:
min = self.max
self.min = self.type(min)
if self.showLabel == 1:
self.printLabel()
if self.value < self.min:
self.set(self.min)
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.minInput.set(self.labelFormat%self.min)
self.opPanel.toggleMin.set(1)
self.opPanel.min_entry.configure(state='normal', fg='gray0')
self.minOld = self.min
else:
self.min = None
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.toggleMin.set(0)
self.opPanel.min_entry.configure(state='disabled',
fg='gray40')
def setMax(self, max):
if max is not None:
assert type(max) in [types.IntType, types.FloatType],\
"Illegal type for maximum. Expected type %s or %s, got %s"%(
type(0), type(0.0), type(max) )
if self.min and max < self.min:
max = self.min
self.max = self.type(max)
if self.showLabel == 1:
self.printLabel()
if self.value > self.max:
self.set(self.max)
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.maxInput.set(self.labelFormat%self.max)
self.opPanel.toggleMax.set(1)
self.opPanel.max_entry.configure(state='normal', fg='gray0')
self.maxOld = self.max
else:
self.max = None
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.toggleMax.set(0)
self.opPanel.max_entry.configure(state='disabled', fg='gray40')
def setIncrement(self, incr):
if incr is not None:
assert type(incr) in [types.IntType, types.FloatType],\
"Illegal type for increment. Expected type %s or %s, got %s"%(
type(0), type(0.0), type(incr) )
self.increment = self.type(incr)
self.offsetValue = self.value
self.incrementOld = self.increment
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.incrInput.set(self.labelFormat%self.increment)
self.opPanel.toggleIncr.set(1)
self.opPanel.incr_entry.configure(state='normal', fg='gray0')
else:
self.increment = self.type(0)
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.toggleIncr.set(0)
self.opPanel.incrInput.set(self.labelFormat%0)
self.opPanel.incr_entry.configure(state='disabled',
fg='gray40')
def setPrecision(self, val):
assert type(val) in [types.IntType, types.FloatType],\
"Illegal type for precision. Expected type %s or %s, got %s"%(
type(0), type(0.0), type(val) )
val = int(val)
if val > 10:
val = 10
if val < 1:
val = 1
self.precision = val
if self.type == float:
self.labelFormat = "%."+str(self.precision)+"f"
else:
self.labelFormat = "%d"
if hasattr(self.opPanel, 'optionsForm'):
w = self.opPanel.idf.entryByName['selPrec']['widget']
w.setvalue(val)
if self.opPanel:
self.opPanel.updateDisplay()
# and update the printed label
if self.canvas and self.showLabel == 1:
self.printLabel()
def setContinuous(self, cont):
""" cont can be None, 0 or 1 """
assert cont in [None, 0, 1],\
"Illegal value for continuous: expected None, 0 or 1, got %s"%cont
if cont != 1:
cont = None
self.continuous = cont
if hasattr(self.opPanel, 'optionsForm'):
w = self.opPanel.idf.entryByName['togCont']['widget']
if cont:
w.setvalue('on')#i=1
else:
w.setvalue('off')#i=0
if self.opPanel:
self.opPanel.updateDisplay()
def setShowLabel(self, val):
"""Show label can be 0, 1 or 2
0: no label
1: label is always shown
2: show label only when value changes"""
assert val in [0,1,2],\
"Illegal value for showLabel. Expected 0, 1 or 2, got %s"%val
if val != 0 and val != 1 and val != 2:
print "Illegal value. Must be 0, 1 or 2"
return
self.showLabel = val
self.toggleWidgetLabel(val)
if hasattr(self.opPanel, 'optionsForm'):
w = self.opPanel.idf.entryByName['togLabel']['widget']
if self.showLabel == 0:
label = 'never'
elif self.showLabel == 1:
label = 'always'
elif self.showLabel == 2:
label = 'move'
w.setvalue(label)
if self.opPanel:
self.opPanel.updateDisplay()
def setOneTurn(self, oneTurn):
assert type(oneTurn) in [types.IntType, types.FloatType],\
"Illegal type for oneTurn. Expected %s or %s, got %s"%(
type(0), type(0.0), type(oneTurn) )
self.oneTurn = oneTurn
self.threeSixtyOver1turn = 360./oneTurn
self.piOver1turn = math.pi/oneTurn
self.oneTurnOver2pi = oneTurn / (2*math.pi)
if self.opPanel:
self.opPanel.updateDisplay()
#####################################################################
# the 'lock' methods:
#####################################################################
def lockTypeCB(self, mode):
if mode != 0: mode = 1
self.lockType = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockMinCB(self, mode): #min entry field
if mode != 0: mode = 1
self.lockMin = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockBMinCB(self, mode): # min checkbutton
if mode != 0: mode = 1
self.lockBMin = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockMaxCB(self, mode): # max entry field
if mode != 0: mode = 1
self.lockMax = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockBMaxCB(self, mode): # max checkbutton
if mode != 0: mode = 1
self.lockBMax = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockIncrementCB(self, mode): # increment entry field
if mode != 0: mode = 1
self.lockIncrement = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockBIncrementCB(self, mode): # increment checkbutton
if mode != 0: mode = 1
self.lockBIncrement = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockPrecisionCB(self, mode):
if mode != 0: mode = 1
self.lockPrecision = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockShowLabelCB(self, mode):
if mode != 0: mode = 1
self.lockShowLabel = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockValueCB(self, mode):
if mode != 0: mode = 1
self.lockValue = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockContinuousCB(self, mode):
if mode != 0: mode = 1
self.lockContinuous = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
def lockOneTurnCB(self, mode):
if mode != 0: mode = 1
self.lockOneTurn = mode
if hasattr(self.opPanel, 'optionsForm'):
self.opPanel.lockUnlockDisplay()
if __name__ == '__main__':
def foo(val):
print val
d = Dial(size=50)
d.configure(showLabel=1)
d.callbacks.AddCallback(foo)
| 36.376638
| 87
| 0.547643
| 3,868
| 33,321
| 4.706825
| 0.152792
| 0.046523
| 0.004779
| 0.005273
| 0.32341
| 0.297484
| 0.258376
| 0.209162
| 0.176261
| 0.161046
| 0
| 0.018954
| 0.333393
| 33,321
| 915
| 88
| 36.416393
| 0.800693
| 0.101378
| 0
| 0.279534
| 0
| 0
| 0.064124
| 0
| 0
| 0
| 0
| 0.001093
| 0.023295
| 0
| null | null | 0
| 0.016639
| null | null | 0.023295
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6a9d42bd307c1507375c76e403f46b3901bbf76d
| 3,560
|
py
|
Python
|
qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py
|
kevinlq/Qt-Creator-Opensource-Study
|
b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f
|
[
"MIT"
] | 5
|
2018-12-22T14:49:13.000Z
|
2022-01-13T07:21:46.000Z
|
qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py
|
kevinlq/Qt-Creator-Opensource-Study
|
b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f
|
[
"MIT"
] | null | null | null |
qt-creator-opensource-src-4.6.1/scripts/checkInstalledFiles.py
|
kevinlq/Qt-Creator-Opensource-Study
|
b8cadff1f33f25a5d4ef33ed93f661b788b1ba0f
|
[
"MIT"
] | 8
|
2018-07-17T03:55:48.000Z
|
2021-12-22T06:37:53.000Z
|
#!/usr/bin/env python
############################################################################
#
# Copyright (C) 2016 The Qt Company Ltd.
# Contact: https://www.qt.io/licensing/
#
# This file is part of Qt Creator.
#
# Commercial License Usage
# Licensees holding valid commercial Qt licenses may use this file in
# accordance with the commercial license agreement provided with the
# Software or, alternatively, in accordance with the terms contained in
# a written agreement between you and The Qt Company. For licensing terms
# and conditions see https://www.qt.io/terms-conditions. For further
# information use the contact form at https://www.qt.io/contact-us.
#
# GNU General Public License Usage
# Alternatively, this file may be used under the terms of the GNU
# General Public License version 3 as published by the Free Software
# Foundation with exceptions as appearing in the file LICENSE.GPL3-EXCEPT
# included in the packaging of this file. Please review the following
# information to ensure the GNU General Public License requirements will
# be met: https://www.gnu.org/licenses/gpl-3.0.html.
#
############################################################################
import os
import sys
import stat
import difflib
import inspect
import getopt
def referenceFile():
if sys.platform.startswith('linux'):
filename = 'makeinstall.linux'
elif sys.platform.startswith('win'):
filename = 'makeinstall.windows'
elif sys.platform == 'darwin':
filename = 'makeinstall.darwin'
else:
print "Unsupported platform: ", sys.platform
sys.exit(-1)
scriptDir = os.path.dirname(inspect.getfile(inspect.currentframe()))
return os.path.join(scriptDir,'..','tests', 'reference', filename)
def readReferenceFile():
# read file with old diff
f = open(referenceFile(), 'r');
filelist = []
for line in f:
filelist.append(line)
f.close()
return filelist
def generateReference(rootdir):
fileDict = {}
for root, subFolders, files in os.walk(rootdir):
for file in (subFolders + files):
f = os.path.join(root,file)
perm = os.stat(f).st_mode & 0777
if os.path.getsize(f) == 0:
print "'%s' is empty!" % f
fileDict[f[len(rootdir)+1:]] = perm
# generate new list
formattedlist = []
for name, perm in sorted(fileDict.iteritems()):
formattedlist.append("%o %s\n"% (perm, name))
return formattedlist;
def usage():
print "Usage: %s [-g | --generate] <dir>" % os.path.basename(sys.argv[0])
def main():
generateMode = False
try:
opts, args = getopt.gnu_getopt(sys.argv[1:], 'hg', ['help', 'generate'])
except:
print str(err)
usage()
sys.exit(2)
for o, a in opts:
if o in ('-h', '--help'):
usage()
sys.exit(0)
if o in ('-g', '--generate'):
generateMode = True
if len(args) != 1:
usage()
sys.exit(2)
rootdir = args[0]
if generateMode:
f = open(referenceFile(), 'w')
for item in generateReference(rootdir):
f.write(item)
f.close()
print "Do not forget to commit", referenceFile()
else:
hasDiff = False
for line in difflib.unified_diff(readReferenceFile(), generateReference(rootdir), fromfile=referenceFile(), tofile="generated"):
sys.stdout.write(line)
hasDiff = True
if hasDiff:
sys.exit(1)
if __name__ == "__main__":
main()
| 31.504425
| 136
| 0.608989
| 439
| 3,560
| 4.91344
| 0.416856
| 0.016226
| 0.013908
| 0.01669
| 0.024108
| 0
| 0
| 0
| 0
| 0
| 0
| 0.008453
| 0.235674
| 3,560
| 112
| 137
| 31.785714
| 0.78427
| 0.290169
| 0
| 0.123288
| 0
| 0
| 0.100851
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.082192
| null | null | 0.068493
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa62343269180c72e1026d8bfdc9d3fa9196b1e
| 7,448
|
py
|
Python
|
gluon/contrib/pbkdf2_ctypes.py
|
Cwlowe/web2py
|
6ae4c3c274be1026cbc45b0fcd8d1180c74b9070
|
[
"BSD-3-Clause"
] | 9
|
2018-04-19T05:08:30.000Z
|
2021-11-23T07:36:58.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 98
|
2017-11-02T19:00:44.000Z
|
2022-03-22T16:15:39.000Z
|
gluon/contrib/pbkdf2_ctypes.py
|
mohit3011/Quiz-Mate
|
17988a623abde439aef2b43fc8dc3162b5cae15e
|
[
"BSD-3-Clause"
] | 9
|
2017-10-24T21:53:36.000Z
|
2021-11-23T07:36:59.000Z
|
# -*- coding: utf-8 -*-
"""
pbkdf2_ctypes
~~~~~~
Fast pbkdf2.
This module implements pbkdf2 for Python using crypto lib from
openssl or commoncrypto.
Note: This module is intended as a plugin replacement of pbkdf2.py
by Armin Ronacher.
Git repository:
$ git clone https://github.com/michele-comitini/pbkdf2_ctypes.git
:copyright: Copyright (c) 2013: Michele Comitini <[email protected]>
:license: LGPLv3
"""
import ctypes
import ctypes.util
import hashlib
import platform
import os.path
import binascii
import sys
__all__ = ['pkcs5_pbkdf2_hmac', 'pbkdf2_bin', 'pbkdf2_hex']
__version__ = '0.99.3'
def _commoncrypto_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.sha1: 1,
hashlib.sha224: 2,
hashlib.sha256: 3,
hashlib.sha384: 4,
hashlib.sha512: 5}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
return crypto_hashfunc
def _commoncrypto_pbkdf2(data, salt, iterations, digest, keylen):
"""Common Crypto compatibile wrapper
"""
c_hashfunc = ctypes.c_uint32(_commoncrypto_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_size_t(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_size_t(len(salt))
c_iter = ctypes.c_uint(iterations)
c_keylen = ctypes.c_size_t(keylen)
c_buff = ctypes.create_string_buffer(keylen)
crypto.CCKeyDerivationPBKDF.restype = ctypes.c_int
crypto.CCKeyDerivationPBKDF.argtypes = [ctypes.c_uint32,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_char_p,
ctypes.c_size_t,
ctypes.c_uint32,
ctypes.c_uint,
ctypes.c_char_p,
ctypes.c_size_t]
ret = crypto.CCKeyDerivationPBKDF(2, # hardcoded 2-> PBKDF2
c_pass, c_passlen,
c_salt, c_saltlen,
c_hashfunc,
c_iter,
c_buff,
c_keylen)
return (1 - ret, c_buff)
def _openssl_hashlib_to_crypto_map_get(hashfunc):
hashlib_to_crypto_map = {hashlib.md5: crypto.EVP_md5,
hashlib.sha1: crypto.EVP_sha1,
hashlib.sha256: crypto.EVP_sha256,
hashlib.sha224: crypto.EVP_sha224,
hashlib.sha384: crypto.EVP_sha384,
hashlib.sha512: crypto.EVP_sha512}
crypto_hashfunc = hashlib_to_crypto_map.get(hashfunc)
if crypto_hashfunc is None:
raise ValueError('Unkwnown digest %s' % hashfunc)
crypto_hashfunc.restype = ctypes.c_void_p
return crypto_hashfunc()
def _openssl_pbkdf2(data, salt, iterations, digest, keylen):
"""OpenSSL compatibile wrapper
"""
c_hashfunc = ctypes.c_void_p(_openssl_hashlib_to_crypto_map_get(digest))
c_pass = ctypes.c_char_p(data)
c_passlen = ctypes.c_int(len(data))
c_salt = ctypes.c_char_p(salt)
c_saltlen = ctypes.c_int(len(salt))
c_iter = ctypes.c_int(iterations)
c_keylen = ctypes.c_int(keylen)
c_buff = ctypes.create_string_buffer(keylen)
# PKCS5_PBKDF2_HMAC(const char *pass, int passlen,
# const unsigned char *salt, int saltlen, int iter,
# const EVP_MD *digest,
# int keylen, unsigned char *out);
crypto.PKCS5_PBKDF2_HMAC.argtypes = [ctypes.c_char_p, ctypes.c_int,
ctypes.c_char_p, ctypes.c_int,
ctypes.c_int, ctypes.c_void_p,
ctypes.c_int, ctypes.c_char_p]
crypto.PKCS5_PBKDF2_HMAC.restype = ctypes.c_int
err = crypto.PKCS5_PBKDF2_HMAC(c_pass, c_passlen,
c_salt, c_saltlen,
c_iter,
c_hashfunc,
c_keylen,
c_buff)
return (err, c_buff)
try: # check that we have proper OpenSSL or Common Crypto on the system.
system = platform.system()
if system == 'Windows':
if platform.architecture()[0] == '64bit':
libname = ctypes.util.find_library('libeay64')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(libname)
else:
libname = ctypes.util.find_library('libeay32')
if not libname:
raise OSError('Library libeay32 not found.')
crypto = ctypes.CDLL(libname)
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
elif system == 'Darwin': # think different(TM)! i.e. break things!
if [int(x) for x in platform.mac_ver()[0].split('.')] < [10, 7, 0]:
raise OSError('OS X Version too old %s < 10.7.0' % platform.mac_ver()[0])
libname = ctypes.util.find_library('System')
if not libname:
raise OSError('Library not found')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _commoncrypto_pbkdf2
else:
libname = ctypes.util.find_library('crypto')
if not libname:
raise OSError('Library crypto not found.')
crypto = ctypes.CDLL(os.path.basename(libname))
_pbkdf2_hmac = _openssl_pbkdf2
crypto.PKCS5_PBKDF2_HMAC # test compatibility
except (OSError, AttributeError):
_, e, _ = sys.exc_info()
raise ImportError('Cannot find a compatible cryptographic library '
'on your system. %s' % e)
def pkcs5_pbkdf2_hmac(data, salt, iterations=1000, keylen=24, hashfunc=None):
if hashfunc is None:
hashfunc = hashlib.sha1
err, c_buff = _pbkdf2_hmac(data, salt, iterations, hashfunc, keylen)
if err == 0:
raise ValueError('wrong parameters')
return c_buff.raw[:keylen]
def pbkdf2_hex(data, salt, iterations=1000, keylen=24, hashfunc=None):
return binascii.hexlify(pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc))
def pbkdf2_bin(data, salt, iterations=1000, keylen=24, hashfunc=None):
return pkcs5_pbkdf2_hmac(data, salt, iterations, keylen, hashfunc)
if __name__ == '__main__':
try:
crypto.SSLeay_version.restype = ctypes.c_char_p
print(crypto.SSLeay_version(0))
except:
pass
import platform
if platform.python_version_tuple() < ('3', '0', '0'):
def bytes(*args):
return str(args[0])
for h in [hashlib.sha1, hashlib.sha224, hashlib.sha256,
hashlib.sha384, hashlib.sha512]:
print(binascii.hexlify(pkcs5_pbkdf2_hmac(bytes('secret', 'utf-8') * 11,
bytes('salt', 'utf-8'),
hashfunc=h)))
| 38.194872
| 88
| 0.569683
| 850
| 7,448
| 4.732941
| 0.223529
| 0.0609
| 0.041014
| 0.032811
| 0.49043
| 0.434999
| 0.338553
| 0.338553
| 0.272682
| 0.222719
| 0
| 0.034061
| 0.341702
| 7,448
| 194
| 89
| 38.391753
| 0.786457
| 0.116273
| 0
| 0.309353
| 0
| 0
| 0.054685
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.057554
| false
| 0.05036
| 0.064748
| 0.021583
| 0.179856
| 0.014388
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa7fd8436efabe5593a8174e9772f897fb7aec0
| 4,465
|
py
|
Python
|
sympy/polys/tests/test_sqfreetools.py
|
eriknw/sympy
|
b7544e2bb74c011f6098a7e886fd77f41776c2c4
|
[
"BSD-3-Clause"
] | 7
|
2015-01-14T06:55:33.000Z
|
2018-08-11T14:43:52.000Z
|
sympy/polys/tests/test_sqfreetools.py
|
pbeltran/sympy-1
|
94f92b36731c2bebe6de1037c063c2a258a8a399
|
[
"BSD-3-Clause"
] | 1
|
2018-02-19T04:56:04.000Z
|
2018-02-19T04:56:04.000Z
|
sympy/polys/tests/test_sqfreetools.py
|
pbeltran/sympy-1
|
94f92b36731c2bebe6de1037c063c2a258a8a399
|
[
"BSD-3-Clause"
] | 1
|
2016-04-24T14:39:22.000Z
|
2016-04-24T14:39:22.000Z
|
"""Tests for square-free decomposition algorithms and related tools. """
from sympy.polys.rings import ring
from sympy.polys.domains import FF, ZZ, QQ
from sympy.polys.polyclasses import DMP
from sympy.polys.specialpolys import f_polys
from sympy.utilities.pytest import raises
f_0, f_1, f_2, f_3, f_4, f_5, f_6 = f_polys()
def test_dup_sqf():
R, x = ring("x", ZZ)
assert R.dup_sqf_part(0) == 0
assert R.dup_sqf_p(0) is True
assert R.dup_sqf_part(7) == 1
assert R.dup_sqf_p(7) is True
assert R.dup_sqf_part(2*x + 2) == x + 1
assert R.dup_sqf_p(2*x + 2) is True
assert R.dup_sqf_part(x**3 + x + 1) == x**3 + x + 1
assert R.dup_sqf_p(x**3 + x + 1) is True
assert R.dup_sqf_part(-x**3 + x + 1) == x**3 - x - 1
assert R.dup_sqf_p(-x**3 + x + 1) is True
assert R.dup_sqf_part(2*x**3 + 3*x**2) == 2*x**2 + 3*x
assert R.dup_sqf_p(2*x**3 + 3*x**2) is False
assert R.dup_sqf_part(-2*x**3 + 3*x**2) == 2*x**2 - 3*x
assert R.dup_sqf_p(-2*x**3 + 3*x**2) is False
assert R.dup_sqf_list(0) == (0, [])
assert R.dup_sqf_list(1) == (1, [])
assert R.dup_sqf_list(x) == (1, [(x, 1)])
assert R.dup_sqf_list(2*x**2) == (2, [(x, 2)])
assert R.dup_sqf_list(3*x**3) == (3, [(x, 3)])
assert R.dup_sqf_list(-x**5 + x**4 + x - 1) == \
(-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dup_sqf_list(x**8 + 6*x**6 + 12*x**4 + 8*x**2) == \
( 1, [(x, 2), (x**2 + 2, 3)])
assert R.dup_sqf_list(2*x**2 + 4*x + 2) == (2, [(x + 1, 2)])
R, x = ring("x", QQ)
assert R.dup_sqf_list(2*x**2 + 4*x + 2) == (2, [(x + 1, 2)])
R, x = ring("x", FF(2))
assert R.dup_sqf_list(x**2 + 1) == (1, [(x + 1, 2)])
R, x = ring("x", FF(3))
assert R.dup_sqf_list(x**10 + 2*x**7 + 2*x**4 + x) == \
(1, [(x, 1),
(x + 1, 3),
(x + 2, 6)])
R1, x = ring("x", ZZ)
R2, y = ring("y", FF(3))
f = x**3 + 1
g = y**3 + 1
assert R1.dup_sqf_part(f) == f
assert R2.dup_sqf_part(g) == y + 1
assert R1.dup_sqf_p(f) is True
assert R2.dup_sqf_p(g) is False
R, x, y = ring("x,y", ZZ)
A = x**4 - 3*x**2 + 6
D = x**6 - 5*x**4 + 5*x**2 + 4
f, g = D, R.dmp_sub(A, R.dmp_mul(R.dmp_diff(D, 1), y))
res = R.dmp_resultant(f, g)
h = (4*y**2 + 1).drop(x)
assert R.drop(x).dup_sqf_list(res) == (45796, [(h, 3)])
R, x = ring("x", ZZ["t"])
assert R.dup_sqf_list_include(DMP([1, 0, 0, 0], ZZ)*x**2) == \
[(DMP([1, 0, 0, 0], ZZ), 1), (DMP([1], ZZ)*x, 2)]
def test_dmp_sqf():
R, x, y = ring("x,y", ZZ)
assert R.dmp_sqf_part(0) == 0
assert R.dmp_sqf_p(0) is True
assert R.dmp_sqf_part(7) == 1
assert R.dmp_sqf_p(7) is True
assert R.dmp_sqf_list(3) == (3, [])
assert R.dmp_sqf_list_include(3) == [(3, 1)]
R, x, y, z = ring("x,y,z", ZZ)
assert R.dmp_sqf_p(f_0) is True
assert R.dmp_sqf_p(f_0**2) is False
assert R.dmp_sqf_p(f_1) is True
assert R.dmp_sqf_p(f_1**2) is False
assert R.dmp_sqf_p(f_2) is True
assert R.dmp_sqf_p(f_2**2) is False
assert R.dmp_sqf_p(f_3) is True
assert R.dmp_sqf_p(f_3**2) is False
assert R.dmp_sqf_p(f_5) is False
assert R.dmp_sqf_p(f_5**2) is False
assert R.dmp_sqf_p(f_4) is True
assert R.dmp_sqf_part(f_4) == -f_4
assert R.dmp_sqf_part(f_5) == x + y - z
R, x, y, z, t = ring("x,y,z,t", ZZ)
assert R.dmp_sqf_p(f_6) is True
assert R.dmp_sqf_part(f_6) == f_6
R, x = ring("x", ZZ)
f = -x**5 + x**4 + x - 1
assert R.dmp_sqf_list(f) == (-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dmp_sqf_list_include(f) == [(-x**3 - x**2 - x - 1, 1), (x - 1, 2)]
R, x, y = ring("x,y", ZZ)
f = -x**5 + x**4 + x - 1
assert R.dmp_sqf_list(f) == (-1, [(x**3 + x**2 + x + 1, 1), (x - 1, 2)])
assert R.dmp_sqf_list_include(f) == [(-x**3 - x**2 - x - 1, 1), (x - 1, 2)]
f = -x**2 + 2*x - 1
assert R.dmp_sqf_list_include(f) == [(-1, 1), (x - 1, 2)]
R, x, y = ring("x,y", FF(2))
raises(NotImplementedError, lambda: R.dmp_sqf_list(y**2 + 1))
def test_dup_gff_list():
R, x = ring("x", ZZ)
f = x**5 + 2*x**4 - x**3 - 2*x**2
assert R.dup_gff_list(f) == [(x, 1), (x + 2, 4)]
g = x**9 - 20*x**8 + 166*x**7 - 744*x**6 + 1965*x**5 - 3132*x**4 + 2948*x**3 - 1504*x**2 + 320*x
assert R.dup_gff_list(g) == [(x**2 - 5*x + 4, 1), (x**2 - 5*x + 4, 2), (x, 3)]
raises(ValueError, lambda: R.dup_gff_list(0))
| 29.569536
| 100
| 0.519821
| 975
| 4,465
| 2.212308
| 0.082051
| 0.178489
| 0.12981
| 0.156699
| 0.662031
| 0.596198
| 0.504868
| 0.385257
| 0.29949
| 0.223459
| 0
| 0.088722
| 0.255319
| 4,465
| 150
| 101
| 29.766667
| 0.56
| 0.014558
| 0
| 0.135922
| 0
| 0
| 0.00774
| 0
| 0
| 0
| 0
| 0
| 0.572816
| 1
| 0.029126
| false
| 0
| 0.048544
| 0
| 0.07767
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa848925fe885025486d711e7226e473656a954
| 1,377
|
py
|
Python
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | 2
|
2021-02-07T21:27:04.000Z
|
2021-03-13T06:47:25.000Z
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | 1
|
2021-02-10T05:45:00.000Z
|
2021-02-10T05:45:00.000Z
|
ezno_convert/enums.py
|
ofersadan85/ezno_convert
|
4c5cf7d41c72698e5486068673f170d968a9de27
|
[
"MIT"
] | null | null | null |
import enum
from typing import Union
@enum.unique
class PPT(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/powerpoint.ppsaveasfiletype
AnimatedGIF = 40
BMP = 19
Default = 11
EMF = 23
External = 64000
GIF = 16
JPG = 17
META = 15
MP4 = 39
OpenPresentation = 35
PDF = 32
PNG = 18
Presentation = 1
RTF = 6
SHOW = 7
Template = 5
TIF = 21
WMV = 37
XPS = 33
app = 'Powerpoint.Application'
extensions = ('.ppt', '.pptx')
@enum.unique
class WORD(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/word.wdsaveformat
DosText = 4
DosTextLineBreaks = 5
FilteredHTML = 10
FlatXML = 19
OpenDocumentText = 23
HTML = 8
RTF = 6
Template = 1
Text = 2
TextLineBreaks = 3
UnicodeText = 7
WebArchive = 9
XML = 11
Document97 = 0
DocumentDefault = 16
PDF = 17
XPS = 18
app = 'Word.Application'
extensions = ('.doc', '.docx')
@enum.unique
class XL(enum.Enum):
# Source: https://docs.microsoft.com/en-us/office/vba/api/excel.xlfixedformattype
# TODO: Implement "SaveAs" methods, see: https://docs.microsoft.com/en-us/office/vba/api/excel.workbook.saveas
PDF = 0
XPS = 1
app = 'Excel.Application'
extensions = ('.xls', '.xlsx')
enum_types = Union[PPT, WORD, XL]
| 20.863636
| 114
| 0.611474
| 177
| 1,377
| 4.751412
| 0.553672
| 0.042806
| 0.085612
| 0.099881
| 0.237812
| 0.237812
| 0.237812
| 0.237812
| 0.237812
| 0.237812
| 0
| 0.065606
| 0.269426
| 1,377
| 65
| 115
| 21.184615
| 0.770378
| 0.251271
| 0
| 0.09434
| 0
| 0
| 0.079922
| 0.021443
| 0
| 0
| 0
| 0.015385
| 0
| 1
| 0
| false
| 0
| 0.037736
| 0
| 0.924528
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aa897704d8b8b96376b6c78aa9de27ecec18071
| 378
|
py
|
Python
|
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
app/django_first/news/migrations/0002_movies_year.py
|
vvuri/flask_pipeline
|
d3f283b8a6a6239e56d85e67dbe3edce55bcb980
|
[
"MIT"
] | null | null | null |
# Generated by Django 4.0.1 on 2022-01-19 23:58
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('news', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='movies',
name='year',
field=models.CharField(max_length=4, null=True),
),
]
| 19.894737
| 60
| 0.582011
| 42
| 378
| 5.166667
| 0.833333
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.075188
| 0.296296
| 378
| 18
| 61
| 21
| 0.740602
| 0.119048
| 0
| 0
| 1
| 0
| 0.07855
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.083333
| 0
| 0.333333
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aaa29259fb6e01655aa91ee60654bb2eceee036
| 1,271
|
py
|
Python
|
gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py
|
AisinoPythonTeam/PythonAiniso
|
983a29962752679d8cc26a2c3cdb0ba8fcfa3f02
|
[
"Apache-2.0"
] | null | null | null |
gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py
|
AisinoPythonTeam/PythonAiniso
|
983a29962752679d8cc26a2c3cdb0ba8fcfa3f02
|
[
"Apache-2.0"
] | null | null | null |
gjqyxyxxcxxt/gjqyxyxxcxxt/queue_companies.py
|
AisinoPythonTeam/PythonAiniso
|
983a29962752679d8cc26a2c3cdb0ba8fcfa3f02
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import pymysql
import sys, os, json, time, pymongo
app_dir = os.path.abspath("../")
sys.path.append(app_dir)
from gjqyxyxxcxxt import settings
from gjqyxyxxcxxt.database.my_redis import QueueRedis
conn = None
def connect_db():
global conn
conn = pymysql.connect(host="172.16.16.15",port=3306,user="root",passwd="A1s1n0@zxyc#3",db="ixinnuo_sjcj",charset="utf8")
return
def get_req_from_db():
global conn
cursor = conn.cursor()
cursor.execute('select id, entname from req where status=0 order by id limit 10')
results = cursor.fetchall()
companies = []
for res in results:
company = {}
company['id'] = res[0]
company['name'] = res[1]
companies.append(company)
return companies
def main():
my_queue = QueueRedis()
result = my_queue.get_queue_length(settings.COMPANIES)
print result
#mq 里存在数据则,3秒后退出
if result:
time.sleep(3)
exit()
time.sleep(3)
global conn
connect_db()
source = get_req_from_db()
for id_name in source:
message = json.dumps(id_name)
my_queue.send_to_queue(settings.COMPANIES, message)
conn.close()
print '成功添加队列%s条数据!!!' % len(source)
if __name__ == '__main__':
main()
| 24.921569
| 125
| 0.650669
| 173
| 1,271
| 4.612717
| 0.508671
| 0.037594
| 0.030075
| 0.030075
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.027328
| 0.222659
| 1,271
| 50
| 126
| 25.42
| 0.780364
| 0.028324
| 0
| 0.121951
| 0
| 0
| 0.112916
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.02439
| 0.097561
| null | null | 0.04878
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ab5293b9595b159942c1bb0c1e2bfcef5e08aec
| 1,029
|
py
|
Python
|
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | 1
|
2022-01-22T19:48:44.000Z
|
2022-01-22T19:48:44.000Z
|
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | null | null | null |
solutions/PE4.py
|
KerimovEmil/ProjectEuler
|
bc9cb682181c1ac7889ee57c36d32beae7b441a8
|
[
"MIT"
] | null | null | null |
"""
PROBLEM
A palindromic number reads the same both ways. The largest palindrome made from the product of two 2-digit numbers
is 9009 = 91 × 99.
Find the largest palindrome made from the product of two 3-digit numbers.
ANSWER:
906609
Solve time ~ 0.760 seconds
"""
from itertools import product
import unittest
from util.utils import timeit
class Problem4:
def __init__(self, num_digits):
self.lower = 10 ** (num_digits - 1) - 1
self.upper = 10 ** num_digits - 1
@staticmethod
def is_palindrome(num):
return str(num) == str(num)[::-1]
@timeit
def solve(self):
pds = []
for i, j in product(range(self.lower, self.upper), repeat=2):
if self.is_palindrome(i * j):
pds.append(i * j)
return max(pds)
class Solution4(unittest.TestCase):
def setUp(self):
self.problem = Problem4(3)
def test_solution(self):
self.assertEqual(906609, self.problem.solve())
if __name__ == '__main__':
unittest.main()
| 21.4375
| 114
| 0.640428
| 144
| 1,029
| 4.458333
| 0.493056
| 0.042056
| 0.062305
| 0.074766
| 0.133956
| 0.133956
| 0.133956
| 0.133956
| 0.133956
| 0
| 0
| 0.050781
| 0.253644
| 1,029
| 47
| 115
| 21.893617
| 0.783854
| 0.252673
| 0
| 0
| 0
| 0
| 0.010512
| 0
| 0
| 0
| 0
| 0
| 0.041667
| 1
| 0.208333
| false
| 0
| 0.125
| 0.041667
| 0.5
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ab606d6bade1bb254f8ee2b1905c9d3d07e2051
| 11,447
|
py
|
Python
|
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
ai_analysis.py
|
kwangilkimkenny/chatbot_seq2seq_flask
|
f2f3bda9311c5f2930aebc8ae4a6497597b190e1
|
[
"MIT"
] | null | null | null |
import pandas as pd
import numpy as np
import re
import pickle
# plotting
import seaborn as sns
import matplotlib.pyplot as plt
# Tune learning_rate
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import GridSearchCV
from sklearn.model_selection import StratifiedKFold
# First XGBoost model for MBTI dataset
from numpy import loadtxt
from xgboost import XGBClassifier
from sklearn.model_selection import train_test_split
from sklearn.metrics import accuracy_score
##### Compute list of subject with Type | list of comments
from nltk.stem import PorterStemmer, WordNetLemmatizer
from nltk.corpus import stopwords
from nltk import word_tokenize
import nltk
nltk.download('wordnet')
from sklearn.feature_extraction.text import TfidfTransformer
from sklearn.feature_extraction.text import CountVectorizer
from sklearn.manifold import TSNE
#타입을 숫자로 변환
def get_types(row):
t=row['type']
I = 0; N = 0
T = 0; J = 0
if t[0] == 'I': I = 1
elif t[0] == 'E': I = 0
else: print('I-E incorrect')
if t[1] == 'N': N = 1
elif t[1] == 'S': N = 0
else: print('N-S incorrect')
if t[2] == 'T': T = 1
elif t[2] == 'F': T = 0
else: print('T-F incorrect')
if t[3] == 'J': J = 1
elif t[3] == 'P': J = 0
else: print('J-P incorrect')
return pd.Series( {'IE':I, 'NS':N , 'TF': T, 'JP': J })
#딕셔너리파일 설정
b_Pers = {'I':0, 'E':1, 'N':0, 'S':1, 'F':0, 'T':1, 'J':0, 'P':1}
#리스트를 두개씩 묶어서 리스트로 만듬
b_Pers_list = [{0:'I', 1:'E'}, {0:'N', 1:'S'}, {0:'F', 1:'T'}, {0:'J', 1:'P'}]
def translate_personality(personality):
# transform mbti to binary vector
return [b_Pers[l] for l in personality]
def translate_back(personality):
# transform binary vector to mbti personality
s = ""
for i, l in enumerate(personality):
s += b_Pers_list[i][l]
return s
# We want to remove these from the psosts
unique_type_list = ['INFJ', 'ENTP', 'INTP', 'INTJ', 'ENTJ', 'ENFJ', 'INFP', 'ENFP',
'ISFP', 'ISTP', 'ISFJ', 'ISTJ', 'ESTP', 'ESFP', 'ESTJ', 'ESFJ']
unique_type_list = [x.lower() for x in unique_type_list]
# Lemmatize
stemmer = PorterStemmer()
lemmatiser = WordNetLemmatizer()
# Cache the stop words for speed
cachedStopWords = stopwords.words("english")
def pre_process_data(data, remove_stop_words=True, remove_mbti_profiles=True):
list_personality = []
list_posts = []
len_data = len(data)
i=0
for row in data.iterrows():
i+=1
if (i % 500 == 0 or i == 1 or i == len_data):
print("%s of %s rows" % (i, len_data))
##### Remove and clean comments
posts = row[1].posts
temp = re.sub('http[s]?://(?:[a-zA-Z]|[0-9]|[$-_@.&+]|(?:%[0-9a-fA-F][0-9a-fA-F]))+', ' ', posts)
temp = re.sub("[^a-zA-Z]", " ", temp)
temp = re.sub(' +', ' ', temp).lower()
if remove_stop_words:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ') if w not in cachedStopWords])
else:
temp = " ".join([lemmatiser.lemmatize(w) for w in temp.split(' ')])
if remove_mbti_profiles:
for t in unique_type_list:
temp = temp.replace(t,"")
type_labelized = translate_personality(row[1].type)
list_personality.append(type_labelized)
list_posts.append(temp)
list_posts = np.array(list_posts)
list_personality = np.array(list_personality)
return list_posts, list_personality
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject_2020_12_09/essayai/mbti_1.csv')
data = pd.read_csv('./mbti/mbti_1.csv')
# get_types 함수 적용
data = data.join(data.apply (lambda row: get_types (row),axis=1))
# load
with open('./mbti/list_posts.pickle', 'rb') as f:
list_posts = pickle.load(f)
# load
with open('./mbti/list_personality.pickle', 'rb') as f:
list_personality = pickle.load(f)
# # Posts to a matrix of token counts
cntizer = CountVectorizer(analyzer="word",
max_features=1500,
tokenizer=None,
preprocessor=None,
stop_words=None,
max_df=0.7,
min_df=0.1)
# Learn the vocabulary dictionary and return term-document matrix
print("CountVectorizer...")
X_cnt = cntizer.fit_transform(list_posts)
#################################################
#save!!! model X_cnt
import pickle
# save
# with open('./essayai/ai_character/mbti/data_X_cnt.pickle', 'wb') as f:
# pickle.dump(X_cnt, f, pickle.HIGHEST_PROTOCOL)
# load
with open('./mbti/data_X_cnt.pickle', 'rb') as f:
X_cnt = pickle.load(f)
#################################################
# Transform the count matrix to a normalized tf or tf-idf representation
tfizer = TfidfTransformer()
print("Tf-idf...")
# Learn the idf vector (fit) and transform a count matrix to a tf-idf representation
X_tfidf = tfizer.fit_transform(X_cnt).toarray()
# load
with open('./mbti/data.pickle', 'rb') as f:
X_tfidf = pickle.load(f)
def mbti_classify(text):
type_indicators = [ "IE: Introversion (I) / Extroversion (E)", "NS: Intuition (N) – Sensing (S)",
"FT: Feeling (F) - Thinking (T)", "JP: Judging (J) – Perceiving (P)" ]
# Posts in tf-idf representation
X = X_tfidf
my_posts = str(text)
# The type is just a dummy so that the data prep fucntion can be reused
mydata = pd.DataFrame(data={'type': ['INFJ'], 'posts': [my_posts]})
my_posts, dummy = pre_process_data(mydata, remove_stop_words=True)
my_X_cnt = cntizer.transform(my_posts)
my_X_tfidf = tfizer.transform(my_X_cnt).toarray()
# setup parameters for xgboost
param = {}
param['n_estimators'] = 200
param['max_depth'] = 2
param['nthread'] = 8
param['learning_rate'] = 0.2
result = []
# Let's train type indicator individually
for l in range(len(type_indicators)):
print("%s ..." % (type_indicators[l]))
Y = list_personality[:,l]
# split data into train and test sets
seed = 7
test_size = 0.33
X_train, X_test, y_train, y_test = train_test_split(X, Y, test_size=test_size, random_state=seed)
# fit model on training data
model = XGBClassifier(**param)
model.fit(X_train, y_train)
# make predictions for my data
y_pred = model.predict(my_X_tfidf)
result.append(y_pred[0])
# print("* %s prediction: %s" % (type_indicators[l], y_pred))
print("The result is: ", translate_back(result))
#결과를 리스트에 담고
Result_list = list(translate_back(result))
#mbit 결과값에 따라 내용 print 하기
# read data
# data = pd.read_csv('/Users/jongphilkim/Desktop/Django_WEB/essayfitaiproject/essayai/mbti_exp.csv')
data = pd.read_csv('./mbti/mbti_exp.csv')
#새로운 데이터프레임을 만들어서 계산된 값을 추가할 예정
df2 = pd.DataFrame(index=range(0,4),columns=['Type', 'Explain'])
#리스트에서 한글자씩 불러와서 데이터프레임의 값을 출력하면 됨
for i in range(0, len(Result_list)):
type = Result_list[i]
for j in range(0, len(data)):
if type == data.iloc[j,0]:
break
is_mbti = data.iloc[j,2]
df2.iloc[i, [0,1]] = [type, is_mbti]
print(df2)
return df2
# my_posts = """Describe a place or environment where you are perfectly content. What do you do or experience there, and why is it meaningful to you? 644 words out of 650 Gettysburg, a small town in the middle of Pennsylvania, was the sight of the largest, bloodiest battle in the Civil War. Something about these hallowed grounds draws me back every year for a three day camping trip with my family over Labor Day weekend. Every year, once school starts, I count the days until I take that three and half hour drive from Pittsburgh to Gettysburg. Each year, we leave after school ends on Friday and arrive in Gettysburg with just enough daylight to pitch the tents and cook up a quick dinner on the campfire. As more of the extended family arrives, we circle around the campfire and find out what is new with everyone. The following morning, everyone is up by nine and helping to make breakfast which is our best meal of the day while camping. Breakfast will fuel us for the day as we hike the vast battlefields. My Uncle Mark, my twin brother, Andrew, and I like to take charge of the family tour since we have the most passion and knowledge about the battle. I have learned so much from the stories Mark tells us while walking on the tours. Through my own research during these last couple of trips, I did some of the explaining about the events that occurred during the battle 150 years ago. My fondest experience during one trip was when we decided to go off of the main path to find a carving in a rock from a soldier during the battle. Mark had read about the carving in one of his books about Gettysburg, and we were determined to locate it. After almost an hour of scanning rocks in the area, we finally found it with just enough daylight to read what it said. After a long day of exploring the battlefield, we went back to the campsite for some 'civil war' stew. There is nothing special about the stew, just meat, vegetables and gravy, but for whatever reason, it is some of the best stew I have ever eaten. For the rest of the night, we enjoy the company of our extended family. My cousins, my brother and I listen to the stories from Mark and his friends experiences' in the military. After the parents have gone to bed, we stay up talking with each other, inching closer and closer to the fire as it gets colder. Finally, we creep back into our tents, trying to be as quiet as possible to not wake our parents. The next morning we awake red-eyed from the lack of sleep and cook up another fantastic breakfast. Unfortunately, after breakfast we have to pack up and head back to Pittsburgh. It will be another year until I visit Gettysburg again. There is something about that time I spend in Gettysburg that keeps me coming back to visit. For one, it is just a fun, relaxing time I get to spend with my family. This trip also fulfills my love for the outdoors. From sitting by the campfire and falling asleep to the chirp of the crickets, that is my definition of a perfect weekend. Gettysburg is also an interesting place to go for Civil War buffs like me. While walking down the Union line or walking Pickett's Charge, I imagine how the battle would have been played out around me. Every year when I visit Gettysburg, I learn more facts and stories about the battle, soldiers and generally about the Civil War. While I am in Gettysburg, I am perfectly content, passionate about the history and just enjoying the great outdoors with my family. This drive to learn goes beyond just my passion for history but applies to all of the math, science and business classes I have taken and clubs I am involved in at school. Every day, I am genuinely excited to learn.
# """
# test = mbti_classify(my_posts)
# print ('check')
# test
# print ('check2')
| 41.625455
| 3,675
| 0.653621
| 1,750
| 11,447
| 4.194286
| 0.326857
| 0.006812
| 0.00327
| 0.007084
| 0.107493
| 0.070845
| 0.06049
| 0.053951
| 0.053951
| 0.053951
| 0
| 0.011976
| 0.241373
| 11,447
| 275
| 3,676
| 41.625455
| 0.833026
| 0.451909
| 0
| 0.042553
| 0
| 0.007092
| 0.111929
| 0.024987
| 0
| 0
| 0
| 0
| 0
| 1
| 0.035461
| false
| 0
| 0.156028
| 0.007092
| 0.22695
| 0.070922
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac1a5f132a19c0dca01d22ddfd3613255dba8b5
| 4,258
|
py
|
Python
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 3
|
2019-07-25T03:24:23.000Z
|
2021-06-23T14:01:34.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 1
|
2019-12-20T16:04:19.000Z
|
2019-12-20T16:04:19.000Z
|
wce_triage/ops/create_image_runner.py
|
pfrouleau/wce-triage-v2
|
25610cda55f5cb2170e13e121ae1cbaa92ef7626
|
[
"MIT"
] | 2
|
2019-07-25T03:24:26.000Z
|
2021-02-14T05:27:11.000Z
|
#!/usr/bin/env python3
#
# Create disk image
#
import re, sys, traceback
from .tasks import task_fetch_partitions, task_refresh_partitions, task_mount, task_remove_persistent_rules, task_remove_logs, task_fsck, task_shrink_partition, task_expand_partition, task_unmount
from .partclone_tasks import task_create_disk_image
from .ops_ui import console_ui
from ..components.disk import create_storage_instance
from .runner import Runner
from ..lib.disk_images import make_disk_image_name
from .json_ui import json_ui
from ..lib.util import init_triage_logger, is_block_device
# "Waiting", "Prepare", "Preflight", "Running", "Success", "Failed"]
my_messages = { "Waiting": "Saving disk is waiting.",
"Prepare": "Savign disk is preparing.",
"Preflight": "Saving disk is preparing.",
"Running": "{step} of {steps}: Running {task}",
"Success": "Saving disk completed successfully.",
"Failed": "Saving disk failed." }
#
class ImageDiskRunner(Runner):
'''Runner for creating disk image. does fsck, shrink partition, create disk
image and resize the file system back to the max.
For now, this is only dealing with the EXT4 linux partition.
'''
# FIXME: If I want to make this to a generic clone app, I need to deal with all of partitions on the disk.
# One step at a time.
def __init__(self, ui, runner_id, disk, destdir, suggestedname=None, partition_id='Linux'):
super().__init__(ui, runner_id)
self.time_estimate = 600
self.disk = disk
self.partition_id = partition_id
self.destdir = destdir
self.imagename = make_disk_image_name(destdir, suggestedname)
pass
def prepare(self):
super().prepare()
# self.tasks.append(task_mount_nfs_destination(self, "Mount the destination volume"))
self.tasks.append(task_fetch_partitions("Fetch partitions", self.disk))
self.tasks.append(task_refresh_partitions("Refresh partition information", self.disk))
self.tasks.append(task_mount("Mount the target disk", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_persistent_rules("Remove persistent rules", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_remove_logs("Remove/Clean Logs", disk=self.disk, partition_id=self.partition_id))
task = task_unmount("Unmount target", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
self.tasks.append(task_fsck("fsck partition", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_shrink_partition("Shrink partition to smallest", disk=self.disk, partition_id=self.partition_id))
self.tasks.append(task_create_disk_image("Create disk image", disk=self.disk, partition_id=self.partition_id, imagename=self.imagename))
task = task_expand_partition("Expand the partion back", disk=self.disk, partition_id=self.partition_id)
task.set_teardown_task()
self.tasks.append(task)
pass
pass
if __name__ == "__main__":
tlog = init_triage_logger()
if len(sys.argv) == 1:
print( 'Unloader: devicename part destdir')
sys.exit(0)
# NOTREACHED
pass
devname = sys.argv[1]
if not is_block_device(devname):
print( '%s is not a block device.' % devname)
sys.exit(1)
# NOTREACHED
pass
part = sys.argv[2] # This is a partition id
destdir = sys.argv[3] # Destination directory
disk = create_storage_instance(devname)
# Preflight is for me to see the tasks. http server runs this with json_ui.
do_it = True
if destdir == "preflight":
ui = console_ui()
do_it = False
pass
elif destdir == "testflight":
ui = console_ui()
do_it = True
pass
else:
ui = json_ui(wock_event="saveimage", message_catalog=my_messages)
pass
if re.match(part, '\d+'):
part = int(part)
pass
runner_id = disk.device_name
runner = ImageDiskRunner(ui, runner_id, disk, destdir, partition_id=part)
try:
runner.prepare()
runner.preflight()
runner.explain()
runner.run()
sys.exit(0)
# NOTREACHED
except Exception as exc:
sys.stderr.write(traceback.format_exc(exc) + "\n")
sys.exit(1)
# NOTREACHED
pass
pass
| 35.190083
| 196
| 0.711837
| 593
| 4,258
| 4.903879
| 0.284992
| 0.079436
| 0.067056
| 0.071871
| 0.237276
| 0.18088
| 0.162311
| 0.162311
| 0.149243
| 0.134801
| 0
| 0.003726
| 0.180601
| 4,258
| 120
| 197
| 35.483333
| 0.829751
| 0.155707
| 0
| 0.27381
| 0
| 0
| 0.142457
| 0
| 0
| 0
| 0
| 0.008333
| 0
| 1
| 0.02381
| false
| 0.130952
| 0.107143
| 0
| 0.142857
| 0.02381
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac3173f834c06ec5469554b76a1d8e391432cee
| 5,171
|
py
|
Python
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 15
|
2019-09-15T05:24:19.000Z
|
2021-02-26T20:31:19.000Z
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 16
|
2019-10-10T23:27:00.000Z
|
2020-05-14T02:30:56.000Z
|
demos/chicken_pasta/chicken_pasta.py
|
icaros-usc/wecook
|
27bbb6b78a48e04765a87d33cc8a5d3748d2d4cc
|
[
"BSD-3-Clause"
] | 2
|
2020-02-01T16:31:29.000Z
|
2020-04-07T21:00:04.000Z
|
#!/usr/bin/env python3
import rospy
from wecook.msg import ActionMsg, TaskMsg, SceneMsg, ObjectMsg, ContainingMsg, AgentMsg
def talker():
pub = rospy.Publisher('WeCookDispatch', TaskMsg, queue_size=10)
rospy.init_node('wecook_chicken_pasta', anonymous=True)
scene_msg = SceneMsg([ObjectMsg('wall0',
'package://wecook_assets/data/furniture/wall.urdf',
[0.75, 0.05, 0., 0., 0., 0., 1.]),
ObjectMsg('wall1',
'package://wecook_assets/data/furniture/wall.urdf',
[-0.85, 1.45, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('counter0',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0.3, 0., 0., 0., 0., 0., 1.]),
ObjectMsg('counter1',
'package://wecook_assets/data/furniture/kitchen_counter.urdf',
[0., 1.0, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('sink0',
'package://wecook_assets/data/furniture/sink_counter.urdf',
[-1.3, 1.05, 0., 0., 0., 0.707, 0.707]),
ObjectMsg('shelf0',
'package://wecook_assets/data/furniture/bookcase.urdf',
[0.3, -1.05, 0., 0., 0., 0., 1.]),
ObjectMsg('stove0',
'package://wecook_assets/data/objects/stove.urdf',
[-0.35, 0.95, 0.75, 0., 0., 0., 1.]),
ObjectMsg('pot0',
'package://wecook_assets/data/objects/cooking_pot.urdf',
[0.35, 1.1, 0.75, 0., 0., 0., 1.]),
ObjectMsg('skillet0',
'package://wecook_assets/data/objects/skillet.urdf',
[0.3, 0.7, 0.75, 0., 0., -0.707, .707]),
ObjectMsg('cutting_board0',
'package://wecook_assets/data/objects/cutting_board.urdf',
[0.3, -0.3, 0.75, 0., 0., 0., 1.]),
ObjectMsg('knife0',
'package://wecook_assets/data/objects/knife_big.urdf',
[0.215, -0.55, 0.775, 0., 0., 0., 1.]),
ObjectMsg('plate0',
'package://wecook_assets/data/objects/plate.urdf',
[0.3, 0.075, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl0',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.45, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('bowl1',
'package://wecook_assets/data/objects/bowl_green.urdf',
[0.15, 0.375, 0.75, 0., 0., 0., 1.]),
ObjectMsg('oil0',
'package://wecook_assets/data/objects/olive_oil.urdf',
[0., 1.15, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('salt0',
'package://wecook_assets/data/objects/salt.urdf',
[0., 1.0, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('pepper0',
'package://wecook_assets/data/objects/black_pepper.urdf',
[0., 0.9, 0.75, 0., 0., 0.707, 0.707]),
ObjectMsg('chicken0',
'package://wecook_assets/data/food/chicken.urdf',
[0.3, 0.075, 0.757, 0., 0., 0., 1.]),
ObjectMsg('lime0',
'package://wecook_assets/data/food/lime.urdf',
[0.3, -0.3, 0.757, 0., 0., 0., 1.]),
ObjectMsg('pasta0',
'package://wecook_assets/data/food/pasta.urdf',
[0.45, 0.375, 0.757, 0., 0., 0., 1.])],
[ContainingMsg(['plate0', 'chicken0']),
ContainingMsg(['bowl0', 'pasta0'])])
task_msg = TaskMsg(scene_msg,
[ActionMsg(['p1'], 'cut', ['plate0'], 'knife0', ['lime0'])],
[AgentMsg('p1', 'r', [0., 0., 0.75, 0., 0., 0., 0.])],
"",
"",
"follow",
"RRTConnect",
False)
# sleeping 10 seconds to publish
rospy.sleep(1)
pub.publish(task_msg)
if __name__ == '__main__':
try:
talker()
except rospy.ROSInterruptException:
pass
| 55.010638
| 98
| 0.375169
| 482
| 5,171
| 3.927386
| 0.242739
| 0.058109
| 0.049128
| 0.243001
| 0.58056
| 0.349709
| 0.303222
| 0.231379
| 0.156366
| 0
| 0
| 0.11743
| 0.47631
| 5,171
| 93
| 99
| 55.602151
| 0.58161
| 0.010056
| 0
| 0.097561
| 0
| 0
| 0.243502
| 0.197772
| 0
| 0
| 0
| 0
| 0
| 1
| 0.012195
| false
| 0.012195
| 0.02439
| 0
| 0.036585
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac3c0aa131a8fbf4b061367a8fbb2e23790a4c8
| 3,777
|
py
|
Python
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 4
|
2020-11-17T06:29:30.000Z
|
2021-08-08T11:56:01.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 36
|
2021-02-02T14:18:40.000Z
|
2022-03-20T15:07:30.000Z
|
metricbeat/module/postgresql/test_postgresql.py
|
SHolzhauer/beats
|
39679a536a22e8a0d7534a2475504488909d19fd
|
[
"ECL-2.0",
"Apache-2.0"
] | 6
|
2021-03-10T05:38:32.000Z
|
2021-08-16T13:11:19.000Z
|
import metricbeat
import os
import pytest
import sys
import unittest
class Test(metricbeat.BaseTest):
COMPOSE_SERVICES = ['postgresql']
def common_checks(self, output):
# Ensure no errors or warnings exist in the log.
self.assert_no_logged_warnings()
for evt in output:
top_level_fields = metricbeat.COMMON_FIELDS + ["postgresql"]
self.assertCountEqual(self.de_dot(top_level_fields), evt.keys())
self.assert_fields_are_documented(evt)
def get_hosts(self):
username = "postgres"
host = self.compose_host()
dsn = "postgres://{}?sslmode=disable".format(host)
return (
[dsn],
username,
os.getenv("POSTGRESQL_PASSWORD"),
)
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_activity(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["activity"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["activity"]["database"]
assert "oid" in evt["postgresql"]["activity"]["database"]
assert "state" in evt["postgresql"]["activity"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_database(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["database"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "name" in evt["postgresql"]["database"]
assert "oid" in evt["postgresql"]["database"]
assert "blocks" in evt["postgresql"]["database"]
assert "rows" in evt["postgresql"]["database"]
assert "conflicts" in evt["postgresql"]["database"]
assert "deadlocks" in evt["postgresql"]["database"]
@unittest.skipUnless(metricbeat.INTEGRATION_TESTS, "integration test")
@pytest.mark.tag('integration')
def test_bgwriter(self):
"""
PostgreSQL module outputs an event.
"""
hosts, username, password = self.get_hosts()
self.render_config_template(modules=[{
"name": "postgresql",
"metricsets": ["bgwriter"],
"hosts": hosts,
"username": username,
"password": password,
"period": "5s"
}])
proc = self.start_beat()
self.wait_until(lambda: self.output_lines() > 0)
proc.check_kill_and_wait()
output = self.read_output_json()
self.common_checks(output)
for evt in output:
assert "checkpoints" in evt["postgresql"]["bgwriter"]
assert "buffers" in evt["postgresql"]["bgwriter"]
assert "stats_reset" in evt["postgresql"]["bgwriter"]
| 32.843478
| 76
| 0.581943
| 380
| 3,777
| 5.626316
| 0.247368
| 0.028064
| 0.084191
| 0.064546
| 0.697381
| 0.623012
| 0.579046
| 0.579046
| 0.579046
| 0.579046
| 0
| 0.00224
| 0.290707
| 3,777
| 114
| 77
| 33.131579
| 0.795819
| 0.041038
| 0
| 0.556818
| 0
| 0
| 0.182432
| 0.008164
| 0
| 0
| 0
| 0
| 0.170455
| 1
| 0.056818
| false
| 0.079545
| 0.056818
| 0
| 0.147727
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac4ca9b00a8492410dc6166ad36ac8d64fdcffc
| 2,337
|
py
|
Python
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | 1
|
2021-03-24T13:00:14.000Z
|
2021-03-24T13:00:14.000Z
|
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
rabbitmq/tests/common.py
|
jfmyers9/integrations-core
|
8793c784f1d5b2c9541b2dd4214dd91584793ced
|
[
"BSD-3-Clause"
] | null | null | null |
# (C) Datadog, Inc. 2018-present
# All rights reserved
# Licensed under a 3-clause BSD style license (see LICENSE)
import os
from packaging import version
from datadog_checks.base.utils.common import get_docker_hostname
HERE = os.path.dirname(os.path.abspath(__file__))
ROOT = os.path.dirname(os.path.dirname(HERE))
RABBITMQ_VERSION_RAW = os.environ['RABBITMQ_VERSION']
RABBITMQ_VERSION = version.parse(RABBITMQ_VERSION_RAW)
CHECK_NAME = 'rabbitmq'
HOST = get_docker_hostname()
PORT = 15672
URL = 'http://{}:{}/api/'.format(HOST, PORT)
CONFIG = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
}
CONFIG_NO_NODES = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues': ['test1'],
'tags': ["tag1:1", "tag2"],
'exchanges': ['test1'],
'collect_node_metrics': False,
}
CONFIG_REGEX = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'queues_regexes': [r'test\d+'],
'exchanges_regexes': [r'test\d+'],
}
CONFIG_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'myvhost'],
}
CONFIG_WITH_FAMILY = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'tag_families': True,
'queues_regexes': [r'(test)\d+'],
'exchanges_regexes': [r'(test)\d+'],
}
CONFIG_DEFAULT_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['/', 'test'],
}
CONFIG_TEST_VHOSTS = {
'rabbitmq_api_url': URL,
'rabbitmq_user': 'guest',
'rabbitmq_pass': 'guest',
'vhosts': ['test', 'test2'],
}
EXCHANGE_MESSAGE_STATS = {
'ack': 1.0,
'ack_details': {'rate': 1.0},
'confirm': 1.0,
'confirm_details': {'rate': 1.0},
'deliver_get': 1.0,
'deliver_get_details': {'rate': 1.0},
'publish': 1.0,
'publish_details': {'rate': 1.0},
'publish_in': 1.0,
'publish_in_details': {'rate': 1.0},
'publish_out': 1.0,
'publish_out_details': {'rate': 1.0},
'return_unroutable': 1.0,
'return_unroutable_details': {'rate': 1.0},
'redeliver': 1.0,
'redeliver_details': {'rate': 1.0},
}
| 23.606061
| 64
| 0.618314
| 291
| 2,337
| 4.694158
| 0.323024
| 0.023426
| 0.070278
| 0.076135
| 0.489751
| 0.418009
| 0.418009
| 0.418009
| 0.418009
| 0.418009
| 0
| 0.027749
| 0.182713
| 2,337
| 98
| 65
| 23.846939
| 0.687435
| 0.046213
| 0
| 0.341772
| 0
| 0
| 0.402247
| 0.011236
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.088608
| 0.037975
| 0
| 0.037975
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac55faf90a367de65f30a569842061f13204e0c
| 2,952
|
py
|
Python
|
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
module1-introduction-to-sql/query.py
|
jrslagle/DS-Unit-3-Sprint-2-SQL-and-Databases
|
8a6b3fd14b6a6833ee3a14b2d8a7db3bee494a14
|
[
"MIT"
] | null | null | null |
# Look at the charactercreator_character table
# GET_CHARACTERS = """
# SELECT *
# FROM charactercreator_character;
# """
# How many total Characters are there? (302)
TOTAL_CHARACTERS = """
SELECT COUNT(*) as number_of_characters
FROM charactercreator_character;
"""
# How many of each specific subclass?
# TOTAL_SUBCLASS = """
# SELECT
# (SELECT COUNT(*) FROM charactercreator_necromancer) AS necros,
# (SELECT COUNT(*) FROM charactercreator_mage) AS mages,
# (SELECT COUNT(*) FROM charactercreator_thief) AS thiefs,
# (SELECT COUNT(*) FROM charactercreator_cleric) AS clerics,
# (SELECT COUNT(*) FROM charactercreator_fighter) AS fighters;
# """
CLASS = "SELECT COUNT(*) FROM charactercreator_"
# How many total Items? (174)
TOTAL_ITEMS = """
SELECT COUNT(item_id) as items
FROM armory_item;
"""
# How many of the Items are weapons? (37)
WEAPONS = """
SELECT COUNT(item_ptr_id)
FROM armory_weapon;
"""
# How many of the items are not weapons? (137)
NON_WEAPONS = """
SELECT COUNT(items.name)
FROM armory_item as items
WHERE items.item_id NOT IN(
SELECT armory_weapon.item_ptr_id
FROM armory_weapon);
"""
# How many Items does each character have? (Return first 20 rows)
CHARACTER_ITEMS = """
SELECT character.name as "character_name", COUNT(inventory.id) as "#_of_items"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory
WHERE character.character_id = inventory.character_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# How many Weapons does each character have? (Return first 20 rows)
CHARACTER_WEAPONS = """
SELECT character.name as "character_name", COUNT(weapon.item_ptr_id) as "#_of_weapons"
FROM charactercreator_character AS character, charactercreator_character_inventory AS inventory, armory_weapon as weapon
WHERE character.character_id = inventory.character_id AND inventory.item_id = weapon.item_ptr_id
GROUP BY character.name
ORDER BY character.name
LIMIT 20;
"""
# On average, how many Items does each Character have? (3.02)
AVG_CHARACTER_ITEMS = """
SELECT
AVG("#_of_items") as "avg_#_of_items"
FROM
(
SELECT
COUNT(inventory.id) AS "#_of_items"
FROM
charactercreator_character AS character,
charactercreator_character_inventory AS inventory
WHERE
character.character_id = inventory.character_id
GROUP BY character.name
);
"""
# On average, how many Weapons does each character have? (0.67)
AVG_CHARACTER_WEAPONS = """
SELECT
AVG(weapon_count) as avg_weapons_per_char
FROM (
SELECT
character.character_id,
COUNT(DISTINCT weapon.item_ptr_id) as weapon_count
FROM
charactercreator_character AS character
LEFT JOIN charactercreator_character_inventory inventory -- characters may have zero items
ON character.character_id = inventory.character_id
LEFT JOIN armory_weapon weapon -- many items are not weapons, so only retain weapons
ON inventory.item_id = weapon.item_ptr_id
GROUP BY character.character_id
) subq;
"""
| 28.941176
| 120
| 0.774051
| 404
| 2,952
| 5.445545
| 0.195545
| 0.109091
| 0.079545
| 0.084545
| 0.530455
| 0.464091
| 0.430909
| 0.337727
| 0.308636
| 0.265909
| 0
| 0.009827
| 0.138211
| 2,952
| 101
| 121
| 29.227723
| 0.854953
| 0.300474
| 0
| 0.328358
| 0
| 0
| 0.882353
| 0.267647
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac7d878414c23d75e260d1c447ced1efb264340
| 2,420
|
py
|
Python
|
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | null | null | null |
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | 7
|
2022-01-16T15:36:40.000Z
|
2022-01-25T22:02:12.000Z
|
events_page/app.py
|
los-verdes/lv-event-pagenerator
|
88416b626ff2dca6e2d71fa60bff4823954b3131
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
from zoneinfo import ZoneInfo
import flask
from dateutil.parser import parse
from flask_assets import Bundle, Environment
from logzero import logger, setup_logger
from webassets.filter import get_filter
from config import cfg
from apis import calendar as gcal
setup_logger(name=__name__)
app = flask.Flask(__name__)
libsass = get_filter(
"libsass",
as_output=True,
style="compressed",
)
assets = Environment(app) # create an Environment instance
bundles = { # define nested Bundle
"style": Bundle(
"scss/*.scss",
filters=(libsass),
output="style.css",
)
}
assets.register(bundles)
@app.route("/")
def events():
return flask.render_template(
"index.html",
calendar=gcal.load_calendar(
service=gcal.build_service(),
calendar_id=cfg.calendar_id,
),
)
@app.template_filter()
def parse_tz_datetime(datetime_str):
return parse(datetime_str).replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def replace_tz(datetime_obj):
return datetime_obj.replace(tzinfo=ZoneInfo(app.config["display_timezone"]))
@app.template_filter()
def hex2rgb(hex, alpha=None):
"""Convert a string to all caps."""
if not hex.startswith("#"):
return hex
h = hex.lstrip("#")
try:
rgb = tuple(int(h[i : i + 2], 16) for i in (0, 2, 4)) # noqa
except Exception as err:
logger.exception(f"unable to convert {hex=} to rgb: {err}")
return h
if alpha is None:
return f"rgb({rgb[0]}, {rgb[1]}, {rgb[2]})"
else:
return f"rgba({rgb[0]}, {rgb[1]}, {rgb[2]}, {alpha})"
def get_base_url():
if prefix := cfg.gcs_bucket_prefix:
return f"https://{cfg.hostname}/{prefix}"
return f"https://{cfg.hostname}"
def create_app():
cfg.load()
# TODO: do this default settings thing better?
default_app_config = dict(
display_timezone=cfg.display_timezone,
FREEZER_BASE_URL=get_base_url(),
FREEZER_STATIC_IGNORE=["*.scss", ".webassets-cache/*", ".DS_Store"],
FREEZER_RELATIVE_URLS=False,
FREEZER_REMOVE_EXTRA_FILES=True,
)
logger.info(f"create_app() => {default_app_config=}")
app.config.update(default_app_config)
return app
if __name__ == "__main__":
app = create_app()
app.run(
host="0.0.0.0",
debug=True,
)
| 24.444444
| 87
| 0.648347
| 319
| 2,420
| 4.711599
| 0.413793
| 0.035928
| 0.033932
| 0.03992
| 0.141051
| 0.141051
| 0.086494
| 0.086494
| 0.086494
| 0.086494
| 0
| 0.008952
| 0.215289
| 2,420
| 98
| 88
| 24.693878
| 0.782517
| 0.06281
| 0
| 0.04
| 0
| 0
| 0.150133
| 0.0093
| 0
| 0
| 0
| 0.010204
| 0
| 1
| 0.08
| false
| 0
| 0.106667
| 0.04
| 0.32
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ac9be98a456dcdce40e3c4f391cc313ab62f054
| 13,522
|
py
|
Python
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 44
|
2021-04-18T23:00:48.000Z
|
2022-02-14T17:43:15.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 354
|
2021-04-16T16:48:39.000Z
|
2022-03-31T17:16:39.000Z
|
sdk/python/pulumi_google_native/healthcare/v1beta1/user_data_mapping.py
|
AaronFriel/pulumi-google-native
|
75d1cda425e33d4610348972cd70bddf35f1770d
|
[
"Apache-2.0"
] | 8
|
2021-04-24T17:46:51.000Z
|
2022-01-05T10:40:21.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['UserDataMappingArgs', 'UserDataMapping']
@pulumi.input_type
class UserDataMappingArgs:
def __init__(__self__, *,
consent_store_id: pulumi.Input[str],
data_id: pulumi.Input[str],
dataset_id: pulumi.Input[str],
user_id: pulumi.Input[str],
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]] = None):
"""
The set of arguments for constructing a UserDataMapping resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
pulumi.set(__self__, "consent_store_id", consent_store_id)
pulumi.set(__self__, "data_id", data_id)
pulumi.set(__self__, "dataset_id", dataset_id)
pulumi.set(__self__, "user_id", user_id)
if location is not None:
pulumi.set(__self__, "location", location)
if name is not None:
pulumi.set(__self__, "name", name)
if project is not None:
pulumi.set(__self__, "project", project)
if resource_attributes is not None:
pulumi.set(__self__, "resource_attributes", resource_attributes)
@property
@pulumi.getter(name="consentStoreId")
def consent_store_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "consent_store_id")
@consent_store_id.setter
def consent_store_id(self, value: pulumi.Input[str]):
pulumi.set(self, "consent_store_id", value)
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Input[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@data_id.setter
def data_id(self, value: pulumi.Input[str]):
pulumi.set(self, "data_id", value)
@property
@pulumi.getter(name="datasetId")
def dataset_id(self) -> pulumi.Input[str]:
return pulumi.get(self, "dataset_id")
@dataset_id.setter
def dataset_id(self, value: pulumi.Input[str]):
pulumi.set(self, "dataset_id", value)
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Input[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
@user_id.setter
def user_id(self, value: pulumi.Input[str]):
pulumi.set(self, "user_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter
def name(self) -> Optional[pulumi.Input[str]]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@name.setter
def name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "name", value)
@property
@pulumi.getter
def project(self) -> Optional[pulumi.Input[str]]:
return pulumi.get(self, "project")
@project.setter
def project(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "project", value)
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@resource_attributes.setter
def resource_attributes(self, value: Optional[pulumi.Input[Sequence[pulumi.Input['AttributeArgs']]]]):
pulumi.set(self, "resource_attributes", value)
class UserDataMapping(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] data_id: A unique identifier for the mapped resource.
:param pulumi.Input[str] name: Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]] resource_attributes: Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
:param pulumi.Input[str] user_id: User's UUID provided by the client.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: UserDataMappingArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Creates a new User data mapping in the parent consent store.
:param str resource_name: The name of the resource.
:param UserDataMappingArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(UserDataMappingArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
consent_store_id: Optional[pulumi.Input[str]] = None,
data_id: Optional[pulumi.Input[str]] = None,
dataset_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
name: Optional[pulumi.Input[str]] = None,
project: Optional[pulumi.Input[str]] = None,
resource_attributes: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['AttributeArgs']]]]] = None,
user_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
if consent_store_id is None and not opts.urn:
raise TypeError("Missing required property 'consent_store_id'")
__props__.__dict__["consent_store_id"] = consent_store_id
if data_id is None and not opts.urn:
raise TypeError("Missing required property 'data_id'")
__props__.__dict__["data_id"] = data_id
if dataset_id is None and not opts.urn:
raise TypeError("Missing required property 'dataset_id'")
__props__.__dict__["dataset_id"] = dataset_id
__props__.__dict__["location"] = location
__props__.__dict__["name"] = name
__props__.__dict__["project"] = project
__props__.__dict__["resource_attributes"] = resource_attributes
if user_id is None and not opts.urn:
raise TypeError("Missing required property 'user_id'")
__props__.__dict__["user_id"] = user_id
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
super(UserDataMapping, __self__).__init__(
'google-native:healthcare/v1beta1:UserDataMapping',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'UserDataMapping':
"""
Get an existing UserDataMapping resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = UserDataMappingArgs.__new__(UserDataMappingArgs)
__props__.__dict__["archive_time"] = None
__props__.__dict__["archived"] = None
__props__.__dict__["data_id"] = None
__props__.__dict__["name"] = None
__props__.__dict__["resource_attributes"] = None
__props__.__dict__["user_id"] = None
return UserDataMapping(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="archiveTime")
def archive_time(self) -> pulumi.Output[str]:
"""
Indicates the time when this mapping was archived.
"""
return pulumi.get(self, "archive_time")
@property
@pulumi.getter
def archived(self) -> pulumi.Output[bool]:
"""
Indicates whether this mapping is archived.
"""
return pulumi.get(self, "archived")
@property
@pulumi.getter(name="dataId")
def data_id(self) -> pulumi.Output[str]:
"""
A unique identifier for the mapped resource.
"""
return pulumi.get(self, "data_id")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
Resource name of the User data mapping, of the form `projects/{project_id}/locations/{location_id}/datasets/{dataset_id}/consentStores/{consent_store_id}/userDataMappings/{user_data_mapping_id}`.
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="resourceAttributes")
def resource_attributes(self) -> pulumi.Output[Sequence['outputs.AttributeResponse']]:
"""
Attributes of the resource. Only explicitly set attributes are displayed here. Attribute definitions with defaults set implicitly apply to these User data mappings. Attributes listed here must be single valued, that is, exactly one value is specified for the field "values" in each Attribute.
"""
return pulumi.get(self, "resource_attributes")
@property
@pulumi.getter(name="userId")
def user_id(self) -> pulumi.Output[str]:
"""
User's UUID provided by the client.
"""
return pulumi.get(self, "user_id")
| 45.837288
| 400
| 0.654859
| 1,582
| 13,522
| 5.326802
| 0.112516
| 0.075709
| 0.071437
| 0.060045
| 0.702029
| 0.633915
| 0.59523
| 0.565088
| 0.546695
| 0.466477
| 0
| 0.000293
| 0.24205
| 13,522
| 294
| 401
| 45.993197
| 0.821934
| 0.278953
| 0
| 0.362245
| 1
| 0
| 0.111542
| 0.00786
| 0
| 0
| 0
| 0
| 0
| 1
| 0.142857
| false
| 0.005102
| 0.035714
| 0.020408
| 0.265306
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6aca7a5f520c3a19c81c989f925529d891ca4d67
| 661
|
py
|
Python
|
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
_doc/sphinxdoc/source/conf.py
|
Jerome-maker/ensae_teaching_cs
|
43ea044361ee60c00c85aea354a7b25c21c0fd07
|
[
"MIT"
] | null | null | null |
import sys
import os
import sphinx_rtd_theme
source_path = os.path.normpath(
os.path.join(
os.path.abspath(
os.path.split(__file__)[0])))
try:
from conf_base import *
except ImportError:
sys.path.append(source_path)
from conf_base import *
html_theme = 'sphinx_rtd_theme'
html_theme_path = [sphinx_rtd_theme.get_html_theme_path()]
templates_path = [os.path.join(source_path, 'phdoc_static')]
html_static_path = [os.path.join(source_path, 'phdoc_static')]
if not os.path.exists(templates_path[0]):
raise FileNotFoundError(templates_path[0])
blog_root = "http://www.xavierdupre.fr/app/ensae_teaching_cs/helpsphinx3/"
| 25.423077
| 74
| 0.741301
| 98
| 661
| 4.673469
| 0.428571
| 0.091703
| 0.091703
| 0.078603
| 0.152838
| 0.152838
| 0.152838
| 0.152838
| 0
| 0
| 0
| 0.00703
| 0.139183
| 661
| 25
| 75
| 26.44
| 0.797891
| 0
| 0
| 0.105263
| 0
| 0
| 0.151286
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.315789
| 0
| 0.315789
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
6acb7ed968b97603aa5b744b910e0997b0f3f62d
| 561
|
py
|
Python
|
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
server/api/migrations/0002_auto_20201011_1053.py
|
ShahriarDhruvo/WebTech_Assignment2
|
845d198a91b1dcc8ed149362499754167fca419d
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.2 on 2020-10-11 10:53
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('api', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='task',
name='author',
field=models.CharField(default='Anonymous', max_length=100),
),
migrations.AlterField(
model_name='task',
name='deadline',
field=models.DateTimeField(default='2020-10-11 10:53'),
),
]
| 23.375
| 72
| 0.57041
| 58
| 561
| 5.448276
| 0.637931
| 0.037975
| 0.050633
| 0.063291
| 0.310127
| 0.234177
| 0
| 0
| 0
| 0
| 0
| 0.086957
| 0.30303
| 561
| 23
| 73
| 24.391304
| 0.721228
| 0.080214
| 0
| 0.352941
| 1
| 0
| 0.120623
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.058824
| 0
| 0.235294
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6acc395ad3bfafbc612c2d532d32bbb5ce80e13f
| 4,123
|
py
|
Python
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
lisy09/flink-ai-extended
|
011a5a332f7641f66086653e715d0596eab2e107
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-08-06T04:24:36.000Z
|
2021-08-06T04:24:36.000Z
|
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | null | null | null |
flink-ai-flow/lib/notification_service/notification_service/mongo_event_storage.py
|
sentimentist/flink-ai-extended
|
689d000f2db8919fd80e0725a1609918ca4a26f4
|
[
"Apache-2.0",
"BSD-2-Clause",
"MIT",
"ECL-2.0",
"BSD-3-Clause"
] | 1
|
2021-05-20T02:17:11.000Z
|
2021-05-20T02:17:11.000Z
|
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
#
import time
import socket
from collections import Iterable
from typing import Union, Tuple
from mongoengine import connect
from notification_service.event_storage import BaseEventStorage
from notification_service.base_notification import BaseEvent
from notification_service.mongo_notification import MongoEvent
class MongoEventStorage(BaseEventStorage):
def __init__(self, *args, **kwargs):
self.db_conn = self.setup_connection(**kwargs)
self.server_ip = socket.gethostbyname(socket.gethostname())
def setup_connection(self, **kwargs):
db_conf = {
"host": kwargs.get("host"),
"port": kwargs.get("port"),
"db": kwargs.get("db"),
}
username = kwargs.get("username", None)
password = kwargs.get("password", None)
authentication_source = kwargs.get("authentication_source", "admin")
if (username or password) and not (username and password):
raise Exception("Please provide valid username and password")
if username and password:
db_conf.update({
"username": username,
"password": password,
"authentication_source": authentication_source
})
return connect(**db_conf)
def get_latest_version(self, key: str, namespace: str = None):
mongo_events = MongoEvent.get_by_key(key, 0, 1, "-version")
if not mongo_events:
return 0
return mongo_events[0].version
def add_event(self, event: BaseEvent, uuid: str):
kwargs = {
"server_ip": self.server_ip,
"create_time": int(time.time() * 1000),
"event_type": event.event_type,
"key": event.key,
"value": event.value,
"context": event.context,
"namespace": event.namespace,
"sender": event.sender,
"uuid": uuid
}
mongo_event = MongoEvent(**kwargs)
mongo_event.save()
mongo_event.reload()
event.create_time = mongo_event.create_time
event.version = mongo_event.version
return event
def list_events(self,
key: Union[str, Tuple[str]],
version: int = None,
event_type: str = None,
start_time: int = None,
namespace: str = None,
sender: str = None):
key = None if key == "" else key
version = None if version == 0 else version
event_type = None if event_type == "" else event_type
namespace = None if namespace == "" else namespace
sender = None if sender == "" else sender
if isinstance(key, str):
key = (key,)
elif isinstance(key, Iterable):
key = tuple(key)
res = MongoEvent.get_base_events(key, version, event_type, start_time, namespace, sender)
return res
def list_all_events(self, start_time: int):
res = MongoEvent.get_base_events_by_time(start_time)
return res
def list_all_events_from_version(self, start_version: int, end_version: int = None):
res = MongoEvent.get_base_events_by_version(start_version, end_version)
return res
def clean_up(self):
MongoEvent.delete_by_client(self.server_ip)
| 38.175926
| 97
| 0.64031
| 494
| 4,123
| 5.192308
| 0.311741
| 0.024561
| 0.026901
| 0.023392
| 0.051462
| 0.041326
| 0
| 0
| 0
| 0
| 0
| 0.004329
| 0.271647
| 4,123
| 107
| 98
| 38.53271
| 0.849817
| 0.182391
| 0
| 0.037975
| 0
| 0
| 0.063544
| 0.01253
| 0
| 0
| 0
| 0
| 0
| 1
| 0.101266
| false
| 0.063291
| 0.101266
| 0
| 0.303797
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
6acc7db3216417c3207f16b6723988768ff50b66
| 711
|
py
|
Python
|
src/unicon/plugins/confd/csp/__init__.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | 1
|
2021-02-25T19:36:56.000Z
|
2021-02-25T19:36:56.000Z
|
src/unicon/plugins/confd/csp/__init__.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | null | null | null |
src/unicon/plugins/confd/csp/__init__.py
|
tahigash/unicon.plugins
|
1b43a5a61244ea9312387fd855442ace37c65db9
|
[
"Apache-2.0"
] | null | null | null |
__author__ = "Dave Wapstra <[email protected]>"
from unicon.plugins.confd import ConfdServiceList, ConfdConnection, ConfdConnectionProvider
from .statemachine import CspStateMachine
from .settings import CspSettings
from . import service_implementation as csp_svc
class CspServiceList(ConfdServiceList):
def __init__(self):
super().__init__()
delattr(self, 'cli_style')
self.reload = csp_svc.Reload
class CspSingleRPConnection(ConfdConnection):
os = 'confd'
series = 'csp'
chassis_type = 'single_rp'
state_machine_class = CspStateMachine
connection_provider_class = ConfdConnectionProvider
subcommand_list = CspServiceList
settings = CspSettings()
| 28.44
| 91
| 0.759494
| 70
| 711
| 7.385714
| 0.657143
| 0.023211
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.168776
| 711
| 24
| 92
| 29.625
| 0.874788
| 0
| 0
| 0
| 0
| 0
| 0.082982
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.055556
| false
| 0
| 0.222222
| 0
| 0.777778
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
6accba984dd52f022ed6544e1f7ad42db7180437
| 665
|
py
|
Python
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 15
|
2017-08-24T18:44:55.000Z
|
2021-02-01T22:07:53.000Z
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 5
|
2017-09-05T12:25:09.000Z
|
2021-10-18T06:45:24.000Z
|
setup.py
|
rrwen/search_google
|
e647868ba5da2803e787a3c06b32e09452068736
|
[
"MIT"
] | 1
|
2018-02-20T13:44:44.000Z
|
2018-02-20T13:44:44.000Z
|
# -*- coding: utf-8 -*-
from setuptools import setup
import search_google as package
def readme():
with open('README.rst') as f:
return ''.join(f.readlines()[11:])
setup(
name=package.__name__,
version=package.__version__,
description=package.__description__,
long_description=readme(),
author=package.__author__,
author_email=package.__email__,
license=package.__license__,
url=package.__url__,
download_url=package.__download_url__,
keywords =package. __keywords__,
entry_points=package.__entry_points__,
packages=package.__packages__,
package_data=package.__package_data__,
install_requires=package.__install_requires__
)
| 24.62963
| 47
| 0.771429
| 77
| 665
| 5.844156
| 0.480519
| 0.044444
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005119
| 0.118797
| 665
| 26
| 48
| 25.576923
| 0.762799
| 0.031579
| 0
| 0
| 0
| 0
| 0.015576
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.047619
| true
| 0
| 0.095238
| 0
| 0.190476
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ad2141e919181f75e53ccffa43344d1aae6eea7
| 346
|
py
|
Python
|
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
main.py
|
BenG49/sudoku
|
e4b14655e23d04c161feb16ceb1338537f519bdb
|
[
"MIT"
] | null | null | null |
from sudoku import Sudoku
def main():
s = Sudoku.parse(
'''
-------------
| |2 | |
| | 6 |4 3|
| | 5| 7 |
-------------
| 7 | 2|8 |
|51 | 4|9 |
| 9| 3| |
-------------
| | 9| |
| 2| | 98|
| 83|1 |2 |
-------------
'''
)
print(s)
print(s.solve())
if __name__ == '__main__':
main()
| 12.814815
| 26
| 0.297688
| 38
| 346
| 2.5
| 0.605263
| 0.126316
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.105505
| 0.369942
| 346
| 26
| 27
| 13.307692
| 0.330275
| 0
| 0
| 0
| 0
| 0
| 0.062016
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.125
| false
| 0
| 0.125
| 0
| 0.25
| 0.25
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ad3007b95e5d17415b05151d343ee3326e45e1d
| 2,157
|
py
|
Python
|
experiment/diabetes/accuracy_info.py
|
leandro-santiago/bloomwisard
|
4c02610c4ef2d2cf8424797c8a815da182ca2383
|
[
"MIT"
] | 2
|
2020-10-25T17:01:10.000Z
|
2020-12-04T14:26:26.000Z
|
experiment/diabetes/accuracy_info.py
|
leandro-santiago/bloomwisard
|
4c02610c4ef2d2cf8424797c8a815da182ca2383
|
[
"MIT"
] | null | null | null |
experiment/diabetes/accuracy_info.py
|
leandro-santiago/bloomwisard
|
4c02610c4ef2d2cf8424797c8a815da182ca2383
|
[
"MIT"
] | null | null | null |
import numpy as np
import sys
from timeit import default_timer as timer
sys.path.append("../../")
from core import wnn
from encoding import thermometer
from encoding import util
#Load Diabetes data
base_path = "../../dataset/diabetes/"
#2/3 Test
bits_encoding = 20
train_data, train_label, test_data, test_label, data_min, data_max = util.load_3data(base_path)
ths = []
for i in range(len(data_max)):
ths.append(thermometer.Thermometer(data_min[i], data_max[i], bits_encoding))
train_bin = []
test_bin = []
i = 0
for data in train_data:
train_bin.append(np.array([], dtype=bool))
t = 0
for v in data:
binarr = ths[t].binarize(v)
train_bin[i] = np.append(train_bin[i], binarr)
t += 1
i += 1
i = 0
for data in test_data:
test_bin.append(np.array([], dtype=bool))
t = 0
for v in data:
binarr = ths[t].binarize(v)
test_bin[i] = np.append(test_bin[i], binarr)
t += 1
i += 1
#print test_label
#Wisard
num_classes = 2
tuple_list = [2, 4, 8, 14, 16, 18, 20, 22, 24, 26, 28, 30]
acc_list = []
test_length = len(test_label)
entry_size = len(train_bin[0])
#print entry_size
for t in tuple_list:
wisard = wnn.Wisard(entry_size, t, num_classes)
wisard.train(train_bin, train_label)
rank_result = wisard.rank(test_bin)
num_hits = 0
for i in range(test_length):
if rank_result[i] == test_label[i]:
num_hits += 1
acc_list.append(float(num_hits)/float(test_length))
#Bloom Wisard
btuple_list = [2, 4, 8, 14, 16, 18, 20, 22, 24, 26, 28, 30, 32, 40, 56]
bacc_list = []
#capacity = len(train_bin)
capacity = 10
print capacity
for t in btuple_list:
bwisard = wnn.BloomWisard(entry_size, t, num_classes, capacity)
bwisard.train(train_bin, train_label)
rank_result = bwisard.rank(test_bin)
num_hits = 0
for i in range(test_length):
if rank_result[i] == test_label[i]:
num_hits += 1
bacc_list.append(float(num_hits)/float(test_length))
print "Tuples=", tuple_list
print "Wisard Accuracy=", acc_list
print "Tuples=", btuple_list
print "BloomWisard Accuracy=",bacc_list
| 23.445652
| 95
| 0.658785
| 349
| 2,157
| 3.868195
| 0.240688
| 0.047407
| 0.013333
| 0.024444
| 0.395556
| 0.34963
| 0.34963
| 0.28
| 0.225185
| 0.225185
| 0
| 0.040756
| 0.215114
| 2,157
| 91
| 96
| 23.703297
| 0.756645
| 0.046824
| 0
| 0.3125
| 0
| 0
| 0.039063
| 0.01123
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.09375
| null | null | 0.078125
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6adc3f2423ac6cf2c778f44e1751ae2e595e05f5
| 74,159
|
py
|
Python
|
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
jss_figures_replication_script.py
|
Cole-vJ/AdvEMDpy
|
160cd44b371a2c8aa66961f23062c1d7305dd728
|
[
"Unlicense"
] | null | null | null |
# ________
# /
# \ /
# \ /
# \/
import random
import textwrap
import emd_mean
import AdvEMDpy
import emd_basis
import emd_utils
import numpy as np
import pandas as pd
import cvxpy as cvx
import seaborn as sns
import matplotlib.pyplot as plt
from scipy.integrate import odeint
from scipy.ndimage import gaussian_filter
from emd_utils import time_extension, Utility
from scipy.interpolate import CubicSpline
from emd_hilbert import Hilbert, hilbert_spectrum
from emd_preprocess import Preprocess
from emd_mean import Fluctuation
from AdvEMDpy import EMD
# alternate packages
from PyEMD import EMD as pyemd0215
import emd as emd040
sns.set(style='darkgrid')
pseudo_alg_time = np.linspace(0, 2 * np.pi, 1001)
pseudo_alg_time_series = np.sin(pseudo_alg_time) + np.sin(5 * pseudo_alg_time)
pseudo_utils = Utility(time=pseudo_alg_time, time_series=pseudo_alg_time_series)
# plot 0 - addition
fig = plt.figure(figsize=(9, 4))
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('First Iteration of Sifting Algorithm')
plt.plot(pseudo_alg_time, pseudo_alg_time_series, label=r'$h_{(1,0)}(t)$', zorder=1)
plt.scatter(pseudo_alg_time[pseudo_utils.max_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.max_bool_func_1st_order_fd()],
c='r', label=r'$M(t_i)$', zorder=2)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) + 1, '--', c='r', label=r'$\tilde{h}_{(1,0)}^M(t)$', zorder=4)
plt.scatter(pseudo_alg_time[pseudo_utils.min_bool_func_1st_order_fd()],
pseudo_alg_time_series[pseudo_utils.min_bool_func_1st_order_fd()],
c='c', label=r'$m(t_j)$', zorder=3)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time) - 1, '--', c='c', label=r'$\tilde{h}_{(1,0)}^m(t)$', zorder=5)
plt.plot(pseudo_alg_time, np.sin(pseudo_alg_time), '--', c='purple', label=r'$\tilde{h}_{(1,0)}^{\mu}(t)$', zorder=5)
plt.yticks(ticks=[-2, -1, 0, 1, 2])
plt.xticks(ticks=[0, np.pi, 2 * np.pi],
labels=[r'0', r'$\pi$', r'$2\pi$'])
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.95, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/pseudo_algorithm.png')
plt.show()
knots = np.arange(12)
time = np.linspace(0, 11, 1101)
basis = emd_basis.Basis(time=time, time_series=time)
b_spline_basis = basis.cubic_b_spline(knots)
chsi_basis = basis.chsi_basis(knots)
# plot 1
plt.title('Non-Natural Cubic B-Spline Bases at Boundary')
plt.plot(time[500:], b_spline_basis[2, 500:].T, '--', label=r'$ B_{-3,4}(t) $')
plt.plot(time[500:], b_spline_basis[3, 500:].T, '--', label=r'$ B_{-2,4}(t) $')
plt.plot(time[500:], b_spline_basis[4, 500:].T, '--', label=r'$ B_{-1,4}(t) $')
plt.plot(time[500:], b_spline_basis[5, 500:].T, '--', label=r'$ B_{0,4}(t) $')
plt.plot(time[500:], b_spline_basis[6, 500:].T, '--', label=r'$ B_{1,4}(t) $')
plt.xticks([5, 6], [r'$ \tau_0 $', r'$ \tau_1 $'])
plt.xlim(4.4, 6.6)
plt.plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
plt.legend(loc='upper left')
plt.savefig('jss_figures/boundary_bases.png')
plt.show()
# plot 1a - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
knots_uniform = np.linspace(0, 2 * np.pi, 51)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs = emd.empirical_mode_decomposition(knots=knots_uniform, edge_effect='anti-symmetric', verbose=False)[0]
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Uniform Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Uniform Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Uniform Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots_uniform[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots_uniform)):
axs[i].plot(knots_uniform[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_uniform.png')
plt.show()
# plot 1b - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=1, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Statically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Statically Optimised Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Statically Optimised Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots)):
axs[i].plot(knots[j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_1.png')
plt.show()
# plot 1c - addition
knot_demonstrate_time = np.linspace(0, 2 * np.pi, 1001)
knot_demonstrate_time_series = np.sin(knot_demonstrate_time) + np.sin(5 * knot_demonstrate_time)
emd = EMD(time=knot_demonstrate_time, time_series=knot_demonstrate_time_series)
imfs, _, _, _, knots, _, _ = emd.empirical_mode_decomposition(edge_effect='anti-symmetric',
optimise_knots=2, verbose=False)
fig, axs = plt.subplots(3, 1)
fig.subplots_adjust(hspace=0.6)
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Time Series and Dynamically Optimised Knots')
axs[0].plot(knot_demonstrate_time, knot_demonstrate_time_series, Linewidth=2, zorder=100)
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].set_title('IMF 1 and Dynamically Knots')
axs[1].plot(knot_demonstrate_time, imfs[1, :], Linewidth=2, zorder=100)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[1].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[2].set_title('IMF 2 and Dynamically Knots')
axs[2].plot(knot_demonstrate_time, imfs[2, :], Linewidth=2, zorder=100)
axs[2].set_yticks(ticks=[-2, 0, 2])
axs[2].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[2].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[0].plot(knots[0][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[0].legend(loc='lower left')
axs[1].plot(knots[1][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
axs[2].plot(knots[2][0] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey', label='Knots')
for i in range(3):
for j in range(1, len(knots[i])):
axs[i].plot(knots[i][j] * np.ones(101), np.linspace(-2, 2, 101), '--', c='grey')
plt.savefig('jss_figures/knot_2.png')
plt.show()
# plot 1d - addition
window = 81
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Filtering Demonstration')
axs[1].set_title('Zoomed Region')
preprocess_time = pseudo_alg_time.copy()
np.random.seed(1)
random.seed(1)
preprocess_time_series = pseudo_alg_time_series + np.random.normal(0, 0.1, len(preprocess_time))
for i in random.sample(range(1000), 500):
preprocess_time_series[i] += np.random.normal(0, 1)
preprocess = Preprocess(time=preprocess_time, time_series=preprocess_time_series)
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[0].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[0].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[0].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[0].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple', label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.mean_filter(window_width=window)[1], label=textwrap.fill('Mean filter', 12))
axs[1].plot(preprocess_time, preprocess.median_filter(window_width=window)[1], label=textwrap.fill('Median filter', 13))
axs[1].plot(preprocess_time, preprocess.winsorize(window_width=window, a=0.8)[1], label=textwrap.fill('Windsorize filter', 12))
axs[1].plot(preprocess_time, preprocess.winsorize_interpolate(window_width=window, a=0.8)[1],
label=textwrap.fill('Windsorize interpolation filter', 14))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.90)[1], c='grey',
label=textwrap.fill('Quantile window', 12))
axs[1].plot(preprocess_time, preprocess.quantile_filter(window_width=window, q=0.10)[1], c='grey')
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_filter.png')
plt.show()
# plot 1e - addition
fig, axs = plt.subplots(2, 1)
fig.subplots_adjust(hspace=0.4)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
axs[0].set_title('Preprocess Smoothing Demonstration')
axs[1].set_title('Zoomed Region')
axs[0].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[0].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[0].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[0].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
downsampled_and_decimated = preprocess.downsample()
axs[0].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 11))
downsampled = preprocess.downsample(decimate=False)
axs[0].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), -3 * np.ones(101), '--', c='black',
label=textwrap.fill('Zoomed region', 10))
axs[0].plot(np.linspace(0.85 * np.pi, 1.15 * np.pi, 101), 3 * np.ones(101), '--', c='black')
axs[0].plot(0.85 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].plot(1.15 * np.pi * np.ones(101), np.linspace(-3, 3, 101), '--', c='black')
axs[0].set_yticks(ticks=[-2, 0, 2])
axs[0].set_xticks(ticks=[0, np.pi, 2 * np.pi])
axs[0].set_xticklabels(labels=['0', r'$\pi$', r'$2\pi$'])
axs[1].plot(preprocess_time, preprocess_time_series, label='x(t)')
axs[1].plot(pseudo_alg_time, pseudo_alg_time_series, '--', c='purple',
label=textwrap.fill('Noiseless time series', 12))
axs[1].plot(preprocess_time, preprocess.hp()[1],
label=textwrap.fill('Hodrick-Prescott smoothing', 12))
axs[1].plot(preprocess_time, preprocess.hw(order=51)[1],
label=textwrap.fill('Henderson-Whittaker smoothing', 13))
axs[1].plot(downsampled_and_decimated[0], downsampled_and_decimated[1],
label=textwrap.fill('Downsampled & decimated', 13))
axs[1].plot(downsampled[0], downsampled[1],
label=textwrap.fill('Downsampled', 13))
axs[1].set_xlim(0.85 * np.pi, 1.15 * np.pi)
axs[1].set_ylim(-3, 3)
axs[1].set_yticks(ticks=[-2, 0, 2])
axs[1].set_xticks(ticks=[np.pi])
axs[1].set_xticklabels(labels=[r'$\pi$'])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.06, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, -0.15))
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.06, box_1.y0, box_1.width * 0.85, box_1.height])
plt.savefig('jss_figures/preprocess_smooth.png')
plt.show()
# plot 2
fig, axs = plt.subplots(1, 2, sharey=True)
axs[0].set_title('Cubic B-Spline Bases')
axs[0].plot(time, b_spline_basis[2, :].T, '--', label='Basis 1')
axs[0].plot(time, b_spline_basis[3, :].T, '--', label='Basis 2')
axs[0].plot(time, b_spline_basis[4, :].T, '--', label='Basis 3')
axs[0].plot(time, b_spline_basis[5, :].T, '--', label='Basis 4')
axs[0].legend(loc='upper left')
axs[0].plot(5 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].plot(6 * np.ones(100), np.linspace(-0.2, 0.8, 100), 'k-')
axs[0].set_xticks([5, 6])
axs[0].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[0].set_xlim(4.5, 6.5)
axs[1].set_title('Cubic Hermite Spline Bases')
axs[1].plot(time, chsi_basis[10, :].T, '--')
axs[1].plot(time, chsi_basis[11, :].T, '--')
axs[1].plot(time, chsi_basis[12, :].T, '--')
axs[1].plot(time, chsi_basis[13, :].T, '--')
axs[1].plot(5 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].plot(6 * np.ones(100), np.linspace(-0.2, 1.2, 100), 'k-')
axs[1].set_xticks([5, 6])
axs[1].set_xticklabels([r'$ \tau_k $', r'$ \tau_{k+1} $'])
axs[1].set_xlim(4.5, 6.5)
plt.savefig('jss_figures/comparing_bases.png')
plt.show()
# plot 3
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_dash = maxima_y[-1] * np.ones_like(max_dash_time)
min_dash_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_dash = minima_y[-1] * np.ones_like(min_dash_time)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
max_discard = maxima_y[-1]
max_discard_time = minima_x[-1] - maxima_x[-1] + minima_x[-1]
max_discard_dash_time = np.linspace(max_discard_time - width, max_discard_time + width, 101)
max_discard_dash = max_discard * np.ones_like(max_discard_dash_time)
dash_2_time = np.linspace(minima_x[-1], max_discard_time, 101)
dash_2 = np.linspace(minima_y[-1], max_discard, 101)
end_point_time = time[-1]
end_point = time_series[-1]
time_reflect = np.linspace((5 - a) * np.pi, (5 + a) * np.pi, 101)
time_series_reflect = np.flip(np.cos(np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)) + np.cos(5 * np.linspace((5 - 2.6 * a) * np.pi,
(5 - a) * np.pi, 101)))
time_series_anti_reflect = time_series_reflect[0] - time_series_reflect
utils = emd_utils.Utility(time=time, time_series=time_series_anti_reflect)
anti_max_bool = utils.max_bool_func_1st_order_fd()
anti_max_point_time = time_reflect[anti_max_bool]
anti_max_point = time_series_anti_reflect[anti_max_bool]
utils = emd_utils.Utility(time=time, time_series=time_series_reflect)
no_anchor_max_time = time_reflect[utils.max_bool_func_1st_order_fd()]
no_anchor_max = time_series_reflect[utils.max_bool_func_1st_order_fd()]
point_1 = 5.4
length_distance = np.linspace(maxima_y[-1], minima_y[-1], 101)
length_distance_time = point_1 * np.pi * np.ones_like(length_distance)
length_time = np.linspace(point_1 * np.pi - width, point_1 * np.pi + width, 101)
length_top = maxima_y[-1] * np.ones_like(length_time)
length_bottom = minima_y[-1] * np.ones_like(length_time)
point_2 = 5.2
length_distance_2 = np.linspace(time_series[-1], minima_y[-1], 101)
length_distance_time_2 = point_2 * np.pi * np.ones_like(length_distance_2)
length_time_2 = np.linspace(point_2 * np.pi - width, point_2 * np.pi + width, 101)
length_top_2 = time_series[-1] * np.ones_like(length_time_2)
length_bottom_2 = minima_y[-1] * np.ones_like(length_time_2)
symmetry_axis_1_time = minima_x[-1] * np.ones(101)
symmetry_axis_2_time = time[-1] * np.ones(101)
symmetry_axis = np.linspace(-2, 2, 101)
end_time = np.linspace(time[-1] - width, time[-1] + width, 101)
end_signal = time_series[-1] * np.ones_like(end_time)
anti_symmetric_time = np.linspace(time[-1] - 0.5, time[-1] + 0.5, 101)
anti_symmetric_signal = time_series[-1] * np.ones_like(anti_symmetric_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Symmetry Edge Effects Example')
plt.plot(time_reflect, time_series_reflect, 'g--', LineWidth=2, label=textwrap.fill('Symmetric signal', 10))
plt.plot(time_reflect[:51], time_series_anti_reflect[:51], '--', c='purple', LineWidth=2,
label=textwrap.fill('Anti-symmetric signal', 10))
plt.plot(max_dash_time, max_dash, 'k-')
plt.plot(min_dash_time, min_dash, 'k-')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(length_distance_time, length_distance, 'k--')
plt.plot(length_distance_time_2, length_distance_2, 'k--')
plt.plot(length_time, length_top, 'k-')
plt.plot(length_time, length_bottom, 'k-')
plt.plot(length_time_2, length_top_2, 'k-')
plt.plot(length_time_2, length_bottom_2, 'k-')
plt.plot(end_time, end_signal, 'k-')
plt.plot(symmetry_axis_1_time, symmetry_axis, 'r--', zorder=1)
plt.plot(anti_symmetric_time, anti_symmetric_signal, 'r--', zorder=1)
plt.plot(symmetry_axis_2_time, symmetry_axis, 'r--', label=textwrap.fill('Axes of symmetry', 10), zorder=1)
plt.text(5.1 * np.pi, -0.7, r'$\beta$L')
plt.text(5.34 * np.pi, -0.05, 'L')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(max_discard_time, max_discard, c='purple', zorder=4, label=textwrap.fill('Symmetric Discard maxima', 10))
plt.scatter(end_point_time, end_point, c='orange', zorder=4, label=textwrap.fill('Symmetric Anchor maxima', 10))
plt.scatter(anti_max_point_time, anti_max_point, c='green', zorder=4, label=textwrap.fill('Anti-Symmetric maxima', 10))
plt.scatter(no_anchor_max_time, no_anchor_max, c='gray', zorder=4, label=textwrap.fill('Symmetric maxima', 10))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_symmetry_anti.png')
plt.show()
# plot 4
a = 0.21
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
max_dash_1 = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_dash_2 = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_dash_time_1 = maxima_x[-1] * np.ones_like(max_dash_1)
max_dash_time_2 = maxima_x[-2] * np.ones_like(max_dash_1)
min_dash_1 = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_dash_2 = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_dash_time_1 = minima_x[-1] * np.ones_like(min_dash_1)
min_dash_time_2 = minima_x[-2] * np.ones_like(min_dash_1)
dash_1_time = np.linspace(maxima_x[-1], minima_x[-1], 101)
dash_1 = np.linspace(maxima_y[-1], minima_y[-1], 101)
dash_2_time = np.linspace(maxima_x[-1], minima_x[-2], 101)
dash_2 = np.linspace(maxima_y[-1], minima_y[-2], 101)
s1 = (minima_y[-2] - maxima_y[-1]) / (minima_x[-2] - maxima_x[-1])
slope_based_maximum_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
slope_based_maximum = minima_y[-1] + (slope_based_maximum_time - minima_x[-1]) * s1
max_dash_time_3 = slope_based_maximum_time * np.ones_like(max_dash_1)
max_dash_3 = np.linspace(slope_based_maximum - width, slope_based_maximum + width, 101)
dash_3_time = np.linspace(minima_x[-1], slope_based_maximum_time, 101)
dash_3 = np.linspace(minima_y[-1], slope_based_maximum, 101)
s2 = (minima_y[-1] - maxima_y[-1]) / (minima_x[-1] - maxima_x[-1])
slope_based_minimum_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
slope_based_minimum = slope_based_maximum - (slope_based_maximum_time - slope_based_minimum_time) * s2
min_dash_time_3 = slope_based_minimum_time * np.ones_like(min_dash_1)
min_dash_3 = np.linspace(slope_based_minimum - width, slope_based_minimum + width, 101)
dash_4_time = np.linspace(slope_based_maximum_time, slope_based_minimum_time)
dash_4 = np.linspace(slope_based_maximum, slope_based_minimum)
maxima_dash = np.linspace(2.5 - width, 2.5 + width, 101)
maxima_dash_time_1 = maxima_x[-2] * np.ones_like(maxima_dash)
maxima_dash_time_2 = maxima_x[-1] * np.ones_like(maxima_dash)
maxima_dash_time_3 = slope_based_maximum_time * np.ones_like(maxima_dash)
maxima_line_dash_time = np.linspace(maxima_x[-2], slope_based_maximum_time, 101)
maxima_line_dash = 2.5 * np.ones_like(maxima_line_dash_time)
minima_dash = np.linspace(-3.4 - width, -3.4 + width, 101)
minima_dash_time_1 = minima_x[-2] * np.ones_like(minima_dash)
minima_dash_time_2 = minima_x[-1] * np.ones_like(minima_dash)
minima_dash_time_3 = slope_based_minimum_time * np.ones_like(minima_dash)
minima_line_dash_time = np.linspace(minima_x[-2], slope_based_minimum_time, 101)
minima_line_dash = -3.4 * np.ones_like(minima_line_dash_time)
# slightly edit signal to make difference between slope-based method and improved slope-based method more clear
time_series[time >= minima_x[-1]] = 1.5 * (time_series[time >= minima_x[-1]] - time_series[time == minima_x[-1]]) + \
time_series[time == minima_x[-1]]
improved_slope_based_maximum_time = time[-1]
improved_slope_based_maximum = time_series[-1]
improved_slope_based_minimum_time = slope_based_minimum_time
improved_slope_based_minimum = improved_slope_based_maximum + s2 * (improved_slope_based_minimum_time -
improved_slope_based_maximum_time)
min_dash_4 = np.linspace(improved_slope_based_minimum - width, improved_slope_based_minimum + width, 101)
min_dash_time_4 = improved_slope_based_minimum_time * np.ones_like(min_dash_4)
dash_final_time = np.linspace(improved_slope_based_maximum_time, improved_slope_based_minimum_time, 101)
dash_final = np.linspace(improved_slope_based_maximum, improved_slope_based_minimum, 101)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.gcf().subplots_adjust(bottom=0.10)
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.title('Slope-Based Edge Effects Example')
plt.plot(max_dash_time_1, max_dash_1, 'k-')
plt.plot(max_dash_time_2, max_dash_2, 'k-')
plt.plot(max_dash_time_3, max_dash_3, 'k-')
plt.plot(min_dash_time_1, min_dash_1, 'k-')
plt.plot(min_dash_time_2, min_dash_2, 'k-')
plt.plot(min_dash_time_3, min_dash_3, 'k-')
plt.plot(min_dash_time_4, min_dash_4, 'k-')
plt.plot(maxima_dash_time_1, maxima_dash, 'k-')
plt.plot(maxima_dash_time_2, maxima_dash, 'k-')
plt.plot(maxima_dash_time_3, maxima_dash, 'k-')
plt.plot(minima_dash_time_1, minima_dash, 'k-')
plt.plot(minima_dash_time_2, minima_dash, 'k-')
plt.plot(minima_dash_time_3, minima_dash, 'k-')
plt.text(4.34 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.74 * np.pi, -3.2, r'$\Delta{t^{min}_{m}}$')
plt.text(4.12 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.50 * np.pi, 2, r'$\Delta{t^{max}_{M}}$')
plt.text(4.30 * np.pi, 0.35, r'$s_1$')
plt.text(4.43 * np.pi, -0.20, r'$s_2$')
plt.text(4.30 * np.pi + (minima_x[-1] - minima_x[-2]), 0.35 + (minima_y[-1] - minima_y[-2]), r'$s_1$')
plt.text(4.43 * np.pi + (slope_based_minimum_time - minima_x[-1]),
-0.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.text(4.50 * np.pi + (slope_based_minimum_time - minima_x[-1]),
1.20 + (slope_based_minimum - minima_y[-1]), r'$s_2$')
plt.plot(minima_line_dash_time, minima_line_dash, 'k--')
plt.plot(maxima_line_dash_time, maxima_line_dash, 'k--')
plt.plot(dash_1_time, dash_1, 'k--')
plt.plot(dash_2_time, dash_2, 'k--')
plt.plot(dash_3_time, dash_3, 'k--')
plt.plot(dash_4_time, dash_4, 'k--')
plt.plot(dash_final_time, dash_final, 'k--')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(slope_based_maximum_time, slope_based_maximum, c='orange', zorder=4,
label=textwrap.fill('Slope-based maximum', 11))
plt.scatter(slope_based_minimum_time, slope_based_minimum, c='purple', zorder=4,
label=textwrap.fill('Slope-based minimum', 11))
plt.scatter(improved_slope_based_maximum_time, improved_slope_based_maximum, c='deeppink', zorder=4,
label=textwrap.fill('Improved slope-based maximum', 11))
plt.scatter(improved_slope_based_minimum_time, improved_slope_based_minimum, c='dodgerblue', zorder=4,
label=textwrap.fill('Improved slope-based minimum', 11))
plt.xlim(3.9 * np.pi, 5.5 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-3, -2, -1, 0, 1, 2), ('-3', '-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_slope_based.png')
plt.show()
# plot 5
a = 0.25
width = 0.2
time = np.linspace(0, (5 - a) * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
A2 = np.abs(maxima_y[-2] - minima_y[-2]) / 2
A1 = np.abs(maxima_y[-1] - minima_y[-1]) / 2
P2 = 2 * np.abs(maxima_x[-2] - minima_x[-2])
P1 = 2 * np.abs(maxima_x[-1] - minima_x[-1])
Huang_time = (P1 / P2) * (time[time >= maxima_x[-2]] - time[time == maxima_x[-2]]) + maxima_x[-1]
Huang_wave = (A1 / A2) * (time_series[time >= maxima_x[-2]] - time_series[time == maxima_x[-2]]) + maxima_y[-1]
Coughlin_time = Huang_time
Coughlin_wave = A1 * np.cos(2 * np.pi * (1 / P1) * (Coughlin_time - Coughlin_time[0]))
Average_max_time = maxima_x[-1] + (maxima_x[-1] - maxima_x[-2])
Average_max = (maxima_y[-2] + maxima_y[-1]) / 2
Average_min_time = minima_x[-1] + (minima_x[-1] - minima_x[-2])
Average_min = (minima_y[-2] + minima_y[-1]) / 2
utils_Huang = emd_utils.Utility(time=time, time_series=Huang_wave)
Huang_max_bool = utils_Huang.max_bool_func_1st_order_fd()
Huang_min_bool = utils_Huang.min_bool_func_1st_order_fd()
utils_Coughlin = emd_utils.Utility(time=time, time_series=Coughlin_wave)
Coughlin_max_bool = utils_Coughlin.max_bool_func_1st_order_fd()
Coughlin_min_bool = utils_Coughlin.min_bool_func_1st_order_fd()
Huang_max_time = Huang_time[Huang_max_bool]
Huang_max = Huang_wave[Huang_max_bool]
Huang_min_time = Huang_time[Huang_min_bool]
Huang_min = Huang_wave[Huang_min_bool]
Coughlin_max_time = Coughlin_time[Coughlin_max_bool]
Coughlin_max = Coughlin_wave[Coughlin_max_bool]
Coughlin_min_time = Coughlin_time[Coughlin_min_bool]
Coughlin_min = Coughlin_wave[Coughlin_min_bool]
max_2_x_time = np.linspace(maxima_x[-2] - width, maxima_x[-2] + width, 101)
max_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
max_2_x = maxima_y[-2] * np.ones_like(max_2_x_time)
min_2_x_time = np.linspace(minima_x[-2] - width, minima_x[-2] + width, 101)
min_2_x_time_side = np.linspace(5.3 * np.pi - width, 5.3 * np.pi + width, 101)
min_2_x = minima_y[-2] * np.ones_like(min_2_x_time)
dash_max_min_2_x = np.linspace(minima_y[-2], maxima_y[-2], 101)
dash_max_min_2_x_time = 5.3 * np.pi * np.ones_like(dash_max_min_2_x)
max_2_y = np.linspace(maxima_y[-2] - width, maxima_y[-2] + width, 101)
max_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
max_2_y_time = maxima_x[-2] * np.ones_like(max_2_y)
min_2_y = np.linspace(minima_y[-2] - width, minima_y[-2] + width, 101)
min_2_y_side = np.linspace(-1.8 - width, -1.8 + width, 101)
min_2_y_time = minima_x[-2] * np.ones_like(min_2_y)
dash_max_min_2_y_time = np.linspace(minima_x[-2], maxima_x[-2], 101)
dash_max_min_2_y = -1.8 * np.ones_like(dash_max_min_2_y_time)
max_1_x_time = np.linspace(maxima_x[-1] - width, maxima_x[-1] + width, 101)
max_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
max_1_x = maxima_y[-1] * np.ones_like(max_1_x_time)
min_1_x_time = np.linspace(minima_x[-1] - width, minima_x[-1] + width, 101)
min_1_x_time_side = np.linspace(5.4 * np.pi - width, 5.4 * np.pi + width, 101)
min_1_x = minima_y[-1] * np.ones_like(min_1_x_time)
dash_max_min_1_x = np.linspace(minima_y[-1], maxima_y[-1], 101)
dash_max_min_1_x_time = 5.4 * np.pi * np.ones_like(dash_max_min_1_x)
max_1_y = np.linspace(maxima_y[-1] - width, maxima_y[-1] + width, 101)
max_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
max_1_y_time = maxima_x[-1] * np.ones_like(max_1_y)
min_1_y = np.linspace(minima_y[-1] - width, minima_y[-1] + width, 101)
min_1_y_side = np.linspace(-2.1 - width, -2.1 + width, 101)
min_1_y_time = minima_x[-1] * np.ones_like(min_1_y)
dash_max_min_1_y_time = np.linspace(minima_x[-1], maxima_x[-1], 101)
dash_max_min_1_y = -2.1 * np.ones_like(dash_max_min_1_y_time)
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Characteristic Wave Effects Example')
plt.plot(time, time_series, LineWidth=2, label='Signal')
plt.scatter(Huang_max_time, Huang_max, c='magenta', zorder=4, label=textwrap.fill('Huang maximum', 10))
plt.scatter(Huang_min_time, Huang_min, c='lime', zorder=4, label=textwrap.fill('Huang minimum', 10))
plt.scatter(Coughlin_max_time, Coughlin_max, c='darkorange', zorder=4,
label=textwrap.fill('Coughlin maximum', 14))
plt.scatter(Coughlin_min_time, Coughlin_min, c='dodgerblue', zorder=4,
label=textwrap.fill('Coughlin minimum', 14))
plt.scatter(Average_max_time, Average_max, c='orangered', zorder=4,
label=textwrap.fill('Average maximum', 14))
plt.scatter(Average_min_time, Average_min, c='cyan', zorder=4,
label=textwrap.fill('Average minimum', 14))
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.plot(Huang_time, Huang_wave, '--', c='darkviolet', label=textwrap.fill('Huang Characteristic Wave', 14))
plt.plot(Coughlin_time, Coughlin_wave, '--', c='darkgreen', label=textwrap.fill('Coughlin Characteristic Wave', 14))
plt.plot(max_2_x_time, max_2_x, 'k-')
plt.plot(max_2_x_time_side, max_2_x, 'k-')
plt.plot(min_2_x_time, min_2_x, 'k-')
plt.plot(min_2_x_time_side, min_2_x, 'k-')
plt.plot(dash_max_min_2_x_time, dash_max_min_2_x, 'k--')
plt.text(5.16 * np.pi, 0.85, r'$2a_2$')
plt.plot(max_2_y_time, max_2_y, 'k-')
plt.plot(max_2_y_time, max_2_y_side, 'k-')
plt.plot(min_2_y_time, min_2_y, 'k-')
plt.plot(min_2_y_time, min_2_y_side, 'k-')
plt.plot(dash_max_min_2_y_time, dash_max_min_2_y, 'k--')
plt.text(4.08 * np.pi, -2.2, r'$\frac{p_2}{2}$')
plt.plot(max_1_x_time, max_1_x, 'k-')
plt.plot(max_1_x_time_side, max_1_x, 'k-')
plt.plot(min_1_x_time, min_1_x, 'k-')
plt.plot(min_1_x_time_side, min_1_x, 'k-')
plt.plot(dash_max_min_1_x_time, dash_max_min_1_x, 'k--')
plt.text(5.42 * np.pi, -0.1, r'$2a_1$')
plt.plot(max_1_y_time, max_1_y, 'k-')
plt.plot(max_1_y_time, max_1_y_side, 'k-')
plt.plot(min_1_y_time, min_1_y, 'k-')
plt.plot(min_1_y_time, min_1_y_side, 'k-')
plt.plot(dash_max_min_1_y_time, dash_max_min_1_y, 'k--')
plt.text(4.48 * np.pi, -2.5, r'$\frac{p_1}{2}$')
plt.xlim(3.9 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/edge_effects_characteristic_wave.png')
plt.show()
# plot 6
t = np.linspace(5, 95, 100)
signal_orig = np.cos(2 * np.pi * t / 50) + 0.6 * np.cos(2 * np.pi * t / 25) + 0.5 * np.sin(2 * np.pi * t / 200)
util_nn = emd_utils.Utility(time=t, time_series=signal_orig)
maxima = signal_orig[util_nn.max_bool_func_1st_order_fd()]
minima = signal_orig[util_nn.min_bool_func_1st_order_fd()]
cs_max = CubicSpline(t[util_nn.max_bool_func_1st_order_fd()], maxima)
cs_min = CubicSpline(t[util_nn.min_bool_func_1st_order_fd()], minima)
time = np.linspace(0, 5 * np.pi, 1001)
lsq_signal = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 101)
time_extended = time_extension(time)
time_series_extended = np.zeros_like(time_extended) / 0
time_series_extended[int(len(lsq_signal) - 1):int(2 * (len(lsq_signal) - 1) + 1)] = lsq_signal
neural_network_m = 200
neural_network_k = 100
# forward ->
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[(-(neural_network_m + neural_network_k - col)):(-(neural_network_m - col))]
P[-1, col] = 1 # for additive constant
t = lsq_signal[-neural_network_m:]
# test - top
seed_weights = np.ones(neural_network_k) / neural_network_k
weights = 0 * seed_weights.copy()
train_input = P[:-1, :]
lr = 0.01
for iterations in range(1000):
output = np.matmul(weights, train_input)
error = (t - output)
gradients = error * (- train_input)
# guess average gradients
average_gradients = np.mean(gradients, axis=1)
# steepest descent
max_gradient_vector = average_gradients * (np.abs(average_gradients) == max(np.abs(average_gradients)))
adjustment = - lr * average_gradients
# adjustment = - lr * max_gradient_vector
weights += adjustment
# test - bottom
weights_right = np.hstack((weights, 0))
max_count_right = 0
min_count_right = 0
i_right = 0
while ((max_count_right < 1) or (min_count_right < 1)) and (i_right < len(lsq_signal) - 1):
time_series_extended[int(2 * (len(lsq_signal) - 1) + 1 + i_right)] = \
sum(weights_right * np.hstack((time_series_extended[
int(2 * (len(lsq_signal) - 1) + 1 - neural_network_k + i_right):
int(2 * (len(lsq_signal) - 1) + 1 + i_right)], 1)))
i_right += 1
if i_right > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_right += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)],
time_series=time_series_extended[int(2 * (len(lsq_signal) - 1) + 1):
int(2 * (len(lsq_signal) - 1) + 1 + i_right + 1)])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_right += 1
# backward <-
P = np.zeros((int(neural_network_k + 1), neural_network_m))
for col in range(neural_network_m):
P[:-1, col] = lsq_signal[int(col + 1):int(col + neural_network_k + 1)]
P[-1, col] = 1 # for additive constant
t = lsq_signal[:neural_network_m]
vx = cvx.Variable(int(neural_network_k + 1))
objective = cvx.Minimize(cvx.norm((2 * (vx * P) + 1 - t), 2)) # linear activation function is arbitrary
prob = cvx.Problem(objective)
result = prob.solve(verbose=True, solver=cvx.ECOS)
weights_left = np.array(vx.value)
max_count_left = 0
min_count_left = 0
i_left = 0
while ((max_count_left < 1) or (min_count_left < 1)) and (i_left < len(lsq_signal) - 1):
time_series_extended[int(len(lsq_signal) - 2 - i_left)] = \
2 * sum(weights_left * np.hstack((time_series_extended[int(len(lsq_signal) - 1 - i_left):
int(len(lsq_signal) - 1 - i_left + neural_network_k)],
1))) + 1
i_left += 1
if i_left > 1:
emd_utils_max = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_max.max_bool_func_1st_order_fd()) > 0:
max_count_left += 1
emd_utils_min = \
emd_utils.Utility(time=time_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))],
time_series=time_series_extended[int(len(lsq_signal) - 1 - i_left):int(len(lsq_signal))])
if sum(emd_utils_min.min_bool_func_1st_order_fd()) > 0:
min_count_left += 1
lsq_utils = emd_utils.Utility(time=time, time_series=lsq_signal)
utils_extended = emd_utils.Utility(time=time_extended, time_series=time_series_extended)
maxima = lsq_signal[lsq_utils.max_bool_func_1st_order_fd()]
maxima_time = time[lsq_utils.max_bool_func_1st_order_fd()]
maxima_extrapolate = time_series_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
maxima_extrapolate_time = time_extended[utils_extended.max_bool_func_1st_order_fd()][-1]
minima = lsq_signal[lsq_utils.min_bool_func_1st_order_fd()]
minima_time = time[lsq_utils.min_bool_func_1st_order_fd()]
minima_extrapolate = time_series_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
minima_extrapolate_time = time_extended[utils_extended.min_bool_func_1st_order_fd()][-2:]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Single Neuron Neural Network Example')
plt.plot(time, lsq_signal, zorder=2, label='Signal')
plt.plot(time_extended, time_series_extended, c='g', zorder=1, label=textwrap.fill('Extrapolated signal', 12))
plt.scatter(maxima_time, maxima, c='r', zorder=3, label='Maxima')
plt.scatter(minima_time, minima, c='b', zorder=3, label='Minima')
plt.scatter(maxima_extrapolate_time, maxima_extrapolate, c='magenta', zorder=3,
label=textwrap.fill('Extrapolated maxima', 12))
plt.scatter(minima_extrapolate_time, minima_extrapolate, c='cyan', zorder=4,
label=textwrap.fill('Extrapolated minima', 12))
plt.plot(((time[-302] + time[-301]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k',
label=textwrap.fill('Neural network inputs', 13))
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time[-302] + time[-301]) / 2), ((time[-302] + time[-301]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='k')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1002]) / 2),
((time_extended[-1001] + time_extended[-1002]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='k')
plt.plot(((time_extended[-1001] + time_extended[-1002]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='k')
plt.plot(((time[-202] + time[-201]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray', linestyle='dashed',
label=textwrap.fill('Neural network targets', 13))
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
-2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time[-202] + time[-201]) / 2), ((time[-202] + time[-201]) / 2) + 0.1, 100),
2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), -2.75 * np.ones(100), c='gray')
plt.plot(np.linspace(((time_extended[-1001] + time_extended[-1000]) / 2),
((time_extended[-1001] + time_extended[-1000]) / 2) - 0.1, 100), 2.75 * np.ones(100), c='gray')
plt.plot(((time_extended[-1001] + time_extended[-1000]) / 2) * np.ones(100), np.linspace(-2.75, 2.75, 100), c='gray',
linestyle='dashed')
plt.xlim(3.4 * np.pi, 5.6 * np.pi)
plt.xticks((4 * np.pi, 5 * np.pi), (r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/neural_network.png')
plt.show()
# plot 6a
np.random.seed(0)
time = np.linspace(0, 5 * np.pi, 1001)
knots_51 = np.linspace(0, 5 * np.pi, 51)
time_series = np.cos(2 * time) + np.cos(4 * time) + np.cos(8 * time)
noise = np.random.normal(0, 1, len(time_series))
time_series += noise
advemdpy = EMD(time=time, time_series=time_series)
imfs_51, hts_51, ifs_51 = advemdpy.empirical_mode_decomposition(knots=knots_51, max_imfs=3,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_31 = np.linspace(0, 5 * np.pi, 31)
imfs_31, hts_31, ifs_31 = advemdpy.empirical_mode_decomposition(knots=knots_31, max_imfs=2,
edge_effect='symmetric_anchor', verbose=False)[:3]
knots_11 = np.linspace(0, 5 * np.pi, 11)
imfs_11, hts_11, ifs_11 = advemdpy.empirical_mode_decomposition(knots=knots_11, max_imfs=1,
edge_effect='symmetric_anchor', verbose=False)[:3]
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
print(f'DFA fluctuation with 51 knots: {np.round(np.var(time_series - (imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :])), 3)}')
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[0].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[0].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[0].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
print(f'DFA fluctuation with 31 knots: {np.round(np.var(time_series - (imfs_31[1, :] + imfs_31[2, :])), 3)}')
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[1].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[1].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[1].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
print(f'DFA fluctuation with 11 knots: {np.round(np.var(time_series - imfs_51[3, :]), 3)}')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[2].set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$', r'$5\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), 5.5 * np.ones(101), 'k--')
axs[2].plot(np.linspace(0.95 * np.pi, 1.55 * np.pi, 101), -5.5 * np.ones(101), 'k--')
axs[2].plot(0.95 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--')
axs[2].plot(1.55 * np.pi * np.ones(101), np.linspace(-5.5, 5.5, 101), 'k--', label='Zoomed region')
plt.savefig('jss_figures/DFA_different_trends.png')
plt.show()
# plot 6b
fig, axs = plt.subplots(3, 1)
plt.suptitle(textwrap.fill('Comparison of Trends Extracted with Different Knot Sequences Zoomed Region', 40))
plt.subplots_adjust(hspace=0.1)
axs[0].plot(time, time_series, label='Time series')
axs[0].plot(time, imfs_51[1, :] + imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 1, IMF 2, & IMF 3 with 51 knots', 21))
for knot in knots_51:
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[0].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[0].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[0].set_xticklabels(['', '', '', '', '', ''])
box_0 = axs[0].get_position()
axs[0].set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.85, box_0.height])
axs[0].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[0].set_ylim(-5.5, 5.5)
axs[0].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[1].plot(time, time_series, label='Time series')
axs[1].plot(time, imfs_31[1, :] + imfs_31[2, :], label=textwrap.fill('Sum of IMF 1 and IMF 2 with 31 knots', 19))
axs[1].plot(time, imfs_51[2, :] + imfs_51[3, :], label=textwrap.fill('Sum of IMF 2 and IMF 3 with 51 knots', 19))
for knot in knots_31:
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[1].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[1].set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi])
axs[1].set_xticklabels(['', '', '', '', '', ''])
box_1 = axs[1].get_position()
axs[1].set_position([box_1.x0 - 0.05, box_1.y0, box_1.width * 0.85, box_1.height])
axs[1].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[1].set_ylim(-5.5, 5.5)
axs[1].set_xlim(0.95 * np.pi, 1.55 * np.pi)
axs[2].plot(time, time_series, label='Time series')
axs[2].plot(time, imfs_11[1, :], label='IMF 1 with 11 knots')
axs[2].plot(time, imfs_31[2, :], label='IMF 2 with 31 knots')
axs[2].plot(time, imfs_51[3, :], label='IMF 3 with 51 knots')
for knot in knots_11:
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1)
axs[2].plot(knot * np.ones(101), np.linspace(-5, 5, 101), '--', c='grey', zorder=1, label='Knots')
axs[2].set_xticks([np.pi, (3 / 2) * np.pi])
axs[2].set_xticklabels([r'$\pi$', r'$\frac{3}{2}\pi$'])
box_2 = axs[2].get_position()
axs[2].set_position([box_2.x0 - 0.05, box_2.y0, box_2.width * 0.85, box_2.height])
axs[2].legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
axs[2].set_ylim(-5.5, 5.5)
axs[2].set_xlim(0.95 * np.pi, 1.55 * np.pi)
plt.savefig('jss_figures/DFA_different_trends_zoomed.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs_51, hts_51, ifs_51, max_frequency=12, plot=False)
# plot 6c
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 0.9
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Simple Sinusoidal Time Seres with Added Noise', 50))
x_hs, y, z = hs_ouputs
z_min, z_max = 0, np.abs(z).max()
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.plot(x_hs[0, :], 8 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 8$', Linewidth=3)
ax.plot(x_hs[0, :], 4 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 4$', Linewidth=3)
ax.plot(x_hs[0, :], 2 * np.ones_like(x_hs[0, :]), '--', label=r'$\omega = 2$', Linewidth=3)
ax.set_xticks([0, np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi])
ax.set_xticklabels(['$0$', r'$\pi$', r'$2\pi$', r'$3\pi$', r'$4\pi$'])
plt.ylabel(r'Frequency (rad.s$^{-1}$)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.85, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/DFA_hilbert_spectrum.png')
plt.show()
# plot 6c
time = np.linspace(0, 5 * np.pi, 1001)
time_series = np.cos(time) + np.cos(5 * time)
knots = np.linspace(0, 5 * np.pi, 51)
fluc = Fluctuation(time=time, time_series=time_series)
max_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=False)
max_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='maxima', smooth=True)
min_unsmoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=False)
min_smoothed = fluc.envelope_basis_function_approximation(knots_for_envelope=knots, extrema_type='minima', smooth=True)
util = Utility(time=time, time_series=time_series)
maxima = util.max_bool_func_1st_order_fd()
minima = util.min_bool_func_1st_order_fd()
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title(textwrap.fill('Plot Demonstrating Unsmoothed Extrema Envelopes if Schoenberg–Whitney Conditions are Not Satisfied', 50))
plt.plot(time, time_series, label='Time series', zorder=2, LineWidth=2)
plt.scatter(time[maxima], time_series[maxima], c='r', label='Maxima', zorder=10)
plt.scatter(time[minima], time_series[minima], c='b', label='Minima', zorder=10)
plt.plot(time, max_unsmoothed[0], label=textwrap.fill('Unsmoothed maxima envelope', 10), c='darkorange')
plt.plot(time, max_smoothed[0], label=textwrap.fill('Smoothed maxima envelope', 10), c='red')
plt.plot(time, min_unsmoothed[0], label=textwrap.fill('Unsmoothed minima envelope', 10), c='cyan')
plt.plot(time, min_smoothed[0], label=textwrap.fill('Smoothed minima envelope', 10), c='blue')
for knot in knots[:-1]:
plt.plot(knot * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', zorder=1)
plt.plot(knots[-1] * np.ones(101), np.linspace(-3.0, -2.0, 101), '--', c='grey', label='Knots', zorder=1)
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi),
(r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$', r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Schoenberg_Whitney_Conditions.png')
plt.show()
# plot 7
a = 0.25
width = 0.2
time = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 1001)
knots = np.linspace((0 + a) * np.pi, (5 - a) * np.pi, 11)
time_series = np.cos(time) + np.cos(5 * time)
utils = emd_utils.Utility(time=time, time_series=time_series)
max_bool = utils.max_bool_func_1st_order_fd()
maxima_x = time[max_bool]
maxima_y = time_series[max_bool]
min_bool = utils.min_bool_func_1st_order_fd()
minima_x = time[min_bool]
minima_y = time_series[min_bool]
inflection_bool = utils.inflection_point()
inflection_x = time[inflection_bool]
inflection_y = time_series[inflection_bool]
fluctuation = emd_mean.Fluctuation(time=time, time_series=time_series)
maxima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
maxima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'maxima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=False,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
minima_envelope_smooth = fluctuation.envelope_basis_function_approximation(knots, 'minima', smooth=True,
smoothing_penalty=0.2, edge_effect='none',
spline_method='b_spline')[0]
inflection_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='inflection_points')[0]
binomial_points_envelope = fluctuation.direct_detrended_fluctuation_estimation(knots,
smooth=True,
smoothing_penalty=0.2,
technique='binomial_average', order=21,
increment=20)[0]
derivative_of_lsq = utils.derivative_forward_diff()
derivative_time = time[:-1]
derivative_knots = np.linspace(knots[0], knots[-1], 31)
# change (1) detrended_fluctuation_technique and (2) max_internal_iter and (3) debug (confusing with external debugging)
emd = AdvEMDpy.EMD(time=derivative_time, time_series=derivative_of_lsq)
imf_1_of_derivative = emd.empirical_mode_decomposition(knots=derivative_knots,
knot_time=derivative_time, text=False, verbose=False)[0][1, :]
utils = emd_utils.Utility(time=time[:-1], time_series=imf_1_of_derivative)
optimal_maxima = np.r_[False, utils.derivative_forward_diff() < 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
optimal_minima = np.r_[False, utils.derivative_forward_diff() > 0, False] & \
np.r_[utils.zero_crossing() == 1, False]
EEMD_maxima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'maxima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
EEMD_minima_envelope = fluctuation.envelope_basis_function_approximation_fixed_points(knots, 'minima',
optimal_maxima,
optimal_minima,
smooth=False,
smoothing_penalty=0.2,
edge_effect='none')[0]
ax = plt.subplot(111)
plt.gcf().subplots_adjust(bottom=0.10)
plt.title('Detrended Fluctuation Analysis Examples')
plt.plot(time, time_series, LineWidth=2, label='Time series')
plt.scatter(maxima_x, maxima_y, c='r', zorder=4, label='Maxima')
plt.scatter(minima_x, minima_y, c='b', zorder=4, label='Minima')
plt.scatter(time[optimal_maxima], time_series[optimal_maxima], c='darkred', zorder=4,
label=textwrap.fill('Optimal maxima', 10))
plt.scatter(time[optimal_minima], time_series[optimal_minima], c='darkblue', zorder=4,
label=textwrap.fill('Optimal minima', 10))
plt.scatter(inflection_x, inflection_y, c='magenta', zorder=4, label=textwrap.fill('Inflection points', 10))
plt.plot(time, maxima_envelope, c='darkblue', label=textwrap.fill('EMD envelope', 10))
plt.plot(time, minima_envelope, c='darkblue')
plt.plot(time, (maxima_envelope + minima_envelope) / 2, c='darkblue')
plt.plot(time, maxima_envelope_smooth, c='darkred', label=textwrap.fill('SEMD envelope', 10))
plt.plot(time, minima_envelope_smooth, c='darkred')
plt.plot(time, (maxima_envelope_smooth + minima_envelope_smooth) / 2, c='darkred')
plt.plot(time, EEMD_maxima_envelope, c='darkgreen', label=textwrap.fill('EEMD envelope', 10))
plt.plot(time, EEMD_minima_envelope, c='darkgreen')
plt.plot(time, (EEMD_maxima_envelope + EEMD_minima_envelope) / 2, c='darkgreen')
plt.plot(time, inflection_points_envelope, c='darkorange', label=textwrap.fill('Inflection point envelope', 10))
plt.plot(time, binomial_points_envelope, c='deeppink', label=textwrap.fill('Binomial average envelope', 10))
plt.plot(time, np.cos(time), c='black', label='True mean')
plt.xticks((0, 1 * np.pi, 2 * np.pi, 3 * np.pi, 4 * np.pi, 5 * np.pi), (r'$0$', r'$\pi$', r'2$\pi$', r'3$\pi$',
r'4$\pi$', r'5$\pi$'))
plt.yticks((-2, -1, 0, 1, 2), ('-2', '-1', '0', '1', '2'))
plt.xlim(-0.25 * np.pi, 5.25 * np.pi)
box_0 = ax.get_position()
ax.set_position([box_0.x0 - 0.05, box_0.y0, box_0.width * 0.84, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/detrended_fluctuation_analysis.png')
plt.show()
# Duffing Equation Example
def duffing_equation(xy, ts):
gamma = 0.1
epsilon = 1
omega = ((2 * np.pi) / 25)
return [xy[1], xy[0] - epsilon * xy[0] ** 3 + gamma * np.cos(omega * ts)]
t = np.linspace(0, 150, 1501)
XY0 = [1, 1]
solution = odeint(duffing_equation, XY0, t)
x = solution[:, 0]
dxdt = solution[:, 1]
x_points = [0, 50, 100, 150]
x_names = {0, 50, 100, 150}
y_points_1 = [-2, 0, 2]
y_points_2 = [-1, 0, 1]
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.2)
axs[0].plot(t, x)
axs[0].set_title('Duffing Equation Displacement')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, dxdt)
axs[1].set_title('Duffing Equation Velocity')
axs[1].set_ylim([-1.5, 1.5])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel('x(t)')
ax.set_yticks(y_points_1)
if axis == 1:
ax.set_ylabel(r'$ \dfrac{dx(t)}{dt} $')
ax.set(xlabel='t')
ax.set_yticks(y_points_2)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation.png')
plt.show()
# compare other packages Duffing - top
pyemd = pyemd0215()
py_emd = pyemd(x)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd.T, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using PyEMD 0.2.10', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_pyemd.png')
plt.show()
plt.show()
emd_sift = emd040.sift.sift(x)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift, 10, 'hilbert')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 0.2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
ax = plt.subplot(111)
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using emd 0.3.3', 40))
plt.pcolormesh(t, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht_emd.png')
plt.show()
# compare other packages Duffing - bottom
emd_duffing = AdvEMDpy.EMD(time=t, time_series=x)
emd_duff, emd_ht_duff, emd_if_duff, _, _, _, _ = emd_duffing.empirical_mode_decomposition(verbose=False)
fig, axs = plt.subplots(2, 1)
plt.subplots_adjust(hspace=0.3)
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
axs[0].plot(t, emd_duff[1, :], label='AdvEMDpy')
axs[0].plot(t, py_emd[0, :], '--', label='PyEMD 0.2.10')
axs[0].plot(t, emd_sift[:, 0], '--', label='emd 0.3.3')
axs[0].set_title('IMF 1')
axs[0].set_ylim([-2, 2])
axs[0].set_xlim([0, 150])
axs[1].plot(t, emd_duff[2, :], label='AdvEMDpy')
print(f'AdvEMDpy driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_duff[2, :])), 3)}')
axs[1].plot(t, py_emd[1, :], '--', label='PyEMD 0.2.10')
print(f'PyEMD driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - py_emd[1, :])), 3)}')
axs[1].plot(t, emd_sift[:, 1], '--', label='emd 0.3.3')
print(f'emd driving function error: {np.round(sum(abs(0.1 * np.cos(0.04 * 2 * np.pi * t) - emd_sift[:, 1])), 3)}')
axs[1].plot(t, 0.1 * np.cos(0.04 * 2 * np.pi * t), '--', label=r'$0.1$cos$(0.08{\pi}t)$')
axs[1].set_title('IMF 2')
axs[1].set_ylim([-0.2, 0.4])
axs[1].set_xlim([0, 150])
axis = 0
for ax in axs.flat:
ax.label_outer()
if axis == 0:
ax.set_ylabel(r'$\gamma_1(t)$')
ax.set_yticks([-2, 0, 2])
if axis == 1:
ax.set_ylabel(r'$\gamma_2(t)$')
ax.set_yticks([-0.2, 0, 0.2])
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0, box_0.width * 0.85, box_0.height])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5), fontsize=8)
ax.set_xticks(x_points)
ax.set_xticklabels(x_names)
axis += 1
plt.savefig('jss_figures/Duffing_equation_imfs.png')
plt.show()
hs_ouputs = hilbert_spectrum(t, emd_duff, emd_ht_duff, emd_if_duff, max_frequency=1.3, plot=False)
ax = plt.subplot(111)
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of Duffing Equation using AdvEMDpy', 40))
x, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
figure_size = plt.gcf().get_size_inches()
factor = 1.0
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
plt.plot(t[:-1], 0.124 * np.ones_like(t[:-1]), '--', label=textwrap.fill('Hamiltonian frequency approximation', 15))
plt.plot(t[:-1], 0.04 * np.ones_like(t[:-1]), 'g--', label=textwrap.fill('Driving function frequency', 15))
plt.xticks([0, 50, 100, 150])
plt.yticks([0, 0.1, 0.2])
plt.ylabel('Frequency (Hz)')
plt.xlabel('Time (s)')
box_0 = ax.get_position()
ax.set_position([box_0.x0, box_0.y0 + 0.05, box_0.width * 0.75, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/Duffing_equation_ht.png')
plt.show()
# Carbon Dioxide Concentration Example
CO2_data = pd.read_csv('Data/co2_mm_mlo.csv', header=51)
plt.plot(CO2_data['month'], CO2_data['decimal date'])
plt.title(textwrap.fill('Mean Monthly Concentration of Carbon Dioxide in the Atmosphere', 35))
plt.ylabel('Parts per million')
plt.xlabel('Time (years)')
plt.savefig('jss_figures/CO2_concentration.png')
plt.show()
signal = CO2_data['decimal date']
signal = np.asarray(signal)
time = CO2_data['month']
time = np.asarray(time)
# compare other packages Carbon Dioxide - top
pyemd = pyemd0215()
py_emd = pyemd(signal)
IP, IF, IA = emd040.spectra.frequency_transform(py_emd[:2, :].T, 12, 'hilbert')
print(f'PyEMD annual frequency error: {np.round(sum(np.abs(IF[:, 0] - np.ones_like(IF[:, 0]))), 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using PyEMD 0.2.10', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_pyemd.png')
plt.show()
emd_sift = emd040.sift.sift(signal)
IP, IF, IA = emd040.spectra.frequency_transform(emd_sift[:, :1], 12, 'hilbert')
print(f'emd annual frequency error: {np.round(sum(np.abs(IF - np.ones_like(IF)))[0], 3)}')
freq_edges, freq_bins = emd040.spectra.define_hist_bins(0, 2, 100)
hht = emd040.spectra.hilberthuang(IF, IA, freq_edges)
hht = gaussian_filter(hht, sigma=1)
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.8
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
plt.title(textwrap.fill('Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using emd 0.3.3', 45))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.pcolormesh(time, freq_bins, hht, cmap='gist_rainbow', vmin=0, vmax=np.max(np.max(np.abs(hht))))
plt.plot(time, np.ones_like(time), 'k--', label=textwrap.fill('Annual cycle', 10))
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert_emd.png')
plt.show()
# compare other packages Carbon Dioxide - bottom
knots = np.linspace(time[0], time[-1], 200)
emd_example = AdvEMDpy.EMD(time=time, time_series=signal)
imfs, hts, ifs, _, _, _, _ = \
emd_example.empirical_mode_decomposition(knots=knots, knot_time=time, verbose=False)
print(f'AdvEMDpy annual frequency error: {np.round(sum(np.abs(ifs[1, :] / (2 * np.pi) - np.ones_like(ifs[1, :]))), 3)}')
fig, axs = plt.subplots(2, 2)
plt.subplots_adjust(hspace=0.5)
axs[0, 0].plot(time, signal)
axs[0, 1].plot(time, signal)
axs[0, 1].plot(time, imfs[0, :], label='Smoothed')
axs[0, 1].legend(loc='lower right')
axs[1, 0].plot(time, imfs[1, :])
axs[1, 1].plot(time, imfs[2, :])
axis = 0
for ax in axs.flat:
if axis == 0:
ax.set(ylabel=R'C0$_2$ concentration')
if axis == 1:
pass
if axis == 2:
ax.set(ylabel=R'C0$_2$ concentration')
ax.set(xlabel='Time (years)')
if axis == 3:
ax.set(xlabel='Time (years)')
axis += 1
plt.gcf().subplots_adjust(bottom=0.15)
axs[0, 0].set_title(r'Original CO$_2$ Concentration')
axs[0, 1].set_title('Smoothed CO$_2$ Concentration')
axs[1, 0].set_title('IMF 1')
axs[1, 1].set_title('Residual')
plt.gcf().subplots_adjust(bottom=0.15)
plt.savefig('jss_figures/CO2_EMD.png')
plt.show()
hs_ouputs = hilbert_spectrum(time, imfs, hts, ifs, max_frequency=10, which_imfs=[1], plot=False)
x_hs, y, z = hs_ouputs
y = y / (2 * np.pi)
z_min, z_max = 0, np.abs(z).max()
fig, ax = plt.subplots()
figure_size = plt.gcf().get_size_inches()
factor = 0.7
plt.gcf().set_size_inches((figure_size[0], factor * figure_size[1]))
ax.pcolormesh(x_hs, y, np.abs(z), cmap='gist_rainbow', vmin=z_min, vmax=z_max)
ax.set_title(textwrap.fill(r'Gaussian Filtered Hilbert Spectrum of CO$_{2}$ Concentration using AdvEMDpy', 40))
plt.ylabel('Frequency (year$^{-1}$)')
plt.xlabel('Time (years)')
plt.plot(x_hs[0, :], np.ones_like(x_hs[0, :]), 'k--', label=textwrap.fill('Annual cycle', 10))
ax.axis([x_hs.min(), x_hs.max(), y.min(), y.max()])
box_0 = ax.get_position()
ax.set_position([box_0.x0 + 0.0125, box_0.y0 + 0.075, box_0.width * 0.8, box_0.height * 0.9])
ax.legend(loc='center left', bbox_to_anchor=(1, 0.5))
plt.savefig('jss_figures/CO2_Hilbert.png')
plt.show()
| 48.949835
| 135
| 0.664141
| 12,802
| 74,159
| 3.621856
| 0.043196
| 0.016909
| 0.027498
| 0.012768
| 0.784416
| 0.728988
| 0.665711
| 0.613152
| 0.567192
| 0.53523
| 0
| 0.065736
| 0.149732
| 74,159
| 1,514
| 136
| 48.982166
| 0.66959
| 0.012662
| 0
| 0.425532
| 0
| 0.007092
| 0.114509
| 0.018518
| 0
| 0
| 0
| 0
| 0
| 1
| 0.000788
| false
| 0.000788
| 0.016548
| 0
| 0.018125
| 0.007092
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6adeb529cfb4e14bdceab8619cd0e9f75dad5fb6
| 615
|
py
|
Python
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 41
|
2019-11-28T16:58:41.000Z
|
2022-01-28T21:11:16.000Z
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notification-api
|
b1c1064f291eb860b494c3fa65ac256ad70bf47c
|
[
"MIT"
] | 1,083
|
2019-07-08T12:57:24.000Z
|
2022-03-08T18:53:40.000Z
|
migrations/versions/0158_remove_rate_limit_default.py
|
cds-snc/notifier-api
|
90b385ec49efbaee7e607516fc7d9f08991af813
|
[
"MIT"
] | 9
|
2020-01-24T19:56:43.000Z
|
2022-01-27T21:36:53.000Z
|
"""
Revision ID: 0158_remove_rate_limit_default
Revises: 0157_add_rate_limit_to_service
Create Date: 2018-01-09 14:33:08.313893
"""
import sqlalchemy as sa
from alembic import op
revision = "0158_remove_rate_limit_default"
down_revision = "0157_add_rate_limit_to_service"
def upgrade():
op.execute("ALTER TABLE services ALTER rate_limit DROP DEFAULT")
op.execute("ALTER TABLE services_history ALTER rate_limit DROP DEFAULT")
def downgrade():
op.execute("ALTER TABLE services ALTER rate_limit SET DEFAULT '3000'")
op.execute("ALTER TABLE services_history ALTER rate_limit SET DEFAULT '3000'")
| 26.73913
| 82
| 0.785366
| 93
| 615
| 4.924731
| 0.430108
| 0.157205
| 0.122271
| 0.165939
| 0.720524
| 0.558952
| 0.388646
| 0.388646
| 0.209607
| 0
| 0
| 0.082552
| 0.133333
| 615
| 22
| 83
| 27.954545
| 0.776735
| 0.2
| 0
| 0
| 0
| 0
| 0.59751
| 0.124481
| 0
| 0
| 0
| 0
| 0
| 1
| 0.2
| false
| 0
| 0.2
| 0
| 0.4
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
6ae016a3900fe6ed337451d458c99fc65e3be76f
| 888
|
py
|
Python
|
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | 10
|
2020-05-05T16:20:04.000Z
|
2021-07-22T15:15:13.000Z
|
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | null | null | null |
backend/core/api_urls.py
|
albeiks/omaralbeik.com
|
8d096130393919612863aac6280dffaf6e00961d
|
[
"MIT"
] | 1
|
2020-05-06T22:31:48.000Z
|
2020-05-06T22:31:48.000Z
|
from django.conf.urls import url, include
from core.routers import OptionalTrailingSlashRouter
from blog import views as blogViews
from snippets import views as snippetsViews
from projects import views as projectsViews
from tags import views as tagsViews
from contents import views as contentsViews
from contact import views as contactViews
router = OptionalTrailingSlashRouter()
router.register(r"blog", blogViews.PostViewSet)
router.register(r"snippets", snippetsViews.SnippetViewSet)
router.register(r"languages", snippetsViews.ProgrammingLanguageViewSet)
router.register(r"projects", projectsViews.ProjectViewSet)
router.register(r"tags", tagsViews.TagViewSet)
router.register(r"contents", contentsViews.ContentViewSet)
router.register(r"contact", contactViews.MessageViewSet)
# List or url patterns for the api subdomain
urlpatterns = [
url(r"^v2/", include(router.urls)),
]
| 35.52
| 71
| 0.824324
| 107
| 888
| 6.841122
| 0.411215
| 0.13388
| 0.143443
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.001245
| 0.095721
| 888
| 24
| 72
| 37
| 0.910336
| 0.047297
| 0
| 0
| 0
| 0
| 0.061611
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.421053
| 0
| 0.421053
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
|
0
| 1
|
6ae3ed28439c3795f0a3092e3b0da325e69356b7
| 1,590
|
py
|
Python
|
tools/perf/contrib/oop_raster/oop_raster.py
|
zipated/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 2,151
|
2020-04-18T07:31:17.000Z
|
2022-03-31T08:39:18.000Z
|
tools/perf/contrib/oop_raster/oop_raster.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 395
|
2020-04-18T08:22:18.000Z
|
2021-12-08T13:04:49.000Z
|
tools/perf/contrib/oop_raster/oop_raster.py
|
cangulcan/src
|
2b8388091c71e442910a21ada3d97ae8bc1845d3
|
[
"BSD-3-Clause"
] | 338
|
2020-04-18T08:03:10.000Z
|
2022-03-29T12:33:22.000Z
|
# Copyright 2018 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
from benchmarks import smoothness,thread_times
import page_sets
from telemetry import benchmark
# pylint: disable=protected-access
def CustomizeBrowserOptionsForOopRasterization(options):
"""Enables flags needed for out of process rasterization."""
options.AppendExtraBrowserArgs('--force-gpu-rasterization')
options.AppendExtraBrowserArgs('--enable-oop-rasterization')
@benchmark.Owner(emails=['[email protected]'])
class SmoothnessOopRasterizationTop25(smoothness._Smoothness):
"""Measures rendering statistics for the top 25 with oop rasterization.
"""
tag = 'oop_rasterization'
page_set = page_sets.Top25SmoothPageSet
def SetExtraBrowserOptions(self, options):
CustomizeBrowserOptionsForOopRasterization(options)
@classmethod
def Name(cls):
return 'smoothness.oop_rasterization.top_25_smooth'
@benchmark.Owner(emails=['[email protected]'])
class ThreadTimesOopRasterKeyMobile(thread_times._ThreadTimes):
"""Measure timeline metrics for key mobile pages while using out of process
raster."""
tag = 'oop_rasterization'
page_set = page_sets.KeyMobileSitesSmoothPageSet
options = {'story_tag_filter': 'fastpath'}
def SetExtraBrowserOptions(self, options):
super(ThreadTimesOopRasterKeyMobile, self).SetExtraBrowserOptions(options)
CustomizeBrowserOptionsForOopRasterization(options)
@classmethod
def Name(cls):
return 'thread_times.oop_rasterization.key_mobile'
| 34.565217
| 78
| 0.796855
| 170
| 1,590
| 7.341176
| 0.547059
| 0.076923
| 0.019231
| 0.038462
| 0.251603
| 0.251603
| 0.251603
| 0.133013
| 0
| 0
| 0
| 0.008553
| 0.11761
| 1,590
| 45
| 79
| 35.333333
| 0.880969
| 0.249686
| 0
| 0.461538
| 0
| 0
| 0.192833
| 0.114334
| 0
| 0
| 0
| 0
| 0
| 1
| 0.192308
| false
| 0
| 0.115385
| 0.076923
| 0.653846
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
0a77fdb1c15169709a632c8652ce9cffd62abd68
| 491
|
py
|
Python
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 5
|
2020-05-19T07:32:39.000Z
|
2022-03-14T09:09:48.000Z
|
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | null | null | null |
jnpy/experiments/Qt/pyqtgraph_tutorial/codeloop_org_materials/c4_drawing_curves.py
|
jojoquant/jnpy
|
c874060af4b129ae09cee9f8542517b7b2f6573b
|
[
"MIT"
] | 3
|
2020-04-02T08:30:17.000Z
|
2020-05-03T12:12:05.000Z
|
# !/usr/bin/env python3
# -*- coding:utf-8 -*-
# @Datetime : 2019/11/14 上午2:26
# @Author : Fangyang
# @Software : PyCharm
import sys
from PyQt5.QtWidgets import QApplication
import pyqtgraph as pg
import numpy as np
app = QApplication(sys.argv)
x = np.arange(1000)
y = np.random.normal(size=(3, 1000))
plotWidget = pg.plot(title='Three plot curves')
for i in range(3):
plotWidget.plot(x, y[i], pen=(i, 3))
status = app.exec_()
sys.exit(status)
if __name__ == '__main__':
pass
| 19.64
| 47
| 0.678208
| 77
| 491
| 4.207792
| 0.714286
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.060976
| 0.164969
| 491
| 24
| 48
| 20.458333
| 0.729268
| 0.230143
| 0
| 0
| 0
| 0
| 0.067204
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0.071429
| 0.285714
| 0
| 0.285714
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
|
0
| 1
|
0a7c17bb65b9c51d7ea399323ecb512289bae204
| 8,155
|
py
|
Python
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 277
|
2018-06-18T14:57:09.000Z
|
2022-03-29T04:05:06.000Z
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 1,447
|
2018-06-20T00:58:34.000Z
|
2022-03-31T21:28:43.000Z
|
sdk/python/pulumi_kubernetes/coordination/v1/_inputs.py
|
polivbr/pulumi-kubernetes
|
36a5fb34240a38a60b52a5f4e55e66e248d9305f
|
[
"Apache-2.0"
] | 95
|
2018-06-30T03:30:05.000Z
|
2022-03-29T04:05:09.000Z
|
# coding=utf-8
# *** WARNING: this file was generated by pulumigen. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from ... import meta as _meta
__all__ = [
'LeaseSpecArgs',
'LeaseArgs',
]
@pulumi.input_type
class LeaseSpecArgs:
def __init__(__self__, *,
acquire_time: Optional[pulumi.Input[str]] = None,
holder_identity: Optional[pulumi.Input[str]] = None,
lease_duration_seconds: Optional[pulumi.Input[int]] = None,
lease_transitions: Optional[pulumi.Input[int]] = None,
renew_time: Optional[pulumi.Input[str]] = None):
"""
LeaseSpec is a specification of a Lease.
:param pulumi.Input[str] acquire_time: acquireTime is a time when the current lease was acquired.
:param pulumi.Input[str] holder_identity: holderIdentity contains the identity of the holder of a current lease.
:param pulumi.Input[int] lease_duration_seconds: leaseDurationSeconds is a duration that candidates for a lease need to wait to force acquire it. This is measure against time of last observed RenewTime.
:param pulumi.Input[int] lease_transitions: leaseTransitions is the number of transitions of a lease between holders.
:param pulumi.Input[str] renew_time: renewTime is a time when the current holder of a lease has last updated the lease.
"""
if acquire_time is not None:
pulumi.set(__self__, "acquire_time", acquire_time)
if holder_identity is not None:
pulumi.set(__self__, "holder_identity", holder_identity)
if lease_duration_seconds is not None:
pulumi.set(__self__, "lease_duration_seconds", lease_duration_seconds)
if lease_transitions is not None:
pulumi.set(__self__, "lease_transitions", lease_transitions)
if renew_time is not None:
pulumi.set(__self__, "renew_time", renew_time)
@property
@pulumi.getter(name="acquireTime")
def acquire_time(self) -> Optional[pulumi.Input[str]]:
"""
acquireTime is a time when the current lease was acquired.
"""
return pulumi.get(self, "acquire_time")
@acquire_time.setter
def acquire_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "acquire_time", value)
@property
@pulumi.getter(name="holderIdentity")
def holder_identity(self) -> Optional[pulumi.Input[str]]:
"""
holderIdentity contains the identity of the holder of a current lease.
"""
return pulumi.get(self, "holder_identity")
@holder_identity.setter
def holder_identity(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "holder_identity", value)
@property
@pulumi.getter(name="leaseDurationSeconds")
def lease_duration_seconds(self) -> Optional[pulumi.Input[int]]:
"""
leaseDurationSeconds is a duration that candidates for a lease need to wait to force acquire it. This is measure against time of last observed RenewTime.
"""
return pulumi.get(self, "lease_duration_seconds")
@lease_duration_seconds.setter
def lease_duration_seconds(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "lease_duration_seconds", value)
@property
@pulumi.getter(name="leaseTransitions")
def lease_transitions(self) -> Optional[pulumi.Input[int]]:
"""
leaseTransitions is the number of transitions of a lease between holders.
"""
return pulumi.get(self, "lease_transitions")
@lease_transitions.setter
def lease_transitions(self, value: Optional[pulumi.Input[int]]):
pulumi.set(self, "lease_transitions", value)
@property
@pulumi.getter(name="renewTime")
def renew_time(self) -> Optional[pulumi.Input[str]]:
"""
renewTime is a time when the current holder of a lease has last updated the lease.
"""
return pulumi.get(self, "renew_time")
@renew_time.setter
def renew_time(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "renew_time", value)
@pulumi.input_type
class LeaseArgs:
def __init__(__self__, *,
api_version: Optional[pulumi.Input[str]] = None,
kind: Optional[pulumi.Input[str]] = None,
metadata: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']] = None,
spec: Optional[pulumi.Input['LeaseSpecArgs']] = None):
"""
Lease defines a lease concept.
:param pulumi.Input[str] api_version: APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
:param pulumi.Input[str] kind: Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
:param pulumi.Input['_meta.v1.ObjectMetaArgs'] metadata: More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
:param pulumi.Input['LeaseSpecArgs'] spec: Specification of the Lease. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
if api_version is not None:
pulumi.set(__self__, "api_version", 'coordination.k8s.io/v1')
if kind is not None:
pulumi.set(__self__, "kind", 'Lease')
if metadata is not None:
pulumi.set(__self__, "metadata", metadata)
if spec is not None:
pulumi.set(__self__, "spec", spec)
@property
@pulumi.getter(name="apiVersion")
def api_version(self) -> Optional[pulumi.Input[str]]:
"""
APIVersion defines the versioned schema of this representation of an object. Servers should convert recognized schemas to the latest internal value, and may reject unrecognized values. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#resources
"""
return pulumi.get(self, "api_version")
@api_version.setter
def api_version(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "api_version", value)
@property
@pulumi.getter
def kind(self) -> Optional[pulumi.Input[str]]:
"""
Kind is a string value representing the REST resource this object represents. Servers may infer this from the endpoint the client submits requests to. Cannot be updated. In CamelCase. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#types-kinds
"""
return pulumi.get(self, "kind")
@kind.setter
def kind(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "kind", value)
@property
@pulumi.getter
def metadata(self) -> Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]:
"""
More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#metadata
"""
return pulumi.get(self, "metadata")
@metadata.setter
def metadata(self, value: Optional[pulumi.Input['_meta.v1.ObjectMetaArgs']]):
pulumi.set(self, "metadata", value)
@property
@pulumi.getter
def spec(self) -> Optional[pulumi.Input['LeaseSpecArgs']]:
"""
Specification of the Lease. More info: https://git.k8s.io/community/contributors/devel/sig-architecture/api-conventions.md#spec-and-status
"""
return pulumi.get(self, "spec")
@spec.setter
def spec(self, value: Optional[pulumi.Input['LeaseSpecArgs']]):
pulumi.set(self, "spec", value)
| 46.073446
| 335
| 0.682649
| 1,021
| 8,155
| 5.322233
| 0.15573
| 0.076923
| 0.094406
| 0.060729
| 0.7212
| 0.544902
| 0.478285
| 0.44424
| 0.44424
| 0.419948
| 0
| 0.00233
| 0.210423
| 8,155
| 176
| 336
| 46.335227
| 0.84159
| 0.383691
| 0
| 0.150943
| 1
| 0
| 0.116269
| 0.033433
| 0
| 0
| 0
| 0
| 0
| 1
| 0.188679
| false
| 0
| 0.056604
| 0
| 0.349057
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a7c48d84a538009f1d4846a3bf1ffec3626caf1
| 1,005
|
py
|
Python
|
Components/Align All Components.py
|
davidtahim/Glyphs-Scripts
|
5ed28805b5fe03c63d904ad2f79117844c22aa44
|
[
"Apache-2.0"
] | 1
|
2021-09-04T18:41:30.000Z
|
2021-09-04T18:41:30.000Z
|
Components/Align All Components.py
|
davidtahim/Glyphs-Scripts
|
5ed28805b5fe03c63d904ad2f79117844c22aa44
|
[
"Apache-2.0"
] | null | null | null |
Components/Align All Components.py
|
davidtahim/Glyphs-Scripts
|
5ed28805b5fe03c63d904ad2f79117844c22aa44
|
[
"Apache-2.0"
] | null | null | null |
#MenuTitle: Align All Components
# -*- coding: utf-8 -*-
__doc__="""
Fakes auto-alignment in glyphs that cannot be auto-aligned.
"""
import GlyphsApp
thisFont = Glyphs.font # frontmost font
thisFontMaster = thisFont.selectedFontMaster # active master
thisFontMasterID = thisFont.selectedFontMaster.id # active master
listOfSelectedLayers = thisFont.selectedLayers # active layers of selected glyphs
def process( thisLayer ):
advance = 0.0
for thisComponent in thisLayer.components:
thisComponent.position = NSPoint( advance, 0.0 )
advance += thisComponent.component.layers[thisFontMasterID].width
thisLayer.width = advance
thisFont.disableUpdateInterface() # suppresses UI updates in Font View
for thisLayer in listOfSelectedLayers:
thisGlyph = thisLayer.parent
print "Aligning components in:", thisGlyph.name
thisGlyph.beginUndo() # begin undo grouping
process( thisLayer )
thisGlyph.endUndo() # end undo grouping
thisFont.enableUpdateInterface() # re-enables UI updates in Font View
| 32.419355
| 81
| 0.78607
| 112
| 1,005
| 7.017857
| 0.5625
| 0.066158
| 0.022901
| 0.038168
| 0.048346
| 0
| 0
| 0
| 0
| 0
| 0
| 0.005727
| 0.131343
| 1,005
| 30
| 82
| 33.5
| 0.894616
| 0.235821
| 0
| 0
| 0
| 0
| 0.110818
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.045455
| null | null | 0.045455
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a7cd64e2508df91e539f1a6f804bc5eb4b0ea83
| 12,372
|
py
|
Python
|
audio/audio_server.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
audio/audio_server.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
audio/audio_server.py
|
artigianitecnologici/marrtino_apps
|
b58bf4daa1d06db2f1c8a47be02b29948d41f48d
|
[
"BSD-4-Clause"
] | null | null | null |
# Only PCM 16 bit wav 44100 Hz - Use audacity or sox to convert audio files.
# WAV generation
# Synth
# sox -n --no-show-progress -G --channels 1 -r 44100 -b 16 -t wav bip.wav synth 0.25 sine 800
# sox -n --no-show-progress -G --channels 1 -r 44100 -b 16 -t wav bop.wav synth 0.25 sine 400
# Voices
# pico2wave -l "it-IT" -w start.wav "Bene! Si Parte!"
# Then convert wav files to to 44100 Hz
# Note: some initial sound may not be played.
# alsaaudio examples
# https://larsimmisch.github.io/pyalsaaudio/libalsaaudio.html
import threading
import time
import socket
import sys, os, platform
import re
import wave
import argparse
import rospy
use_sound_play = False
use_alsaaudio = True
try:
from sound_play.msg import SoundRequest
from sound_play.libsoundplay import SoundClient
except:
print('ROS package sound_play required.')
print('Install with: sudo apt-get install ros-kinetic-audio-common libasound2')
use_sound_play = False
#sys.exit(0)
try:
import sox
except:
print('sox required. Install with: pip install --user sox')
sys.exit(0)
try:
import alsaaudio
except:
print('alsaaudio required. Install with: pip install --user pyalsaaudio')
use_alsaaudio = False
#sys.exit(0)
from asr_server import ASRServer
SOUNDS_DIR = "sounds/" # dir with sounds
soundfile = None # sound file
tts_server = None
asr_server = None
def TTS_callback(in_data, frame_count, time_info, status):
global soundfile
if (soundfile==None):
return (None, True)
else:
data = soundfile.readframes(frame_count)
return (data, pyaudio.paContinue)
class TTSServer(threading.Thread):
def __init__(self, port, output_device):
global use_alsaaudio, use_sound_play
threading.Thread.__init__(self)
# Initialize audio player
self.streaming = False
self.output_device = output_device
self.soundhandle = None
m = platform.machine()
print "Machine type:" , m
if (m[0:3]=='arm'):
use_sound_play = False
if (use_sound_play):
os.system('roslaunch sound_play.launch &')
time.sleep(5)
rospy.init_node('sound_client', disable_signals=True)
use_alsaaudio = False
elif (use_alsaaudio):
self.init_alsaaudio()
else:
print('Cannot initializa audio interface')
# Create a TCP/IP socket
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
self.sock.settimeout(3)
# Bind the socket to the port
server_address = ('', port)
self.sock.bind(server_address)
self.sock.listen(1)
print "TTS Server running on port ", port, " ..."
self.dorun = True
self.connection = None
# Dictionary of sounds
self.Sounds = {}
self.Sounds['bip'] = wave.open(SOUNDS_DIR+'bip.wav', 'rb')
self.idcache = 0
def init_alsaaudio(self):
print("Audio devices available")
pp = alsaaudio.pcms()
if (self.output_device=='sysdefault'):
# select proper sysdefault name
for l in pp:
print(' %s' %l)
if (l[0:10]=='sysdefault'):
print "choose ",l
self.output_device = l # choose default device
break
print("Audio device used: %s" %self.output_device)
self.aa_stream = None
retry = 3
while retry>0:
try:
self.aa_stream = alsaaudio.PCM(alsaaudio.PCM_PLAYBACK, alsaaudio.PCM_NORMAL, self.output_device)
retry = 0
except Exception as e:
print(e)
retry -= 1
time.sleep(2)
if self.aa_stream == None:
retry = 3
while retry>0:
try:
self.output_device='default'
print("Audio device used: %s" %self.output_device)
self.aa_stream = alsaaudio.PCM(alsaaudio.PCM_PLAYBACK, alsaaudio.PCM_NORMAL, self.output_device)
retry = 0
except Exception as e:
print(e)
retry -= 1
time.sleep(2)
self.audio_rate = 44100
self.periodsize = self.audio_rate / 8
if self.aa_stream != None:
self.aa_stream.setformat(alsaaudio.PCM_FORMAT_S16_LE)
self.aa_stream.setchannels(1)
self.aa_stream.setrate(self.audio_rate)
self.aa_stream.setperiodsize(self.periodsize)
def stop(self):
self.dorun = False
def connect(self):
connected = False
while (self.dorun and not connected):
try:
# print 'Waiting for a connection ...'
# Wait for a connection
self.connection, client_address = self.sock.accept()
self.connection.settimeout(3) # timeout when listening (exit with CTRL+C)
connected = True
print 'TTS Server Connection from ', client_address
except:
pass #print "Listen again ..."
def reply(self,mstr):
if (self.connection != None):
try:
mstr = mstr.encode('utf-8')
self.connection.send(mstr+'\n\r')
except:
print('Connection closed')
def setVolume(self,volperc): # volume in percentag [0-100]
cmdstr = 'amixer set PCM %d%%' %volperc
os.system(cmdstr)
def run(self):
global asr_server
if (use_sound_play and self.soundhandle == None):
self.soundhandle = SoundClient()
time.sleep(3)
self.setVolume(99) # set volume (99% = +3 dB)
#print('bip')
#self.play('bip')
#time.sleep(3)
self.say('Hello!', 'en')
self.say('Audio server is running.', 'en')
time.sleep(3)
while (self.dorun):
self.connect()
try:
# Receive the data in small chunks
while (self.dorun):
try:
data = self.connection.recv(320)
data = data.strip()
except socket.timeout:
data = "***"
except:
data = None
if (data!=None and data !="" and data!="***"):
if data!="ASR":
print 'TTS Received [%s]' % data
if (data.startswith('TTS')):
lang = 'en-US' # default language
strsay = data[4:]
if (data[3]=='['):
vd = re.split('\[|\]',data)
lang = vd[1]
strsay = vd[2]
self.say(strsay,lang)
self.reply('OK')
elif (data=="ASR"):
#print('asr request')
bh = asr_server.get_asr()
self.reply(bh)
if bh!='':
print('ASR sent [%s]' %bh)
elif (data.startswith('SOUND')):
self.play(data[6:]) # play this sound
self.reply('OK')
#print 'sending data back to the client'
#self.connection.sendall("OK")
else:
print('Message not understood: %s' %data)
self.reply('ERR')
elif (data == None or data==""):
break
finally:
print 'TTS Server Connection closed.'
# Clean up the connection
if (self.connection != None):
self.connection.close()
self.connection = None
self.say('Audio server has been closed.', 'en')
time.sleep(2)
self.aa_stream = None
def say(self, data, lang):
print 'Say ',data
if (use_sound_play):
voice = 'voice_kal_diphone'
volume = 1.0
print 'Saying: %s' % data
print 'Voice: %s' % voice
print 'Volume: %s' % volume
self.soundhandle.say(data, voice, volume)
rospy.sleep(3)
elif (use_alsaaudio):
cachefile = 'cache'+str(self.idcache)
self.idcache = (self.idcache+1)%10
tmpfile = "/tmp/cache.wav"
ofile = "%s%s.wav" %(SOUNDS_DIR, cachefile)
cmd = 'rm %s %s' %(tmpfile, ofile)
os.system(cmd)
if (lang=='en'):
lang = 'en-US'
elif (len(lang)==2):
lang = lang+'-'+lang.upper()
time.sleep(0.2)
cmd = 'pico2wave -l "%s" -w %s " , %s"' %(lang,tmpfile, data)
print cmd
os.system(cmd)
time.sleep(0.2)
# convert samplerate
tfm = sox.Transformer()
tfm.rate(samplerate=self.audio_rate)
tfm.build(tmpfile, ofile)
time.sleep(0.2)
self.play(cachefile)
else:
print('Cannot play audio. No infrastructure available.')
def play(self, name):
if (use_alsaaudio):
print('Playing %s ...' %name)
soundfile = None
i = 0
while (i<3): #((not name in self.Sounds) and (i<3)):
try:
soundfile = wave.open(SOUNDS_DIR+name+".wav", 'rb')
#self.Sounds[name] = soundfile
except:
print "File %s%s.wav not found." %(SOUNDS_DIR,name)
time.sleep(1)
i += 1
if (soundfile != None and use_alsaaudio): #(name in self.Sounds):
self.playwav_aa(soundfile)
print('Play completed.')
def playwav_aa(self, soundfile):
soundfile.setpos(0)
data = soundfile.readframes(self.periodsize)
while (len(data)>0):
# print('stream data %d' %(len(data)))
if self.aa_stream != None:
self.aa_stream.write(data)
data = soundfile.readframes(self.periodsize)
# def playwav_pa(self, sfile):
# global soundfile
# self.streaming = True
# self.stream = self.pa.open(format = 8, #self.pa.get_format_from_width(f.getsampwidth#()),
# channels = 1, #f.getnchannels(),
# rate = 44100, #f.getframerate(),
# output = True,
# stream_callback = TTS_callback,
# output_device_index = self.output_device)
# soundfile = sfile
# soundfile.setpos(0)
# self.stream.start_stream()
# while self.stream.is_active():
# time.sleep(1.0)
# self.stream.stop_stream()
# self.stream.close()
# self.streaming = False
if __name__ == "__main__":
parser = argparse.ArgumentParser(description='audio_server')
parser.add_argument('-ttsport', type=int, help='TTS server port [default: 9001]', default=9001)
parser.add_argument('-asrport', type=int, help='ASR server port [default: 9002]', default=9002)
parser.add_argument('-device', type=str, help='audio device [default: \'sysdefault\']', default='sysdefault')
args = parser.parse_args()
tts_server = TTSServer(args.ttsport,args.device)
asr_server = ASRServer(args.asrport)
tts_server.start()
time.sleep(1)
asr_server.start()
run = True
while (run):
try:
time.sleep(3)
#if (not tts_server.streaming):
# cmd = 'play -n --no-show-progress -r 44100 -c1 synth 0.1 sine 50 vol 0.01' # keep sound alive
# os.system(cmd)
except KeyboardInterrupt:
print "Exit"
run = False
tts_server.stop()
asr_server.stop()
sys.exit(0)
| 30.93
| 116
| 0.516246
| 1,364
| 12,372
| 4.585777
| 0.246334
| 0.020144
| 0.023022
| 0.01279
| 0.119265
| 0.096882
| 0.086331
| 0.086331
| 0.076739
| 0.076739
| 0
| 0.020704
| 0.375364
| 12,372
| 399
| 117
| 31.007519
| 0.78869
| 0.165535
| 0
| 0.312977
| 0
| 0
| 0.106993
| 0.002341
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0.003817
| 0.049618
| null | null | 0.114504
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a7f1dd168a64e7f7f19d3324731c892ec275922
| 1,845
|
py
|
Python
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 5
|
2018-02-02T06:36:56.000Z
|
2020-12-21T20:17:20.000Z
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 8
|
2017-10-10T17:50:47.000Z
|
2021-06-02T00:02:58.000Z
|
patch.py
|
silverhikari/romtools
|
2a09290fef85f35502a95c5c2874317029f0439c
|
[
"Apache-2.0"
] | 2
|
2017-10-10T20:15:24.000Z
|
2021-12-17T04:50:16.000Z
|
"""
Utils for creating xdelta patches.
"""
import logging
from subprocess import check_output, CalledProcessError
from shutil import copyfile
from os import remove, path
class PatchChecksumError(Exception):
def __init__(self, message, errors):
super(PatchChecksumError, self).__init__(message)
class Patch:
# TODO: Abstract out the need for "edited" by just copying the original
# file.
def __init__(self, original, filename, edited=None, xdelta_dir='.'):
self.original = original
self.edited = edited
self.filename = filename
# Need to have this absolute path for xdelta3 to be found.
self.xdelta_path = path.join(xdelta_dir, 'xdelta3')
# self.xdelta_path = 'xdelta3'
def create(self):
if self.edited is None:
raise Exception
cmd = [
self.xdelta_path,
'-f',
'-s',
self.original,
self.edited,
self.filename,
]
print(cmd)
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError as e:
raise Exception(e.output)
def apply(self):
if not self.edited:
copyfile(self.original, self.original + "_temp")
self.edited = self.original
self.original = self.original + "_temp"
cmd = [
self.xdelta_path,
'-f',
'-d',
'-s',
self.original,
self.filename,
self.edited,
]
logging.info(cmd)
try:
check_output(cmd)
except CalledProcessError:
raise PatchChecksumError('Target file had incorrect checksum', [])
finally:
if self.original.endswith('_temp'):
remove(self.original)
| 27.132353
| 78
| 0.564228
| 190
| 1,845
| 5.352632
| 0.378947
| 0.129794
| 0.078663
| 0.070796
| 0.198623
| 0.108161
| 0.108161
| 0.108161
| 0.108161
| 0
| 0
| 0.002479
| 0.344173
| 1,845
| 67
| 79
| 27.537313
| 0.838017
| 0.106775
| 0
| 0.384615
| 0
| 0
| 0.040929
| 0
| 0
| 0
| 0
| 0.014925
| 0
| 1
| 0.076923
| false
| 0
| 0.076923
| 0
| 0.192308
| 0.019231
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a854fbf5fe92dd3c9a7f42e69f796c6cc578917
| 333
|
py
|
Python
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 10
|
2015-05-28T18:26:40.000Z
|
2021-09-06T10:07:03.000Z
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 762
|
2015-01-15T10:00:59.000Z
|
2022-03-31T15:35:14.000Z
|
bluebottle/tasks/migrations/0012_merge.py
|
terrameijar/bluebottle
|
b4f5ba9c4f03e678fdd36091b29240307ea69ffd
|
[
"BSD-3-Clause"
] | 9
|
2015-02-20T13:19:30.000Z
|
2022-03-08T14:09:17.000Z
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9.6 on 2016-09-27 15:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('tasks', '0011_auto_20160919_1508'),
('tasks', '0011_auto_20160920_1019'),
]
operations = [
]
| 19.588235
| 47
| 0.657658
| 41
| 333
| 5.073171
| 0.804878
| 0.086538
| 0.125
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.183908
| 0.216216
| 333
| 16
| 48
| 20.8125
| 0.613027
| 0.201201
| 0
| 0
| 1
| 0
| 0.212928
| 0.174905
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| false
| 0
| 0.222222
| 0
| 0.555556
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a85751a815d71753d3e2aaa3ccbd06b815ba219
| 5,200
|
py
|
Python
|
bat_train/evaluate.py
|
bgotthold-usgs/batdetect
|
0d4a70f1cda9f6104f6f785f0d953f802fddf0f1
|
[
"BSD-Source-Code"
] | 59
|
2018-03-05T08:58:59.000Z
|
2022-03-19T17:33:14.000Z
|
bat_train/evaluate.py
|
bgotthold-usgs/batdetect
|
0d4a70f1cda9f6104f6f785f0d953f802fddf0f1
|
[
"BSD-Source-Code"
] | 11
|
2018-03-16T21:46:51.000Z
|
2021-12-14T16:07:55.000Z
|
bat_train/evaluate.py
|
bgotthold-usgs/batdetect
|
0d4a70f1cda9f6104f6f785f0d953f802fddf0f1
|
[
"BSD-Source-Code"
] | 24
|
2018-03-15T14:48:08.000Z
|
2022-01-09T01:12:51.000Z
|
import numpy as np
from sklearn.metrics import roc_curve, auc
def compute_error_auc(op_str, gt, pred, prob):
# classification error
pred_int = (pred > prob).astype(np.int)
class_acc = (pred_int == gt).mean() * 100.0
# ROC - area under curve
fpr, tpr, thresholds = roc_curve(gt, pred)
roc_auc = auc(fpr, tpr)
print op_str, ', class acc = %.3f, ROC AUC = %.3f' % (class_acc, roc_auc)
#return class_acc, roc_auc
def calc_average_precision(recall, precision):
precision[np.isnan(precision)] = 0
recall[np.isnan(recall)] = 0
# pascal'12 way
mprec = np.hstack((0, precision, 0))
mrec = np.hstack((0, recall, 1))
for ii in range(mprec.shape[0]-2, -1,-1):
mprec[ii] = np.maximum(mprec[ii], mprec[ii+1])
inds = np.where(np.not_equal(mrec[1:], mrec[:-1]))[0]+1
ave_prec = ((mrec[inds] - mrec[inds-1])*mprec[inds]).sum()
return ave_prec
def remove_end_preds(nms_pos_o, nms_prob_o, gt_pos_o, durations, win_size):
# this filters out predictions and gt that are close to the end
# this is a bit messy because of the shapes of gt_pos_o
nms_pos = []
nms_prob = []
gt_pos = []
for ii in range(len(nms_pos_o)):
valid_time = durations[ii] - win_size
gt_cur = gt_pos_o[ii]
if gt_cur.shape[0] > 0:
gt_pos.append(gt_cur[:, 0][gt_cur[:, 0] < valid_time][..., np.newaxis])
else:
gt_pos.append(gt_cur)
valid_preds = nms_pos_o[ii] < valid_time
nms_pos.append(nms_pos_o[ii][valid_preds])
nms_prob.append(nms_prob_o[ii][valid_preds, 0][..., np.newaxis])
return nms_pos, nms_prob, gt_pos
def prec_recall_1d(nms_pos_o, nms_prob_o, gt_pos_o, durations, detection_overlap, win_size, remove_eof=True):
"""
nms_pos, nms_prob, and gt_pos are lists of numpy arrays specifying detection
position, detection probability and GT position.
Each list entry is a different file.
Each entry in nms_pos is an array of length num_entries. For nms_prob and
gt_pos its an array of size (num_entries, 1).
durations is a array of the length of the number of files with each entry
containing that file length in seconds.
detection_overlap determines if a prediction is counted as correct or not.
win_size is used to ignore predictions and ground truth at the end of an
audio file.
returns
precision: fraction of retrieved instances that are relevant.
recall: fraction of relevant instances that are retrieved.
"""
if remove_eof:
# filter out the detections in both ground truth and predictions that are too
# close to the end of the file - dont count them during eval
nms_pos, nms_prob, gt_pos = remove_end_preds(nms_pos_o, nms_prob_o, gt_pos_o, durations, win_size)
else:
nms_pos = nms_pos_o
nms_prob = nms_prob_o
gt_pos = gt_pos_o
# loop through each file
true_pos = [] # correctly predicts the ground truth
false_pos = [] # says there is a detection but isn't
for ii in range(len(nms_pos)):
num_preds = nms_pos[ii].shape[0]
if num_preds > 0: # check to make sure it contains something
num_gt = gt_pos[ii].shape[0]
# for each set of predictions label them as true positive or false positive (i.e. 1-tp)
tp = np.zeros(num_preds)
distance_to_gt = np.abs(gt_pos[ii].ravel()-nms_pos[ii].ravel()[:, np.newaxis])
within_overlap = (distance_to_gt <= detection_overlap)
# remove duplicate detections - assign to valid detection with highest prob
for jj in range(num_gt):
inds = np.where(within_overlap[:, jj])[0] # get the indices of all valid predictions
if inds.shape[0] > 0:
max_prob = np.argmax(nms_prob[ii][inds])
selected_pred = inds[max_prob]
within_overlap[selected_pred, :] = False
tp[selected_pred] = 1 # set as true positives
true_pos.append(tp)
false_pos.append(1 - tp)
# calc precision and recall - sort confidence in descending order
# PASCAL style
conf = np.concatenate(nms_prob)[:, 0]
num_gt = np.concatenate(gt_pos).shape[0]
inds = np.argsort(conf)[::-1]
true_pos_cat = np.concatenate(true_pos)[inds].astype(float)
false_pos_cat = np.concatenate(false_pos)[inds].astype(float) # i.e. 1-true_pos_cat
if (conf == conf[0]).sum() == conf.shape[0]:
# all the probability values are the same therefore we will not sweep
# the curve and instead will return a single value
true_pos_sum = true_pos_cat.sum()
false_pos_sum = false_pos_cat.sum()
recall = np.asarray([true_pos_sum / float(num_gt)])
precision = np.asarray([(true_pos_sum / (false_pos_sum + true_pos_sum))])
elif inds.shape[0] > 0:
# otherwise produce a list of values
true_pos_cum = np.cumsum(true_pos_cat)
false_pos_cum = np.cumsum(false_pos_cat)
recall = true_pos_cum / float(num_gt)
precision = (true_pos_cum / (false_pos_cum + true_pos_cum))
return precision, recall
| 38.80597
| 109
| 0.649038
| 814
| 5,200
| 3.927518
| 0.255528
| 0.031905
| 0.015327
| 0.012512
| 0.1198
| 0.071317
| 0.054426
| 0.041289
| 0.041289
| 0.041289
| 0
| 0.012323
| 0.250962
| 5,200
| 133
| 110
| 39.097744
| 0.808472
| 0.180962
| 0
| 0.027027
| 0
| 0
| 0.009756
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | null | 0
| 0.027027
| null | null | 0.013514
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a86094f8b6e8a0e12d48278a3971b48591f4ec2
| 27,399
|
py
|
Python
|
azure-mgmt/tests/test_mgmt_network.py
|
SUSE/azure-sdk-for-python
|
324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f
|
[
"MIT"
] | 2
|
2020-07-29T14:22:17.000Z
|
2020-11-06T18:47:40.000Z
|
azure-mgmt/tests/test_mgmt_network.py
|
SUSE/azure-sdk-for-python
|
324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f
|
[
"MIT"
] | 1
|
2016-08-01T07:37:04.000Z
|
2016-08-01T07:37:04.000Z
|
azure-mgmt/tests/test_mgmt_network.py
|
SUSE/azure-sdk-for-python
|
324f99d26dd6f4ee9793b9bf1d4d5f928e4b6c2f
|
[
"MIT"
] | 1
|
2020-12-12T21:04:41.000Z
|
2020-12-12T21:04:41.000Z
|
# coding: utf-8
#-------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#--------------------------------------------------------------------------
import unittest
import azure.mgmt.network.models
from testutils.common_recordingtestcase import record
from tests.mgmt_testcase import HttpStatusCode, AzureMgmtTestCase
class MgmtNetworkTest(AzureMgmtTestCase):
def setUp(self):
super(MgmtNetworkTest, self).setUp()
self.network_client = self.create_mgmt_client(
azure.mgmt.network.NetworkManagementClient
)
if not self.is_playback():
self.create_resource_group()
@record
def test_network_interface_card(self):
vnet_name = self.get_resource_name('pyvnet')
subnet_name = self.get_resource_name('pysubnet')
nic_name = self.get_resource_name('pynic')
# Create VNet
async_vnet_creation = self.network_client.virtual_networks.create_or_update(
self.group_name,
vnet_name,
{
'location': self.region,
'address_space': {
'address_prefixes': ['10.0.0.0/16']
}
}
)
async_vnet_creation.wait()
# Create Subnet
async_subnet_creation = self.network_client.subnets.create_or_update(
self.group_name,
vnet_name,
subnet_name,
{'address_prefix': '10.0.0.0/24'}
)
subnet_info = async_subnet_creation.result()
# Create NIC
async_nic_creation = self.network_client.network_interfaces.create_or_update(
self.group_name,
nic_name,
{
'location': self.region,
'ip_configurations': [{
'name': 'MyIpConfig',
'subnet': {
'id': subnet_info.id
}
}]
}
)
nic_info = async_nic_creation.result()
nic_info = self.network_client.network_interfaces.get(
self.group_name,
nic_info.name
)
nics = list(self.network_client.network_interfaces.list(
self.group_name
))
self.assertEqual(len(nics), 1)
nics = list(self.network_client.network_interfaces.list_all())
self.assertGreater(len(nics), 0)
async_delete = self.network_client.network_interfaces.delete(
self.group_name,
nic_info.name
)
async_delete.wait()
@record
def test_load_balancers(self):
public_ip_name = self.get_resource_name('pyipname')
frontend_ip_name = self.get_resource_name('pyfipname')
addr_pool_name = self.get_resource_name('pyapname')
probe_name = self.get_resource_name('pyprobename')
lb_name = self.get_resource_name('pylbname')
front_end_id = ('/subscriptions/{}'
'/resourceGroups/{}'
'/providers/Microsoft.Network'
'/loadBalancers/{}'
'/frontendIPConfigurations/{}').format(
self.settings.SUBSCRIPTION_ID,
self.group_name,
lb_name,
frontend_ip_name
)
back_end_id = ('/subscriptions/{}'
'/resourceGroups/{}'
'/providers/Microsoft.Network'
'/loadBalancers/{}'
'/backendAddressPools/{}').format(
self.settings.SUBSCRIPTION_ID,
self.group_name,
lb_name,
addr_pool_name
)
probe_id = ('/subscriptions/{}'
'/resourceGroups/{}'
'/providers/Microsoft.Network'
'/loadBalancers/{}'
'/probes/{}').format(
self.settings.SUBSCRIPTION_ID,
self.group_name,
lb_name,
probe_name
)
# Create PublicIP
public_ip_parameters = {
'location': self.region,
'public_ip_allocation_method': 'static',
'idle_timeout_in_minutes': 4
}
async_publicip_creation = self.network_client.public_ip_addresses.create_or_update(
self.group_name,
public_ip_name,
public_ip_parameters
)
public_ip_info = async_publicip_creation.result()
# Building a FrontEndIpPool
frontend_ip_configurations = [{
'name': frontend_ip_name,
'private_ip_allocation_method': 'Dynamic',
'public_ip_address': {
'id': public_ip_info.id
}
}]
# Building a BackEnd adress pool
backend_address_pools = [{
'name': addr_pool_name
}]
# Building a HealthProbe
probes = [{
'name': probe_name,
'protocol': 'Http',
'port': 80,
'interval_in_seconds': 15,
'number_of_probes': 4,
'request_path': 'healthprobe.aspx'
}]
# Building a LoadBalancer rule
load_balancing_rules = [{
'name': 'azure-sample-lb-rule',
'protocol': 'tcp',
'frontend_port': 80,
'backend_port': 80,
'idle_timeout_in_minutes': 4,
'enable_floating_ip': False,
'load_distribution': 'Default',
'frontend_ip_configuration': {
'id': front_end_id
},
'backend_address_pool': {
'id': back_end_id
},
'probe': {
'id': probe_id
}
}]
# Building InboundNATRule1
inbound_nat_rules = [{
'name': 'azure-sample-netrule1',
'protocol': 'tcp',
'frontend_port': 21,
'backend_port': 22,
'enable_floating_ip': False,
'idle_timeout_in_minutes': 4,
'frontend_ip_configuration': {
'id': front_end_id
}
}]
# Building InboundNATRule2
inbound_nat_rules.append({
'name': 'azure-sample-netrule2',
'protocol': 'tcp',
'frontend_port': 23,
'backend_port': 22,
'enable_floating_ip': False,
'idle_timeout_in_minutes': 4,
'frontend_ip_configuration': {
'id': front_end_id
}
})
# Creating Load Balancer
lb_async_creation = self.network_client.load_balancers.create_or_update(
self.group_name,
lb_name,
{
'location': self.region,
'frontend_ip_configurations': frontend_ip_configurations,
'backend_address_pools': backend_address_pools,
'probes': probes,
'load_balancing_rules': load_balancing_rules,
'inbound_nat_rules' :inbound_nat_rules
}
)
lb_info = lb_async_creation.result()
# Get it
lb_info = self.network_client.load_balancers.get(
self.group_name,
lb_name
)
# List all
lbs = self.network_client.load_balancers.list_all()
lbs = list(lbs)
self.assertGreater(len(lbs), 0)
# List RG
lbs = self.network_client.load_balancers.list(self.group_name)
lbs = list(lbs)
self.assertGreater(len(lbs), 0)
# Delete
async_lb_delete = self.network_client.load_balancers.delete(
self.group_name,
lb_name
)
async_lb_delete.wait()
@record
def test_public_ip_addresses(self):
public_ip_name = self.get_resource_name('pyipname')
params_create = azure.mgmt.network.models.PublicIPAddress(
location=self.region,
public_ip_allocation_method=azure.mgmt.network.models.IPAllocationMethod.dynamic,
tags={
'key': 'value',
},
)
result_create = self.network_client.public_ip_addresses.create_or_update(
self.group_name,
public_ip_name,
params_create,
)
result_create.wait() # AzureOperationPoller
#self.assertEqual(result_create.status_code, HttpStatusCode.OK)
result_get = self.network_client.public_ip_addresses.get(
self.group_name,
public_ip_name,
)
#self.assertEqual(result_get.status_code, HttpStatusCode.OK)
self.assertEqual(result_get.location, self.region)
self.assertEqual(result_get.tags['key'], 'value')
result_list = self.network_client.public_ip_addresses.list(self.group_name)
#self.assertEqual(result_list.status_code, HttpStatusCode.OK)
result_list = list(result_list)
self.assertEqual(len(result_list), 1)
result_list_all = self.network_client.public_ip_addresses.list_all()
#self.assertEqual(result_list_all.status_code, HttpStatusCode.OK)
result_list_all = list(result_list_all)
self.assertGreater(len(result_list_all), 0)
result_delete = self.network_client.public_ip_addresses.delete(
self.group_name,
public_ip_name,
)
result_delete.wait() # AzureOperationPoller
#self.assertEqual(result_delete.status_code, HttpStatusCode.OK)
result_list = self.network_client.public_ip_addresses.list(self.group_name)
#self.assertEqual(result_list.status_code, HttpStatusCode.OK)
result_list = list(result_list)
self.assertEqual(len(result_list), 0)
@record
def test_virtual_networks(self):
network_name = self.get_resource_name('pyvnet')
subnet1_name = self.get_resource_name('pyvnetsubnetone')
subnet2_name = self.get_resource_name('pyvnetsubnettwo')
params_create = azure.mgmt.network.models.VirtualNetwork(
location=self.region,
address_space=azure.mgmt.network.models.AddressSpace(
address_prefixes=[
'10.0.0.0/16',
],
),
dhcp_options=azure.mgmt.network.models.DhcpOptions(
dns_servers=[
'10.1.1.1',
'10.1.2.4',
],
),
subnets=[
azure.mgmt.network.models.Subnet(
name=subnet1_name,
address_prefix='10.0.1.0/24',
),
azure.mgmt.network.models.Subnet(
name=subnet2_name,
address_prefix='10.0.2.0/24',
),
],
)
result_create = self.network_client.virtual_networks.create_or_update(
self.group_name,
network_name,
params_create,
)
vnet = result_create.result()
vnet = self.network_client.virtual_networks.get(
self.group_name,
vnet.name,
)
ip_availability = self.network_client.virtual_networks.check_ip_address_availability(
self.group_name,
vnet.name,
'10.0.1.35' # Should be available since new VNet sor Subnet 1
)
self.assertTrue(ip_availability.available)
result_list = list(self.network_client.virtual_networks.list(
self.group_name,
))
self.assertEqual(len(result_list), 1)
result_list_all = list(self.network_client.virtual_networks.list_all())
async_delete = self.network_client.virtual_networks.delete(
self.group_name,
network_name,
)
async_delete.wait()
@record
def test_dns_availability(self):
result_check = self.network_client.check_dns_name_availability(
self.region,
'pydomain',
)
#self.assertEqual(result_check.status_code, HttpStatusCode.OK)
self.assertTrue(result_check)
@record
def test_subnets(self):
network_name = self.get_resource_name('pysubnet')
subnet1_name = self.get_resource_name('pysubnetone')
subnet2_name = self.get_resource_name('pysubnettwo')
params_create = azure.mgmt.network.models.VirtualNetwork(
location=self.region,
address_space=azure.mgmt.network.models.AddressSpace(
address_prefixes=[
'10.0.0.0/16',
],
),
dhcp_options=azure.mgmt.network.models.DhcpOptions(
dns_servers=[
'10.1.1.1',
'10.1.2.4',
],
),
subnets=[
azure.mgmt.network.models.Subnet(
name=subnet1_name,
address_prefix='10.0.1.0/24',
),
],
)
result_create = self.network_client.virtual_networks.create_or_update(
self.group_name,
network_name,
params_create,
)
result_create.wait() # AzureOperationPoller
params_create = azure.mgmt.network.models.Subnet(
name=subnet2_name,
address_prefix='10.0.2.0/24',
)
result_create = self.network_client.subnets.create_or_update(
self.group_name,
network_name,
subnet2_name,
params_create,
)
result_create.wait() # AzureOperationPoller
result_get = self.network_client.virtual_networks.get(
self.group_name,
network_name,
)
self.assertEqual(len(result_get.subnets), 2)
result_get = self.network_client.subnets.get(
self.group_name,
network_name,
subnet2_name,
)
result_list = self.network_client.subnets.list(
self.group_name,
network_name,
)
subnets = list(result_list)
result_delete = self.network_client.subnets.delete(
self.group_name,
network_name,
subnet2_name,
)
result_delete.wait()
@record
def test_network_security_groups(self):
security_group_name = self.get_resource_name('pysecgroup')
security_rule_name = self.get_resource_name('pysecgrouprule')
params_create = azure.mgmt.network.models.NetworkSecurityGroup(
location=self.region,
security_rules=[
azure.mgmt.network.models.SecurityRule(
name=security_rule_name,
access=azure.mgmt.network.models.SecurityRuleAccess.allow,
description='Test security rule',
destination_address_prefix='*',
destination_port_range='123-3500',
direction=azure.mgmt.network.models.SecurityRuleDirection.inbound,
priority=500,
protocol=azure.mgmt.network.models.SecurityRuleProtocol.tcp,
source_address_prefix='*',
source_port_range='655',
),
],
)
result_create = self.network_client.network_security_groups.create_or_update(
self.group_name,
security_group_name,
params_create,
)
result_create.wait() # AzureOperationPoller
result_get = self.network_client.network_security_groups.get(
self.group_name,
security_group_name,
)
result_list = list(self.network_client.network_security_groups.list(
self.group_name,
))
self.assertEqual(len(result_list), 1)
result_list_all = list(self.network_client.network_security_groups.list_all())
# Security Rules
new_security_rule_name = self.get_resource_name('pynewrule')
async_security_rule = self.network_client.security_rules.create_or_update(
self.group_name,
security_group_name,
new_security_rule_name,
{
'access':azure.mgmt.network.models.SecurityRuleAccess.allow,
'description':'New Test security rule',
'destination_address_prefix':'*',
'destination_port_range':'123-3500',
'direction':azure.mgmt.network.models.SecurityRuleDirection.outbound,
'priority':400,
'protocol':azure.mgmt.network.models.SecurityRuleProtocol.tcp,
'source_address_prefix':'*',
'source_port_range':'655',
}
)
security_rule = async_security_rule.result()
security_rule = self.network_client.security_rules.get(
self.group_name,
security_group_name,
security_rule.name
)
self.assertEqual(security_rule.name, new_security_rule_name)
new_security_rules = list(self.network_client.security_rules.list(
self.group_name,
security_group_name
))
self.assertEqual(len(new_security_rules), 2)
result_delete = self.network_client.security_rules.delete(
self.group_name,
security_group_name,
new_security_rule_name
)
result_delete.wait()
# Delete NSG
result_delete = self.network_client.network_security_groups.delete(
self.group_name,
security_group_name,
)
result_delete.wait()
@record
def test_routes(self):
route_table_name = self.get_resource_name('pyroutetable')
route_name = self.get_resource_name('pyroute')
async_route_table = self.network_client.route_tables.create_or_update(
self.group_name,
route_table_name,
{'location': self.region}
)
route_table = async_route_table.result()
route_table = self.network_client.route_tables.get(
self.group_name,
route_table.name
)
self.assertEqual(route_table.name, route_table_name)
route_tables = list(self.network_client.route_tables.list(
self.group_name
))
self.assertEqual(len(route_tables), 1)
route_tables = list(self.network_client.route_tables.list_all())
self.assertGreater(len(route_tables), 0)
async_route = self.network_client.routes.create_or_update(
self.group_name,
route_table.name,
route_name,
{
'address_prefix': '10.1.0.0/16',
'next_hop_type': 'None'
}
)
route = async_route.result()
route = self.network_client.routes.get(
self.group_name,
route_table.name,
route.name
)
self.assertEqual(route.name, route_name)
routes = list(self.network_client.routes.list(
self.group_name,
route_table.name
))
self.assertEqual(len(routes), 1)
async_route_delete = self.network_client.routes.delete(
self.group_name,
route_table.name,
route.name
)
async_route_delete.wait()
async_route_table_delete = self.network_client.route_tables.delete(
self.group_name,
route_table_name
)
async_route_table_delete.wait()
@record
def test_usages(self):
usages = list(self.network_client.usages.list(self.region))
self.assertGreater(len(usages), 1)
self.assertTrue(all(hasattr(u, 'name') for u in usages))
@record
def test_express_route_service_providers(self):
ersp = list(self.network_client.express_route_service_providers.list())
self.assertGreater(len(ersp), 0)
self.assertTrue(all(hasattr(u, 'bandwidths_offered') for u in ersp))
@record
def test_express_route_circuit(self):
express_route_name = self.get_resource_name('pyexpressroute')
async_express_route = self.network_client.express_route_circuits.create_or_update(
self.group_name,
express_route_name,
{
"location": self.region,
"sku": {
"name": "Standard_MeteredData",
"tier": "Standard",
"family": "MeteredData"
},
"service_provider_properties": {
"service_provider_name": "Comcast",
"peering_location": "Chicago",
"bandwidth_in_mbps": 100
}
}
)
express_route = async_express_route.result()
express_route = self.network_client.express_route_circuits.get(
self.group_name,
express_route_name
)
routes = list(self.network_client.express_route_circuits.list(
self.group_name
))
self.assertEqual(len(routes), 1)
routes = list(self.network_client.express_route_circuits.list_all())
self.assertGreater(len(routes), 0)
stats = self.network_client.express_route_circuits.get_stats(
self.group_name,
express_route_name
)
self.assertIsNotNone(stats)
async_peering = self.network_client.express_route_circuit_peerings.create_or_update(
self.group_name,
express_route_name,
'AzurePublicPeering',
{
"peering_type": "AzurePublicPeering",
"peer_asn": 100,
"primary_peer_address_prefix": "192.168.1.0/30",
"secondary_peer_address_prefix": "192.168.2.0/30",
"vlan_id": 200,
}
)
peering = async_peering.result()
peering = self.network_client.express_route_circuit_peerings.get(
self.group_name,
express_route_name,
'AzurePublicPeering'
)
peerings = list(self.network_client.express_route_circuit_peerings.list(
self.group_name,
express_route_name
))
self.assertEqual(len(peerings), 1)
stats = self.network_client.express_route_circuits.get_peering_stats(
self.group_name,
express_route_name,
'AzurePublicPeering'
)
self.assertIsNotNone(stats)
auth_name = self.get_resource_name('pyauth')
async_auth = self.network_client.express_route_circuit_authorizations.create_or_update(
self.group_name,
express_route_name,
auth_name,
{}
)
auth = async_auth.result()
auth = self.network_client.express_route_circuit_authorizations.get(
self.group_name,
express_route_name,
auth_name
)
auths = list(self.network_client.express_route_circuit_authorizations.list(
self.group_name,
express_route_name
))
self.assertEqual(len(auths), 1)
async_auth = self.network_client.express_route_circuit_authorizations.delete(
self.group_name,
express_route_name,
auth_name
)
async_auth.wait()
async_peering = self.network_client.express_route_circuit_peerings.delete(
self.group_name,
express_route_name,
'AzurePublicPeering'
)
async_peering.wait()
async_express_route = self.network_client.express_route_circuits.delete(
self.group_name,
express_route_name
)
async_express_route.wait()
@record
def test_virtual_network_gateway_operations(self):
# https://docs.microsoft.com/en-us/azure/vpn-gateway/vpn-gateway-howto-site-to-site-resource-manager-portal
vnet_name = self.get_resource_name('pyvirtnet')
fe_name = self.get_resource_name('pysubnetfe')
be_name = self.get_resource_name('pysubnetbe')
gateway_name = self.get_resource_name('pysubnetga')
# Create VNet
async_vnet_creation = self.network_client.virtual_networks.create_or_update(
self.group_name,
vnet_name,
{
'location': self.region,
'address_space': {
'address_prefixes': [
'10.11.0.0/16',
'10.12.0.0/16'
]
}
}
)
async_vnet_creation.wait()
# Create Front End Subnet
async_subnet_creation = self.network_client.subnets.create_or_update(
self.group_name,
vnet_name,
fe_name,
{'address_prefix': '10.11.0.0/24'}
)
fe_subnet_info = async_subnet_creation.result()
# Create Back End Subnet
async_subnet_creation = self.network_client.subnets.create_or_update(
self.group_name,
vnet_name,
be_name,
{'address_prefix': '10.12.0.0/24'}
)
be_subnet_info = async_subnet_creation.result()
# Create Gateway Subnet
async_subnet_creation = self.network_client.subnets.create_or_update(
self.group_name,
vnet_name,
'GatewaySubnet',
{'address_prefix': '10.12.255.0/27'}
)
gateway_subnet_info = async_subnet_creation.result()
# Public IP Address
public_ip_name = self.get_resource_name('pyipname')
params_create = azure.mgmt.network.models.PublicIPAddress(
location=self.region,
public_ip_allocation_method=azure.mgmt.network.models.IPAllocationMethod.dynamic,
tags={
'key': 'value',
},
)
result_create = self.network_client.public_ip_addresses.create_or_update(
self.group_name,
public_ip_name,
params_create,
)
public_ip_address = result_create.result()
# Gateway itself
vng_name = self.get_resource_name('pyvng')
gw_params = {
'location': self.region,
'gateway_type': 'VPN',
'vpn_type': 'RouteBased',
'enable_bgp': False,
'sku': {
'tier': 'Standard',
'capacity': 2,
'name': 'Standard'},
'ip_configurations':[{
'name': 'default',
'private_ip_allocation_method': 'Dynamic',
'subnet': {
'id': gateway_subnet_info.id
},
'public_ip_address': {
'id': public_ip_address.id
}
}],
}
async_create = self.network_client.virtual_network_gateways.create_or_update(
self.group_name,
vng_name,
gw_params
)
vng = async_create.result()
self.assertEquals(vng.name, vng_name)
#------------------------------------------------------------------------------
if __name__ == '__main__':
unittest.main()
| 33.454212
| 115
| 0.567904
| 2,714
| 27,399
| 5.389831
| 0.119381
| 0.057151
| 0.085999
| 0.036369
| 0.688474
| 0.577864
| 0.510938
| 0.401217
| 0.331966
| 0.267774
| 0
| 0.013431
| 0.334209
| 27,399
| 818
| 116
| 33.49511
| 0.7884
| 0.05409
| 0
| 0.440703
| 0
| 0
| 0.097832
| 0.025009
| 0
| 0
| 0
| 0
| 0.046852
| 0
| null | null | 0
| 0.005857
| null | null | 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a8741dde6ef103d06812289a7da5d5ee4748c1d
| 2,427
|
py
|
Python
|
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
src/tkdialog/dialog.py
|
KosukeMizuno/tkdialog
|
082fc106908bbbfa819d1a129929165f11d4e944
|
[
"MIT"
] | null | null | null |
from pathlib import Path
import pickle
import tkinter as tk
import tkinter.filedialog
def open_dialog(**opt):
"""Parameters
----------
Options will be passed to `tkinter.filedialog.askopenfilename`.
See also tkinter's document.
Followings are example of frequently used options.
- filetypes=[(label, ext), ...]
- label: str
- ext: str, semicolon separated extentions
- initialdir: str, default Path.cwd()
- multiple: bool, default False
Returns
--------
filename, str
"""
root = tk.Tk()
root.withdraw()
root.wm_attributes("-topmost", True)
opt_default = dict(initialdir=Path.cwd())
_opt = dict(opt_default, **opt)
return tk.filedialog.askopenfilename(**_opt)
def saveas_dialog(**opt):
"""Parameters
----------
Options will be passed to `tkinter.filedialog.asksaveasfilename`.
See also tkinter's document.
Followings are example of frequently used options.
- filetypes=[(label, ext), ...]
- label: str
- ext: str, semicolon separated extentions
- initialdir: str, default Path.cwd()
- initialfile: str, default isn't set
Returns
--------
filename, str
"""
root = tk.Tk()
root.withdraw()
root.wm_attributes("-topmost", True)
opt_default = dict(initialdir=Path.cwd())
_opt = dict(opt_default, **opt)
return tk.filedialog.asksaveasfilename(**_opt)
def load_pickle_with_dialog(mode='rb', **opt):
"""Load a pickled object with a filename assigned by tkinter's open dialog.
kwargs will be passed to saveas_dialog.
"""
opt_default = dict(filetypes=[('pickled data', '*.pkl'), ('all', '*')])
_opt = dict(opt_default, **opt)
fn = open_dialog(**_opt)
if fn == '': # canceled
return None
with Path(fn).open(mode) as f:
data = pickle.load(f)
return data
def dump_pickle_with_dialog(obj, mode='wb', **opt):
"""Pickle an object with a filename assigned by tkinter's saveas dialog.
kwargs will be passed to saveas_dialog.
Returns
--------
filename: str
"""
opt_default = dict(filetypes=[('pickled data', '*.pkl'), ('all', '*')])
_opt = dict(opt_default, **opt)
fn = saveas_dialog(**_opt)
if fn == '': # canceled
return ''
# note: 上書き確認はtkinterがやってくれるのでここではチェックしない
with Path(fn).open(mode) as f:
pickle.dump(obj, f)
return fn
| 25.547368
| 79
| 0.622167
| 291
| 2,427
| 5.085911
| 0.285223
| 0.054054
| 0.032432
| 0.037838
| 0.713514
| 0.713514
| 0.677027
| 0.648649
| 0.547297
| 0.547297
| 0
| 0
| 0.23527
| 2,427
| 94
| 80
| 25.819149
| 0.797414
| 0.433045
| 0
| 0.5
| 0
| 0
| 0.050945
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.111111
| false
| 0
| 0.111111
| 0
| 0.388889
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a89d9e3455e77e62d24b044c32fc90cbc464fc1
| 368
|
py
|
Python
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | 7
|
2019-08-04T20:37:55.000Z
|
2020-03-05T08:36:10.000Z
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | 1
|
2019-10-21T05:43:28.000Z
|
2019-10-21T05:43:28.000Z
|
setup.py
|
SilicalNZ/canvas
|
44d1eee02c334aae6b41aeba01ed0ecdf83aed21
|
[
"MIT"
] | null | null | null |
import setuptools
setuptools.setup(
name = 'sili-canvas',
version = '0.0.1',
license = 'MIT',
url = 'https://github.com/SilicalNZ/canvas',
description = 'A series of easy to use classes to perform complex 2D array transformations',
long_description = '',
author = 'SilicalNZ',
packages = ['canvas', 'canvas.common', 'canvas.tools']
)
| 26.285714
| 96
| 0.649457
| 43
| 368
| 5.534884
| 0.790698
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0.013699
| 0.206522
| 368
| 13
| 97
| 28.307692
| 0.80137
| 0
| 0
| 0
| 0
| 0
| 0.459239
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| true
| 0
| 0.090909
| 0
| 0.090909
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a8b4fc2b42148f674fa2146ee9800ea9e96f927
| 2,614
|
py
|
Python
|
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
surname_rnn/surname/containers.py
|
sudarshan85/nlpbook
|
41e59d706fb31f5185a0133789639ccffbddb41f
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
import pandas as pd
from pathlib import Path
from torch.utils.data import DataLoader
class ModelContainer(object):
def __init__(self, model, optimizer, loss_fn, scheduler=None):
self.model = model
self.optimizer = optimizer
self.loss_fn = loss_fn
self.scheduler = scheduler
class DataContainer(object):
def __init__(self, df_with_split: pd.DataFrame, dataset_class, vectorizer_file: Path, batch_size:
int, with_test=True, is_load: bool=True) -> None:
self.train_df = df_with_split.loc[df_with_split['split'] == 'train']
self.val_df = df_with_split.loc[df_with_split['split'] == 'val']
self._bs = batch_size
self.with_test = with_test
self.is_load = is_load
self._lengths = {'train_size': len(self.train_df), 'val_size': len(self.val_df)}
self._n_batches = [self._lengths['train_size'] // self._bs, self._lengths['val_size'] //
self._bs]
if not self.is_load:
print("Creating and saving vectorizer")
train_ds = dataset_class.load_data_and_create_vectorizer(self.train_df)
train_ds.save_vectorizer(vectorizer_file)
self.train_ds = dataset_class.load_data_and_vectorizer_from_file(self.train_df, vectorizer_file)
self.vectorizer = self.train_ds.vectorizer
self.surname_vocab = self.vectorizer.surname_vocab
self.nationality_vocab = self.vectorizer.nationality_vocab
self.train_dl = DataLoader(self.train_ds, self._bs, shuffle=True, drop_last=True)
self.val_ds = dataset_class.load_data_and_vectorizer(self.val_df, self.vectorizer)
self.val_dl = DataLoader(self.val_ds, self._bs, shuffle=True, drop_last=True)
if self.with_test:
self.test_df = df_with_split.loc[df_with_split['split'] == 'test']
self._lengths['test_size'] = len(self.test_df)
self._n_batches.append(self._lengths['test_size'] // self._bs)
self.test_ds = dataset_class.load_data_and_vectorizer(self.test_df, self.vectorizer)
self.test_dl = DataLoader(self.test_ds, self._bs, shuffle=True, drop_last=True)
def get_loaders(self):
return self.train_dl, self.val_dl, self.test_dl
@property
def train_batches(self):
return self._n_batches[0]
@property
def val_batches(self):
return self._n_batches[1]
@property
def test_batches(self):
if not self.with_test:
raise NameError("No test dataset was provided")
return self._n_batches[2]
@property
def vocab_size(self):
return len(self.surname_vocab)
@property
def n_classes(self):
return len(self.nationality_vocab)
@property
def sizes(self):
return self._lengths
| 33.512821
| 100
| 0.729533
| 389
| 2,614
| 4.570694
| 0.210797
| 0.045557
| 0.043307
| 0.040495
| 0.22216
| 0.22216
| 0.189539
| 0.150169
| 0.053993
| 0
| 0
| 0.001366
| 0.159908
| 2,614
| 77
| 101
| 33.948052
| 0.808288
| 0.007651
| 0
| 0.101695
| 0
| 0
| 0.053627
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.152542
| false
| 0
| 0.050847
| 0.101695
| 0.355932
| 0.016949
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
| 0
|
0
| 1
|
0a95cfa206f2acf8636e2a3399ef4362d43aa15a
| 3,092
|
py
|
Python
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 12
|
2021-10-10T20:00:07.000Z
|
2022-02-09T11:29:07.000Z
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 20
|
2021-10-13T09:37:20.000Z
|
2022-03-07T15:14:00.000Z
|
pybm/commands/compare.py
|
nicholasjng/pybm
|
13e256ca5c2c8239f9d611b9849dab92f70b2834
|
[
"Apache-2.0"
] | 1
|
2022-02-09T10:09:41.000Z
|
2022-02-09T10:09:41.000Z
|
from typing import List
from pybm import PybmConfig
from pybm.command import CLICommand
from pybm.config import get_reporter_class
from pybm.exceptions import PybmError
from pybm.reporters import BaseReporter
from pybm.status_codes import ERROR, SUCCESS
from pybm.util.path import get_subdirs
class CompareCommand(CLICommand):
"""
Report benchmark results from specified sources.
"""
usage = "pybm compare <run> <anchor-ref> <compare-refs> [<options>]\n"
def __init__(self):
super(CompareCommand, self).__init__(name="compare")
self.config = PybmConfig.load()
def add_arguments(self):
self.parser.add_argument(
"run",
type=str,
metavar="<run>",
help="Benchmark run to report results for. "
"To report the preceding run, use the "
'"latest" keyword. To report results '
"of the n-th preceding run "
"(i.e., n runs ago), "
'use the "latest^{n}" syntax.',
)
self.parser.add_argument(
"refs",
nargs="+",
metavar="<refs>",
help="Benchmarked refs to compare. The first "
"given ref will be treated as the "
"anchor ref, relative to which all "
"differences are reported. An error is "
"raised if any of the given "
"refs are not present in the run.",
)
reporter: BaseReporter = get_reporter_class(config=self.config)
reporter_args = reporter.additional_arguments()
if reporter_args:
reporter_name = self.config.get_value("reporter.name")
reporter_group_desc = (
f"Additional options from configured reporter class {reporter_name!r}"
)
reporter_group = self.parser.add_argument_group(reporter_group_desc)
# add builder-specific options into the group
for arg in reporter_args:
reporter_group.add_argument(arg.pop("flags"), **arg)
def run(self, args: List[str]) -> int:
if not args:
self.parser.print_help()
return ERROR
self.add_arguments()
options = self.parser.parse_args(args)
reporter: BaseReporter = get_reporter_class(config=self.config)
# TODO: Parse run to fit schema
run = options.run
refs: List[str] = options.refs
result_dir = reporter.result_dir
# TODO: Make this dynamic to support other run identifiers
result = sorted(get_subdirs(result_dir))[-1]
result_path = result_dir / result
if result_path.exists():
reporter.compare(
*refs,
result=result,
target_filter=options.target_filter,
benchmark_filter=options.benchmark_filter,
context_filter=options.context_filter,
)
else:
raise PybmError(
f"No benchmark results found for the requested run {run!r}."
)
return SUCCESS
| 32.893617
| 86
| 0.597025
| 351
| 3,092
| 5.11396
| 0.361823
| 0.031198
| 0.026741
| 0.035097
| 0.057939
| 0.057939
| 0.057939
| 0.057939
| 0
| 0
| 0
| 0.000474
| 0.317917
| 3,092
| 93
| 87
| 33.247312
| 0.85064
| 0.058215
| 0
| 0.056338
| 0
| 0
| 0.212509
| 0
| 0
| 0
| 0
| 0.010753
| 0
| 1
| 0.042254
| false
| 0
| 0.112676
| 0
| 0.211268
| 0.014085
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 1
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
|
0
| 1
|
0a9605df608e45d997ef3a777c5490c843c12343
| 1,728
|
py
|
Python
|
dddm/recoil_rates/halo.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | null | null | null |
dddm/recoil_rates/halo.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | 85
|
2021-09-20T12:08:53.000Z
|
2022-03-30T12:48:06.000Z
|
dddm/recoil_rates/halo.py
|
JoranAngevaare/dddm
|
3461e37984bac4d850beafecc9d1881b84fb226c
|
[
"MIT"
] | null | null | null |
"""
For a given detector get a WIMPrate for a given detector (not taking into
account any detector effects
"""
import numericalunits as nu
import wimprates as wr
import dddm
export, __all__ = dddm.exporter()
@export
class SHM:
"""
class used to pass a halo model to the rate computation
must contain:
:param v_esc -- escape velocity (multiplied by units)
:param rho_dm -- density in mass/volume of dark matter at the Earth (multiplied by units)
The standard halo model also allows variation of v_0
:param v_0 -- v0 of the velocity distribution (multiplied by units)
:function velocity_dist -- function taking v,t giving normalised
velocity distribution in earth rest-frame.
"""
def __init__(self, v_0=None, v_esc=None, rho_dm=None):
self.v_0 = 230 * nu.km / nu.s if v_0 is None else v_0
self.v_esc = 544 * nu.km / nu.s if v_esc is None else v_esc
self.rho_dm = (0.3 * nu.GeV / nu.c0 ** 2 / nu.cm ** 3
if rho_dm is None else rho_dm)
def __str__(self):
# Standard Halo Model (shm)
return 'shm'
def velocity_dist(self, v, t):
"""
Get the velocity distribution in units of per velocity,
:param v: v is in units of velocity
:return: observed velocity distribution at earth
"""
return wr.observed_speed_dist(v, t, self.v_0, self.v_esc)
def parameter_dict(self):
"""Return a dict of readable parameters of the current settings"""
return dict(
v_0=self.v_0 / (nu.km / nu.s),
v_esc=self.v_esc / (nu.km / nu.s),
rho_dm=self.rho_dm / (nu.GeV / nu.c0 ** 2 / nu.cm ** 3),
)
| 33.882353
| 97
| 0.618634
| 271
| 1,728
| 3.793358
| 0.346863
| 0.01751
| 0.023346
| 0.027237
| 0.068093
| 0.048638
| 0.029183
| 0.029183
| 0
| 0
| 0
| 0.019608
| 0.291667
| 1,728
| 50
| 98
| 34.56
| 0.820261
| 0.447917
| 0
| 0
| 0
| 0
| 0.003628
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0.190476
| false
| 0
| 0.142857
| 0.047619
| 0.52381
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| null | 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 0
| 1
| 0
|
0
| 1
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.