blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 2 616 | content_id stringlengths 40 40 | detected_licenses listlengths 0 69 | license_type stringclasses 2 values | repo_name stringlengths 5 118 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringlengths 4 63 | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 2.91k 686M ⌀ | star_events_count int64 0 209k | fork_events_count int64 0 110k | gha_license_id stringclasses 23 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 213 values | src_encoding stringclasses 30 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 2 10.3M | extension stringclasses 246 values | content stringlengths 2 10.3M | authors listlengths 1 1 | author_id stringlengths 0 212 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
e466f7785d0653d87a42c6c611519124213450a4 | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03804/s238311565.py | d24c8c937628264b69ec9b555924c8f24f8947e1 | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 272 | py | N,M = map(int, input().split())
A = [input() for _ in range(N)]
B = [input() for _ in range(M)]
for i in range(N-M+1):
for j in range(N-M+1):
C = [A[k][i:i+M] for k in range(j, j+M)]
if B == C:
print('Yes')
exit()
print('No') | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
f006304aea65a77abfd6fcb17a77b3924912097f | 285cadabf9496439b4542fcf45ffab92e0de5e4c | /sbin/ruamel-json | 5e6ed395d3f033b8da4e24d44b26a2f37b24d3e2 | [] | no_license | asomov/yaml-editor | c1d54280b7902f211545e9b24b72f2922558edae | 48b2f6712987db5ca987b83aacd83303b9232706 | refs/heads/master | 2023-08-21T16:31:53.639876 | 2018-02-12T21:59:36 | 2018-02-12T21:59:36 | 122,829,888 | 0 | 0 | null | 2018-02-25T11:27:41 | 2018-02-25T11:27:41 | null | UTF-8 | Python | false | false | 302 | #!/usr/bin/env python
import sys
import pprint
import ruamel.yaml
import json
for doc in ruamel.yaml.load_all(sys.stdin.read(), Loader=ruamel.yaml.Loader):
try:
print json.dumps(doc, sort_keys=True, indent=2)
except:
print "Error json.dumps:", sys.exc_info()[0]
raise
| [
"cpan2@tinita.de"
] | cpan2@tinita.de | |
ca83ecd1bf929adff5bb0b13ed678c5e20523e67 | 8f62f80e981e66aa0b0dffb0d5d6a78dbbd9bf37 | /scraper/scrape.py | d1254c7005625f1b0d39fd710e05a35e89ece2ad | [] | no_license | navidkanaani/Python-Codes | fdb0ae8560474540cbb2b3e6b7f739d9be396155 | f8112e12847b4b93bb10300261c078bac4ebd291 | refs/heads/main | 2023-04-28T17:52:30.950480 | 2021-05-12T13:35:34 | 2021-05-12T13:35:34 | 340,845,243 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 784 | py | import requests
from bs4 import BeautifulSoup
import pprint
res = requests.get('https://news.ycombinator.com/')
soup = BeautifulSoup(res.text, 'html.parser')
links = soup.select('.storylink')
subtext = soup.select('.subtext')
def sort_news(hnlist):
return sorted(hnlist, key=lambda k: k['votes'], reverse=True)
def extract_news(links, subtext):
hn = []
for idx, item in enumerate(links):
title = item.getText()
href = item.get('href', None)
votes = subtext[idx].select('.score')
if len(votes):
points = int(votes[0].getText().replace(' points', ''))
if points > 99:
hn.append({'title': title, 'link': href, 'votes': points})
return sort_news(hn)
pprint.pprint(extract_news(links, subtext))
| [
"navidkaen@gmail.com"
] | navidkaen@gmail.com |
d5bdddaa0535d3e596e4a5c59c2125ca69bf75a7 | 53fab060fa262e5d5026e0807d93c75fb81e67b9 | /backup/user_367/ch8_2020_03_02_19_35_41_766365.py | 90c50d0003f62e4bfadd4d65843b2c9d5316886e | [] | no_license | gabriellaec/desoft-analise-exercicios | b77c6999424c5ce7e44086a12589a0ad43d6adca | 01940ab0897aa6005764fc220b900e4d6161d36b | refs/heads/main | 2023-01-31T17:19:42.050628 | 2020-12-16T05:21:31 | 2020-12-16T05:21:31 | 306,735,108 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 65 | py | def f(x):
s=so+v*t
return s
v=10
t=1
s=f(x)
so=0
print(s) | [
"you@example.com"
] | you@example.com |
832504a27211323bfbdddfbc5b3733a6f5ac8d47 | 5d9c4e85c773b784f52d6ea0a492d1cd2ddbcf6f | /Project3/orders/views.py | b082fa0ef3e75aa4eff9fd67331855fa36e93a1e | [] | no_license | ntouev/CS50_Web_Programming_with_Python_and_Javascript | cc5320c5ba8f178c010883b42946eec88bfcc020 | 5ce0cd65a41166a1083a0a092447834ee2d61182 | refs/heads/master | 2020-08-18T05:08:55.621469 | 2019-12-24T23:48:45 | 2019-12-24T23:48:45 | 215,750,820 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 627 | py | from django.http import HttpResponse
from django.shortcuts import render
from .models import Regular, Sicilian, Topping, Sub, Addon, Dinner_platter, Salad, Pasta
# Create your views here.
def home(request):
context = {
"regulars": Regular.objects.all(),
"sicilians": Sicilian.objects.all(),
"pastas": Pasta.objects.all(),
"salads": Salad.objects.all(),
"dinner_platters": Dinner_platter.objects.all(),
"subs": Sub.objects.all(),
"addons": Addon.objects.all(),
"toppings": Topping.objects.all(),
}
return render(request, 'orders/home.html', context)
| [
"evntouros@gmail.com"
] | evntouros@gmail.com |
c7deac3ce546c1bd060412133c463d7faabfa784 | 06f0ae3ecaaf47b1c23e231838afa524d8446f5e | /account/migrations/0004_auto_20161013_1836.py | ad9f35d08a9de1a95a6918cf821e8ea4841cb673 | [] | no_license | nakamotohideyoshi/draftboard-web | c20a2a978add93268617b4547654b89eda11abfd | 4796fa9d88b56f80def011e2b043ce595bfce8c4 | refs/heads/master | 2022-12-15T06:18:24.926893 | 2017-09-17T12:40:03 | 2017-09-17T12:40:03 | 224,877,650 | 0 | 0 | null | 2022-12-08T00:02:57 | 2019-11-29T15:20:17 | Python | UTF-8 | Python | false | false | 2,078 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.9.7 on 2016-10-13 18:36
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('account', '0003_userlog'),
]
operations = [
migrations.CreateModel(
name='Identity',
fields=[
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, primary_key=True, serialize=False, to=settings.AUTH_USER_MODEL)),
('first_name', models.CharField(max_length=100)),
('last_name', models.CharField(max_length=100)),
('birth_day', models.PositiveSmallIntegerField()),
('birth_month', models.PositiveSmallIntegerField()),
('birth_year', models.PositiveSmallIntegerField()),
('postal_code', models.CharField(max_length=16)),
('created', models.DateTimeField(auto_now_add=True)),
],
options={
'verbose_name': 'Trulioo User Identity',
},
),
migrations.AlterField(
model_name='userlog',
name='action',
field=models.SmallIntegerField(choices=[(0, 'Country check failed'), (1, 'State check failed'), (2, 'VPN check failed'), (3, 'IP check status'), (4, 'IP check bypassed, user on local network'), (5, 'User Login'), (6, 'Lineup creation'), (7, 'Lineup edited'), (8, 'Contest entered'), (9, 'Contest deregistered'), (10, 'Deposit funds'), (11, 'Deposit pageview'), (12, 'Withdraw request - paypal'), (13, 'Trulioo verification failed'), (14, 'Trulioo verification success'), (15, 'User identity is already claimed.')]),
),
migrations.AlterField(
model_name='userlog',
name='type',
field=models.SmallIntegerField(choices=[(0, 'Location verification'), (1, 'Contest actions'), (2, 'User funds actions'), (3, 'User authenntication')]),
),
]
| [
"nakamoto.guru.0991@gmail.com"
] | nakamoto.guru.0991@gmail.com |
6c55e591a731959b085370e51d16a8f589994063 | 4317d2ef985f1a8f033be328c18586e5fdedd426 | /cam_preview.py | 4c1db1015173a264054874336b5981830f307eab | [] | no_license | jeromebyrne/rasp_motion_cam | bb5573bf5a8e0c7c8b062fd8eba89ee3849e7534 | 76f1fc5495bd6cbd0dc0a06cd8577a4036ebed08 | refs/heads/master | 2021-01-12T09:14:35.127511 | 2016-12-19T09:34:22 | 2016-12-19T09:34:22 | 76,804,677 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 256 | py | # -*- coding: utf-8 -*-
from picamera import PiCamera
import datetime
import time
from time import sleep
camera = PiCamera()
camera.rotation = 180
camera.resolution = (1280, 720)
sleep(1)
camera.start_preview(alpha=190)
sleep(120)
camera.stop_preview()
| [
"jeromebyrne21@hotmail.com"
] | jeromebyrne21@hotmail.com |
1d762be616913683414ba9d4fde53ba80aec78a9 | f4ae4fbdad07bc3e1953066281cf2f0fbed9d7e8 | /src/kompos/helpers/himl_helper.py | 377b5bced0618592a5dd892023a41949c976fa4f | [
"Apache-2.0"
] | permissive | adobe/kompos | de2739cb9acc2ce90029416d3676551ba9c6455c | e2d1e24b5d7f57b7e692360b5137b5b0c14e4824 | refs/heads/main | 2023-09-01T12:59:08.743202 | 2023-04-27T13:52:13 | 2023-04-27T13:52:13 | 222,483,798 | 19 | 14 | Apache-2.0 | 2023-04-10T08:21:18 | 2019-11-18T15:39:35 | Python | UTF-8 | Python | false | false | 4,086 | py | # Copyright 2019 Adobe. All rights reserved.
# This file is licensed to you under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. You may obtain a copy
# of the License at http://www.apache.org/licenses/LICENSE-2.0
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR REPRESENTATIONS
# OF ANY KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
from himl.config_generator import ConfigProcessor
from kompos import display
COMPOSITION_KEY = "composition"
class HierarchicalConfigGenerator:
def __init__(self):
self.config_processor = ConfigProcessor()
def generate_config(
self,
config_path,
filters=(),
exclude_keys=(),
enclosing_key=None,
remove_enclosing_key=None,
output_format="yaml",
print_data=False,
output_file=None,
skip_interpolation_resolving=False,
skip_interpolation_validation=False,
skip_secrets=False,
multi_line_string=False,
type_strategies=[(list, ["append"]), (dict, ["merge"])],
fallback_strategies=["override"],
type_conflict_strategies=["override"]
):
cmd = self.get_sh_command(
config_path,
filters,
exclude_keys,
enclosing_key,
remove_enclosing_key,
output_format,
print_data,
output_file,
skip_interpolation_resolving,
skip_interpolation_validation,
skip_secrets,
multi_line_string,
)
display(cmd, color="yellow")
return self.config_processor.process(
path=config_path,
filters=filters,
exclude_keys=exclude_keys,
enclosing_key=enclosing_key,
remove_enclosing_key=remove_enclosing_key,
output_format=output_format,
output_file=output_file,
print_data=print_data,
skip_interpolations=skip_interpolation_resolving,
skip_interpolation_validation=skip_interpolation_validation,
skip_secrets=skip_secrets,
multi_line_string=multi_line_string,
type_strategies=type_strategies,
fallback_strategies=fallback_strategies,
type_conflict_strategies=type_conflict_strategies
)
@staticmethod
def get_sh_command(
config_path,
filters=(),
exclude_keys=(),
enclosing_key=None,
remove_enclosing_key=None,
output_format="yaml",
print_data=False,
output_file=None,
skip_interpolation_resolving=False,
skip_interpolation_validation=False,
skip_secrets=False,
multi_line_string=False,
):
command = "kompos {} config --format {}".format(
config_path, output_format)
for filter in filters:
command += " --filter {}".format(filter)
for exclude in exclude_keys:
command += " --exclude {}".format(exclude)
if enclosing_key:
command += " --enclosing-key {}".format(enclosing_key)
if remove_enclosing_key:
command += " --remove-enclosing-key {}".format(remove_enclosing_key)
if output_file:
command += " --output-file {}".format(output_file)
if print_data:
command += " --print-data"
if skip_interpolation_resolving:
command += " --skip-interpolation-resolving"
if skip_interpolation_validation:
command += " --skip-interpolation-validation"
if skip_secrets:
command += " --skip-secrets"
if multi_line_string:
command += " --multi-line-string"
return command
| [
"noreply@github.com"
] | adobe.noreply@github.com |
cdbb5cbd49309f187bcb448bb7ba1b7eca201413 | 1eeb2bb39c2d3222542c3b9767a2c1e11b4259c5 | /知乎Win7端/18_QLabel/QLabel(movie).py | c8f4029b46a3b4ee7eaa7bb6ef96c383ab88e5d5 | [] | no_license | kingdelee/PyQt5 | 9e58cebed3436e7696914a7ead180f82534315dc | 01128372dc93300198998455d2e4574af33e3487 | refs/heads/master | 2020-06-25T13:27:59.935234 | 2019-04-29T14:02:05 | 2019-04-29T14:02:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,233 | py | #coding=utf-8
from PyQt5.QtWidgets import QWidget, QApplication, QLabel, QPushButton
from PyQt5.QtGui import QMovie, QPixmap
import sys
class Example(QWidget):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.resize(550,300)
self.setWindowTitle('关注微信公众号:学点编程吧--标签:动画(QLabel)')
self.lb = QLabel(self)
self.lb.setGeometry(100,50,300,200)
self.bt1 = QPushButton('开始',self)
self.bt2 = QPushButton('停止',self)
self.bt1.move(100,20)
self.bt2.move(280,20)
self.pix = QPixmap('movie.gif')
self.lb.setPixmap(self.pix)
self.lb.setScaledContents(True)
self.bt1.clicked.connect(self.run)
self.bt2.clicked.connect(self.run)
self.show()
def run(self):
movie = QMovie("movie.gif")
self.lb.setMovie(movie)
if self.sender() == self.bt1:
movie.start()
else:
movie.stop()
self.lb.setPixmap(self.pix)
if __name__ == '__main__':
app = QApplication(sys.argv)
ex = Example()
sys.exit(app.exec_()) | [
"falcon81321@gmail.com"
] | falcon81321@gmail.com |
6a7ae2173add022eeabb80f6eec8c301f4847aed | 1368a4f7d33d0a577d2aaa8aaf03bbf55ca8814a | /Python/Week-6.py | 3b309db895fd315a40bf0a09e91793e4dd9129f4 | [] | no_license | AyaMutlaq/Facoders | 362eee0bda764a48a9bd96a1fc46181337265137 | 84e9c0770ddd5b57290631e2c4ad56ab0bb88661 | refs/heads/master | 2020-04-02T22:52:03.737015 | 2018-11-21T21:49:55 | 2018-11-21T21:49:55 | 152,479,054 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,420 | py | grade_one= {'Sami': [19, 18, 19.5, 30, 10],
'Ahmad': [15, 14, 16, 21, 7],
'Faris': [18, 19, 17, 26, 9],
'Salem': [20, 20, 19, 30, 10],
'Mahmoud': [12, 13, 11, 18, 7]}
grade_two= {'Lana': [17, 19, 20, 28, 9],
'Dina': [18.5, 19.5, 20, 29, 10],
'Maha': [20, 20, 18, 26, 9],
'Abeer': [16, 18, 19.5, 25, 8]}
grade_three= {'Rima': [18, 19, 18, 26, 9],
'Tala': [20, 20, 19, 29, 10],
'Lamar': [19, 20, 18, 26, 9],
'Rola': [15, 14, 16, 19, 7],
'Naya': [9, 6, 11, 14, 7],
'Dalal': [1, 5, 2, 6, 7],
'Ola': [19.5, 20, 20, 29.5, 10]}
def students_names():
grade= input('Enter grade: ')
a= list(grade.keys())
print(a)
def students_score():
grade, name = input('Enter grade: '), input('Enter name: ')
a= sum(grade[name])
print(a)
def students_count():
grade= input('Enter grade: ')
a= len(list(grade.keys()))
print(a)
print('Choose one: students_names(), students_score(), students_count()')
x= input('Choose one: ')
y= input("If you finished write 'Done' or If you not write 'More': ")
while True:
if y=='More':
print('Choose one: students_names, students_score, students_count')
x= input('Choose one: ')
y= input("If you finished write 'Done' or If you not write 'More': ")
else:
break
| [
"aya.hashem93@gmail.com"
] | aya.hashem93@gmail.com |
b58656031eee017e9eafa9bc8c065da38fd67326 | b2b40bfb0a6a6b0899f5185be165e6907abf0b27 | /My-codes/vasp/hetero_2d-2d.py | 45cf83b7c21ebe11af208897667f64d84b40450b | [] | no_license | ritesh001/Related_to_research | 00d0f45fd5943811e2de9203981e5abe978c415d | 310876da18e20b3a60639d730b6e40ff848576eb | refs/heads/master | 2023-04-13T05:30:47.269278 | 2023-04-02T09:33:36 | 2023-04-02T09:33:36 | 78,549,166 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,722 | py | # coding: utf-8
# Copyright (c) Henniggroup.
# Distributed under the terms of the MIT License.
from __future__ import division, print_function, unicode_literals, absolute_import
import sys
"""
Compute the reduced matching lattice vectors for heterostructure
interfaces as described in the paper by Zur and McGill:
Journal of Applied Physics 55, 378 (1984); doi: 10.1063/1.333084
"""
__author__ = "Kiran Mathew, Arunima Singh, Ritesh Kumar(modified for 2d-2d heterostructures)"
from mpinterfaces.calibrate import CalibrateSlab
from mpinterfaces.interface import Interface
from mpinterfaces.transformations import *
from mpinterfaces.utils import *
separation = 3 # in angstroms
nlayers_2d = 1
nlayers_substrate = 1
substrate_1 = slab_from_file([0, 0, 1], sys.argv[1])
sa_sub = SpacegroupAnalyzer(substrate_1)
substrate_2 = slab_from_file([0, 0, 1], sys.argv[2])
substrate_1_aligned, substrate_2_aligned = get_aligned_lattices(
substrate_1,
substrate_2,
max_area=400,
max_mismatch=0.05,
max_angle_diff=1,
r1r2_tol=0.01)
# substrate_1_aligned.to(fmt='poscar',
# filename='POSCAR_gr_aligned.vasp')
# substrate_2_aligned.to(fmt='poscar',
# filename='POSCAR_mos2_aligned.vasp')
# merge substrate and mat2d in all possible ways
hetero_interfaces = generate_all_configs(substrate_1_aligned,
substrate_2_aligned,
nlayers_2d, nlayers_substrate,
separation)
# generate all poscars
for i, iface in enumerate(hetero_interfaces):
poscar = Poscar(iface)
poscar.write_file(
filename='POSCAR_final_{}.vasp'.format(i))
| [
"dasdeya@gmail.com"
] | dasdeya@gmail.com |
0b8be68f52a1d5b67bb14a360d8b2a529fd34cef | 47cf5a6945da3b4f469794dc210e40e8971fa80f | /examples/basic/boundaries.py | 3fedf7980f0804a677ae36d3d4c9005337413e06 | [
"MIT",
"OFL-1.1",
"CC0-1.0",
"LicenseRef-scancode-public-domain"
] | permissive | charliekind/vtkplotter | 53aee8f8e599129dbcd768c0810b6caa95addfc9 | e16daac258dc0b383043575f2916ac4ea84a60b1 | refs/heads/master | 2022-03-08T15:29:42.218590 | 2022-02-16T21:01:16 | 2022-02-16T21:01:16 | 164,445,473 | 0 | 0 | MIT | 2019-01-07T14:35:42 | 2019-01-07T14:35:42 | null | UTF-8 | Python | false | false | 354 | py | """Extract points on the boundary of a mesh.
Add an ID label to all vertices."""
from vedo import *
b = Mesh(dataurl+'290.vtk')
b.computeNormals().clean().lw(0.1)
pids = b.boundaries(returnPointIds=True)
bpts = b.points()[pids]
pts = Points(bpts, r=10, c='red')
labels = b.labels('id', scale=10).c('dg')
show(b, pts, labels, __doc__, zoom=2).close() | [
"marco.musy@gmail.com"
] | marco.musy@gmail.com |
f3449d2e86ca5de75ba09d32a574ad3032a4f81c | dda8bc29ad456fd944b9129afcbf1754523fe1ed | /users/vasile/stl2gcode/stl2gcode | fec91efa93b3bff46c15fff03fd6b86d72716cc5 | [] | no_license | carlossantiagoredbeltteam/teste_212390-redTeam | 8a6da08dcb8319ae2db5228c3b6070753f6be67c | aacd0d9e799ed8f7aa898496af754459c31855dd | refs/heads/master | 2021-01-10T13:27:21.150868 | 2011-07-06T17:36:46 | 2011-07-06T17:36:46 | 55,693,486 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,168 | #!/usr/bin/python
## stl2gcode generates gcode from stereolithographic triangles. This
## gcode can be fed to CNC fab machines to create physical 3D models.
## stl2gcode also generates intermediate file formats (i.e. pov and
## png files).
## Copyright (C) 2008 James Vasile <james@hackervisions.org>. This is
## free software; you can redistribute it and/or modify it under the
## terms of the GNU General Public License as published by the Free
## Software Foundation; either version 3 of the License, or any later
## version.
## This is distributed in the hope that it will be useful, but WITHOUT
## ANY WARRANTY; without even the implied warranty of MERCHANTABILITY
## or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public
## License for more details. You should have received a copy of the
## GNU General Public License along with the work; if not, write to
## the Free Software Foundation, Inc., 51 Franklin Street, 5th Floor,
## Boston, MA 02110-1301 USA
##
## Not tested on Windows or Mac boxes.
##
## Needs PIL, the Python Imaging Library (aptitude install
## python-imaging)
##
## stl2gcode includes some code from image-to-gcode.py. That code is
## Copyright (C) 2005 Chris Radek (chris@timeguy.com)
## TODO: Handle file globs for -f
import sys
import getopt
import stl2gcode
## Defaults
step = 0.002 # this is the x and y step length for the gcode conversion
depth = 0.008 # I think this is color depth
x, y = 0, 0 # starting offset for x and y for gcode
#tolerance = 0.000000001
inside_vector = [-0.5, 0.68, 0.5] # TODO: calculate this
inside_vector = [0, 0, 0]
target = 'gcode' # target format should be one of inc, pov, png, gcode
filename=''
step_x = step_y = step_z = 1 # crude resolution, but we need a default
VERSION = 0.1
def help():
print '''\nBy specifying a file with [inc|pov|png] extension, you can skip
initial steps. And by using the --target option, you can end the
processing at whatever step you need. If intermediary files are what
you're looking for, there's no need to keep processing all the way to
gcode.
Example: 'stl2gcode -f object.inc -t png' generates the pov and png
files but not the inc or gcode files.'''
def usage():
print '''Slices 3D stl models into layers and generates gcode for each layer.
stl2gcode [dstxy] --file
-d --depth d\t\t\tcolor depth (not implemented)
-f --file f\t\t\t\tfilename (required)
-h --help\t\t\thelp
-s --step s\t\t\t\tx/y step size of your fab machine (not implemented)
-t --target [inc|pov|png|gcode]\ttarget format
-x --x x\t\t\t\tinitial x value (not implemented)
-y --y y\t\t\t\tinitial y value (not implemented)'''
def do_args(argv):
if len(argv) == 0: usage(); sys.exit()
try:
opts, args = getopt.getopt(argv, "hd:f:s:t:x:y:",
["help", "file=", "depth=", "step=", "target=",
"x=", "y="])
except getopt.GetoptError, err:
print str(err); usage(); sys.exit(2)
global filename
for o, a in opts:
if o in ("-h", "--help"): usage(); help(); sys.exit()
elif o in ("-d", "--depth"): global depth; depth = float(a)
elif o in ("-f", "--filename"): global filename; filename = a;
elif o in ("-s", "--step"): global step; step = float(a);
elif o in ("-t", "--target"): global target; target = a;
elif o in ("-x", "--x"): global x; x = float(a)
elif o in ("-y", "--y"): global y; y = float(a)
else: assert False, "unhandled option"
if filename == "":
print "Must specify filename. Use -f option."
usage()
sys.exit(2)
if target not in (['inc', 'pov', 'png', 'gcode']):
print "Target must be inc|pov|png|gcode."
usage()
sys.exit(2)
###############################################################################
if __name__ == "__main__":
do_args(sys.argv[1:])
convert_object = stl2gcode.stl2gcode({
'depth': depth, 'filename': filename,
'target': target, 'step_x': step_x, 'step_y': step_y, 'step_z': step_z,
'x': x, 'y': y})
convert_object.convert()
| [
"jvasile@cb376a5e-1013-0410-a455-b6b1f9ac8223"
] | jvasile@cb376a5e-1013-0410-a455-b6b1f9ac8223 | |
d76ea5e54be61c71c5b587175467fb37e20c5824 | 60a831fb3c92a9d2a2b52ff7f5a0f665d4692a24 | /IronPythonStubs/release/stubs.min/System/Windows/Forms/__init___parts/ToolStripItemRenderEventArgs.py | 5d8f56d8057176f9be000cf9e427ae8bd776780e | [
"MIT"
] | permissive | shnlmn/Rhino-Grasshopper-Scripts | a9411098c5d1bbc55feb782def565d535b27b709 | 0e43c3c1d09fb12cdbd86a3c4e2ba49982e0f823 | refs/heads/master | 2020-04-10T18:59:43.518140 | 2020-04-08T02:49:07 | 2020-04-08T02:49:07 | 161,219,695 | 11 | 2 | null | null | null | null | UTF-8 | Python | false | false | 1,200 | py | class ToolStripItemRenderEventArgs(EventArgs):
"""
Provides data for the events that render the background of objects derived from System.Windows.Forms.ToolStripItem in the System.Windows.Forms.ToolStripRenderer class.
ToolStripItemRenderEventArgs(g: Graphics,item: ToolStripItem)
"""
def __getitem__(self,*args):
""" x.__getitem__(y) <==> x[y] """
pass
@staticmethod
def __new__(self,g,item):
""" __new__(cls: type,g: Graphics,item: ToolStripItem) """
pass
Graphics=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the graphics used to paint the System.Windows.Forms.ToolStripItem.
Get: Graphics(self: ToolStripItemRenderEventArgs) -> Graphics
"""
Item=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the System.Windows.Forms.ToolStripItem to paint.
Get: Item(self: ToolStripItemRenderEventArgs) -> ToolStripItem
"""
ToolStrip=property(lambda self: object(),lambda self,v: None,lambda self: None)
"""Gets the value of the System.Windows.Forms.ToolStripItem.Owner property for the System.Windows.Forms.ToolStripItem to paint.
Get: ToolStrip(self: ToolStripItemRenderEventArgs) -> ToolStrip
"""
| [
"magnetscoil@gmail.com"
] | magnetscoil@gmail.com |
4be1d222e9b1fc87b3dfde002aba17f9c12e2a5a | 9d38d7cd271eba95578e36a6d5c8f0d3dda39fe5 | /fdutils/db/vendors.py | 31aa9f4674ec4e5a0df8add64bca64ce3720918f | [
"MIT"
] | permissive | filintod/pyremotelogin | 8e8be07b49e86b79e7fb0776c7462995a01c422e | 3661a686be58415f72db5800431f945ff48324a0 | refs/heads/master | 2022-11-10T09:38:54.522089 | 2022-10-21T20:01:39 | 2022-10-21T20:01:39 | 137,921,045 | 1 | 1 | MIT | 2022-10-21T20:01:40 | 2018-06-19T17:02:17 | Python | UTF-8 | Python | false | false | 3,074 | py | import logging
from .db import SQLDB
SQLITE_CONNECT_STRING = 'sqlite://{file_path}'
log = logging.getLogger(__name__)
class OracleDB(SQLDB):
DATABASE_NAME = 'oracle'
PORT = 1521
DRIVER = ''
def __init__(self, *args, **kwargs):
self.service_name = kwargs.pop('service_name', None)
super(OracleDB, self).__init__(*args, **kwargs)
def __repr__(self, **kwargs):
return super(OracleDB, self).__repr__(', {sid_or_service}="{db}"'.format(
sid_or_service=('sid' if not self.service_name else 'service_name'), db=self.db))
def get_connect_string(self, template=None):
if not template and self.service_name is not None:
# create the multi address url for clusters
address_template = '(ADDRESS = (PROTOCOL = TCP)(HOST = {host})(PORT = {port}))'
hosts_info = ''.join(address_template.format(host=host, port=port or self.port)
for (host,port) in self.cluster)
# https://docs.oracle.com/database/121/HABPT/config_fcf.htm#HABPT5381
template = ("oracle+cx_oracle://{username}:{password}@(DESCRIPTION = "
"(FAILOVER=ON) " +
"(ADDRESS_LIST=" +
"(LOAD_BALANCE=on)" +
"(CONNECT_TIMEOUT=3)(RETRY_COUNT=3)" +
hosts_info +
")" + # from address_list
"(CONNECT_DATA = (SERVER = DEDICATED) (SERVICE_NAME = {db})))")
else:
template = template
return super(OracleDB, self).get_connect_string(template=template)
class MySQLDB(SQLDB):
DATABASE_NAME = 'mysql'
DRIVER = 'pymysql' # other could be 'mysqldb' that is c-based
PORT = 3306
def __repr__(self, **kwargs):
return super(MySQLDB, self).__repr__(', dbname="{}"'.format(self.db))
class PostgresDB(SQLDB):
DATABASE_NAME = 'postgresql'
DRIVER = 'psycopg2'
PORT = 5432
def __repr__(self, **kwargs):
return super(PostgresDB, self).__repr__(', dbname="{}"'.format(self.db))
class SQLite(SQLDB):
DATABASE_NAME = 'sqlite'
DRIVER = ''
TEMPLATE = 'sqlite:///{db}'
def __repr__(self, **kwargs):
return 'SQLite(db_file_path={})'.format(self.db)
def create_from_vendor(vendor='', host='', db='', port=0, username='', password='', **db_kwargs):
""" Creates an instance of a subclass of SQLDB given the type as string
Arguments:
- dbtype (str): the type of DB we want (sqllite, oracle or postgres)
"""
vendor = vendor.lower()
if vendor in ('sqllite', 'sqlite'):
sqldb = SQLite
elif vendor.startswith('postgres'):
sqldb = PostgresDB
elif vendor == 'oracle':
sqldb = OracleDB
elif vendor == 'mysql':
sqldb = MysqlDB
else:
raise NotImplementedError('We have only implemented this method for sqllite, oracle and postgresql')
return sqldb(username=username, password=password, host=host, port=port, db=db, **db_kwargs)
| [
"duranto@gmail.com"
] | duranto@gmail.com |
acbc656b372797c74a08ae745e3ede54e7fc6b37 | 7e98a457a6c6056320d1d6a5ee7cd0cace3b1638 | /Foods/urls.py | 3773628c6ff14cd7c5085009feef32163a3f636d | [] | no_license | buddy9747/Korean-Forum | 97afeb19f009142f6b26375ff6a16608b9321f46 | 825c05e1abf53dbaf3af6a29777eacc8c36e5287 | refs/heads/master | 2020-04-20T07:43:46.367641 | 2019-02-01T15:32:43 | 2019-02-01T15:32:43 | 168,718,145 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 420 | py | from django.contrib import admin
from django.urls import path
from .import views
from KoreanForum import settings
from django.conf.urls.static import static
from django.contrib.auth.views import login
urlpatterns = [
path('dishes',views.food,name='food'),
path('dish/detail',views.food_detail,name='food_detail')
]
if settings.DEBUG:
urlpatterns+=static(settings.STATIC_URL,document_root=settings.MEDIA_ROOT) | [
"40247883+buddy9747@users.noreply.github.com"
] | 40247883+buddy9747@users.noreply.github.com |
d40f19e123fa94db5564765f5ef675feef08ff99 | f492c537db522a7df129cce0dc75d17d33e382c9 | /Day 42/EVENDIFF.py | 38a79985d1d30a408a27dd04caa3ca49400df8f9 | [] | no_license | rahul-s-bhatt/ProgrammingTraning | f9f6b34649a7d7d21a0b0d4af4a862dd385e6752 | dd96c2e10ad14549a37025aae7c165e2bbb64b54 | refs/heads/master | 2023-02-26T14:48:51.024988 | 2021-02-06T16:31:30 | 2021-02-06T16:31:30 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 326 | py | def solve(n, arr):
even, odd = 0, 0
for i in arr:
if i%2 == 0:
even += 1
else:
odd += 1
# if odd == 0 or even == 0:
# return 0
if odd > even:
return even
return odd
for _ in range(int(input())):
n = int(input())
arr = list(map(int, input().split()))
print(solve(n, arr)) | [
"noreply@github.com"
] | rahul-s-bhatt.noreply@github.com |
2daa9888d7ecb2b4011a321c57530ebc68eed284 | 6491795a70aef2176b2b2ab49b539b8cb823dc34 | /AtCoder/ABC/000-159/ABC157_C.py | 6557c36465e6fd4b60388095b9118bf28e0e69c2 | [
"MIT"
] | permissive | sireline/PyCode | 5eacc410b23ca93344a9d095df00e1586c64b1cb | 8578467710c3c1faa89499f5d732507f5d9a584c | refs/heads/master | 2021-06-25T13:47:05.608192 | 2021-03-31T07:57:01 | 2021-03-31T07:57:01 | 221,617,081 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 890 | py | ans = [-1, -1, -1]
N, M = [int(n) for n in input().split()]
for i in range(M):
s, c = [int(n) for n in input().split()]
if N == 1:
if s in [1, 2]:
ans = -1
break
else:
if ans[s-1] == -1 or c < ans[s-1]:
ans[s-1] = c
elif N == 2:
if s in [1]:
ans = -1
break
else:
if ans[s-1] == -1 or c < ans[s-1]:
ans[s-1] = c
else:
if ans[s-1] == -1 or c < ans[s-1]:
ans[s-1] = c
if len(ans) == 1:
print(-1)
else:
ans = "".join([str(c) for c in ans])
if N == 3:
if ans[0] == '-' or (ans[0] == '-' and ans[2] == '-'):
print(-1)
exit()
elif N == 2:
if ans[0] == '-' or ans[2] == '0':
print(-1)
exit()
else:
print(ans.replace('-1', '0'))
| [
"m_sireline@hotmail.com"
] | m_sireline@hotmail.com |
5d831dd561f618d3094e2ca79c93b7ffb966d35c | 66ae73dff4bf55ff19aefabb4a79b3370aaf00cd | /modules.py | 7a8e872ffa7528fee1b9aab60792874b0785095d | [] | no_license | arunprithviraj/JustAnotherRepo | 39e8ec5d732e8c3693b56a3c3b1fa7c473985212 | 69bb6eab1381c3f471891b4724e8477660ce0402 | refs/heads/master | 2020-12-04T19:45:12.119124 | 2016-08-24T00:40:02 | 2016-08-24T00:40:02 | 66,413,382 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 210 | py | def gcd(x,y):
found = False
count = y
while not found & count >=2:
if x%count == 0 and y%count ==0:
return count
elif count == 2:
return 1
count -=1
| [
"arunprithviraj@gmail.com"
] | arunprithviraj@gmail.com |
75fc9f672b0add31f6054701a480f93d327f5ef7 | f2bb082e3d84558f91e1e7dcc1b85d80686cab52 | /experiments/rmdn_comparison/utils.py | 94a4098caa2711394b1248e24ac6dbe6d09e16c5 | [
"MIT"
] | permissive | Tobias-Fischer/dreyeve | 2cd3027c3fec0845e5e8f8efc9b8721780a902f6 | a65342d9c503ce3ec932e2229b90aaeebfd82944 | refs/heads/master | 2022-12-12T14:58:13.778138 | 2020-09-14T05:26:49 | 2020-09-14T05:26:49 | 295,305,865 | 0 | 0 | MIT | 2020-09-14T04:42:47 | 2020-09-14T04:42:46 | null | UTF-8 | Python | false | false | 822 | py | import numpy as np
import scipy.stats
def gmm_to_probability_map(gmm, image_size):
h, w = image_size
y, x = np.mgrid[0:h:1, 0:w:1]
pos = np.empty(x.shape + (2,))
pos[:, :, 0] = y
pos[:, :, 1] = x
out = np.zeros(shape=(h, w))
for g in range(0, gmm.shape[0]):
w = gmm[g, 0]
normal = scipy.stats.multivariate_normal(mean=gmm[g, 1:3], cov=[[gmm[g, 3], gmm[g, 5]], [gmm[g, 5], gmm[g, 4]]])
out += w * normal.pdf(pos)
out /= out.sum()
return out
if __name__ == '__main__':
gmm = np.array([[0.5, 50, 0, 100, 100, 0], [0.5, 100, 100, 10, 10, -1]], dtype='float32')
map = gmm_to_probability_map(gmm, image_size=(128, 171))
from computer_vision_utils.io_helper import normalize
import cv2
cv2.imshow('GMM', normalize(map))
cv2.waitKey()
| [
"btadvd@gmail.com"
] | btadvd@gmail.com |
f065452b9ca4c551d15e102fa72384d1b75b5b1e | 5a2396488632964495582d70326ae911b53c2105 | /utils.py | c75a5461538482ed44c0b63772cd36e5a849de86 | [
"MIT"
] | permissive | ashishsnaik/CarND-Traffic-Sign-Classifier-Project-P3 | 46d4c3d7d4a6f386d6fa89c43d369ecbcb24f56e | a35b2cdb14068bf0f501aff0279faf168508d043 | refs/heads/master | 2020-04-17T06:47:51.506724 | 2019-11-02T00:12:57 | 2019-11-02T00:12:57 | 166,340,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,346 | py | # Includes
import os
import csv
import math
import numpy as np
import matplotlib.pyplot as plt
# plot images
def plot_images(images, titles=None, cols=3, fontsize=12):
n_imgs = len(images)
if images is None or n_imgs < 1:
print("No images to display.")
return
img_h, img_w = images[0].shape[:2]
rows = math.ceil(n_imgs / cols)
width = 21 # 15
row_height = math.ceil((width/cols)*(img_h/img_w)) # they are 1280*720
plt.figure(1, figsize=(width, row_height * rows))
for i, image in enumerate(images):
if len(image.shape) > 2:
cmap = None
else:
cmap = 'gray'
title = ""
if titles is not None and i < len(titles):
title = titles[i]
plt.subplot(rows, cols, i+1)
plt.title(title, fontsize=fontsize)
plt.imshow(image, cmap=cmap)
plt.tight_layout()
plt.show()
# read a csv file to dict
def csv_to_dict(csv_file, print_keys=False):
with open(csv_file, mode='r') as infile:
csv_reader = csv.reader(infile)
# skip the header 'ClassId, SignName'
_ = next(csv_reader)
# read the sign id and names
ret_dict = {rows[0]:rows[1] for rows in csv_reader}
if print_keys is True:
print('Dict keys:', ret_dict.keys())
return ret_dict
| [
"ashish.usf@gmail.com"
] | ashish.usf@gmail.com |
4412244a638580cd00ff9838a86adeb2fe0aabd8 | 88d49250f1a2024d8046f8a5bdc2a82431d019ae | /election/main.py | bbef4f1c9093a6f76e1d22155e2d5d095f3687dd | [] | no_license | atiratree/pv248 | 7148dbecbe384c93f5077738f1815cc20fc8b037 | cd1cbae29448bb6db828ed1fe42243391af2aeaa | refs/heads/master | 2021-08-22T17:17:03.679407 | 2017-11-30T19:32:24 | 2017-11-30T19:32:24 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 367 | py | from metadata.settings import Settings
from parse.parser import Parser
from plot.printer import show_max_percent_parties_bar, show_pie_chart_results
if __name__ == "__main__":
with open(Settings.election_file, 'r') as election:
parties = Parser.import_data(election)
show_max_percent_parties_bar(parties)
show_pie_chart_results(parties)
| [
"suomiy@gmail.com"
] | suomiy@gmail.com |
ed281c5055faf749262fb227155472a298769d3b | 8a8db00750eace66e181b6570501e2db8f2e5851 | /bridge/jobs/management/__init__.py | cd51710109a92b371acf14057a71b097923c6b05 | [
"Apache-2.0"
] | permissive | vmordan/klever | 1c81f2d0e84b9a1bfe64c64ec085c2aae85edbc9 | 52877601e252279375091e049d096d8b302717a6 | refs/heads/master | 2023-07-08T18:08:08.720430 | 2019-09-03T13:25:30 | 2019-09-03T13:25:30 | 214,371,652 | 0 | 0 | null | 2019-10-11T07:27:10 | 2019-10-11T07:27:06 | null | UTF-8 | Python | false | false | 682 | py | #
# Copyright (c) 2018 ISP RAS (http://www.ispras.ru)
# Ivannikov Institute for System Programming of the Russian Academy of Sciences
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
| [
"ilja.zakharov@ispras.ru"
] | ilja.zakharov@ispras.ru |
94888e140b6d56c13b41e77a0342af4f11934c5f | 36d9f12f40ee37d8693f419c7200da0ffb70efc8 | /django_project/users/migrations/0002_auto_20200413_1305.py | 34df19872c4f44ba5d9d0766416ba8702b94faec | [] | no_license | Grizzlyblack/Django-blog | 637e36819b86c49935cb3b6b346f82b85b41b567 | e070ec66a5e090007395f2845e050191ff532bd1 | refs/heads/master | 2022-04-20T14:56:44.017740 | 2020-04-15T15:34:49 | 2020-04-15T15:34:49 | 255,962,332 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 421 | py | # Generated by Django 3.0.5 on 2020-04-13 17:05
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('users', '0001_initial'),
]
operations = [
migrations.AlterField(
model_name='profile',
name='image',
field=models.ImageField(default='profile_pics/default.jpg', upload_to='profile_pics'),
),
]
| [
"ethantoth@gmail.com"
] | ethantoth@gmail.com |
d656dc9394e276db60070a357ce507d0a88a2529 | 7a0efb7f3bfe2694253f7e0e353cd7cedd2ccddc | /test/language/optional_members/python/OptionalRecursionTest.py | b3f2f9b191168cf6552400609cd89d6f2e103fd6 | [
"BSD-3-Clause"
] | permissive | gmdelc66/zserio | c8d89c99628d0ce22d06ca2dc87efffef1021e20 | 9e4c62a9ed10b955f6f44b2322771c1c69c16160 | refs/heads/master | 2023-02-24T20:33:50.299829 | 2021-01-20T21:35:09 | 2021-01-20T21:35:09 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,296 | py | import unittest
import zserio
from testutils import getZserioApi
class OptionalRecursionTest(unittest.TestCase):
@classmethod
def setUpClass(cls):
cls.api = getZserioApi(__file__, "optional_members.zs").optional_recursion
def testParamConstructor(self):
emptyBlock1 = self.api.Block(0)
self.assertEqual(0, emptyBlock1.getByteCount())
self.assertEqual(0, len(emptyBlock1.getDataBytes()))
self.assertEqual(0, emptyBlock1.getBlockTerminator())
self.assertEqual(None, emptyBlock1.getNextData())
def testFromFields(self):
emptyBlock1 = self.api.Block.fromFields(0, [], 0, None)
self.assertEqual(0, emptyBlock1.getByteCount())
self.assertEqual(0, len(emptyBlock1.getDataBytes()))
self.assertEqual(0, emptyBlock1.getBlockTerminator())
self.assertEqual(None, emptyBlock1.getNextData())
def testEq(self):
emptyBlock1 = self._createEmptyBlock()
emptyBlock2 = self._createEmptyBlock()
self.assertTrue(emptyBlock1 == emptyBlock2)
block1 = self._createBlock(self.BLOCK1_DATA)
self.assertFalse(block1 == emptyBlock1)
block2 = self._createBlock(self.BLOCK1_DATA)
self.assertTrue(block2 == block1)
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
self.assertFalse(block12 == block1)
def testHash(self):
emptyBlock1 = self._createEmptyBlock()
emptyBlock2 = self._createEmptyBlock()
self.assertEqual(hash(emptyBlock1), hash(emptyBlock2))
block1 = self._createBlock(self.BLOCK1_DATA)
self.assertTrue(hash(block1) != hash(emptyBlock1))
block2 = self._createBlock(self.BLOCK1_DATA)
self.assertEqual(hash(block2), hash(block1))
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
self.assertTrue(hash(block12) != hash(block1))
def testHasNextData(self):
block1 = self._createBlock(self.BLOCK1_DATA)
self.assertFalse(block1.hasNextData())
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
self.assertTrue(block12.hasNextData())
def testBitSizeOf(self):
block1 = self._createBlock(self.BLOCK1_DATA)
self.assertEqual(OptionalRecursionTest._getBlockBitSize(self.BLOCK1_DATA), block1.bitSizeOf())
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
self.assertEqual(OptionalRecursionTest._getBlock12BitSize(self.BLOCK1_DATA, self.BLOCK2_DATA),
block12.bitSizeOf())
def testInitializeOffsets(self):
block1 = self._createBlock(self.BLOCK1_DATA)
bitPosition = 1
self.assertEqual(bitPosition + OptionalRecursionTest._getBlockBitSize(self.BLOCK1_DATA),
block1.initializeOffsets(bitPosition))
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
self.assertEqual(bitPosition +
OptionalRecursionTest._getBlock12BitSize(self.BLOCK1_DATA, self.BLOCK2_DATA),
block12.initializeOffsets(bitPosition))
def testWriteBlock1(self):
block1 = self._createBlock(self.BLOCK1_DATA)
writer = zserio.BitStreamWriter()
block1.write(writer)
reader = zserio.BitStreamReader(writer.getByteArray())
self._checkBlockInStream(reader, self.BLOCK1_DATA)
reader.setBitPosition(0)
readBlock1 = self.api.Block.fromReader(reader, len(self.BLOCK1_DATA))
self.assertEqual(block1, readBlock1)
def testWriteBlock12(self):
block12 = self._createBlock12(self.BLOCK1_DATA, self.BLOCK2_DATA)
writer = zserio.BitStreamWriter()
block12.write(writer)
reader = zserio.BitStreamReader(writer.getByteArray())
self._checkBlock12InStream(reader, self.BLOCK1_DATA, self.BLOCK2_DATA)
reader.setBitPosition(0)
readBlock12 = self.api.Block.fromReader(reader, len(self.BLOCK1_DATA))
self.assertEqual(block12, readBlock12)
def _createEmptyBlock(self):
return self.api.Block.fromFields(0, None, 0, None)
def _createBlock(self, blockData):
return self.api.Block.fromFields(len(blockData), blockData, 0, None)
def _createBlock12(self, block1Data, block2Data):
block2 = self._createBlock(block2Data)
return self.api.Block.fromFields(len(block1Data), block1Data, len(block2Data), block2)
@staticmethod
def _getBlockBitSize(blockData):
return 8 * len(blockData) + 8
@staticmethod
def _getBlock12BitSize(block1Data, block2Data):
return (OptionalRecursionTest._getBlockBitSize(block1Data) +
OptionalRecursionTest._getBlockBitSize(block2Data))
def _checkBlockInStream(self, reader, blockData):
for element in blockData:
self.assertEqual(element, reader.readBits(8))
self.assertEqual(0, reader.readBits(8))
def _checkBlock12InStream(self, reader, block1Data, block2Data):
for element in block1Data:
self.assertEqual(element, reader.readBits(8))
self.assertEqual(len(block2Data), reader.readBits(8))
self._checkBlockInStream(reader, block2Data)
BLOCK1_DATA = [1, 2, 3, 4, 5, 6]
BLOCK2_DATA = [10, 9, 8, 7]
| [
"mikulas.rozloznik@eccam.com"
] | mikulas.rozloznik@eccam.com |
7dddb6c500238d0cea8f035c1c7e42e0df68a696 | 87f618ef1f40ccfb357d680f767296df810fe40e | /sorting/insertion_sort.py | 0ea7e7d89029521d5aa2cbeff9a05d6dfa3a2aa0 | [] | no_license | michaeljh619/pygorithms | 0c23416aba83dae5a29fcfd23f3f986334a9de50 | eb1a77ca17f4b9fcb2d8a681b5b86679a1ee6515 | refs/heads/master | 2020-04-17T15:29:09.139126 | 2019-03-08T19:30:14 | 2019-03-08T19:30:14 | 166,700,705 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,504 | py | # import base class
from base_sort import BaseSort
class InsertionSort(BaseSort):
@staticmethod
def sort(list_to_sort_arg, smallest_first=True):
# post filter list
list_to_sort = super(InsertionSort,
InsertionSort).sort(list_to_sort_arg,
smallest_first)
# begin at second index and insert working backwards
for i_insertee in range(1, len(list_to_sort)):
# work backwards to find spot to insert
i_insert_search = i_insertee-1
insertee = list_to_sort[i_insertee]
while i_insert_search >= 0:
# compare and set flag if swap is needed
needs_swap = False
current_element = list_to_sort[i_insert_search]
if smallest_first and insertee < current_element:
needs_swap = True
elif not smallest_first and insertee > current_element:
needs_swap = True
# if needs swap, then swap
if needs_swap:
list_to_sort[i_insert_search+1] = current_element
list_to_sort[i_insert_search] = insertee
# else break, since the insertee is in the right spot
else:
break
# decrement to keep searching backwards
i_insert_search -= 1
# return sorted list
return list_to_sort
| [
"michaeljh619@yahoo.com"
] | michaeljh619@yahoo.com |
6bd5b1eb329ca5ae11e3df252c0b0fd4fe966ec3 | 26be4ca25300fd0fe8a797203e384b71e48ea818 | /plugin_tests/python_client_tests/extractID.py | d8592de368f5e8133d5f25645ed4d2f5c88aa854 | [
"Apache-2.0"
] | permissive | jcfr/slicer_extension_manager | fc223bbb1d16c514212b482eb834616e9c985c83 | 9ab568ad091694e5a9d1079b8aa74b24847a0f8f | refs/heads/master | 2023-05-26T09:42:52.030244 | 2018-02-16T00:24:33 | 2018-02-16T00:24:33 | 121,784,446 | 0 | 0 | null | 2018-02-16T18:16:58 | 2018-02-16T18:16:58 | null | UTF-8 | Python | false | false | 297 | py | #!/usr/bin/env python
import re
def extractID(file):
with open(file, 'r') as f:
res = re.search(r'\([a-z0-9]*\)', f.read())
if res:
id = res.group(0)
return id[1:-1]
if __name__ == '__main__':
from sys import argv
print(extractID(argv[1]))
| [
"pierre.assemat@kitware.com"
] | pierre.assemat@kitware.com |
0344baddd74942f6dfa9887cb43835b91fa0091c | c01c931a3fd279f3f97efc6e57482f7e5ffb261f | /app/movie.py | a9616958495c9e0ad697e8f6a41bbb6a83a08db5 | [] | no_license | Erikun/votatron | 7fe5e8aec9f4c40f587a590a155d68df5ead1908 | c5691c6f864086ac51f8739d919cc790d0e8d8ff | refs/heads/master | 2020-04-16T21:16:53.856300 | 2019-12-31T09:58:36 | 2019-12-31T09:58:36 | 165,919,430 | 1 | 1 | null | 2019-02-10T18:54:26 | 2019-01-15T20:33:55 | Python | UTF-8 | Python | false | false | 562 | py | from flask import Blueprint, render_template, request
from .movie_search import find_movie
movie = Blueprint('movie', __name__, template_folder='templates')
@movie.route('/', methods=["GET", "POST"])
def index():
if request.method == "GET":
# User has not searched yet
return render_template('movie.html.jinja2')
else:
# User has entered a search string
search_string = request.form["search"]
hits = find_movie("title.basics.tsv", search_string)
return render_template("movie.html.jinja2", hits=hits)
| [
"johan.forsberg@gmail.com"
] | johan.forsberg@gmail.com |
577ad28e0d8d54fa64296f446e5c7859be5ef502 | c77d62bca1ac466e8d11da90f90937cf6cfb7a80 | /python/cmd/cmd_file.py | 1fcd85efc6ec3ba25465585a3708c2a31186718e | [
"MIT"
] | permissive | tleonhardt/CodingPlayground | bd77cfc600edc383dedec12f12ae3f3829ad674c | c21bf5a7e3cfcf64d75122ea83f41daa9b2e73d4 | refs/heads/master | 2022-09-24T14:52:09.513488 | 2022-08-25T02:26:17 | 2022-08-25T02:26:17 | 67,437,634 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 809 | py | #!/usr/bin/env python
import cmd
class HelloWorld(cmd.Cmd):
"""Simple command processor example."""
# Disable rawinput module use
use_rawinput = False
# Do not show a prompt after each command read
prompt = ''
def do_greet(self, person):
"""greet [person]
Greet the named person"""
if person:
print("hi, {}".format(person))
else:
print('hi')
def do_EOF(self, line):
""" Called when <Ctrl>-d is pressed to exit """
return True
def postloop(self):
""" Called at end just to ensure final newline when exiting """
print()
if __name__ == '__main__':
import sys
input = open(sys.argv[1], 'rt')
try:
HelloWorld(stdin=input).cmdloop()
finally:
input.close()
| [
"todd.leonhardt@gmail.com"
] | todd.leonhardt@gmail.com |
ab5d49d700059376c4757cd72b98eda77a4aa424 | af7ab3c9d189caf4a22b2a83a03da4560dba6166 | /generated/administration_data/ProjectGroupNVL.py | ecfbd7d52940e8b4e4ed99db2d462d107784fe38 | [] | no_license | Eggwise/unit4_python_api | 43418d16d84abb73ddd843b8b268883f02ff996b | 421195392e408bd9e14bda0851817c5ab835ebaf | refs/heads/master | 2021-01-15T13:43:21.486918 | 2016-09-19T14:10:02 | 2016-09-19T14:10:02 | 68,611,588 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | import requests, json
from generated.base.unit4_base import Unit4Base
class ProjectGroupNVL(Unit4Base):
def get_projectGroupNVL(self, database, ):
request_args = locals()
url_template = 'api/{database}/ProjectGroupNVL'
url = url_template.format(**request_args)
#print(url)
url = self.authorize(url)
response = requests.get(url=url)
print(response.text)
return json.loads(response.text)
| [
"dev@eggwise.com"
] | dev@eggwise.com |
3397b7a6f1fc43ae58b57648be387a037cb8dd77 | bb6bcc19922db5da3a9411e2dff07a0e10f6a760 | /CCC/tally.py | 7626e9dbcd2d3183798162268448550013b77e89 | [] | no_license | sayali7242/Masters-Project | bc38f7fcadf7c14d595a02bf64ef8b24b83df62e | c344821feebff8b1073a5c6884c37607ef2f0b00 | refs/heads/master | 2023-03-04T20:03:08.939360 | 2021-02-09T23:23:07 | 2021-02-09T23:23:07 | 327,779,254 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,791 | py | #This is a multi-threaded implementation of CCC calcualtion using bit-arrays.
import pandas as pd
import numpy as np
import itertools
import sys
import time
import networkx as nx
import threading
df = pd.read_csv('combined_'+sys.argv[1]+'_case_ctrl', header=None)
nanlist = list(open('NaNs_'+sys.argv[1]+'_combined', 'r').readlines())
q=1.5
f0=open(sys.argv[1]+'_g0', 'a')
f1=open(sys.argv[1]+'_g1', 'a')
f2=open(sys.argv[1]+'_g2', 'a')
f3=open(sys.argv[1]+'_g3', 'a')
f4=open(sys.argv[1]+'_g4', 'a')
file_dict={
0: f0,
1: f1,
2: f2,
3: f3,
4: f4
}
#print(np.fromstring(nanlist[0][1:-2], sep=',', dtype=int))
def scoreouts(first, start, end, tnum):
s1 = np.array(list(df.iloc[first][0]), dtype=int)
nan1 = set(np.fromstring(nanlist[first][1:-2], sep=',', dtype=int))
for j in range(start, end):
ij_matrix=np.array([[0,0],[0,0]])
s2 = np.array(list(df.iloc[j][0]), dtype=int)
nan2 = np.fromstring(nanlist[j][1:-2], sep=',', dtype=int)
nans_union = sorted( nan1.union(set(nan2)) )
num_nans=int(len(nans_union)/2)
s1_temp = np.delete(s1, nans_union)
s2_temp = np.delete(s2, nans_union)
s1_1 = s1_temp[::2]
not_s1_1 = (1+s1_1)%2
s1_2 = s1_temp[1::2]
not_s1_2 = (1+s1_2)%2
s2_1 = s2_temp[::2]
not_s2_1 = (1+s2_1)%2
s2_2 = s2_temp[1::2]
not_s2_2 = (1+s2_2)%2
ij_matrix[1][1] += np.dot(s1_1, s2_1)
ij_matrix[1][1] += np.dot(s1_1, s2_2)
ij_matrix[1][1] += np.dot(s1_2, s2_1)
ij_matrix[1][1] += np.dot(s1_2, s2_2)
ij_matrix[0][1] += np.dot(not_s1_1, s2_1)
ij_matrix[0][1] += np.dot(not_s1_1, s2_2)
ij_matrix[0][1] += np.dot(not_s1_2, s2_1)
ij_matrix[0][1] += np.dot(not_s1_2, s2_2)
ij_matrix[1][0] += np.dot(s1_1, not_s2_1)
ij_matrix[1][0] += np.dot(s1_1, not_s2_2)
ij_matrix[1][0] += np.dot(s1_2, not_s2_1)
ij_matrix[1][0] += np.dot(s1_2, not_s2_2)
ij_matrix[0][0] += np.dot(not_s1_1, not_s2_1)
ij_matrix[0][0] += np.dot(not_s1_1, not_s2_2)
ij_matrix[0][0] += np.dot(not_s1_2, not_s2_1)
ij_matrix[0][0] += np.dot(not_s1_2, not_s2_2)
f_1_0 = 1- ((ij_matrix[0][0]+ij_matrix[0][1])/(2*q*((len(s1))-(num_nans*2))))
f_1_1 = 1- ((ij_matrix[1][0]+ij_matrix[1][1])/(2*q*((len(s1))-(num_nans*2))))
f_2_0 = 1- ((ij_matrix[0][0]+ij_matrix[1][0])/(2*q*((len(s1))-(num_nans*2))))
f_2_1 = 1- ((ij_matrix[0][1]+ij_matrix[1][1])/(2*q*((len(s1))-(num_nans*2))))
ij_matrix = ij_matrix/((len(s1)/2)-num_nans)
ij_matrix = ij_matrix/4
ij_matrix[0][0] = ij_matrix[0][0]*f_1_0*f_2_0
ij_matrix[0][1] = ij_matrix[0][1]*f_1_0*f_2_1
ij_matrix[1][0] = ij_matrix[1][0]*f_1_1*f_2_0
ij_matrix[1][1] = ij_matrix[1][1]*f_1_1*f_2_1
CCC = 4.5*ij_matrix
file_dict[tnum].write(str(i) + '_0 ' + str(j) + '_0 ' + str(CCC[0][0]) + '\n')
file_dict[tnum].write(str(i) + '_1 ' + str(j) + '_0 ' + str(CCC[1][0]) + '\n')
file_dict[tnum].write(str(i) + '_0 ' + str(j) + '_1 ' + str(CCC[0][1]) + '\n')
file_dict[tnum].write(str(i) + '_1 ' + str(j) + '_1 ' + str(CCC[1][1]) + '\n')
print('Thread : ' + str(first)+'_'+str(start)+'_'+str(end)+'_'+str(tnum))
popsize=len(df)
for i in range(popsize-1):
threadlist = []
n=popsize-i-1
thr_indices=[i+1, 1+i+int(n/4), 1+i+int(n/2), 1+i+int((3*n)/4), popsize]
if thr_indices[0]!=thr_indices[1]:
threadlist.append(threading.Thread(target=scoreouts, args=(i, thr_indices[0], thr_indices[1], 0)))
threadlist.append(threading.Thread(target=scoreouts, args=(i, thr_indices[1], thr_indices[2], 1)))
threadlist.append(threading.Thread(target=scoreouts, args=(i, thr_indices[2], thr_indices[3], 2)))
threadlist.append(threading.Thread(target=scoreouts, args=(i, thr_indices[3], thr_indices[4], 3)))
for t in threadlist:
t.start()
for t in threadlist:
t.join()
else:
scoreouts(i, i+1, popsize, 4)
f0.close()
f1.close()
f2.close()
f3.close()
f4.close() | [
"sayali.patil@wustl.edu"
] | sayali.patil@wustl.edu |
b50dbcd8ef4284854ab218413bc4eb9053e13840 | bf0d17d391727934f72111d4276edbf826101558 | /second.py | 5af6749d13f830317375974fec6f972837179d0e | [] | no_license | AlSavva/OpenEduCourses | 065c1e89ae0c315a6b3b8e818c3579f6ce95c0c0 | ac82e7e7f9e953019a06645155e6a8a709263ad6 | refs/heads/master | 2023-01-19T22:10:17.237315 | 2020-11-24T19:47:36 | 2020-11-24T19:47:36 | 309,606,657 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 721 | py | # Дан файл, каждая строка которого может содержать одно или несколько целых
# чисел, разделенных одним или несколькими пробелами.
# Вычислите сумму чисел в каждой строке и выведите эти суммы через пробел
# (для каждой строки выводится сумма чисел в этой строке).
with open('input 2.txt', 'r', encoding='utf-8') as infile:
for line in infile:
total = 0
lst = list(line.split())
for n in lst:
if n.isdigit():
total += int(n)
print(total, end=' ')
| [
"savva2003@gmail.com"
] | savva2003@gmail.com |
0d10bdf8508fb39ac7efdf6bd9797a558f348bc8 | bb150497a05203a718fb3630941231be9e3b6a32 | /framework/e2e/jit_legacy/api/test_expm1.py | e7e0445fa77c3a48d93e6a09ca4071cb9a28bda3 | [] | no_license | PaddlePaddle/PaddleTest | 4fb3dec677f0f13f7f1003fd30df748bf0b5940d | bd3790ce72a2a26611b5eda3901651b5a809348f | refs/heads/develop | 2023-09-06T04:23:39.181903 | 2023-09-04T11:17:50 | 2023-09-04T11:17:50 | 383,138,186 | 42 | 312 | null | 2023-09-13T11:13:35 | 2021-07-05T12:44:59 | Python | UTF-8 | Python | false | false | 2,636 | py | #!/bin/env python
# -*- coding: utf-8 -*-
# encoding=utf-8 vi:ts=4:sw=4:expandtab:ft=python
"""
test expm1
"""
import pytest
import paddle
import numpy as np
from jitbase import Runner
from jitbase import randtool
@pytest.mark.jit_expm1_vartype
def test_jit_expm1_base():
"""
@paddle.jit.to_static
def fun(inputs):
return paddle.expm1(inputs)
inputs=np.array([1.5, 2.1, 3.2])
dtype=["float32", "float64", "int32", "int64"]
"""
@paddle.jit.to_static
def func(inputs):
"""
paddle.expm1
"""
return paddle.expm1(inputs)
inps = np.array([1.5, 2.1, 3.2])
runner = Runner(func=func, name="expm1_base", dtype=["float32", "float64"], ftype="func")
runner.add_kwargs_to_dict("params_group1", inputs=inps)
runner.run()
@pytest.mark.jit_expm1_vartype
def test_jit_expm1_1():
"""
@paddle.jit.to_static
def fun(inputs):
return paddle.expm1(inputs)
inputs=np.array([1.5, 2.1, 3.2])
dtype=["float32", "float64", "int32", "int64"]
"""
@paddle.jit.to_static
def func(inputs):
"""
paddle.expm1
"""
a = paddle.expm1(inputs)
return a
inps = np.array([1.5, 2.1, 3.2])
runner = Runner(func=func, name="expm1_1", dtype=["float32", "float64"], ftype="func")
runner.add_kwargs_to_dict("params_group1", inputs=inps)
runner.run()
@pytest.mark.jit_expm1_parameters
def test_jit_expm1_2():
"""
@paddle.jit.to_static
def fun(inputs):
return paddle.expm1(inputs)
inputs=paddle.rand([3, 6, 2, 2, 2, 1, 5, 4, 2])
dtype=["float32"]
"""
@paddle.jit.to_static
def func(inputs):
"""
paddle.expm1
"""
return paddle.expm1(inputs)
inps = randtool("float", -2, 2, shape=[3, 6, 2, 2, 2, 1, 5, 4, 2])
runner = Runner(func=func, name="expm1_2", dtype=["float32"], ftype="func")
runner.add_kwargs_to_dict("params_group1", inputs=inps)
runner.run()
@pytest.mark.jit_expm1_parameters
def test_jit_expm1_3():
"""
@paddle.jit.to_static
def fun(inputs):
return paddle.expm1(inputs)
inputs=paddle.rand([3, 6, 2, 2, 2, 1, 5, 4, 2])
dtype=["float32"]
"""
@paddle.jit.to_static
def func(inputs):
"""
paddle.expm1
"""
return paddle.expm1(inputs)
inps = randtool("float", -2, 2, shape=[3, 6, 2, 2, 2, 1, 5, 4, 2])
runner = Runner(func=func, name="expm1_3", dtype=["float16"], ftype="func")
runner.add_kwargs_to_dict("params_group1", inputs=inps)
if paddle.device.is_compiled_with_cuda() is True:
runner.run()
| [
"noreply@github.com"
] | PaddlePaddle.noreply@github.com |
bb8805a0c049ef3c8a48b08db0c87e10b7aa861f | 962243439b52e556b58ebf28b33b7b47030108de | /exp/exp.py | 38952a71f8afe162cbc2471a4cd209904ecee931 | [] | no_license | shuochen365/FreeViewSynthesis | aa42b448d6dc219741b45a31b84ff53dfd055300 | 36011f10dd51ed29eb9893e3471f4a62833ce23c | refs/heads/master | 2023-01-03T03:54:55.797875 | 2020-10-27T08:03:10 | 2020-10-27T08:03:10 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 16,299 | py | import torch
import numpy as np
import sys
import logging
from pathlib import Path
import PIL
import dataset
import modules
sys.path.append("../")
import co
import ext
import config
class Worker(co.mytorch.Worker):
def __init__(
self,
train_dsets,
eval_dsets="",
train_n_nbs=1,
train_nbs_mode="argmax",
train_scale=1,
train_patch=192,
eval_n_nbs=1,
eval_scale=-1,
n_train_iters=750000,
num_workers=8,
**kwargs,
):
super().__init__(
n_train_iters=n_train_iters,
num_workers=num_workers,
train_device=config.train_device,
eval_device=config.eval_device,
**kwargs,
)
self.train_dsets = train_dsets
self.eval_dsets = eval_dsets
self.train_n_nbs = train_n_nbs
self.train_nbs_mode = train_nbs_mode
self.train_scale = train_scale
self.train_patch = train_patch
self.eval_n_nbs = eval_n_nbs
self.eval_scale = train_scale if eval_scale <= 0 else eval_scale
self.bwd_depth_thresh = 0.01
self.invalid_depth_to_inf = True
self.train_loss = modules.VGGPerceptualLoss()
if config.lpips_root:
self.eval_loss = modules.LPIPS()
else:
self.eval_loss = self.train_loss
def get_pw_dataset(
self,
*,
name,
ibr_dir,
im_size,
patch,
pad_width,
n_nbs,
nbs_mode,
train,
tgt_ind=None,
n_max_sources=-1,
):
logging.info(f" create dataset for {name}")
im_paths = sorted(ibr_dir.glob(f"im_*.png"))
im_paths += sorted(ibr_dir.glob(f"im_*.jpg"))
im_paths += sorted(ibr_dir.glob(f"im_*.jpeg"))
dm_paths = sorted(ibr_dir.glob("dm_*.npy"))
count_paths = sorted(ibr_dir.glob("count_*.npy"))
counts = []
for count_path in count_paths:
counts.append(np.load(count_path))
counts = np.array(counts)
Ks = np.load(ibr_dir / "Ks.npy")
Rs = np.load(ibr_dir / "Rs.npy")
ts = np.load(ibr_dir / "ts.npy")
if tgt_ind is None:
tgt_ind = np.arange(len(im_paths))
src_ind = np.arange(len(im_paths))
else:
src_ind = [
idx for idx in range(len(im_paths)) if idx not in tgt_ind
]
counts = counts[tgt_ind]
counts = counts[:, src_ind]
dset = dataset.Dataset(
name=name,
tgt_im_paths=[im_paths[idx] for idx in tgt_ind],
tgt_dm_paths=[dm_paths[idx] for idx in tgt_ind],
tgt_Ks=Ks[tgt_ind],
tgt_Rs=Rs[tgt_ind],
tgt_ts=ts[tgt_ind],
tgt_counts=counts,
src_im_paths=[im_paths[idx] for idx in src_ind],
src_dm_paths=[dm_paths[idx] for idx in src_ind],
src_Ks=Ks[src_ind],
src_Rs=Rs[src_ind],
src_ts=ts[src_ind],
im_size=im_size,
pad_width=pad_width,
patch=patch,
n_nbs=n_nbs,
nbs_mode=nbs_mode,
bwd_depth_thresh=self.bwd_depth_thresh,
invalid_depth_to_inf=self.invalid_depth_to_inf,
train=train,
)
return dset
def get_track_dataset(
self,
name,
src_ibr_dir,
tgt_ibr_dir,
n_nbs,
im_size=None,
pad_width=16,
patch=None,
nbs_mode="argmax",
train=False,
):
logging.info(f" create dataset for {name}")
src_im_paths = sorted(src_ibr_dir.glob(f"im_*.png"))
src_im_paths += sorted(src_ibr_dir.glob(f"im_*.jpg"))
src_im_paths += sorted(src_ibr_dir.glob(f"im_*.jpeg"))
src_dm_paths = sorted(src_ibr_dir.glob("dm_*.npy"))
src_Ks = np.load(src_ibr_dir / "Ks.npy")
src_Rs = np.load(src_ibr_dir / "Rs.npy")
src_ts = np.load(src_ibr_dir / "ts.npy")
tgt_im_paths = sorted(tgt_ibr_dir.glob(f"im_*.png"))
tgt_im_paths += sorted(tgt_ibr_dir.glob(f"im_*.jpg"))
tgt_im_paths += sorted(tgt_ibr_dir.glob(f"im_*.jpeg"))
if len(tgt_im_paths) == 0:
tgt_im_paths = None
tgt_dm_paths = sorted(tgt_ibr_dir.glob("dm_*.npy"))
count_paths = sorted(tgt_ibr_dir.glob("count_*.npy"))
counts = []
for count_path in count_paths:
counts.append(np.load(count_path))
counts = np.array(counts)
tgt_Ks = np.load(tgt_ibr_dir / "Ks.npy")
tgt_Rs = np.load(tgt_ibr_dir / "Rs.npy")
tgt_ts = np.load(tgt_ibr_dir / "ts.npy")
dset = dataset.Dataset(
name=name,
tgt_im_paths=tgt_im_paths,
tgt_dm_paths=tgt_dm_paths,
tgt_Ks=tgt_Ks,
tgt_Rs=tgt_Rs,
tgt_ts=tgt_ts,
tgt_counts=counts,
src_im_paths=src_im_paths,
src_dm_paths=src_dm_paths,
src_Ks=src_Ks,
src_Rs=src_Rs,
src_ts=src_ts,
im_size=im_size,
pad_width=pad_width,
patch=patch,
n_nbs=n_nbs,
nbs_mode=nbs_mode,
bwd_depth_thresh=self.bwd_depth_thresh,
invalid_depth_to_inf=self.invalid_depth_to_inf,
train=train,
)
return dset
def get_train_set_tat(self, dset):
dense_dir = config.tat_root / dset / "dense"
ibr_path = dense_dir / f"ibr3d_pw_{self.train_scale:.2f}"
dset = self.get_pw_dataset(
name=f'tat_{dset.replace("/", "_")}',
ibr_dir=ibr_dir,
im_size=None,
pad_width=16,
patch=(self.train_patch, self.train_patch),
n_nbs=self.train_n_nbs,
nbs_mode=self.train_nbs_mode,
train=True,
)
return dset
def get_train_set(self):
logging.info("Create train datasets")
dsets = co.mytorch.MultiDataset(name="train")
if "tat" in self.train_dsets:
for dset in config.tat_train_sets:
dsets.append(self.get_train_set_tat(dset))
return dsets
def get_eval_set_tat(self, dset, mode):
dense_dir = config.tat_root / dset / "dense"
ibr_dir = dense_dir / f"ibr3d_pw_{self.eval_scale:.2f}"
if mode == "all":
tgt_ind = None
elif mode == "subseq":
tgt_ind = config.tat_eval_tracks[dset]
else:
raise Exception("invalid mode for get_eval_set_tat")
dset = self.get_pw_dataset(
name=f'tat_{mode}_{dset.replace("/", "_")}',
ibr_dir=ibr_dir,
im_size=None,
pad_width=16,
patch=None,
n_nbs=self.eval_n_nbs,
nbs_mode="argmax",
tgt_ind=tgt_ind,
train=False,
)
return dset
def get_eval_sets(self):
logging.info("Create eval datasets")
eval_sets = []
if "tat" in self.eval_dsets:
for dset in config.tat_eval_sets:
dset = self.get_eval_set_tat(dset, "all")
eval_sets.append(dset)
for dset in self.eval_dsets:
if dset.startswith("tat-scene-"):
dset = dset[len("tat-scene-") :]
dset = self.get_eval_set_tat(dset, "all")
eval_sets.append(dset)
if "tat-subseq" in self.eval_dsets:
for dset in config.tat_eval_sets:
dset = self.get_eval_set_tat(dset, "subseq")
eval_sets.append(dset)
for dset in eval_sets:
dset.logging_rate = 1
dset.vis_ind = np.arange(len(dset))
return eval_sets
def copy_data(self, data, device, train):
self.data = {}
for k, v in data.items():
self.data[k] = v.to(device).requires_grad_(requires_grad=False)
def net_forward(self, net, train, iter):
return net(**self.data)
def loss_forward(self, output, train, iter):
errs = {}
tgt = self.data["tgt"]
est = output["out"]
est = est[..., : tgt.shape[-2], : tgt.shape[-1]]
if train:
for lidx, loss in enumerate(self.train_loss(est, tgt)):
errs[f"rgb{lidx}"] = loss
else:
est = torch.clamp(est, -1, 1)
est = 255 * (est + 1) / 2
est = est.type(torch.uint8)
est = est.type(torch.float32)
est = (est / 255 * 2) - 1
errs["rgb"] = self.eval_loss(est, tgt)
output["out"] = est
return errs
def callback_eval_start(self, **kwargs):
self.metric = None
def im_to2np(self, im):
im = im.detach().to("cpu").numpy()
im = (np.clip(im, -1, 1) + 1) / 2
im = im.transpose(0, 2, 3, 1)
return im
def callback_eval_add(self, **kwargs):
output = kwargs["output"]
batch_idx = kwargs["batch_idx"]
iter = kwargs["iter"]
eval_set = kwargs["eval_set"]
eval_set_name = eval_set.name.replace("/", "_")
eval_set_name = f"{eval_set_name}_{self.eval_scale}"
ta = self.im_to2np(self.data["tgt"])
es = self.im_to2np(output["out"])
# record metrics
if self.metric is None:
self.metric = {}
self.metric["rgb"] = co.metric.MultipleMetric(
metrics=[
co.metric.DistanceMetric(p=1, vec_length=3),
co.metric.PSNRMetric(),
co.metric.SSIMMetric(),
]
)
self.metric["rgb"].add(es, ta)
# write debug images
out_dir = self.exp_out_root / f"{eval_set_name}_n{self.eval_n_nbs}"
out_dir.mkdir(parents=True, exist_ok=True)
for b in range(ta.shape[0]):
bidx = batch_idx * ta.shape[0] + b
if bidx not in eval_set.vis_ind:
continue
tgt_dm = self.data["tgt_dm"][b].detach().to("cpu").numpy()
out_im = (255 * es[b]).astype(np.uint8)
if hasattr(eval_set, "mask_via_depth") and eval_set.mask_via_depth:
out_im[tgt_dm <= 0] = 255
out_im[tgt_dm >= 1e6] = 255
# PIL.Image.fromarray(out_im).save(out_dir / f"{bidx:04d}_es.png")
PIL.Image.fromarray(out_im).save(out_dir / f"s{bidx:04d}_es.jpg")
# out_im = (255 * ta[b]).astype(np.uint8)
# PIL.Image.fromarray(out_im).save(out_dir / f"{bidx:04d}_ta.png")
# tgt_dm[tgt_dm >= 1e9] = np.NaN
# tgt_dm[tgt_dm <= 0] = np.NaN
# tgt_dm = co.plt.image_colorcode(tgt_dm)
# diff = np.abs(ta[b] - es[b]).max(axis=2)
# diff = co.plt.image_colorcode(diff, vmin=0, vmax=50 / 255)
# valid_depth_mask = (
# self.data["valid_depth_masks"][b].detach().to("cpu").numpy()
# )
# valid_depth_mask = np.clip(valid_depth_mask.sum(axis=0), 0, 1)
# valid_depth_mask = co.plt.image_colorcode(
# valid_depth_mask[0], vmin=0, vmax=1
# )
# valid_map_mask = (
# self.data["valid_map_masks"][b].detach().to("cpu").numpy()
# )
# valid_map_mask = np.clip(valid_map_mask.sum(axis=0), 0, 1)
# valid_map_mask = co.plt.image_colorcode(
# valid_map_mask[0], vmin=0, vmax=1
# )
# out_im = co.plt.image_cat2(
# [
# [ta[b], es[b]],
# [tgt_dm, diff],
# [valid_depth_mask, valid_map_mask],
# ]
# )
# out_im = (255 * out_im).astype(np.uint8)
# PIL.Image.fromarray(out_im).save(out_dir / f"{bidx:04d}.jpg")
def callback_eval_stop(self, **kwargs):
eval_set = kwargs["eval_set"]
iter = kwargs["iter"]
mean_loss = kwargs["mean_loss"]
eval_set_name = eval_set.name.replace("/", "_")
eval_set_name = f"{eval_set_name}_{self.eval_scale}"
method = self.experiment_name + f"_n{self.eval_n_nbs}"
for key in self.metric:
self.metric_add_eval(
iter,
eval_set_name,
f"loss_{key}",
sum(np.asarray(mean_loss[key]).ravel()),
method=method,
)
metric = self.metric[key]
logging.info(f"\n{key}\n{metric}")
for k, v in metric.items():
self.metric_add_eval(
iter, eval_set_name, f"{k}", v, method=method
)
if __name__ == "__main__":
parser = co.mytorch.get_parser()
parser.add_argument("--net", type=str, required=True)
parser.add_argument("--train-dsets", nargs="+", type=str, default=["tat"])
parser.add_argument(
"--eval-dsets", nargs="+", type=str, default=["tat", "tat-subseq"]
)
parser.add_argument("--train-n-nbs", type=int, default=5)
parser.add_argument("--train-scale", type=float, default=0.25)
parser.add_argument("--train-patch", type=int, default=192)
parser.add_argument("--eval-n-nbs", type=int, default=5)
parser.add_argument("--eval-scale", type=float, default=-1)
parser.add_argument("--log-debug", type=str, nargs="*", default=[])
args = parser.parse_args()
experiment_name = f"{'+'.join(args.train_dsets)}_nbs{args.train_n_nbs}_s{args.train_scale}_p{args.train_patch}_{args.net}"
worker = Worker(
experiments_root=args.experiments_root,
experiment_name=experiment_name,
train_dsets=args.train_dsets,
eval_dsets=args.eval_dsets,
train_n_nbs=args.train_n_nbs,
train_scale=args.train_scale,
train_patch=args.train_patch,
eval_n_nbs=args.eval_n_nbs,
eval_scale=args.eval_scale,
)
worker.log_debug = args.log_debug
worker.save_frequency = co.mytorch.Frequency(hours=2)
worker.eval_frequency = co.mytorch.Frequency(hours=2)
worker.train_batch_size = 1
worker.eval_batch_size = 1
worker.train_batch_acc_steps = 1
worker_objects = co.mytorch.WorkerObjects(
optim_f=lambda net: torch.optim.Adam(net.parameters(), lr=1e-4)
)
if args.net == "fixed_identity_unet4.64.3":
worker_objects.net_f = lambda: modules.get_fixed_net(
enc_net="identity", dec_net="unet4.64.3", n_views=4
)
worker.train_n_nbs = 4
worker.eval_n_nbs = 4
elif args.net == "fixed_vgg16unet3_unet4.64.3":
worker_objects.net_f = lambda: modules.get_fixed_net(
enc_net="vgg16unet3", dec_net="unet4.64.3", n_views=4
)
worker.train_n_nbs = 4
worker.eval_n_nbs = 4
elif args.net == "aggr_vgg16unet3_unet4.64.3_mean":
worker_objects.net_f = lambda: modules.get_aggr_net(
enc_net="vgg16unet3", merge_net="unet4.64.3", aggr_mode="mean"
)
elif args.net == "rnn_identity_gruunet4.64.3":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="identity", merge_net="gruunet4.64.3"
)
elif args.net == "rnn_vgg16unet3_gruunet4.64.3_single":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="vgg16unet3", merge_net="gruunet4.64.3", mode="single"
)
elif args.net == "rnn_vgg16unet3_unet4.64.3":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="vgg16unet3", merge_net="unet4.64.3"
)
elif args.net == "rnn_vgg16unet3_gruunet4.64.3_nomasks":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="vgg16unet3", merge_net="gruunet4.64.3", cat_masks=False
)
elif args.net == "rnn_vgg16unet3_gruunet4.64.3_noinfdepth":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="vgg16unet3", merge_net="gruunet4.64.3"
)
worker.invalid_depth_to_inf = False
elif args.net == "rnn_vgg16unet3_gruunet4.64.3":
worker_objects.net_f = lambda: modules.get_rnn_net(
enc_net="vgg16unet3", merge_net="gruunet4.64.3"
)
else:
raise Exception("invalid net in exp.py")
worker.do(args, worker_objects)
| [
"gernotriegler@gmail.com"
] | gernotriegler@gmail.com |
8bc2a9f9de60ea39da1602e62b943f17a7d40338 | 084684d625db6c51c2c1d6a7f9c163d621dc9f11 | /test/testngpp-1.1/scripts/testngppgen/DataProviderParser.py | 62620d46ab71798ca9019bb0c84e44d4bd142160 | [
"MIT",
"GPL-3.0-or-later",
"LGPL-3.0-only"
] | permissive | mswdwk/code_test_records | a9938f356d140aa921402d4f049d408bc9c6d2a5 | aec5c92d48b07c834b66d4f8b17a789e9bac7531 | refs/heads/master | 2023-08-23T09:39:01.706418 | 2023-08-17T06:10:01 | 2023-08-17T06:10:01 | 122,560,853 | 1 | 0 | MIT | 2023-02-25T00:13:07 | 2018-02-23T02:04:29 | C++ | UTF-8 | Python | false | false | 4,022 | py | #!/usr/bin/python
from Message import *
from Phase1Result import *
from DataProvider import DataProvider
import re
##########################################################
class DataProviderParser:
#######################################################
def __init__(self, provider, file, line_number):
self.name = provider[0]
self.file = file
self.line = line_number
self.end = None
self.done = None
self.numberOfUnclosedParens = 1
self.chars = ""
self.params = []
self.data_provider = None
self.number_of_groups = 0
self.parse_line(Unknown(line_number, provider[2]))
#######################################################
def should_parse_sub_scopes(self):
return False
#######################################################
def verify_scope(self, scope):
return False
#######################################################
def get_container(self):
return None
#######################################################
def get_elem_parser(self, scope, file, line):
return None
#######################################################
def __handle_space(self, line, c):
if c.isspace():
return True
return None
#######################################################
def __parse_param(self, param):
matched = re.match(r'\s*\(\s*(?P<param>.+)\s*\)\s*$', param)
if not matched:
return
self.params.append(matched.group("param"))
#######################################################
def __parse_data_groups(self):
matched = re.match(r'(?P<groups>.+)\)\s*;\s*$', self.chars)
if not matched:
raw_fatal(self.file, self.line, "grammar error in data provider definition 1")
groups = matched.group("groups")
self.number_of_groups = len(re.findall(r',\s*DATA_GROUP\s*\(', groups))
raw_params = re.split(r'\s*,\s*DATA_GROUP\s*', groups)
for param in raw_params:
self.__parse_param(param)
if len(self.params) != self.number_of_groups:
raw_fatal(self.file, self.line, "grammar error in data provider definition 2")
#######################################################
def __handle_end(self, line, c):
if not self.end:
return None
if c == ';':
self.done = True
self.__parse_data_groups()
return True
fatal(self.file, line, "unexpected char '" + c + "' in data provider definition")
#######################################################
def __handle_done(self, line, c):
if not self.done:
return None
fatal(self.file, line, "unexpected char '" + c + "' in data provider definition")
#######################################################
def __handle_others(self, line, c):
if c == '(':
self.numberOfUnclosedParens += 1
elif c == ')':
self.numberOfUnclosedParens -= 1
if self.numberOfUnclosedParens == 0:
self.end = True
#######################################################
def handle_char(self, line, c):
self.chars += c
self.__handle_space(line, c) or \
self.__handle_end(line, c) or \
self.__handle_done(line, c) or \
self.__handle_others(line, c)
#######################################################
def is_done(self):
if self.done: return DataProvider(self.name, self.params)
return None
#######################################################
def parse_line(self, line):
if self.done:
fatal(self.file, line, "internal error while parsing a data provider")
for c in line.get_content():
self.handle_char(line, c)
return self.is_done()
#######################################################
def handle_tag(self, tag):
warning(self.file, tag, "annotation is not allowed in data provider definition")
##########################################################
| [
"mswdwk@outlook.com"
] | mswdwk@outlook.com |
46927816b49f900a06d9d75dea661ad5d224e625 | 5a64af98434fa5802150e0bbc72d0a4c2aac49bb | /test_z_flow_three.py | ee41772d2677b89026b9375c1b21165d5050d736 | [] | no_license | doujs666/zsph_ft_ui | cf59b5e519d7c058bf2d272c120570bd44814790 | 9904e47cfc4bce8759b4ff158e8a85391d8c7772 | refs/heads/master | 2022-02-14T03:47:30.877904 | 2019-07-29T06:33:15 | 2019-07-29T06:33:15 | 198,366,492 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,371 | py | # coding=UTF-8
from base import BaseSeleniumTestCase
from page.total_flow import TotalFlow
from page.loan_list import LoanList
from page.contract_list import ContractList
from page.credit_audit_loan_list import CreditAuditLoanList
from page.historical_list import HistoricalList
from page.index import Index
from utilities.my_sql import select_customer, clear_customer, clear_credit_report, clear_info_verify, clear_contract, \
clear_sign_page, get_credit_person_login_name
import time
class TestTotalFlow(BaseSeleniumTestCase):
# 信审专员审核拒绝-复议
customer_name = u'张博5'
card_no = '131023197908293345'
mobile = '13522241005'
repulse_status = 'repulse'
pass_status = 'pass'
risk_management = 'gaohf'
judge_manager = 'zhangb'
credit_person = 'wanqh'
manager_login_name = 'gesy'
loan_manager = 'zhangy'
super_script_manager = 'dulr'
review_manager = 'zhangbb'
approved_product = '3'
bank_number = '6215590200000919787'
project_number = '998556'
def test_loan_status(self):
'''信审主管拒绝'''
# 新建客户
TotalFlow(self.selenium).risk_management_new_customer(self.risk_management, self.customer_name, self.card_no,
self.mobile)
Index(self.selenium).click_user_list().click_user_quit()
# 风控专员审核
TotalFlow(self.selenium).risk_management_submit(self.risk_management, self.customer_name)
Index(self.selenium).click_user_list().click_user_quit()
# # 信审经理审核
# TotalFlow(self.selenium).judge_manager_allocation_role(self.judge_manager, self.customer_name)
# status1 = CreditAuditLoanList(self.selenium).get_loan_status(self.customer_name, self.judge_manager)
# self.assertEqual(status1, u'审批中')
# Index(self.selenium).click_user_list().click_user_quit()
# 信审专员审核
get_customer_id = select_customer(self.customer_name)['id']
time.sleep(2)
credit_person_login_name = get_credit_person_login_name(get_customer_id)['login_name']
TotalFlow(self.selenium).risk_management_other(credit_person_login_name, self.customer_name)
Index(self.selenium).click_user_list().click_user_quit()
TotalFlow(self.selenium).risk_management_submit_audit(self.credit_person, self.customer_name, self.pass_status)
Index(self.selenium).click_user_list().click_user_quit()
# 信审主管审核
TotalFlow(self.selenium).manager_contract_form(self.manager_login_name, self.customer_name,
self.approved_product, self.repulse_status)
status2 = HistoricalList(self.selenium).get_loan_status(self.customer_name, self.manager_login_name)
self.assertEqual(status2, u'拒绝')
Index(self.selenium).click_user_list().click_user_quit()
# 风控经理复议
TotalFlow(self.selenium).certificates_flow(self.review_manager, self.customer_name)
status3 = LoanList(self.selenium).get_loan_status(self.customer_name)
self.assertEqual(status3, u'复核中')
Index(self.selenium).click_user_list().click_user_quit()
# 信审主管审核
TotalFlow(self.selenium).manager_contract_form(self.manager_login_name, self.customer_name,
self.approved_product, self.pass_status)
status2 = HistoricalList(self.selenium).get_loan_status(self.customer_name, self.manager_login_name)
self.assertEqual(status2, u'待签约')
Index(self.selenium).click_user_list().click_user_quit()
# 风控专员提交合同
TotalFlow(self.selenium).submit_sign_page(self.risk_management, self.customer_name, self.bank_number)
status3 = LoanList(self.selenium).get_loan_status(self.customer_name)
self.assertEqual(status3, u'合同审核中')
approved_product1 = LoanList(self.selenium).get_approved_product(self.customer_name)
self.assertEqual(approved_product1, u'公积金类')
Index(self.selenium).click_user_list().click_user_quit()
# 合同专员审核
TotalFlow(self.selenium).loan_sign_page(self.loan_manager, self.customer_name)
status4 = ContractList(self.selenium).get_loan_status(self.customer_name)
self.assertEqual(status4, u'放款中')
Index(self.selenium).click_user_list().click_user_quit()
# 上标专员审核
TotalFlow(self.selenium).super_script_flow(self.super_script_manager, self.customer_name, self.project_number)
Index(self.selenium).click_user_list().click_user_quit()
# 合同专员放款
TotalFlow(self.selenium).make_loan_sign_page(self.loan_manager, self.customer_name)
status6 = ContractList(self.selenium).get_loan_status(self.customer_name)
self.assertEqual(status6, u'已放款')
Index(self.selenium).click_user_list().click_user_quit()
def tearDown(self):
super(TestTotalFlow, self).tearDown()
customer_id = select_customer(self.customer_name)['id']
clear_credit_report(customer_id)
clear_info_verify(customer_id)
clear_contract(customer_id)
clear_sign_page(customer_id)
clear_customer(customer_id)
| [
"281545444@qq.com"
] | 281545444@qq.com |
bebfef255f99252a4039a33872bee1b9d8ed755b | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p03303/s057999643.py | 9314276089795c5e76ee4d045719ed4ff71127cb | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 128 | py | from math import ceil
S=input()
w=int(input())
s=""
for i in range(len(S)):
if i%w==0:
s="".join((s,S[i]))
print(s)
| [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
91ac74029c7acf545a80322780898230f2e207a8 | 4217dbba217f6ec58ba631ce499c53a65260f3af | /Lab Activity/Murtadha/Plots/stackbar.py | 4dbfd094a69c69cb306dd29f674bf5ea7de8fee9 | [] | no_license | XCaramellX/ITSC-3155 | cfb1044fa1251404a29f428b39d9558f7fa8eca4 | 425d4b5e2ff9f9f121de1f591bb18e36713a598a | refs/heads/main | 2023-08-28T00:49:55.804588 | 2021-11-09T21:32:00 | 2021-11-09T21:32:00 | 404,490,011 | 0 | 0 | null | 2021-11-09T21:32:00 | 2021-09-08T20:44:12 | HTML | UTF-8 | Python | false | false | 1,662 | py | import pandas as pd
import plotly.offline as pyo
import plotly.graph_objs as go
# Load CSV file from Datasets folder
df = pd.read_csv('../Datasets/CoronavirusTotal.csv')
# Removing empty spaces from State column to avoid errors
df = df.apply(lambda x: x.str.strip() if x.dtype == "object" else x)
# Creating unrecovered column
df['Unrecovered'] = df['Confirmed'] - df['Deaths'] - df['Recovered']
# Removing China and Others from data frame
df = df[(df['Country'] != 'China')]
# Creating sum of number of cases group by Country Column
new_df = df.groupby(['Country']).agg(
{'Confirmed': 'sum', 'Deaths': 'sum', 'Recovered': 'sum', 'Unrecovered': 'sum'}).reset_index()
# Sorting values and select 20 first value
new_df = new_df.sort_values(by=['Confirmed'], ascending=[False]).head(20).reset_index()
# Preparing data
# The Y axis is fetches from the numbers in the document to be represented on the Y axis by the color maked by the color
# associated in the maker attribute
#
trace1 = go.Bar(x=new_df['Country'], y=new_df['Unrecovered'], name='Unrecovered', marker={'color': '#CD7F32'})
trace2 = go.Bar(x=new_df['Country'], y=new_df['Recovered'], name='Recovered', marker={'color': '#9EA0A1'})
trace3 = go.Bar(x=new_df['Country'], y=new_df['Deaths'], name='Deaths', marker={'color': '#FFD700'})
data = [trace1, trace2, trace3]
# Preparing layout
layout = go.Layout(title='Corona Virus Cases in the first 20 country expect China', xaxis_title="Country",
yaxis_title="Number of cases", barmode='stack')
# Plot the figure and saving in a html file
fig = go.Figure(data=data, layout=layout)
pyo.plot(fig, filename='stackbarchart.html')
| [
"45076915+MurtadhaM@users.noreply.github.com"
] | 45076915+MurtadhaM@users.noreply.github.com |
844c023a3f996d35ed9bbbb6c3c0bf3c3828dbdf | 26ae1682ae6c063e1bf0f98144845da05ef285c9 | /FCN/Assignments/Upreti-Divyansh-HW3/PartC/start.py | 81a9f2b6eea9d0abda6762951d7ea192b7f14b4a | [] | no_license | dupreti93/Stony-Brook-University | b1af66999703c87ae5c437effd7a0c5747bb4ce1 | 2bc2c959c8fcac56f5b7f7dd710b28b6d3f1694f | refs/heads/master | 2023-07-12T02:44:06.651759 | 2019-05-04T23:07:51 | 2019-05-04T23:07:51 | 184,950,745 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,299 | py | #!/usr/bin/python
"""
Example network of Quagga routers
(QuaggaTopo + QuaggaService)
"""
import sys
import atexit
# patch isShellBuiltin
import mininet.util
import mininext.util
mininet.util.isShellBuiltin = mininext.util.isShellBuiltin
sys.modules['mininet.util'] = mininet.util
from mininet.util import dumpNodeConnections
from mininet.node import OVSController
from mininet.log import setLogLevel, info
from mininext.cli import CLI
from mininext.net import MiniNExT
from topo import QuaggaTopo
net = None
def startNetwork():
"instantiates a topo, then starts the network and prints debug information"
info('** Creating Quagga network topology\n')
topo = QuaggaTopo()
info('** Starting the network\n')
global net
net = MiniNExT(topo, controller=OVSController)
net.start()
info('** Dumping host connections\n')
dumpNodeConnections(net.hosts)
#Setting up our information
net.get("R1").cmd("sysctl net.ipv4.ip_forward=1")
net.get("R2").cmd("sysctl net.ipv4.ip_forward=1")
net.get("R3").cmd("sysctl net.ipv4.ip_forward=1")
net.get("R4").cmd("sysctl net.ipv4.ip_forward=1")
net.get("H1").cmd("sysctl net.ipv4.ip_forward=1")
net.get("H2").cmd("sysctl net.ipv4.ip_forward=1")
net.get("R1").cmd("ifconfig R1-eth1 192.0.1.2/24")
net.get("R1").cmd("ifconfig R1-eth2 194.0.1.1/24")
net.get("R2").cmd("ifconfig R2-eth1 192.0.1.1/24")
net.get("R3").cmd("ifconfig R3-eth1 194.0.1.2/24")
net.get("R4").cmd("ifconfig R4-eth1 195.0.1.2/24")
net.get("R4").cmd("ifconfig R4-eth2 193.0.1.2/24")
#net.get("H1").cmd("route add default gw 190.0.1.2")
#net.get("H2").cmd("route add default gw 191.0.1.2")
info('** Testing network connectivity\n')
net.ping(net.hosts)
info('** Dumping host processes\n')
for host in net.hosts:
host.cmdPrint("ps aux")
info('** Running CLI\n')
CLI(net)
def stopNetwork():
"stops a network (only called on a forced cleanup)"
if net is not None:
info('** Tearing down Quagga network\n')
net.stop()
if __name__ == '__main__':
# Force cleanup on exit by registering a cleanup function
atexit.register(stopNetwork)
# Tell mininet to print useful information
setLogLevel('info')
startNetwork()
| [
"noreply@github.com"
] | dupreti93.noreply@github.com |
9b1c65c72f1bdc01f7f7853556d0f7f3d2eab80c | 5d65097828ce3029c458126f7ce93facb80944dd | /demo/urls.py | 276c4aa60144f8eab1437b63b68e0ac6123c0d7f | [
"CECILL-B",
"LicenseRef-scancode-cecill-b-en"
] | permissive | breard-r/django-npb | 45c14524cedaa4e4b7ca09403b2bea42fbb535db | c70e1677e40a1a27d357377f3156ffea051ba56c | refs/heads/main | 2021-11-14T17:52:00.295839 | 2021-09-15T10:40:13 | 2021-09-15T10:40:13 | 121,009,317 | 1 | 0 | NOASSERTION | 2021-05-04T04:10:59 | 2018-02-10T11:22:57 | Python | UTF-8 | Python | false | false | 1,128 | py | """demo URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.views.generic.base import RedirectView
from django.conf.urls.i18n import i18n_patterns
from django.urls import include, path, reverse_lazy
from django.contrib import admin
npb_index = reverse_lazy("npb:index")
urlpatterns = [path("", RedirectView.as_view(permanent=False, url=npb_index))]
urlpatterns += i18n_patterns(
path("", RedirectView.as_view(permanent=False, url=npb_index)),
path("paste/", include("npb.urls", namespace="npb")),
path("admin/", admin.site.urls),
)
| [
"rodolphe@what.tf"
] | rodolphe@what.tf |
dfa7b7317a9f3883b650a9344d3199730f7b4308 | 77a014156f639330f10162b79e6fa31fce7e88f9 | /adminacttools/acttools/KAP_actions.py | ca060a0fda7c0a5c9fde9cb762749deabcf23207 | [
"Apache-2.0"
] | permissive | tmbx/teambox-acttools | 1d50e6cdc2c78fb31f1bf8da05897971439645c5 | 4ca2e0a59d37cfa9756047e9655f6a769b688a9e | refs/heads/master | 2016-09-05T19:12:05.439175 | 2013-05-16T19:56:42 | 2013-05-16T19:56:42 | 3,780,293 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,589 | py | #
# Copyright (C) 2010-2012 Opersys inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# -*- encoding: utf-8 -*-
# KAP_actions.py --- Application of KAP data on a KPS system
# Copyright (C) 2006-2012 Opersys inc.
# Author: François-Denis Gonthier
# Application of a KAP doesn't automatically make a KPS functionnal.
# This class doesn't try to determine if a KPS will work after all the
# provided KAP elements are applied on the system.
#
# Doing so would require checking the whole system database and
# configuration file to determine what is missing, if anything. This
# code is not the right place to do that.
import tempfile
from kctllib.kkeys import *
from kctllib.ktbxsosdconfig import *
from kctllib.ktbxsosdconfigdaemon import *
from kctllib.kdatabase import *
# kpython
from kfile import *
from krun import *
KAPActions = []
# Fatal error. This means the activation failed.
class KAPActionException(Exception): pass
# Warning. Might not mean a failed activation. Might mean a partial
# KAP was sent.
class KAPActionWarning(Exception): pass
db_init()
__all__ = ["KAPActions", "KAPActionException", "KAPActionWarning"]
def openconfig():
return KTbxsosdConfig(source_file = "/etc/teambox/tbxsosd/tbxsosd.conf",
user_file = "/etc/teambox/tbxsosd/web.conf")
def action_sig_skey(activator, kap):
if kap.email_sig_skey:
key = kap.email_sig_skey
activator.keyset.set_sig_skey(key)
sdb_importprivkey("sig", key.id, key.owner, key.key)
db_commit()
else:
raise KAPActionWarning("No private signature key provided.")
def action_sig_pkey(activator, kap):
if kap.email_sig_pkey:
key = kap.email_sig_pkey
activator.keyset.set_sig_pkey(key)
sdb_importpubkey("sig", key.id, key.owner, key.key)
db_commit()
else:
raise KAPActionWarning("No public signature key provided.")
def action_enc_skey(activator, kap):
if kap.key_id:
key_owner = "Unknown owner %d" % kap.key_id
if kap.email_sig_pkey:
key_owner = kap.email_sig_pkey.owner
activator.keyset.set_keyid_and_owner(kap.key_id, key_owner)
key = activator.keyset.enc_skey
sdb_importprivkey("enc", key.id, key.owner, key.key)
db_commit()
else:
raise KAPActionWarning("No key ID provided.")
def action_enc_pkey(activator, kap):
if kap.key_id:
key_owner = "Unknown owner %d" % kap.key_id
if kap.email_sig_pkey:
key_owner = kap.email_sig_pkey.owner
activator.keyset.set_keyid_and_owner(kap.key_id, key_owner)
key = activator.keyset.enc_pkey
sdb_importpubkey("enc", key.id, key.owner, key.key)
db_commit()
else:
raise KAPActionWarning("No key ID provided.")
def action_license(activator, kap):
if kap.license:
(_, tmp_path) = tempfile.mkstemp()
write_file(tmp_path, kap.license)
# FIXME: This is a copy of some code in kctlcmd.
kctlbin = kparams_get("kctlbin")
cmd = [kctlbin, "showlicensefile", tmp_path]
proc = KPopen("", cmd)
lines = re.split("\n", proc.stdout)
try:
# Get the first line.
(v, kdn) = re.split(": ", lines[0])
if v != "kdn":
raise KAPActionException("Unable to guess which KDN to use to import the license")
else:
sdb_set_org_license(kdn, kap.license)
db_commit()
except KctlException, ex:
raise KAPActionException("Unable to apply license: %s", ex.message)
finally:
os.unlink(tmp_path)
else:
raise KAPActionWarning("No license provided.")
def action_kdn(activator, kap):
if kap.kdn:
activator.identity.kdn = kap.kdn
activator.identity.save()
config = openconfig()
cfg_kdn = config.get("server.kdn")
if cfg_kdn and cfg_kdn.find(kap.kdn) >= 0:
config.set("server.kdn", " ".join([cfg_kdn, kap.kdn]))
else:
config.set("server.kdn", kap.kdn)
config.save(target_file = "/etc/teambox/tbxsosd/web.conf")
else:
raise KAPActionWarning("No KDN provided.")
def action_neworg(activator, kap):
if kap.kdn:
orgs = sdb_lsorg()[1]
doAdd = True
if orgs:
for o in orgs:
if o[1] == kap.kdn:
doAdd = False
org_id = o[0]
# The database returns a long integer here but we can usually
# treat the org_id as an integer.
if doAdd:
org_id = sdb_addorg(kap.kdn)
sdb_set_org_status(org_id, 2)
db_commit()
activator.identity.org_id = int(org_id)
activator.identity.save()
activator.save()
else:
activator.identity.org_id = int(org_id)
activator.identity.save()
activator.save()
raise KAPActionWarning("Organization %s already exists." % kap.kdn)
else:
raise KAPActionWarning("No KDN provided.")
def action_bundle(activator, kap):
if kap.bundle:
(_, tmp_file) = tempfile.mkstemp()
try:
write_file(tmp_file, kap.bundle)
kcfgdaemon = TbxsosdConfigDaemon()
if kcfgdaemon.present():
kcfgdaemon.install_bundle(tmp_file)
else:
raise KAPActionException("No configuration daemon present.")
finally:
os.unlink(tmp_file)
def action_closing(activator, kap):
activator.step = 7
activator.save()
KAPActions += [("Private signature key", action_sig_skey),
("Public signature key", action_sig_pkey),
("Private encryption key", action_enc_skey),
("Public encryption key", action_enc_pkey),
("KDN", action_kdn),
("New organization", action_neworg),
("License", action_license),
("KPS bundle", action_bundle),
("End of activation", action_closing)]
| [
"karim.yaghmour@opersys.com"
] | karim.yaghmour@opersys.com |
c0f08d3cdba32cd4ce1ac2444183d881175a00c1 | 7ddc5d778a5d7d866b60af7549cfe1d20c7e46b9 | /shell/communication_serialization/python/serialization.py | 6045fc486e79c3337f763809bbc4d847fb523f22 | [] | no_license | 05dirnbe/neural_network_control | cc4de3466493831b5f015596d912b6a864dd4f37 | 9041364004c1a71fec85222f00af8e45efe27135 | refs/heads/master | 2021-03-22T03:28:20.881654 | 2017-05-11T02:01:31 | 2017-05-11T02:01:31 | 88,581,468 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,108 | py | import logging
from collections import defaultdict
import numpy as np
import flatbuffers
import configuration
import Buffers.Integer
import Buffers.IntegerArray
import Buffers.IntegerMatrix
import Buffers.Spike
import Buffers.SpikesArray
import Buffers.String
class Serializer_Operations(object):
def __init__(self):
self.logger = logging.getLogger("serializer")
self.logger.setLevel(logging.DEBUG)
def deserialize_weights(self, data_buffer, initial_buffer_size = 1024):
# weights and topology have the same buffer layout
return self.deserialize_topology(data_buffer, initial_buffer_size)
def deserialize_parameters(self, data_buffer, initial_buffer_size = 1024):
# recieve a flatbuffer and deserialize it into an numpy array of ints
assert type(data_buffer) == str
array = Buffers.IntegerArray.IntegerArray.GetRootAsIntegerArray(data_buffer, initial_buffer_size)
# Get and test the `values` FlatBuffer `vector`.
data = np.zeros(array.ListLength(), dtype = int)
for i in xrange(array.ListLength()):
data[i] = array.List(i)
self.logger.debug("Deserializing to obtain: %s", data)
return data
def deserialize_spikes(self, data_buffer, initial_buffer_size = 1024):
# deserialize flatbuffer into lists of ints stored in a dict
assert type(data_buffer) == str
data = defaultdict(list)
l = Buffers.SpikesArray.SpikesArray.GetRootAsSpikesArray(data_buffer, initial_buffer_size)
# Get and test the `values` FlatBuffer `vector`
for i in xrange(l.ListLength()):
spike = l.List(i)
data["timestamp"].append(spike.Timestamp())
data["address"].append(spike.Address())
self.logger.debug("Deserializing to obtain: %s", data)
return data
def deserialize_topology(self, data_buffer, initial_buffer_size = 1024):
# deserialize flatbuffer into numpy matrix of ints
assert type(data_buffer) == str
container = Buffers.IntegerMatrix.IntegerMatrix.GetRootAsIntegerMatrix(data_buffer, initial_buffer_size)
flat_matrix = np.array([ container.List(i) for i in xrange(container.ListLength()) ])
data = flat_matrix.reshape(container.N(),container.M())
self.logger.debug("Deserializing to obtain: %s", data)
return data
def deserialize_camera(self, data_buffer, initial_buffer_size = 1024):
# recieve a flatbuffer and deserialize it into an int
assert type(data_buffer) == str
integer = Buffers.Integer.Integer.GetRootAsInteger(data_buffer, initial_buffer_size)
data = integer.Value()
self.logger.debug("Deserializing to obtain: %s", data)
return data
def deserialize_command(self, data_buffer, initial_buffer_size = 1024):
# deserialize flattbuffer into string
assert type(data_buffer) == str
string = Buffers.String.String.GetRootAsString(data_buffer, 0)
data = string.Message()
self.logger.debug("Deserializing to obtain: %s", data)
return data
def dummy_deserialize(self, data_buffer, initial_buffer_size = 0):
if data_buffer == "None":
return None
self.logger.debug("Deserializing to obtain: %s", data)
return data
def serialize_weights(self, data, initial_buffer_size = 1024):
# weights and topology have the same containers: a np 2darray containing ints
return self.serialize_topology(data,initial_buffer_size)
def serialize_parameters(self, data, initial_buffer_size = 1024):
# turn list an np array of ints into flatbuffer
assert isinstance(data, (np.ndarray, np.generic) )
assert data.dtype == int
n = len(data)
builder = flatbuffers.Builder(initial_buffer_size)
Buffers.IntegerArray.IntegerArrayStartListVector(builder, n)
# Note: Since we prepend the items, this loop iterates in reverse order.
for i in reversed(xrange(n)):
builder.PrependUint32(data[i])
l = builder.EndVector(n)
Buffers.IntegerArray.IntegerArrayStart(builder)
Buffers.IntegerArray.IntegerArrayAddList(builder, l)
l = Buffers.IntegerArray.IntegerArrayEnd(builder)
builder.Finish(l)
data_buffer = builder.Output()
self.logger.debug("Serializing: %s", data)
return data_buffer
def serialize_spikes(self, data, initial_buffer_size = 1024):
assert isinstance(data, (np.ndarray, np.generic) )
assert len(data.shape) == 2
assert data.shape[1] == 2
assert data.dtype == int
n = data.shape[0]
spikes = []
builder = flatbuffers.Builder(initial_buffer_size)
# first we build the n spikes themselves
for i in xrange(n):
Buffers.Spike.SpikeStart(builder)
Buffers.Spike.SpikeAddTimestamp(builder,data[i][0])
Buffers.Spike.SpikeAddAddress(builder,data[i][1])
spike = Buffers.Spike.SpikeEnd(builder)
spikes.append(spike)
# next we build a vector that holds the spikes
Buffers.SpikesArray.SpikesArrayStartListVector(builder, n)
for s in reversed(spikes):
builder.PrependUOffsetTRelative(s)
spikes_offset = builder.EndVector(n)
# then we add the vector to the buffer
Buffers.SpikesArray.SpikesArrayStart(builder)
Buffers.SpikesArray.SpikesArrayAddList(builder, spikes_offset)
l = Buffers.SpikesArray.SpikesArrayEnd(builder)
builder.Finish(l)
data_buffer = builder.Output()
self.logger.debug("Serializing: %s", data)
return data_buffer
def serialize_topology(self, data, initial_buffer_size = 1024):
# serialize numpy 2d array of ints to flatbuffer
assert isinstance(data, (np.ndarray, np.generic) )
assert len(data.shape) == 2
assert data.dtype == int
n, m = data.shape
flat_matrix = data.flatten()
# Serialize the FlatBuffer data.
# Note: Since we prepend the items, this loop iterates in reverse order.
builder = flatbuffers.Builder(initial_buffer_size)
Buffers.IntegerMatrix.IntegerMatrixStartListVector(builder, len(flat_matrix))
for value in reversed(flat_matrix):
builder.PrependUint32(value)
data = builder.EndVector(len(flat_matrix))
Buffers.IntegerMatrix.IntegerMatrixStart(builder)
Buffers.IntegerMatrix.IntegerMatrixAddN(builder, n)
Buffers.IntegerMatrix.IntegerMatrixAddM(builder, m)
Buffers.IntegerMatrix.IntegerMatrixAddList(builder, data)
l = Buffers.IntegerMatrix.IntegerMatrixEnd(builder)
builder.Finish(l)
data_buffer = builder.Output()
self.logger.debug("Serializing: %s", data)
return data_buffer
def serialize_camera(self, data, initial_buffer_size = 1024):
# here we want to serialize an int to a flatbuffer
assert type(data) == int
self.logger.debug("Serializing: %d", data)
builder = flatbuffers.Builder(initial_buffer_size)
Buffers.Integer.IntegerStart(builder)
Buffers.Integer.IntegerAddValue(builder,data)
integer = Buffers.Integer.IntegerEnd(builder)
builder.Finish(integer)
data_buffer = builder.Output()
return data_buffer
def serialize_command(self, data, initial_buffer_size = 1024):
# turn string representation of command to flatbuffer
assert type(data) == str
self.logger.debug("Serializing: %s", data)
builder = flatbuffers.Builder(initial_buffer_size)
message = builder.CreateString(data)
Buffers.String.StringStart(builder)
Buffers.String.StringAddMessage(builder,message)
string = Buffers.String.StringEnd(builder)
builder.Finish(string)
data_buffer = builder.Output()
return data_buffer
def dummy_serialize(self, data, initial_buffer_size = 0):
data_buffer = "None"
self.logger.debug("Serializing: %s", data_buffer)
return data_buffer
class Serializer_Adapter(object):
def __init__(self, operator, config):
self.logger = logging.getLogger("serialization")
self.logger.setLevel(logging.DEBUG)
self.settings = config
self.read_commands = self.settings.read_commands
self.write_commands = self.settings.write_commands
self.topics = self.settings.topics
self.operator = operator
class Serializer(Serializer_Adapter):
def __init__(self, operator = Serializer_Operations(), config=configuration.Config()):
super(Serializer, self).__init__(operator, config)
def read_buffer(self, data_buffer, topic=None):
self.logger.debug("Deserialzing topic: %s", topic )
if topic == "weights":
return self.operator.deserialize_weights(data_buffer)
if topic == "parameters":
return self.operator.deserialize_parameters(data_buffer)
if topic == "spikes":
return self.operator.deserialize_spikes(data_buffer)
if topic == "topology":
return self.operator.deserialize_topology(data_buffer)
if topic == "camera":
return self.operator.deserialize_camera(data_buffer)
if topic == "command":
return self.operator.deserialize_command(data_buffer)
return self.operator.dummy_deserialize(data_buffer)
def write_buffer(self, data, topic=None):
self.logger.debug("Serializing topic: %s", topic )
if topic == "weights":
return self.operator.serialize_weights(data)
if topic == "parameters":
return self.operator.serialize_parameters(data)
if topic == "spikes":
return self.operator.serialize_spikes(data)
if topic == "topology":
return self.operator.serialize_topology(data)
if topic == "camera":
return self.operator.serialize_camera(data)
if topic == "command":
return self.operator.serialize_command(data)
return self.operator.dummy_serialize(data)
| [
"mtd@mpi-inf.mpg.de"
] | mtd@mpi-inf.mpg.de |
1328c1cea5c5eeecd8a910a65bfc89740039a841 | d6190abda288086f6e8e1497bd595c2447c81788 | /for_1.py | 8ec56284f6d48f1e10e1fb8fefdb6d3683d7dfd9 | [] | no_license | user801/aid1807 | 2fdd3c9a806e285f15a901f384d96d1bca3f4bc1 | c5ad0e250b9398a50d4bec5d9ee4baa29fdd3879 | refs/heads/master | 2021-03-29T16:46:43.630204 | 2020-03-17T10:49:53 | 2020-03-17T10:49:53 | 247,968,026 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,878 | py |
# s = "ABCDE"
# for ch in s:
# print("ch -->", ch) # 重複執行 ch 綁定 A B C D E
# else:
# print("for 語句執行else子句")
# print("程序退出")
#上面跑出的程式
# ch --> A
# ch --> B
# ch --> C
# ch --> D
# ch --> E
# for 語句執行else子句
# 程序退出
# 練習:
# 任意輸入一個字符串,判斷這個字符串中有幾個空格' '
# (要求不允許用S.count方法)
# 建議使用for語句實現
# s = input("請輸入一段字符串: ")
# count = 0 # 此變量用來記錄空格的個數
# for ch in s:
#如果ch綁定空格,則將count做加1操作
# if ch == ' ':
# count += 1
# print("空格的個數是:", count)
# 此示例示意range函數的用法
# for x in range(4):
# print(x)
# 0
# 1
# 2
# 3
# 練習:
# 用for語句打印 1~20的整數,打印在一行
# 1 2 3 4 5 6 ... 18 19 20
# for x in range(1, 21):
# print(x, end=' ')
# else:
# print()
# 練習:
# 1. 求 100 以內有哪兒些整數與 自身 + 1 的乘積再對11 求餘結果等於8?
# 2. 計算 1 + 3 + 5 + 7 +.... + 99的和
# 用while 和 for語句兩種方法來實現
# 1.
# for x in range(100):
# if x * (x + 1) % 11 == 8:
# print(x)
# 2.
# s = 0
# i = 1
# while i < 100:
# 把i累加到s變量中#
# s += i
# i += 2 #因為後一前簡前一項為2
# 用for語句來實現
# for i in range(1, 100, 2):
# s += i
#
# print("和為", s)
# 練習:
# 1 寫程序.輸入一個整數n 代表正方形的寬度和高度.
# 打印數字組成的正方形:
# 如
# 輸入: 5
# 打印:
# 1 2 3 4 5
# 1 2 3 4 5
# 1 2 3 4 5
# 1 2 3 4 5
# 1 2 3 4 5
# 輸入: 4
# 打印:
# 1 2 3 4
# 1 2 3 4
# 1 2 3 4
# 1 2 3 4
# w = int(input("請輸入寬度: "))
# for _ in range(w):
# for x in range(1, w + 1):
# print(x, end=' ')
# print( )
| [
"lvze@tedu.cn"
] | lvze@tedu.cn |
0c3733d1070912526214d127e0345cc62e7fa825 | 8da91c26d423bacbeee1163ac7e969904c7e4338 | /pyvisdk/do/host_virtual_switch_bond_bridge.py | f3c52636e92fe47048e84abef1415d561aeb96d2 | [] | no_license | pexip/os-python-infi-pyvisdk | 5d8f3a3858cdd61fb76485574e74ae525cdc7e25 | 1aadea0afbc306d09f6ecb9af0e683dbbf961d20 | refs/heads/master | 2023-08-28T02:40:28.789786 | 2020-07-16T04:00:53 | 2020-07-16T04:00:53 | 10,032,240 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 1,104 | py |
import logging
from pyvisdk.exceptions import InvalidArgumentError
########################################
# Automatically generated, do not edit.
########################################
log = logging.getLogger(__name__)
def HostVirtualSwitchBondBridge(vim, *args, **kwargs):
'''This data object type describes a bridge that provides network adapter teaming
capabilities.'''
obj = vim.client.factory.create('{urn:vim25}HostVirtualSwitchBondBridge')
# do some validation checking...
if (len(args) + len(kwargs)) < 1:
raise IndexError('Expected at least 2 arguments got: %d' % len(args))
required = [ 'nicDevice' ]
optional = [ 'beacon', 'linkDiscoveryProtocolConfig', 'dynamicProperty', 'dynamicType' ]
for name, arg in zip(required+optional, args):
setattr(obj, name, arg)
for name, value in kwargs.items():
if name in required + optional:
setattr(obj, name, value)
else:
raise InvalidArgumentError("Invalid argument: %s. Expected one of %s" % (name, ", ".join(required + optional)))
return obj
| [
"jmb@pexip.com"
] | jmb@pexip.com |
7b871c3927796dc5c8f7832c9f12853a9ad8aefd | f7f705a5fc5b622b6ecec1d52643b59bc07c77ba | /ibis/__init__.py | 2695fcec5ebaa6b0f2f95685de335d9edc9195c6 | [
"MIT"
] | permissive | 0x174/ibis | 0ab0a2c76dc34f5ead43647f260d0c2a7a2288e7 | 5108848c1d45326b3c65213b2f8deaa88fd29be6 | refs/heads/main | 2023-06-05T14:00:10.170620 | 2021-07-02T13:44:16 | 2021-07-02T13:44:16 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 54 | py | from ibis.utility.graphing import (
plot_graph,
)
| [
"jackson@justjackson.dev"
] | jackson@justjackson.dev |
9d5ff74a624e8e7f092708078ba2bb94c79a64ad | 6a14702c187139b9189f0d7d5468c98ce03b6560 | /study/printf.py | 4a47b6c5116a84a045385be0255b5471e4251535 | [] | no_license | boboivo/python-learning | 902b736fee24facbd773ccac6e6af42ad4663e5e | 67a4267d12b8fab94e955d76cacd29af697288d3 | refs/heads/master | 2023-04-28T09:51:10.850980 | 2019-09-29T10:27:33 | 2019-09-29T10:27:33 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 46 | py | printf = "printf"
print(f"hello {printf}")
| [
"zhaihw0417@163.com"
] | zhaihw0417@163.com |
ac32b412881960fdd135b19553dcbd0e51766288 | 7ab6b53f5b359fe779113de39c758735664f0bd3 | /robinhoodbot/transact.py | 11f51c47d80cd9d76c5c1962bad0034812f8dd23 | [
"MIT"
] | permissive | tstromberg/naivete | 74426ce52b590e407661774b1835b535301c45a9 | 7c731cf63ccb7e0a1218a3e0839f3a9933a3bcf8 | refs/heads/master | 2023-07-21T20:06:28.230397 | 2021-09-09T04:11:47 | 2021-09-09T04:11:47 | 347,766,497 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,832 | py | import logging
import typing
import robin_stocks.robinhood as rh
def sell(symbol: str, holdings, dry_run: bool):
""" Place an order to sell all holdings of a stock.
Args:
symbol(str): Symbol of the stock we want to sell
holdings_data(dict): dict obtained from get_modified_holdings() method
"""
shares_owned = int(float(holdings[symbol].get("quantity")))
logging.info("####### Selling " + str(shares_owned) + " shares of " + symbol + " #######")
if not dry_run:
rh.order_sell_market(symbol, shares_owned)
def buy(syms: typing.List[str], profile, holdings, dry_run: bool):
"""Buy holdings of stock, matching average holdings in the rest of your portfolio (magic?)."""
cash = float(profile.get('cash'))
portfolio_value = float(profile.get('equity')) - cash
ideal_position_size = 0
if len(holdings) == 0:
logging.info("found no holdings: set ideal position to 0?")
else:
ideal_position_size = (portfolio_value / len(holdings)) + (cash/len(syms) / (2 * len(syms)))
logging.info("I think the ideal position size may be %s", ideal_position_size)
prices = rh.get_latest_price(syms)
for i in range(0, len(syms)):
stock_price = float(prices[i])
if(ideal_position_size < stock_price < ideal_position_size*1.5):
num_shares = int(ideal_position_size*1.5/stock_price)
elif (stock_price < ideal_position_size):
num_shares = int(ideal_position_size/stock_price)
else:
logging.warning("####### Tried buying shares of " + syms[i] + ", but not enough buying power to do so#######")
break
logging.info("####### Buying " + str(num_shares) + " shares of " + syms[i] + " #######")
if not dry_run:
rh.order_buy_market(syms[i], num_shares)
| [
"t+github@stromberg.org"
] | t+github@stromberg.org |
b42158d184982e9ad4ca929ed7ead3ec46d58790 | 5749fb2dbb0e5489e1d06a835f89fbc9aff8de67 | /wakeuproulette/urls.py | 122e4611d01e8c4afcefbce23bdd944fba0b1d82 | [] | no_license | axsaucedo/wakeuproulette | 73d37abbd08765c2627b752528a7ddf57a102996 | ecc9a6972e6bb419b71f7a97c21402e0a09ba74a | refs/heads/master | 2021-06-11T15:50:18.612512 | 2017-01-24T17:10:01 | 2017-01-24T17:10:01 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,022 | py | from django.conf.urls import patterns, include, url
from wakeup import views
from django.conf import settings
from django.contrib import admin
admin.autodiscover()
urlpatterns = patterns('',
url(r'^$', views.home, name='home'),
# Serving Calls
url(r'^wakeuprequest/(?P<schedule>.+)$', views.wakeUpRequest, name='wakeup'),
url(r'^answercallback/(?P<schedule>.+)$', views.answerCallback, name='answer'),
url(r'^privaterequest/(?P<schedule>.+)$', views.sendToPrivateRoom, name='private'),
url(r'^ratingrequest/(?P<schedule>.+)$', views.ratingRequest, name='rating'),
url(r'^anymatchrequest/(?P<schedule>.+)$', views.tryAnyMatch, name='anymatch'),
url(r'^finishrequest/(?P<schedule>.+)$', views.finishRequest, name='finish'),
url(r'^waitingrequest/(?P<username>.*)$', views.waitingRequest, name='waiting'),
# Handling fallback errors
url(r'^fallback/(?P<schedule>.+)$', views.fallbackRequest, name='fallback'),
# Call verification
url(r'^callverification$', views.callVerifcation, name='callverification'),
# Handling Incoming Calls and Text messages
# url(r'^call/initial/', views.callInitial, name='call'),
# url(r'^call/register/', views.callRegister, name='call'),
# url(r'^call/setup/', views.callSetup, name='call'),
# url(r'newsletter/', views.newsletter, name='newsletter'),
url(r'^sharedwakeup/(?P<shareid>\d+)/$', views.shared_wakeup, name='shared_wakeup'),
url(r'^beta/', views.beta, name='beta'),
url(r'^evening/', views.eveningRoulette, name='evening' ),
url(r'^survey/', views.survey, name='survey'),
url(r'^admin/', include(admin.site.urls)),
url(r'^accounts/', include('accounts.urls')),
)
urlpatterns += patterns('',
(r'^static/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.STATIC_ROOT}),
(r'^media/(?P<path>.*)$', 'django.views.static.serve', {'document_root': settings.MEDIA_ROOT}),
)
urlpatterns += patterns(''
, url(r'^.*/', views.notFound, name='notFound')
)
| [
"axsauze@gmail.com"
] | axsauze@gmail.com |
5027b0fb17f1ba02e70846105a5c9ac76f5424c3 | 3f8af8c8d5efd90c0a09bb38b2a980ced3e38117 | /ml_play_template_SVM.py | 8e031ab59aa09090fe5d0b4b1bb3477cd3372456 | [] | no_license | z50215z/homework4 | fc85d115fd991a9e17f8961e8ea439b6a6e2a255 | a6dfd652127d014a5228770d92783990238b4552 | refs/heads/master | 2020-09-15T11:36:28.175507 | 2019-11-22T16:03:32 | 2019-11-22T16:03:32 | 223,433,469 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,876 | py | """The template of the main script of the machine learning process
python MLGame.py -r arkanoid 1 -i ml_play_template_20191007.py
"""
import games.arkanoid.communication as comm
from games.arkanoid.communication import ( \
SceneInfo, GameInstruction, GameStatus, PlatformAction
)
def ml_loop():
"""The main loop of the machine learning process
This loop is run in a separate process, and communicates with the game process.
Note that the game process won't wait for the ml process to generate the
GameInstruction. It is possible that the frame of the GameInstruction
is behind of the current frame in the game process. Try to decrease the fps
to avoid this situation.
"""
# === Here is the execution order of the loop === #
# 1. Put the initialization code here.
ball_position_history = [ ]
# 2. Inform the game process that ml process is ready before start the loop.
import pickle
import numpy as np
filename = 'C:\\MLGame-master\\SVM.sav'
model = pickle.load(open(filename,'rb'))
comm.ml_ready()
# 3. Start an endless loop.
while True:
# 3.1. Receive the scene information sent from the game process.
scene_info = comm.get_scene_info()
platform_center_x = scene_info.platform[0]
ball_position_history.append(scene_info.ball)
if (len(ball_position_history) > 1):
vy = ball_position_history[-1][1] - ball_position_history[-2][1]
vx = ball_position_history[-1][0] - ball_position_history[-2][0]
inp_temp = np.array([scene_info.ball[0], scene_info.ball[1], scene_info.platform[0],vx,vy])
input = inp_temp[np.newaxis, :]
# 3.2. If the game is over or passed, the game process will reset
# the scene and wait for ml process doing resetting job.
if scene_info.status == GameStatus.GAME_OVER:
print("game over")
comm.ml_ready()
continue
elif scene_info.status == GameStatus.GAME_PASS:
#scene_info = comm.get_scene_info()
# Do some stuff if needed
# 3.2.1. Inform the game process that ml process is ready
print("game pass")
comm.ml_ready()
continue
# 3.3. Put the code here to handle the scene information
if (len(ball_position_history) > 1):
move = model.predict(input)
else:
move = 0
# 3.4. Send the instruction for this frame to the game process
if move < 0:
comm.send_instruction(scene_info.frame, PlatformAction.MOVE_LEFT)
elif move > 0:
comm.send_instruction(scene_info.frame, PlatformAction.MOVE_RIGHT)
else:
comm.send_instruction(scene_info.frame, PlatformAction.NONE) | [
"noreply@github.com"
] | z50215z.noreply@github.com |
58602f9b202b25c612a5d109093a409a38444000 | 4ebd36dafd421ca9988515b74891df8a03deb1b2 | /nerodia/locators/class_helpers.py | 5530916f410882d74bc6db44ec61aff8aacd980d | [
"MIT"
] | permissive | rbojjasba/nerodia | dac4a6610a5418ed5949467c5ac244394cc35bd0 | 2ad87cfd813f5d6201fe6b6aec6ae3ff5a39b7d9 | refs/heads/master | 2020-03-11T15:15:15.592182 | 2018-04-10T19:57:05 | 2018-04-10T19:57:05 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,066 | py | from importlib import import_module
import nerodia
class ClassHelpers(object):
# private
@property
def _locator_class(self):
from .element.locator import Locator
return getattr(self._import_module, 'Locator', Locator)
@property
def _element_validator_class(self):
from .element.validator import Validator
return getattr(self._import_module, 'Validator', Validator)
@property
def _selector_builder_class(self):
from .element.selector_builder import SelectorBuilder
return getattr(self._import_module, 'SelectorBuilder', SelectorBuilder)
@property
def _import_module(self):
from ..module_mapping import map_module
modules = [nerodia.locator_namespace.__name__, map_module(self._element_class_name)]
try:
return import_module('{}.{}'.format(*modules))
except ImportError:
return import_module('{}.element'.format(*modules[:1]))
@property
def _element_class_name(self):
return self._element_class.__name__
| [
"lucast1533@gmail.com"
] | lucast1533@gmail.com |
d02975c051cf9929e7b311cc6691548310b7b564 | f3b233e5053e28fa95c549017bd75a30456eb50c | /thrombin_input/1b/1b-7a_MD_NVT_rerun/set_6.py | 881a105f8345f565a65eb50af369073a75f4648b | [] | no_license | AnguseZhang/Input_TI | ddf2ed40ff1c0aa24eea3275b83d4d405b50b820 | 50ada0833890be9e261c967d00948f998313cb60 | refs/heads/master | 2021-05-25T15:02:38.858785 | 2020-02-18T16:57:04 | 2020-02-18T16:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 778 | py | import os
dir = '/mnt/ls15/scratch/users/songlin3/run/thrombin/ligand_final/final/1b_resp/MD_NVT_rerun/ti_one-step/1b_7a/'
filesdir = dir + 'files/'
temp_prodin = filesdir + 'temp_prod_6.in'
temp_pbs = filesdir + 'temp_6.pbs'
lambd = [ 0.00922, 0.04794, 0.11505, 0.20634, 0.31608, 0.43738, 0.56262, 0.68392, 0.79366, 0.88495, 0.95206, 0.99078]
for j in lambd:
os.chdir("%6.5f" %(j))
workdir = dir + "%6.5f" %(j) + '/'
#prodin
prodin = workdir + "%6.5f_prod_6.in" %(j)
os.system("cp %s %s" %(temp_prodin, prodin))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, prodin))
#PBS
pbs = workdir + "%6.5f_6.pbs" %(j)
os.system("cp %s %s" %(temp_pbs, pbs))
os.system("sed -i 's/XXX/%6.5f/g' %s" %(j, pbs))
#submit pbs
#os.system("qsub %s" %(pbs))
os.chdir(dir)
| [
"songlin3@msu.edu"
] | songlin3@msu.edu |
36618e6283984050bcdc80283bae3eac795fcf08 | d10c6b9095744daf0c5057b233b7fef46ca7c0f5 | /board/migrations/0002_board_delete.py | 294d3a4f1b9c10d739b0c58201e9b9184b3fae31 | [] | no_license | lucy74310/pysite | 8cad7d9c9e694deadf0285243c31edb7c94c5c99 | 2d0d2bf39e8689b25ddd5a7639ec092fb22f4489 | refs/heads/master | 2020-06-07T02:46:21.591985 | 2019-06-25T10:37:05 | 2019-06-25T10:37:05 | 192,876,774 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 369 | py | # Generated by Django 2.2.2 on 2019-06-21 08:41
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('board', '0001_initial'),
]
operations = [
migrations.AddField(
model_name='board',
name='delete',
field=models.IntegerField(default=0),
),
]
| [
"lucy74310@gmail.com"
] | lucy74310@gmail.com |
a2f3efc48199e3f32398aa20acd4b4332eb5fbae | 98637e287a2513a29fde79e68d7ae859061cfaaf | /scripts/python/skin-depth.py | 0b37ea058a0d4c158f9820ab317112ee3dd8fd32 | [] | no_license | mtgeoma/geomamt | 0c8075089fad5e8e997b269105b6895ed69408ae | 827079e295c0641f7236b276f7951bedb99e3a9f | refs/heads/master | 2016-09-01T22:09:18.886194 | 2015-06-15T12:09:55 | 2015-06-15T12:09:55 | 32,728,686 | 5 | 3 | null | null | null | null | UTF-8 | Python | false | false | 1,414 | py | #!/usr/bin/env python
# coding=utf-8
# para calcular o skin-depth (sd)
# ./skin-depth.py rho=100.0 T=3.94784176044E-4
# para calcular o rho
# ./skin-depth.py sd=100.0 T=3.94784176044E-4
# para calcular o período
# ./skin-depth.py sd=100.0 rho=100.0
import sys
import string
import math
if (__name__=="__main__"):
# sd = C*sqrt(rho*T); onde C=1000.0*sqrt(10.0)/(2*PI); no SI
C=1.0E3*math.sqrt(10.0)/(2.0*math.pi)
rho=0.0
sd=0.0
T=0.0
f=0.0
# le parametros de entrada
for i in range(1,len(sys.argv)):
if(sys.argv[i].find("=")!=-1):
parametro=sys.argv[i].split("=")
if(parametro[0]=="rho"):
rho=float(parametro[1])
elif(parametro[0]=="T"):
T=float(parametro[1])
f=1./T
elif(parametro[0]=="f"):
f=float(parametro[1])
T=1./f
elif(parametro[0]=="sd"):
sd=float(parametro[1])
else:
print "erro: \""+parametro[0]+"\" parâmetro desconhecido"
else:
print "erro: todos os parâmetros devem ter um sinal de igual"
# encontrando dois parâmetros maiores que zero, assume que se quer calcular o terceiro parâmetro
if(rho>0.0 and T > 0.0):
print C*math.sqrt(rho*T)
elif(rho>0.0 and sd > 0.0):
print (sd/C)**2/rho
else:
print (sd/C)**2/T
| [
"mtgeoma@gmail.com"
] | mtgeoma@gmail.com |
b3f0cb2910d9bd6bedc063fcf011924df10a94dd | a38a295b2538537e04d9865c003e1f611544cff8 | /Les 3/3_4.py | ba5fd4c4f6c8a6b109ced59c9ae261a874f466cc | [] | no_license | LuukHendriks99/Python | 0bfd3b9584d0fd18742d985fd6c2e465b2f5b13c | 970b1e189a589c817750bb3e966e77e6bdf3f00e | refs/heads/master | 2021-07-20T06:33:18.130667 | 2017-10-30T13:39:36 | 2017-10-30T13:39:36 | 103,111,085 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | lijst = [ 'maandag', 'dinsdag', 'woensdag' ]
for l in lijst:
print(l[:2]) | [
"luukhendriks13"
] | luukhendriks13 |
8c05e056ded7e00bcc15f720813339c9efb1cf94 | 834394ad9550fe0f2f016b9ad2f8ba9a40513519 | /Models/Model_1/Tests/TestRunner.py | 0cd7a0b400ce251f9cd4a2010d8681837ab44028 | [
"MIT"
] | permissive | akanimax/toxic-comment-identification-tensorflow | fd7239df03de11eb1d3964b79b7257c953126123 | a1d065639d8b518c0ac1dc53e98e09642e258bb6 | refs/heads/master | 2021-05-15T04:16:26.674628 | 2018-03-19T14:39:35 | 2018-03-19T14:39:35 | 119,345,551 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 600 | py | """ The main script that combines all the tests for this package in a suite and runs it
"""
import unittest
from test_GraphCreator import TestGraphCreator
def create_suite():
# create the suite object
suite = unittest.TestSuite()
# add all the tests in the suite
suite.addTest(unittest.makeSuite(TestGraphCreator))
# finally return the generated suite object
return suite
# the main test runner script
if __name__ == '__main__':
runner = unittest.TextTestRunner()
test_suite = create_suite()
# run all the tests in the test suite
runner.run(test_suite) | [
"animesh.karnewar@mobiliya.com"
] | animesh.karnewar@mobiliya.com |
5f84763466fcb2b5ad06c86609b902fe79823db0 | bec56b9cc4691dd221a7aafd96cabdf83fe974a5 | /web-containerised/tests/test_information.py | 2819a7509fc34f6bf4174531e011bd194064028d | [] | no_license | foad/bootcamp-project-monorepo | 48be3a5489a73309c6c0ef8e720e07850eea1c7b | 66ec4d46d3f3685bd1738a090640305e6935cfe6 | refs/heads/master | 2022-01-12T20:57:32.735457 | 2019-08-05T14:49:44 | 2019-08-05T14:49:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 188 | py | from skyjourneys import create_app
def test_privacy_page_load(client):
response = client.get('/privacy')
assert response.status_code == 200
assert b'Privacy' in response.data | [
"daniel.foad@sky.uk"
] | daniel.foad@sky.uk |
3f0fb8acec1c4a040c53734126341421e6a699b5 | a1b7381d396af61cdb11e9d2eeabf3c3d84f516f | /olive/store/mongo_connection.py | ddb75fc41d9360430cd110b7be42d9aec0f54516 | [] | no_license | alirezastack/olive | 7771503d4d796d114938610cadd8a8e501348a13 | 3a3496ed58328e0eb412513d1764f1e20b368461 | refs/heads/master | 2023-05-26T02:45:43.866082 | 2019-11-24T15:15:32 | 2019-11-24T15:15:32 | 219,418,765 | 0 | 0 | null | 2023-05-22T22:32:13 | 2019-11-04T04:42:38 | Python | UTF-8 | Python | false | false | 855 | py | from olive.patterns import singleton
from pymongo import MongoClient
@singleton
class MongoConnection:
def __init__(self, cfgs, app):
self._app = app
self._service = cfgs['appname']
self._client = MongoClient(**cfgs)
self._app.log.debug('connecting to {} mongoDB server...'.format(self._service))
# The ismaster command is cheap and does not require auth.
# ConnectionFailure/ServerSelectionTimeoutError will be raised if MongoDB is not reachable
self._client.admin.command('ismaster')
self._app.log.info('connected to {} mongoDB server...'.format(self._service))
@property
def service_db(self):
# return current service database
return getattr(self._client, self._service)
def __str__(self):
return self._service + ' -> {}'.format(self._client)
| [
"alireza.hosseini@zoodroom.com"
] | alireza.hosseini@zoodroom.com |
67b65c2e358b25ecfa254975629b4a3d6533cb96 | 0b7fefef8f7173003adcd2ac3b9067763f7ac79d | /kik.py | 90a54466aa54327a6e2e25e11d967a3784913253 | [
"MIT"
] | permissive | earthexploration/Masters-thesis | e2b854e60a4cb2fc953b6bb0cdd4d6ac1552a441 | 56d1086f54cfa5887f5ea4bb8bb31a4670a83bfb | refs/heads/main | 2023-06-10T15:04:07.589551 | 2021-07-07T20:29:46 | 2021-07-07T20:29:46 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 757 | py | # -*- coding: utf-8 -*-
"""
Created on Wed Oct 7 20:49:34 2020
@author: erlin
"""
import hyperspy.api as hs
import matplotlib.pyplot as plt
import numpy as np
import kikuchipy as kp
filepath = 'filename' #Filepath to raw data
s = kp.load(filepath, lazy=False) #Loading raw EBSD patterns
s.remove_static_background(operation="subtract", relative=True) #Removing static background from patterns
s.remove_dynamic_background() #Removing dynamic background from patterns
w_gauss = kp.filters.Window(window="gaussian",std=1) #Creating gaussian filter
s.average_neighbour_patterns(window=w_gauss) #Neighbour pattern averaging the patterns, using the gaussian filter above
s.save("output_filepath") #Saving the averaged patterns
| [
"noreply@github.com"
] | earthexploration.noreply@github.com |
db365a21716847e846eadc04734267b71ab23c69 | 69fd09dcbd3f251c756b8d87492976bf849dd5ef | /Environment.py | c50ed20b7db23f1d7424163fff05823be689ebd6 | [] | no_license | ejalaa12/MLRobot | 5259e03ec475110cfca57f3145c58406dcb0fa94 | 736525b208e45d23b0be02c542391f5642ada96c | refs/heads/master | 2020-04-09T20:06:46.492355 | 2016-09-14T11:48:22 | 2016-09-14T11:48:22 | 68,200,878 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,170 | py | import numpy as np
import matplotlib.pyplot as plt
from DubinsCar import DubinsCar
from geometry_toolkit import *
from regulator import *
from ai_regulator import AI_regulator
class Environment():
"Environment where the Dubins car will wander"
def __init__(self, width=300, height=300, dt=0.1, regulator=RandomReg()):
self.width = width
self.height = height
self.initEnv(dt)
# Regulation
self.regulator = regulator
def initEnv(self, dt):
self.time = 0
self.dt = dt
self.car = DubinsCar(x=self.width / 2, y=self.height / 2, theta=0)
self.carPathX = []
self.carPathY = []
def reset(self):
self.initEnv(0.1)
def sim1dt(self):
"""
Simulate the car for 1 dt
"""
# Update the new command
self.update_cmd()
# Call euler method to simulate the car given the cmd
self.car.sim_for_dt(self.cmd)
self.time += self.dt # increment time
# Append the new car position to the car path
self.carPathX.append(self.car.x)
self.carPathY.append(self.car.y)
# Check for collision and return True if collision
return self.checkCollision()
def checkCollision(self):
"""
Check if the car has hit any of the walls
(the walls are the limit of the canvas)
"""
if not 0 < self.car.x < self.width:
return True
elif not 0 < self.car.y < self.height:
return True
return False
def simUntilCollision(self):
"""
Recursively call sim1dt until there is a collision, then stops
and set the Score (fitness) which is the duration without collision
"""
while not self.sim1dt():
# Stop simulation if car survived for 100 sec
if self.time > 100:
print 'No colission for a long time'
break
print 'collision after', self.time
self.regulator.setScore(self.time)
def plotPath(self):
"""
Helper method to plot the path of the car using matplotlib
"""
plt.plot(self.carPathX, self.carPathY)
plt.axis([0, self.width, 0, self.height])
plt.plot(self.car.x, self.car.y, 'og', markersize=10)
plt.show()
def update_cmd(self):
"""
Generates the new cmd for the car by asking the given regulator
"""
if isinstance(self.regulator, AI_regulator):
self.cmd = self.regulator.generate_cmd(
[self.width / 2 - self.car.x])
else:
self.cmd = self.regulator.generate_cmd()
# if self.regulator is None:
# self.cmd = 0
# elif self.regulator == 'random':
# self.cmd = np.radians(np.random.randint(0, 180))
# self.cmd *= np.random.choice([-1, 1])
# else:
# self.cmd = self.regulator(self.front_distance())
def find_angles(self):
"""
__ Helper method for front distance
.. todo: move to geometry_tookit module
"""
x, y = self.car.x, self.car.y
# Angle 1: corner width, height
alpha1 = np.arctan((self.height - y) / (self.width - x))
# Angle 2: corner 0 , height
alpha2 = np.radians(90) + np.arctan(x / (self.height - y))
# Angle 3: corner 0 , 0
alpha3 = np.radians(180) + np.arctan(y / x)
# Angle 4: corner width, 0
alpha4 = np.radians(270) + np.arctan((self.width - x) / y)
return [alpha1, alpha2, alpha3, alpha4]
def front_distance(self):
"""
Calculate the distance in front of the car to the closest wall
"""
angle = self.find_angles()
if not (0 < self.car.x < self.width and 0 < self.car.y < self.height):
return 0
a, b = find_equation(self.car.x, self.car.y, self.car.theta)
# car cross with top side: y = height
if angle[0] < self.car.theta <= angle[1]:
if self.car.theta == np.radians(90):
return dist(self.car.x, self.car.y, self.car.x, self.height)
y = self.height
x = (y - b) / a
return dist(self.car.x, self.car.y, x, y)
# car cross with left side: x = 0
elif angle[1] < self.car.theta <= angle[2]:
if self.car.theta == np.radians(180):
return dist(self.car.x, self.car.y, 0, self.car.y)
x = 0
y = b
return dist(self.car.x, self.car.y, x, y)
# car cross with bottom side: y = 0
elif angle[2] < self.car.theta <= angle[3]:
if self.car.theta == np.radians(270):
return dist(self.car.x, self.car.y, self.car.x, 0)
y = 0
x = (y - b) / a
return dist(self.car.x, self.car.y, x, y)
# car cross with right side: x = width
else:
if self.car.theta == np.radians(0):
return dist(self.car.x, self.car.y, 0, self.car.y)
x = self.width
y = a * x + b
return dist(self.car.x, self.car.y, x, y)
| [
"ejalaa12@gmail.com"
] | ejalaa12@gmail.com |
a4c061df7c9980b76d9d90f453e302d42614969f | 8d7142cd15bb05572a69ed5a434c8d9a2da94c4d | /dgsite/settings.py | b3802ecfe40f9beddb19ab3d8ed9923853364145 | [] | no_license | ar-gis/dgblog | 9d8129985eaebdf348a7cbbeac31c6ed076c2225 | c8b5e207e5ac27addcc923ec6c97afd306b98b78 | refs/heads/master | 2020-04-02T01:47:57.192192 | 2018-10-28T09:49:29 | 2018-10-28T09:49:29 | 153,875,664 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,279 | py | """
Django settings for dgsite project.
Generated by 'django-admin startproject' using Django 2.1.2.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'e5$w469*7nrdi&ra4si63mx@d3e1&grbm$e(j!6&3pij_ft2rn'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'blog',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'dgsite.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'dgsite.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'pl-pl'
TIME_ZONE = 'Europe/Warsaw'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
STATIC_URL = '/static/'
STATIC_ROOT = os.path.join(BASE_DIR, 'static')
| [
"andrzej.rogozinski@astra.software"
] | andrzej.rogozinski@astra.software |
66da406ee8824118201d45a6b3adc201dd249228 | ea11b4dc13405f759ecf729dc9b0e0d9b13d82e7 | /player.py | 8427a9eb6a4a046b215fa0e4a0e8fb266b313014 | [] | no_license | Jkinsella497/PyMaze | 04bc5d84f99dc0c249b731c8f9f5b248e453b61b | 1322dc45772b7f746a2a5513fdac6dd6e80a2d4d | refs/heads/master | 2020-03-23T13:55:37.252027 | 2018-07-20T01:07:23 | 2018-07-20T01:07:23 | 141,645,511 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 77 | py | class Player():
def __init__(self):
self.x = 0
self.y = 0 | [
"jake@teamzerolabs.com"
] | jake@teamzerolabs.com |
28739d09f8c5e7a24cf9f76d52af2e798bbbb708 | 380a1f6279d9543b7f89357744082293e289ff77 | /mysite/zotify/migrations/0008_member_role.py | d94914ebaca18ad8f0f2d6e003fcbc5e48e6ca39 | [] | no_license | wgliwa/Zotify | 8ea6658c5ddb11485c4c78173590d95e0c64a420 | 4b7499c5e3580f927a104daeefc43e3f51e8cb34 | refs/heads/main | 2023-05-23T01:35:54.887012 | 2021-06-08T10:29:43 | 2021-06-08T10:29:43 | 374,969,928 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,134 | py | # Generated by Django 3.1.7 on 2021-05-24 09:50
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('zotify', '0007_auto_20210524_0133'),
]
operations = [
migrations.CreateModel(
name='Role',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('role_name', models.CharField(max_length=50)),
],
),
migrations.CreateModel(
name='Member',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=50)),
('surname', models.CharField(max_length=50)),
('pseudonym', models.CharField(max_length=50)),
('artist', models.ManyToManyField(to='zotify.Artist')),
('role', models.ForeignKey(on_delete=django.db.models.deletion.PROTECT, to='zotify.role')),
],
),
]
| [
"58432170+wgliwa@users.noreply.github.com"
] | 58432170+wgliwa@users.noreply.github.com |
241ab1bfa79f249ee15c2e4016344a84b52cee7b | 4f1218079f90a65befbf658679721886d71f4ee8 | /python/atcoder/ABC/B/b126/b089.py | 359ced93c84b3de6272e57227a50c51dfd50b0a0 | [] | no_license | Escaity/Library | 9f57767617422a7930caf48718d18f7ebef81547 | b34d8600e0a65845f1b3a16eb4b98fc7087a3160 | refs/heads/master | 2022-07-29T16:18:33.073738 | 2022-07-17T10:25:22 | 2022-07-17T10:25:22 | 238,588,249 | 0 | 0 | null | 2021-08-17T03:02:34 | 2020-02-06T02:04:08 | Python | UTF-8 | Python | false | false | 102 | py | n = int(input())
s = set(input().split())
if len(s) == 3:
print("Three")
else:
print("Four")
| [
"esk2306@gmail.com"
] | esk2306@gmail.com |
c50d0fe2e4dd816c273f42f943cc81c4394df939 | 9331d1a084c03e8243c2355def0d5b2636f23199 | /Chapter1/Ch1P3.py | 395ec8dd5038a7ee923089692ed767f5220d20cb | [] | no_license | PrabuddhaBanerjee/Python | e489fa5088560682af40b84c3fa86071c854e757 | 9472592a8286f432d66c33e8c6ab46cb928f9f32 | refs/heads/master | 2022-07-10T22:00:00.547223 | 2020-05-18T22:02:15 | 2020-05-18T22:02:15 | 258,084,056 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 163 | py | def main():
print("This program")
x = input("Enter num between 0 and 1:")
for i in range(10):
x = 2.0 * x * (1 - x)
print( x )
main()
| [
"prabudh994@gmail.com"
] | prabudh994@gmail.com |
867d3283eedb6f0f2fff27d4eadf093f82f3715e | afbb6f97e35884744597c764133e798a1490787a | /Python/ex13.py | 0bf1782cad3e5c1e4225c66702e002d2e6cc9bef | [] | no_license | akmoore85/newcode | 6f74259ef4c70ca4bc86c431c8d7e465106501d1 | 306b6e02d96cdc3d5833125c407159d3291f9896 | refs/heads/master | 2020-03-07T07:35:42.124554 | 2018-04-01T03:30:34 | 2018-04-01T03:30:34 | 127,353,412 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 478 | py | # Write a program that asks the user how many Fibonnaci numbers to generate
# and then generates them. Take this opportunity to think about how you can use functions.
# Make sure to ask the user to enter the number of numbers in the sequence to generate.
# (Hint: The Fibonnaci seqence is a sequence of numbers where the next number in the sequence
# is the sum of the previous two numbers in the sequence.
# The sequence looks like this: 1, 1, 2, 3, 5, 8, 13, …)
| [
"akmoore85@gmail.com"
] | akmoore85@gmail.com |
ea7a23f4530621019e15f8f1344c873833802e67 | 41dbe386398222d83eb392528dc738d30c31ecf1 | /src/oxford.py | 05601e6d3bc363a5b9ec1fedca6973625a65b9f1 | [] | no_license | spidgorny/learn-english-by-youtube | dc00fe6f6948bd7416faf4ea84badf9e20d1706a | d7ea46035cadf95ef3a66e8b965b8503088f4267 | refs/heads/master | 2023-01-07T01:23:16.318036 | 2022-02-21T09:56:53 | 2022-02-21T09:56:53 | 188,993,746 | 1 | 0 | null | 2023-01-05T23:58:02 | 2019-05-28T09:06:30 | HTML | UTF-8 | Python | false | false | 484 | py | import requests
import json
from dotenv import load_dotenv
import os
load_dotenv()
app_id = os.getenv("OXFORD_APP")
app_key = os.getenv("OXFORD_KEY")
language = "en-gb"
word_id = "example"
base = "https://od-api.oxforddictionaries.com:443/api/v2"
url = base + "/entries/" + language + "/" + word_id.lower()
url = base + "/translations/en/ru/hello"
r = requests.get(url, headers={"app_id": app_id, "app_key": app_key})
print(r.status_code, r.headers['content-type'])
print(r.json())
| [
"depidsvy@nintendo.de"
] | depidsvy@nintendo.de |
21e1393d3ea043c5743c87df810d454dda2836e0 | fc168aacda192a323f128190fce61890af64204e | /PlottingDrivers/PlotterABC.py | ad491bab95a8c4dc35810b2d64468895b0d07451 | [] | no_license | mandersonGH/MOONJAM | ee7fe0fde2c9091e431c40ed354b9419d1cc5fa9 | 52d9d83270f9c1102daf4ad5379faa043596ff2d | refs/heads/master | 2023-07-19T05:09:57.350697 | 2021-09-09T07:29:44 | 2021-09-09T07:29:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 302 | py | '''
Created on Sep 8, 2017
@author: Mande
'''
import abc
class PlotterABC(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def plot(self, direc, galaxy, plotType):
"""Create and save plots of a certain type, of a certain galaxy in a certain directory."""
return
| [
"mra21@duke.edu"
] | mra21@duke.edu |
53f4a5dbd995c8d0d06462adf2753293500b5cd1 | d734ea42794064f04fc7d20c02103b46e557cf83 | /WikiSummary.py | 1ad53060f68dce62737de58cfbe0bbeecf057ae9 | [] | no_license | Rishoban/AbstractiveTextSummarization | 9d0d7dc628067bda54a1cbbbdc00d5398c924d5c | 44a928b8f4423ade23baaacc9d3bbec239d751c1 | refs/heads/master | 2020-05-04T17:41:37.673608 | 2019-12-28T07:03:08 | 2019-12-28T07:03:08 | 179,322,301 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,079 | py | import numpy as np
import nltk
nltk.download('punkt') # one time execution
import csv
import itertools
import networkx as nx
import math
from nltk.stem import PorterStemmer
#Read this documents
file = open("Evaluation/achu5.txt", "r", encoding = "utf-8")
filedata = file.readlines()
stock_keywords = ['gain','annual', 'report', 'arbitrag', 'averag', 'down', 'bear', 'market', 'beta', 'sharehold', 'manag', 'strategi','sale', 'financi', 'solvenc', 'buy', 'sell', 'stock', 'invest', 'share', 'trade', 'price', 'stabl', 'dividend', 'fiscal', 'exchang', 'bourse', 'bull', 'broker', 'bid', 'close', 'execut', 'high', 'index', 'ipo', 'public', 'offer', 'leverag', 'low', 'margin', 'purchas', 'minimum', 'balanc', 'margin', 'account', 'open', 'order', 'portfolio', 'ralli', 'quot', 'sector', 'spread', 'volatil', 'volum', 'yield', 'bottom', 'line','perform','revenu','loss', 'profit', 'grow', 'increas', 'decreas', 'multipl', 'roe', 'roa', 'p/e', 'alpha', 'rel', 'nasdaq', 'msci', 'hangseng', 'world', 'indic', 'ep', 'quarterli', 'forward', 'contract', 'profit', 'take', 'equiti', 'market']
Multiple_keys = ['nikkei 225', 'forward P/BV', 'Dividend Yield', 'Penny stocks', 'Value stocks', 'Growth stocks', 'risk adjusted return', 'mean reverting', 'S&P 500', 'FTSE 100', 'MSCI Emerging markets', 'technical charts', 'moving averages', 'book value', 'EBITDA growth', 'EBITDA margin', 'all time high', 'all time low', 'price gains', 'Earnings exceeding forecasts', 'Last Twelve Months', 'intrinsic value', 'upside potential', 'stock futures']
from nltk.tokenize import sent_tokenize, word_tokenize
sentences = []
for g in filedata:
sentence = sent_tokenize(g)
sentences += sentence
#Read the glove file
word_embeddings = {}
f = open('glove.6B.100d.txt', encoding='utf-8')
for line in f:
values = line.split()
word = values[0]
coefs = np.asarray(values[1:], dtype='float32')
word_embeddings[word] = coefs
f.close()
from nltk.corpus import stopwords
stop_words = stopwords.words('english')
nltk.download('stopwords')
def remove_stopwords(sen):
sen_new = " ".join([i for i in sen if i not in stop_words])
return sen_new
def stockKey_calculation(sen):
count = 0
words = word_tokenize(sen)
keys = []
ps = PorterStemmer()
stem_words = []
for e in words:
g = ps.stem(e)
stem_words.append(g)
for word in stem_words:
if word in stock_keywords:
count += 1
keys.append(word)
for kes in Multiple_keys:
if kes in sen:
count += 1
keys.append(kes)
return count
def calculate_keys(list_keys):
total_count = sum(list_keys)
ratio_keys = []
if total_count == 0:
return ratio_keys
for stock_num in list_keys:
ratios = stock_num/total_count
ratio_keys.append(ratios)
return ratio_keys
def inverseRank_generator(g, smatrix):
nodes_list = list(g.nodes)
final_dict = {}
for x in nodes_list:
rank = 0
for sim_value in nodes_list:
rank += smatrix[x,sim_value]
final_dict[x] = rank
nx.set_node_attributes(g, final_dict, 'inverseRank')
return g, final_dict
def graph_reduction(g, rankDict, smatrix):
key_max = max(rankDict.keys(), key=(lambda k: rankDict[k])) #Find node which has maximum inverse rank
g.remove_node(key_max) #Remove node from the graph
rest_rank = nx.get_node_attributes(g, 'inverseRank') #get the inverse rank of the rest of the nodes
key_max2 = min(rest_rank.keys(), key=(lambda k: rest_rank[k])) #Find minimum inverse rank node
connected_component = nx.node_connected_component(g, key_max2) #Pick the suggraph whish has minimum inverse rank
s = list(connected_component) #Convert the set of nodes to list of nodes
s_max = max(s)
G_ex = nx.Graph()
G_ex.add_nodes_from(s)
G_ex.add_edges_from(itertools.combinations(s, 2)) #Generate graph from existing nodes and add weights
new_matrix = np.zeros([s_max+1, s_max+1])
for x in s:
for y in s:
new_matrix[x][y] = smatrix[x][y]
G_ex.add_weighted_edges_from([(x, y, smatrix[x][y])])
add_inverseRank, inverRank_dict = inverseRank_generator(G_ex, new_matrix) #find the inverse rank for the rest of the nodes
return G_ex, inverRank_dict
clean_sentences = [remove_stopwords(r.split()) for r in sentences]
#Creating word vectors form each sentences
sentence_vectors = []
stockcounts = []
for i in clean_sentences:
if len(i) != 0:
v = sum([word_embeddings.get(w, np.zeros((100,))) for w in i.split()])/(len(i.split())+0.001)
stockcounts.append(stockKey_calculation(i))
else:
v = np.zeros((100,))
sentence_vectors.append(v)
sim_mat = np.zeros([len(sentences), len(sentences)])
from sklearn.metrics.pairwise import cosine_similarity
#Cosine similarities
for i in range(len(sentences)):
for j in range(len(sentences)):
if i != j:
sim_mat[i][j] = cosine_similarity(sentence_vectors[i].reshape(1,100), sentence_vectors[j].reshape(1,100))[0,0]
ratio_keys = []
ratio_keys = calculate_keys(stockcounts)
mul_ratio_keys = [x * 10 for x in ratio_keys]
for val in range(len(clean_sentences)):
sim_mat[val][val] = mul_ratio_keys[val]
G = nx.from_numpy_matrix(sim_mat)
#with open('Edges.csv', 'w') as csvfile:
# fieldName = ['Source', 'Target', 'Weight', 'Type']
# theWriter = csv.DictWriter(csvfile, fieldnames= fieldName)
# theWriter.writeheader()
#
# for n1, n2, attr in G.edges(data=True):
# lines = [n1, n2, attr.get('weight')]
# if n1 == n2:
# continue
# else:
# theWriter.writerow({fieldName[0]:n1, fieldName[1]: n2, fieldName[2]: attr.get('weight'),fieldName[3]:'Undirected'})
# labels = dict((n, d['inverseRank']) for n, d in iter_G.nodes(data=True))
# nx.draw(iter_G, labels=labels, node_size=1000)
# pylab.show()
s1 = 'Sent'
#with open('Nodes.csv', 'w') as csvfile:
# fieldName = ['Id', 'Label']
# theWriter = csv.DictWriter(csvfile, fieldnames=fieldName)
# theWriter.writeheader()
#
# f = list(G.nodes(data=True))
# for node_label, attribute in f:
# linew = [node_label, attribute.get('inverseRank')]
# theWriter.writerow({fieldName[0]:linew[0], fieldName[1]:s1+str(node_label)})
#Using pageRank algorithm
#nx_graph = nx.from_numpy_array(sim_mat)
#scores = nx.pagerank(nx_graph)
#ranked_sentences = sorted(((scores[i],s) for i,s in enumerate(sentences)), reverse=True)
#summary = []
#for i in range(10):
# summary.append(ranked_sentences[i][1])
add_inverseRank, inverseRank_dict = inverseRank_generator(G, sim_mat)
num_nodes = len(G)
summary_content = math.ceil(num_nodes/2)
iter_con = num_nodes
iter_G = G
rank_dict = inverseRank_dict
while(iter_con > summary_content):
iter_G, rank_dict = graph_reduction(iter_G, rank_dict, sim_mat)
iter_con = len(iter_G)
print(list(iter_G.nodes(data=True)))
summary = list(iter_G.nodes)
for summary_sen in summary:
print(sentences[summary_sen]) | [
"rishoban27@gmail.com"
] | rishoban27@gmail.com |
4940940acafc3286cbd1263ff3ba521ea825fc8b | 96912c23d8ad0ab0e8c65cf2894b8d98e68b7b61 | /explore/latex_basic.py | 2a3b9926a622b4277b4ffe6d3d10739e09c63ae8 | [] | no_license | hemanta212/hello-manim | c68f1f15ad67f252f652232e98e84c686b2054ed | 30e82d6d1a144c87a392a947ad9a2b4bb6b9e253 | refs/heads/master | 2023-06-08T12:13:47.130624 | 2021-06-30T23:37:17 | 2021-06-30T23:37:17 | 301,414,235 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | # Latex
#!/usr/bin/env python3
from manimlib.imports import *
class BasicEquations(Scene):
# A short script showing how to use Latex commands
def construct(self):
eq1 = TextMobject("$\\vec{X}_0 \\cdot \\vec{Y}_1 = 3$")
eq1.shift(2 * UP)
eq2 = TexMobject(r"\vec{F}_{net} = \sum_i \vec{F}_i")
eq2.shift(2 * DOWN)
self.play(Write(eq1))
self.play(Write(eq2))
| [
"sharmahemanta.212@gmail.com"
] | sharmahemanta.212@gmail.com |
f4721af768f72b7661c4e6b7710c1b48368619b0 | 651ed932dcc3af273e2c5639db03ebf3688d833f | /07_classes_and_object/2_self_parameter.py | 5dbffe394e932ec53c08d84f0ef02caa24b92e93 | [] | no_license | adamcanray/canrayLearnPython | b3b470d675d786fc3efc5fcf2b43f5293c77a783 | 00e91d2ce080b845a642a554471b0a14cbc6e046 | refs/heads/master | 2020-08-20T05:44:31.815991 | 2019-10-23T15:27:12 | 2019-10-23T15:27:22 | 215,987,927 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 820 | py | # ----- header
# Saatnya untuk menjelaskan parameter self yang digunakan dalam tugas sebelumnya.
# ----- start reading
# Perhatikan:
# * Parameter self adalah konvensi Python.
# * self adalah parameter pertama yang diteruskan ke metode kelas apa pun.
# * Python akan menggunakan parameter mandiri untuk merujuk ke objek yang sedang dibuat.
# ----- end.
# ----- start coding
# Implementasi:
# class
class Calculator:
# variabel
current = 0
def add(self, amount):
while True:
self.current += amount
if amount > 0:
break
return self.current
def get_current(self):
return self.current
# panggil
print(Calculator().add(100))
# ----- end.
# note: semoga penjelasan diatas bisa membuat kalian paham fungsi dari parameter self itu sendiri. | [
"adamcool180@gmail.com"
] | adamcool180@gmail.com |
7303c13be3c7f4bc28364efa9549c14622515aa2 | 5216fa3332dd7400f64d8e359aa12c803b9615a0 | /scripts/get_luts.py | 83ec9328400efb7f0515bcc35ee805518341d005 | [] | no_license | jlingema/uGMTfirmware | acc5c98e85a7d628f150f08a1cb959dbad4fe797 | cf25041632b4ab88b844a2d6551dd5adc91ba9b6 | refs/heads/master | 2020-12-25T03:41:12.033549 | 2015-05-06T18:54:42 | 2015-05-06T18:54:42 | 32,987,956 | 0 | 0 | null | 2015-03-27T13:36:55 | 2015-03-27T13:36:54 | null | UTF-8 | Python | false | false | 1,489 | py | #!/usr/bin/env python
import urllib
import json
import argparse
import os
def get_addresses(lut_type):
'''
Gets the download URLs through the github-API.
'''
fobj = urllib.urlopen('https://api.github.com/repos/jlingema/uGMTScripts/contents/lut_tests/lut_configuration/data/{tp}'.format(tp=lut_type))
return [x['download_url'] for x in json.loads(fobj.read())]
def parse_options():
'''
Specify which LUTs to download and where to store
'''
desc = "LUT check-out tool"
parser = argparse.ArgumentParser(description=desc, formatter_class=argparse.ArgumentDefaultsHelpFormatter)
parser.add_argument('lut_type', type=str, default='binary', help='LUT-types you want to checkout.')
parser.add_argument('--outpath', type=str, dest='outpath', default='luts', help='Where files should be stored')
opts = parser.parse_args()
return opts
def main():
options = parse_options()
# check if directory exists, otherwise create.
if not os.path.exists(options.outpath):
os.makedirs(options.outpath)
# get the addresses for download
addresses = get_addresses(options.lut_type)
# check if sub-folder exists:
folder_name = options.outpath
if not os.path.exists(folder_name):
os.makedirs(folder_name)
# download the lut-files.
for add in addresses:
fname = add.split('/')[-1]
urllib.urlretrieve(add, os.path.join(folder_name, fname))
if __name__ == "__main__":
main()
| [
"joschka.lingemann@cern.ch"
] | joschka.lingemann@cern.ch |
3e9a13e8c5f476f83099bb0083b9dd0f5a08aac4 | b6a59f4b4ae064371e186658b5e67692026a51b0 | /Settings/scripts/startup/bl_ui/properties_particle.py | 0352c4e6e090be374b30889108641b4b2916f859 | [] | no_license | L0Lock/Blender-Stuff | 7fdb63cb91aa7f82b27ad56fb1138120f161c95d | 19e20f78e84856c760717ffec1783fc9b494273d | refs/heads/master | 2021-09-28T08:11:39.216097 | 2018-11-15T21:14:28 | 2018-11-15T21:14:28 | 109,046,228 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 53,486 | py | # ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8 compliant>
import bpy
from bpy.types import Panel, Menu
from rna_prop_ui import PropertyPanel
from bpy.app.translations import pgettext_iface as iface_
from .properties_physics_common import (
point_cache_ui,
effector_weights_ui,
basic_force_field_settings_ui,
basic_force_field_falloff_ui,
)
def particle_panel_enabled(context, psys):
if psys is None:
return True
phystype = psys.settings.physics_type
if psys.settings.type in {'EMITTER', 'REACTOR'} and phystype in {'NO', 'KEYED'}:
return True
else:
return (psys.point_cache.is_baked is False) and (not psys.is_edited) and (not context.particle_system_editable)
def particle_panel_poll(cls, context):
psys = context.particle_system
engine = context.scene.render.engine
settings = 0
if psys:
settings = psys.settings
elif isinstance(context.space_data.pin_id, bpy.types.ParticleSettings):
settings = context.space_data.pin_id
if not settings:
return False
return settings.is_fluid is False and (engine in cls.COMPAT_ENGINES)
def particle_get_settings(context):
if context.particle_system:
return context.particle_system.settings
elif isinstance(context.space_data.pin_id, bpy.types.ParticleSettings):
return context.space_data.pin_id
return None
class PARTICLE_MT_specials(Menu):
bl_label = "Particle Specials"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
def draw(self, context):
layout = self.layout
props = layout.operator("particle.copy_particle_systems", text="Copy Active to Selected Objects")
props.use_active = True
props.remove_target_particles = False
props = layout.operator("particle.copy_particle_systems", text="Copy All to Selected Objects")
props.use_active = False
props.remove_target_particles = True
layout.operator("particle.duplicate_particle_system")
class PARTICLE_MT_hair_dynamics_presets(Menu):
bl_label = "Hair Dynamics Presets"
preset_subdir = "hair_dynamics"
preset_operator = "script.execute_preset"
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
draw = Menu.draw_preset
class ParticleButtonsPanel:
bl_space_type = 'PROPERTIES'
bl_region_type = 'WINDOW'
bl_context = "particle"
@classmethod
def poll(cls, context):
return particle_panel_poll(cls, context)
def find_modifier(ob, psys):
for md in ob.modifiers:
if md.type == 'PARTICLE_SYSTEM':
if md.particle_system == psys:
return md
class PARTICLE_UL_particle_systems(bpy.types.UIList):
def draw_item(self, context, layout, data, item, icon, active_data, active_propname, index, flt_flag):
ob = data
psys = item
if self.layout_type in {'DEFAULT', 'COMPACT'}:
md = find_modifier(ob, psys)
layout.prop(psys, "name", text="", emboss=False, icon_value=icon)
if md:
layout.prop(md, "show_render", emboss=False, icon_only=True,
icon='RESTRICT_RENDER_OFF' if md.show_render else 'RESTRICT_RENDER_ON')
layout.prop(md, "show_viewport", emboss=False, icon_only=True,
icon='RESTRICT_VIEW_OFF' if md.show_viewport else 'RESTRICT_VIEW_ON')
elif self.layout_type == 'GRID':
layout.alignment = 'CENTER'
layout.label(text="", icon_value=icon)
class PARTICLE_PT_context_particles(ParticleButtonsPanel, Panel):
bl_label = ""
bl_options = {'HIDE_HEADER'}
COMPAT_ENGINES = {'BLENDER_RENDER', 'BLENDER_GAME'}
@classmethod
def poll(cls, context):
engine = context.scene.render.engine
return (context.particle_system or context.object or context.space_data.pin_id) and (engine in cls.COMPAT_ENGINES)
def draw(self, context):
layout = self.layout
if context.scene.render.engine == 'BLENDER_GAME':
layout.label("Not available in the Game Engine")
return
ob = context.object
psys = context.particle_system
part = 0
if ob:
row = layout.row()
row.template_list("PARTICLE_UL_particle_systems", "particle_systems", ob, "particle_systems",
ob.particle_systems, "active_index", rows=1)
col = row.column(align=True)
col.operator("object.particle_system_add", icon='ZOOMIN', text="")
col.operator("object.particle_system_remove", icon='ZOOMOUT', text="")
col.menu("PARTICLE_MT_specials", icon='DOWNARROW_HLT', text="")
if psys is None:
part = particle_get_settings(context)
layout.operator("object.particle_system_add", icon='ZOOMIN', text="New")
if part is None:
return
layout.template_ID(context.space_data, "pin_id")
if part.is_fluid:
layout.label(text="Settings used for fluid")
return
layout.prop(part, "type", text="Type")
elif not psys.settings:
split = layout.split(percentage=0.32)
col = split.column()
col.label(text="Settings:")
col = split.column()
col.template_ID(psys, "settings", new="particle.new")
else:
part = psys.settings
split = layout.split(percentage=0.32)
col = split.column()
if part.is_fluid is False:
col.label(text="Settings:")
col.label(text="Type:")
col = split.column()
if part.is_fluid is False:
row = col.row()
row.enabled = particle_panel_enabled(context, psys)
row.template_ID(psys, "settings", new="particle.new")
if part.is_fluid:
layout.label(text=iface_("%d fluid particles for this frame") % part.count, translate=False)
return
row = col.row()
row.enabled = particle_panel_enabled(context, psys)
row.prop(part, "type", text="")
row.prop(psys, "seed")
if part:
split = layout.split(percentage=0.65)
if part.type == 'HAIR':
if psys is not None and psys.is_edited:
split.operator("particle.edited_clear", text="Free Edit")
else:
row = split.row()
row.enabled = particle_panel_enabled(context, psys)
row.prop(part, "regrow_hair")
row.prop(part, "use_advanced_hair")
row = split.row()
row.enabled = particle_panel_enabled(context, psys)
row.prop(part, "hair_step")
if psys is not None and psys.is_edited:
if psys.is_global_hair:
row = layout.row(align=True)
row.operator("particle.connect_hair").all = False
row.operator("particle.connect_hair", text="Connect All").all = True
else:
row = layout.row(align=True)
row.operator("particle.disconnect_hair").all = False
row.operator("particle.disconnect_hair", text="Disconnect All").all = True
elif psys is not None and part.type == 'REACTOR':
split.enabled = particle_panel_enabled(context, psys)
split.prop(psys, "reactor_target_object")
split.prop(psys, "reactor_target_particle_system", text="Particle System")
class PARTICLE_PT_emission(ParticleButtonsPanel, Panel):
bl_label = "Emission"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
psys = context.particle_system
settings = particle_get_settings(context)
if settings is None:
return False
if settings.is_fluid:
return False
if particle_panel_poll(PARTICLE_PT_emission, context):
return psys is None or not context.particle_system.point_cache.use_external
return False
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
layout.enabled = particle_panel_enabled(context, psys) and (psys is None or not psys.has_multiple_caches)
row = layout.row()
row.active = part.emit_from == 'VERT' or part.distribution != 'GRID'
row.prop(part, "count")
if part.type == 'HAIR':
row.prop(part, "hair_length")
if not part.use_advanced_hair:
row = layout.row()
row.prop(part, "use_modifier_stack")
return
if part.type != 'HAIR':
split = layout.split()
col = split.column(align=True)
col.prop(part, "frame_start")
col.prop(part, "frame_end")
col = split.column(align=True)
col.prop(part, "lifetime")
col.prop(part, "lifetime_random", slider=True)
layout.label(text="Emit From:")
layout.row().prop(part, "emit_from", expand=True)
row = layout.row()
if part.emit_from == 'VERT':
row.prop(part, "use_emit_random")
elif part.distribution == 'GRID':
row.prop(part, "invert_grid")
row.prop(part, "hexagonal_grid")
else:
row.prop(part, "use_emit_random")
row.prop(part, "use_even_distribution")
if part.emit_from == 'FACE' or part.emit_from == 'VOLUME':
layout.row().prop(part, "distribution", expand=True)
row = layout.row()
if part.distribution == 'JIT':
row.prop(part, "userjit", text="Particles/Face")
row.prop(part, "jitter_factor", text="Jittering Amount", slider=True)
elif part.distribution == 'GRID':
row.prop(part, "grid_resolution")
row.prop(part, "grid_random", text="Random", slider=True)
row = layout.row()
row.prop(part, "use_modifier_stack")
class PARTICLE_PT_hair_dynamics(ParticleButtonsPanel, Panel):
bl_label = "Hair Dynamics"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
psys = context.particle_system
engine = context.scene.render.engine
if psys is None:
return False
if psys.settings is None:
return False
return psys.settings.type == 'HAIR' and (engine in cls.COMPAT_ENGINES)
def draw_header(self, context):
psys = context.particle_system
self.layout.prop(psys, "use_hair_dynamics", text="")
def draw(self, context):
layout = self.layout
psys = context.particle_system
if not psys.cloth:
return
cloth_md = psys.cloth
cloth = cloth_md.settings
result = cloth_md.solver_result
layout.enabled = psys.use_hair_dynamics and psys.point_cache.is_baked is False
row = layout.row(align=True)
row.menu("PARTICLE_MT_hair_dynamics_presets", text=bpy.types.PARTICLE_MT_hair_dynamics_presets.bl_label)
row.operator("particle.hair_dynamics_preset_add", text="", icon='ZOOMIN')
row.operator("particle.hair_dynamics_preset_add", text="", icon='ZOOMOUT').remove_active = True
split = layout.column()
col = split.column()
col.label(text="Structure")
col.prop(cloth, "mass")
sub = col.column(align=True)
subsub = sub.row(align=True)
subsub.prop(cloth, "bending_stiffness", text="Stiffness")
subsub.prop(psys.settings, "bending_random", text="Random")
sub.prop(cloth, "bending_damping", text="Damping")
# XXX has no noticeable effect with stiff hair structure springs
#col.prop(cloth, "spring_damping", text="Damping")
split.separator()
col = split.column()
col.label(text="Volume")
col.prop(cloth, "air_damping", text="Air Drag")
col.prop(cloth, "internal_friction", slider=True)
sub = col.column(align=True)
sub.prop(cloth, "density_target", text="Density Target")
sub.prop(cloth, "density_strength", slider=True, text="Strength")
col.prop(cloth, "voxel_cell_size")
split.separator()
col = split.column()
col.label(text="Pinning")
col.prop(cloth, "pin_stiffness", text="Goal Strength")
split.separator()
col = split.column()
col.label(text="Quality:")
col.prop(cloth, "quality", text="Steps", slider=True)
row = col.row()
row.prop(psys.settings, "show_hair_grid", text="HairGrid")
if result:
box = layout.box()
if not result.status:
label = " "
icon = 'NONE'
elif result.status == {'SUCCESS'}:
label = "Success"
icon = 'NONE'
elif result.status - {'SUCCESS'} == {'NO_CONVERGENCE'}:
label = "No Convergence"
icon = 'ERROR'
else:
label = "ERROR"
icon = 'ERROR'
box.label(label, icon=icon)
box.label("Iterations: %d .. %d (avg. %d)" %
(result.min_iterations, result.max_iterations, result.avg_iterations))
box.label("Error: %.5f .. %.5f (avg. %.5f)" % (result.min_error, result.max_error, result.avg_error))
class PARTICLE_PT_cache(ParticleButtonsPanel, Panel):
bl_label = "Cache"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
psys = context.particle_system
engine = context.scene.render.engine
if psys is None:
return False
if psys.settings is None:
return False
if psys.settings.is_fluid:
return False
phystype = psys.settings.physics_type
if phystype == 'NO' or phystype == 'KEYED':
return False
return (
(psys.settings.type in {'EMITTER', 'REACTOR'} or
(psys.settings.type == 'HAIR' and
(psys.use_hair_dynamics or psys.point_cache.is_baked))) and
engine in cls.COMPAT_ENGINES
)
def draw(self, context):
psys = context.particle_system
point_cache_ui(self, context, psys.point_cache, True, 'HAIR' if (psys.settings.type == 'HAIR') else 'PSYS')
class PARTICLE_PT_velocity(ParticleButtonsPanel, Panel):
bl_label = "Velocity"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
if particle_panel_poll(PARTICLE_PT_velocity, context):
psys = context.particle_system
settings = particle_get_settings(context)
if settings.type == 'HAIR' and not settings.use_advanced_hair:
return False
return settings.physics_type != 'BOIDS' and (psys is None or not psys.point_cache.use_external)
else:
return False
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
layout.enabled = particle_panel_enabled(context, psys)
split = layout.split()
col = split.column()
col.label(text="Emitter Geometry:")
col.prop(part, "normal_factor")
sub = col.column(align=True)
sub.prop(part, "tangent_factor")
sub.prop(part, "tangent_phase", slider=True)
col = split.column()
col.label(text="Emitter Object:")
col.prop(part, "object_align_factor", text="")
layout.label(text="Other:")
row = layout.row()
if part.emit_from == 'PARTICLE':
row.prop(part, "particle_factor")
else:
row.prop(part, "object_factor", slider=True)
row.prop(part, "factor_random")
#if part.type=='REACTOR':
# sub.prop(part, "reactor_factor")
# sub.prop(part, "reaction_shape", slider=True)
class PARTICLE_PT_rotation(ParticleButtonsPanel, Panel):
bl_label = "Rotation"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
if particle_panel_poll(PARTICLE_PT_rotation, context):
psys = context.particle_system
settings = particle_get_settings(context)
if settings.type == 'HAIR' and not settings.use_advanced_hair:
return False
return settings.physics_type != 'BOIDS' and (psys is None or not psys.point_cache.use_external)
else:
return False
def draw_header(self, context):
psys = context.particle_system
if psys:
part = psys.settings
else:
part = context.space_data.pin_id
self.layout.prop(part, "use_rotations", text="")
def draw(self, context):
layout = self.layout
psys = context.particle_system
if psys:
part = psys.settings
else:
part = context.space_data.pin_id
layout.enabled = particle_panel_enabled(context, psys) and part.use_rotations
layout.label(text="Initial Orientation:")
split = layout.split()
col = split.column(align=True)
col.prop(part, "rotation_mode", text="")
col.prop(part, "rotation_factor_random", slider=True, text="Random")
col = split.column(align=True)
col.prop(part, "phase_factor", slider=True)
col.prop(part, "phase_factor_random", text="Random", slider=True)
if part.type != 'HAIR':
layout.label(text="Angular Velocity:")
split = layout.split()
col = split.column(align=True)
col.prop(part, "angular_velocity_mode", text="")
sub = col.column(align=True)
sub.active = part.angular_velocity_mode != 'NONE'
sub.prop(part, "angular_velocity_factor", text="")
col = split.column()
col.prop(part, "use_dynamic_rotation")
class PARTICLE_PT_physics(ParticleButtonsPanel, Panel):
bl_label = "Physics"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
if particle_panel_poll(PARTICLE_PT_physics, context):
psys = context.particle_system
settings = particle_get_settings(context)
if settings.type == 'HAIR' and not settings.use_advanced_hair:
return False
return psys is None or not psys.point_cache.use_external
else:
return False
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
layout.enabled = particle_panel_enabled(context, psys)
layout.row().prop(part, "physics_type", expand=True)
row = layout.row()
col = row.column(align=True)
col.prop(part, "particle_size")
col.prop(part, "size_random", slider=True)
if part.physics_type != 'NO':
col = row.column(align=True)
col.prop(part, "mass")
col.prop(part, "use_multiply_size_mass", text="Multiply mass with size")
if part.physics_type in {'NEWTON', 'FLUID'}:
split = layout.split()
col = split.column()
col.label(text="Forces:")
col.prop(part, "brownian_factor")
col.prop(part, "drag_factor", slider=True)
col.prop(part, "damping", slider=True)
col = split.column()
col.label(text="Integration:")
col.prop(part, "integrator", text="")
col.prop(part, "timestep")
sub = col.row()
sub.prop(part, "subframes")
supports_courant = part.physics_type == 'FLUID'
subsub = sub.row()
subsub.enabled = supports_courant
subsub.prop(part, "use_adaptive_subframes", text="")
if supports_courant and part.use_adaptive_subframes:
col.prop(part, "courant_target", text="Threshold")
row = layout.row()
row.prop(part, "use_size_deflect")
row.prop(part, "use_die_on_collision")
layout.prop(part, "collision_group")
if part.physics_type == 'FLUID':
fluid = part.fluid
split = layout.split()
sub = split.row()
sub.prop(fluid, "solver", expand=True)
split = layout.split()
col = split.column()
col.label(text="Fluid Properties:")
col.prop(fluid, "stiffness", text="Stiffness")
col.prop(fluid, "linear_viscosity", text="Viscosity")
col.prop(fluid, "buoyancy", text="Buoyancy", slider=True)
col = split.column()
col.label(text="Advanced:")
if fluid.solver == 'DDR':
sub = col.row()
sub.prop(fluid, "repulsion", slider=fluid.factor_repulsion)
sub.prop(fluid, "factor_repulsion", text="")
sub = col.row()
sub.prop(fluid, "stiff_viscosity", slider=fluid.factor_stiff_viscosity)
sub.prop(fluid, "factor_stiff_viscosity", text="")
sub = col.row()
sub.prop(fluid, "fluid_radius", slider=fluid.factor_radius)
sub.prop(fluid, "factor_radius", text="")
sub = col.row()
sub.prop(fluid, "rest_density", slider=fluid.use_factor_density)
sub.prop(fluid, "use_factor_density", text="")
if fluid.solver == 'CLASSICAL':
# With the classical solver, it is possible to calculate the
# spacing between particles when the fluid is at rest. This
# makes it easier to set stable initial conditions.
particle_volume = part.mass / fluid.rest_density
spacing = pow(particle_volume, 1.0 / 3.0)
sub = col.row()
sub.label(text="Spacing: %g" % spacing)
elif fluid.solver == 'DDR':
split = layout.split()
col = split.column()
col.label(text="Springs:")
col.prop(fluid, "spring_force", text="Force")
col.prop(fluid, "use_viscoelastic_springs")
sub = col.column(align=True)
sub.active = fluid.use_viscoelastic_springs
sub.prop(fluid, "yield_ratio", slider=True)
sub.prop(fluid, "plasticity", slider=True)
col = split.column()
col.label(text="Advanced:")
sub = col.row()
sub.prop(fluid, "rest_length", slider=fluid.factor_rest_length)
sub.prop(fluid, "factor_rest_length", text="")
col.label(text="")
sub = col.column()
sub.active = fluid.use_viscoelastic_springs
sub.prop(fluid, "use_initial_rest_length")
sub.prop(fluid, "spring_frames", text="Frames")
elif part.physics_type == 'KEYED':
split = layout.split()
sub = split.column()
row = layout.row()
col = row.column()
col.active = not psys.use_keyed_timing
col.prop(part, "keyed_loops", text="Loops")
if psys:
row.prop(psys, "use_keyed_timing", text="Use Timing")
layout.label(text="Keys:")
elif part.physics_type == 'BOIDS':
boids = part.boids
row = layout.row()
row.prop(boids, "use_flight")
row.prop(boids, "use_land")
row.prop(boids, "use_climb")
split = layout.split()
col = split.column(align=True)
col.active = boids.use_flight
col.prop(boids, "air_speed_max")
col.prop(boids, "air_speed_min", slider=True)
col.prop(boids, "air_acc_max", slider=True)
col.prop(boids, "air_ave_max", slider=True)
col.prop(boids, "air_personal_space")
row = col.row(align=True)
row.active = (boids.use_land or boids.use_climb) and boids.use_flight
row.prop(boids, "land_smooth")
col = split.column(align=True)
col.active = boids.use_land or boids.use_climb
col.prop(boids, "land_speed_max")
col.prop(boids, "land_jump_speed")
col.prop(boids, "land_acc_max", slider=True)
col.prop(boids, "land_ave_max", slider=True)
col.prop(boids, "land_personal_space")
col.prop(boids, "land_stick_force")
layout.prop(part, "collision_group")
split = layout.split()
col = split.column(align=True)
col.label(text="Battle:")
col.prop(boids, "health")
col.prop(boids, "strength")
col.prop(boids, "aggression")
col.prop(boids, "accuracy")
col.prop(boids, "range")
col = split.column()
col.label(text="Misc:")
col.prop(boids, "bank", slider=True)
col.prop(boids, "pitch", slider=True)
col.prop(boids, "height", slider=True)
if psys and part.physics_type in {'KEYED', 'BOIDS', 'FLUID'}:
if part.physics_type == 'BOIDS':
layout.label(text="Relations:")
elif part.physics_type == 'FLUID':
layout.label(text="Fluid Interaction:")
row = layout.row()
row.template_list("UI_UL_list", "particle_targets", psys, "targets",
psys, "active_particle_target_index", rows=4)
col = row.column()
sub = col.row()
subsub = sub.column(align=True)
subsub.operator("particle.new_target", icon='ZOOMIN', text="")
subsub.operator("particle.target_remove", icon='ZOOMOUT', text="")
sub = col.row()
subsub = sub.column(align=True)
subsub.operator("particle.target_move_up", icon='TRIA_UP', text="")
subsub.operator("particle.target_move_down", icon='TRIA_DOWN', text="")
key = psys.active_particle_target
if key:
row = layout.row()
if part.physics_type == 'KEYED':
col = row.column()
# doesn't work yet
#col.alert = key.valid
col.prop(key, "object", text="")
col.prop(key, "system", text="System")
col = row.column()
col.active = psys.use_keyed_timing
col.prop(key, "time")
col.prop(key, "duration")
elif part.physics_type == 'BOIDS':
sub = row.row()
# doesn't work yet
#sub.alert = key.valid
sub.prop(key, "object", text="")
sub.prop(key, "system", text="System")
layout.row().prop(key, "alliance", expand=True)
elif part.physics_type == 'FLUID':
sub = row.row()
# doesn't work yet
#sub.alert = key.valid
sub.prop(key, "object", text="")
sub.prop(key, "system", text="System")
class PARTICLE_PT_boidbrain(ParticleButtonsPanel, Panel):
bl_label = "Boid Brain"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
psys = context.particle_system
settings = particle_get_settings(context)
engine = context.scene.render.engine
if settings is None:
return False
if psys is not None and psys.point_cache.use_external:
return False
return settings.physics_type == 'BOIDS' and engine in cls.COMPAT_ENGINES
def draw(self, context):
layout = self.layout
boids = particle_get_settings(context).boids
layout.enabled = particle_panel_enabled(context, context.particle_system)
# Currently boids can only use the first state so these are commented out for now.
#row = layout.row()
#row.template_list("UI_UL_list", "particle_boids", boids, "states",
# boids, "active_boid_state_index", compact="True")
#col = row.row()
#sub = col.row(align=True)
#sub.operator("boid.state_add", icon='ZOOMIN', text="")
#sub.operator("boid.state_del", icon='ZOOMOUT', text="")
#sub = row.row(align=True)
#sub.operator("boid.state_move_up", icon='TRIA_UP', text="")
#sub.operator("boid.state_move_down", icon='TRIA_DOWN', text="")
state = boids.active_boid_state
#layout.prop(state, "name", text="State name")
row = layout.row()
row.prop(state, "ruleset_type")
if state.ruleset_type == 'FUZZY':
row.prop(state, "rule_fuzzy", slider=True)
else:
row.label(text="")
row = layout.row()
row.template_list("UI_UL_list", "particle_boids_rules", state,
"rules", state, "active_boid_rule_index", rows=4)
col = row.column()
sub = col.row()
subsub = sub.column(align=True)
subsub.operator_menu_enum("boid.rule_add", "type", icon='ZOOMIN', text="")
subsub.operator("boid.rule_del", icon='ZOOMOUT', text="")
sub = col.row()
subsub = sub.column(align=True)
subsub.operator("boid.rule_move_up", icon='TRIA_UP', text="")
subsub.operator("boid.rule_move_down", icon='TRIA_DOWN', text="")
rule = state.active_boid_rule
if rule:
row = layout.row()
row.prop(rule, "name", text="")
# somebody make nice icons for boids here please! -jahka
row.prop(rule, "use_in_air", icon='TRIA_UP', text="")
row.prop(rule, "use_on_land", icon='TRIA_DOWN', text="")
row = layout.row()
if rule.type == 'GOAL':
row.prop(rule, "object")
row = layout.row()
row.prop(rule, "use_predict")
elif rule.type == 'AVOID':
row.prop(rule, "object")
row = layout.row()
row.prop(rule, "use_predict")
row.prop(rule, "fear_factor")
elif rule.type == 'FOLLOW_PATH':
row.label(text="Not yet functional")
elif rule.type == 'AVOID_COLLISION':
row.prop(rule, "use_avoid")
row.prop(rule, "use_avoid_collision")
row.prop(rule, "look_ahead")
elif rule.type == 'FOLLOW_LEADER':
row.prop(rule, "object", text="")
row.prop(rule, "distance")
row = layout.row()
row.prop(rule, "use_line")
sub = row.row()
sub.active = rule.line
sub.prop(rule, "queue_count")
elif rule.type == 'AVERAGE_SPEED':
row.prop(rule, "speed", slider=True)
row.prop(rule, "wander", slider=True)
row.prop(rule, "level", slider=True)
elif rule.type == 'FIGHT':
row.prop(rule, "distance")
row.prop(rule, "flee_distance")
class PARTICLE_PT_render(ParticleButtonsPanel, Panel):
bl_label = "Render"
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
settings = particle_get_settings(context)
engine = context.scene.render.engine
if settings is None:
return False
return engine in cls.COMPAT_ENGINES
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
if psys:
row = layout.row()
if part.render_type in {'OBJECT', 'GROUP'}:
row.enabled = False
row.prop(part, "material_slot", text="")
row.prop(psys, "parent")
split = layout.split()
col = split.column()
col.prop(part, "use_render_emitter")
col.prop(part, "use_parent_particles")
col = split.column()
col.prop(part, "show_unborn")
col.prop(part, "use_dead")
layout.row().prop(part, "render_type", expand=True)
split = layout.split()
col = split.column()
if part.render_type == 'LINE':
col.prop(part, "line_length_tail")
col.prop(part, "line_length_head")
split.prop(part, "use_velocity_length")
elif part.render_type == 'PATH':
col.prop(part, "use_strand_primitive")
sub = col.column()
sub.active = (part.use_strand_primitive is False)
sub.prop(part, "use_render_adaptive")
sub = col.column()
sub.active = part.use_render_adaptive or part.use_strand_primitive is True
sub.prop(part, "adaptive_angle")
sub = col.column()
sub.active = (part.use_render_adaptive is True and part.use_strand_primitive is False)
sub.prop(part, "adaptive_pixel")
col.prop(part, "use_hair_bspline")
col.prop(part, "render_step", text="Steps")
col = split.column()
col.label(text="Timing:")
col.prop(part, "use_absolute_path_time")
if part.type == 'HAIR' or psys.point_cache.is_baked:
col.prop(part, "path_start", text="Start", slider=not part.use_absolute_path_time)
else:
col.prop(part, "trail_count")
col.prop(part, "path_end", text="End", slider=not part.use_absolute_path_time)
col.prop(part, "length_random", text="Random", slider=True)
row = layout.row()
col = row.column()
if part.type == 'HAIR' and part.use_strand_primitive is True and part.child_type == 'INTERPOLATED':
layout.prop(part, "use_simplify")
if part.use_simplify is True:
row = layout.row()
row.prop(part, "simplify_refsize")
row.prop(part, "simplify_rate")
row.prop(part, "simplify_transition")
row = layout.row()
row.prop(part, "use_simplify_viewport")
sub = row.row()
sub.active = part.use_simplify_viewport is True
sub.prop(part, "simplify_viewport")
elif part.render_type == 'OBJECT':
col.prop(part, "dupli_object")
sub = col.row()
sub.prop(part, "use_global_dupli")
sub.prop(part, "use_rotation_dupli")
sub.prop(part, "use_scale_dupli")
elif part.render_type == 'GROUP':
col.prop(part, "dupli_group")
split = layout.split()
col = split.column()
col.prop(part, "use_whole_group")
sub = col.column()
sub.active = (part.use_whole_group is False)
sub.prop(part, "use_group_pick_random")
sub.prop(part, "use_group_count")
col = split.column()
sub = col.column()
sub.active = (part.use_whole_group is False)
sub.prop(part, "use_global_dupli")
sub.prop(part, "use_rotation_dupli")
sub.prop(part, "use_scale_dupli")
if part.use_group_count and not part.use_whole_group:
row = layout.row()
row.template_list("UI_UL_list", "particle_dupli_weights", part, "dupli_weights",
part, "active_dupliweight_index")
col = row.column()
sub = col.row()
subsub = sub.column(align=True)
subsub.operator("particle.dupliob_copy", icon='ZOOMIN', text="")
subsub.operator("particle.dupliob_remove", icon='ZOOMOUT', text="")
subsub.operator("particle.dupliob_move_up", icon='TRIA_UP', text="")
subsub.operator("particle.dupliob_move_down", icon='TRIA_DOWN', text="")
weight = part.active_dupliweight
if weight:
row = layout.row()
row.prop(weight, "count")
elif part.render_type == 'BILLBOARD':
ob = context.object
col.label(text="Align:")
row = layout.row()
row.prop(part, "billboard_align", expand=True)
row.prop(part, "lock_billboard", text="Lock")
row = layout.row()
row.prop(part, "billboard_object")
row = layout.row()
col = row.column(align=True)
col.label(text="Tilt:")
col.prop(part, "billboard_tilt", text="Angle", slider=True)
col.prop(part, "billboard_tilt_random", text="Random", slider=True)
col = row.column()
col.prop(part, "billboard_offset")
row = layout.row()
col = row.column()
col.prop(part, "billboard_size", text="Scale")
if part.billboard_align == 'VEL':
col = row.column(align=True)
col.label("Velocity Scale:")
col.prop(part, "billboard_velocity_head", text="Head")
col.prop(part, "billboard_velocity_tail", text="Tail")
if psys:
col = layout.column()
col.prop_search(psys, "billboard_normal_uv", ob.data, "uv_textures")
col.prop_search(psys, "billboard_time_index_uv", ob.data, "uv_textures")
split = layout.split(percentage=0.33)
split.label(text="Split UVs:")
split.prop(part, "billboard_uv_split", text="Number of splits")
if psys:
col = layout.column()
col.active = part.billboard_uv_split > 1
col.prop_search(psys, "billboard_split_uv", ob.data, "uv_textures")
row = col.row()
row.label(text="Animate:")
row.prop(part, "billboard_animation", text="")
row.label(text="Offset:")
row.prop(part, "billboard_offset_split", text="")
if part.render_type == 'HALO' or part.render_type == 'LINE' or part.render_type == 'BILLBOARD':
row = layout.row()
col = row.column()
col.prop(part, "trail_count")
if part.trail_count > 1:
col.prop(part, "use_absolute_path_time", text="Length in Frames")
col = row.column()
col.prop(part, "path_end", text="Length", slider=not part.use_absolute_path_time)
col.prop(part, "length_random", text="Random", slider=True)
else:
col = row.column()
col.label(text="")
if part.render_type in {'OBJECT', 'GROUP'} and not part.use_advanced_hair:
row = layout.row(align=True)
row.prop(part, "particle_size")
row.prop(part, "size_random", slider=True)
class PARTICLE_PT_draw(ParticleButtonsPanel, Panel):
bl_label = "Display"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
settings = particle_get_settings(context)
engine = context.scene.render.engine
if settings is None:
return False
return engine in cls.COMPAT_ENGINES
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
row = layout.row()
row.prop(part, "draw_method", expand=True)
row.prop(part, "show_guide_hairs")
if part.draw_method == 'NONE' or (part.render_type == 'NONE' and part.draw_method == 'RENDER'):
return
path = (part.render_type == 'PATH' and part.draw_method == 'RENDER') or part.draw_method == 'PATH'
row = layout.row()
row.prop(part, "draw_percentage", slider=True)
if part.draw_method != 'RENDER' or part.render_type == 'HALO':
row.prop(part, "draw_size")
else:
row.label(text="")
if part.draw_percentage != 100 and psys is not None:
if part.type == 'HAIR':
if psys.use_hair_dynamics and psys.point_cache.is_baked is False:
layout.row().label(text="Display percentage makes dynamics inaccurate without baking!")
else:
phystype = part.physics_type
if phystype != 'NO' and phystype != 'KEYED' and psys.point_cache.is_baked is False:
layout.row().label(text="Display percentage makes dynamics inaccurate without baking!")
row = layout.row()
col = row.column()
col.prop(part, "show_size")
col.prop(part, "show_velocity")
col.prop(part, "show_number")
if part.physics_type == 'BOIDS':
col.prop(part, "show_health")
col = row.column(align=True)
col.label(text="Color:")
col.prop(part, "draw_color", text="")
sub = col.row(align=True)
sub.active = (part.draw_color in {'VELOCITY', 'ACCELERATION'})
sub.prop(part, "color_maximum", text="Max")
if path:
col.prop(part, "draw_step")
class PARTICLE_PT_children(ParticleButtonsPanel, Panel):
bl_label = "Children"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
return particle_panel_poll(cls, context)
def draw(self, context):
layout = self.layout
psys = context.particle_system
part = particle_get_settings(context)
layout.row().prop(part, "child_type", expand=True)
if part.child_type == 'NONE':
return
row = layout.row()
col = row.column(align=True)
col.prop(part, "child_nbr", text="Display")
col.prop(part, "rendered_child_count", text="Render")
if part.child_type == 'INTERPOLATED':
col = row.column()
if psys:
col.prop(psys, "child_seed", text="Seed")
col.prop(part, "virtual_parents", slider=True)
col.prop(part, "create_long_hair_children")
else:
col = row.column(align=True)
col.prop(part, "child_size", text="Size")
col.prop(part, "child_size_random", text="Random")
split = layout.split()
col = split.column()
col.label(text="Effects:")
sub = col.column(align=True)
sub.prop(part, "use_clump_curve")
if part.use_clump_curve:
sub.template_curve_mapping(part, "clump_curve")
else:
sub.prop(part, "clump_factor", slider=True)
sub.prop(part, "clump_shape", slider=True)
sub = col.column(align=True)
sub.prop(part, "use_clump_noise")
subsub = sub.column()
subsub.enabled = part.use_clump_noise
subsub.prop(part, "clump_noise_size")
sub = col.column(align=True)
sub.prop(part, "child_length", slider=True)
sub.prop(part, "child_length_threshold", slider=True)
if part.child_type == 'SIMPLE':
sub = col.column(align=True)
sub.prop(part, "child_radius", text="Radius")
sub.prop(part, "child_roundness", text="Roundness", slider=True)
if psys:
sub.prop(psys, "child_seed", text="Seed")
elif part.virtual_parents > 0.0:
sub = col.column(align=True)
sub.label(text="Parting not")
sub.label(text="available with")
sub.label(text="virtual parents")
else:
sub = col.column(align=True)
sub.prop(part, "child_parting_factor", text="Parting", slider=True)
sub.prop(part, "child_parting_min", text="Min")
sub.prop(part, "child_parting_max", text="Max")
col = split.column()
col.prop(part, "use_roughness_curve")
if part.use_roughness_curve:
sub = col.column()
sub.template_curve_mapping(part, "roughness_curve")
sub.prop(part, "roughness_1", text="Roughness")
sub.prop(part, "roughness_1_size", text="Size")
else:
col.label(text="Roughness:")
sub = col.column(align=True)
sub.prop(part, "roughness_1", text="Uniform")
sub.prop(part, "roughness_1_size", text="Size")
sub = col.column(align=True)
sub.prop(part, "roughness_endpoint", "Endpoint")
sub.prop(part, "roughness_end_shape")
sub = col.column(align=True)
sub.prop(part, "roughness_2", text="Random")
sub.prop(part, "roughness_2_size", text="Size")
sub.prop(part, "roughness_2_threshold", slider=True)
layout.row().label(text="Kink:")
layout.row().prop(part, "kink", expand=True)
split = layout.split()
split.active = part.kink != 'NO'
if part.kink == 'SPIRAL':
col = split.column()
sub = col.column(align=True)
sub.prop(part, "kink_amplitude", text="Radius")
sub.prop(part, "kink_amplitude_random", text="Random", slider=True)
sub = col.column(align=True)
sub.prop(part, "kink_axis")
sub.prop(part, "kink_axis_random", text="Random", slider=True)
col = split.column(align=True)
col.prop(part, "kink_frequency", text="Frequency")
col.prop(part, "kink_shape", text="Shape", slider=True)
col.prop(part, "kink_extra_steps", text="Steps")
else:
col = split.column()
sub = col.column(align=True)
sub.prop(part, "kink_amplitude")
sub.prop(part, "kink_amplitude_clump", text="Clump", slider=True)
col.prop(part, "kink_flat", slider=True)
col = split.column(align=True)
col.prop(part, "kink_frequency")
col.prop(part, "kink_shape", slider=True)
class PARTICLE_PT_field_weights(ParticleButtonsPanel, Panel):
bl_label = "Field Weights"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
return particle_panel_poll(cls, context)
def draw(self, context):
part = particle_get_settings(context)
effector_weights_ui(self, context, part.effector_weights, 'PSYS')
if part.type == 'HAIR':
row = self.layout.row()
row.prop(part.effector_weights, "apply_to_hair_growing")
row.prop(part, "apply_effector_to_children")
row = self.layout.row()
row.prop(part, "effect_hair", slider=True)
class PARTICLE_PT_force_fields(ParticleButtonsPanel, Panel):
bl_label = "Force Field Settings"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
def draw(self, context):
layout = self.layout
part = particle_get_settings(context)
row = layout.row()
row.prop(part, "use_self_effect")
row.prop(part, "effector_amount", text="Amount")
split = layout.split(percentage=0.2)
split.label(text="Type 1:")
split.prop(part.force_field_1, "type", text="")
basic_force_field_settings_ui(self, context, part.force_field_1)
if part.force_field_1.type != 'NONE':
layout.label(text="Falloff:")
basic_force_field_falloff_ui(self, context, part.force_field_1)
if part.force_field_1.type != 'NONE':
layout.label(text="")
split = layout.split(percentage=0.2)
split.label(text="Type 2:")
split.prop(part.force_field_2, "type", text="")
basic_force_field_settings_ui(self, context, part.force_field_2)
if part.force_field_2.type != 'NONE':
layout.label(text="Falloff:")
basic_force_field_falloff_ui(self, context, part.force_field_2)
class PARTICLE_PT_vertexgroups(ParticleButtonsPanel, Panel):
bl_label = "Vertex Groups"
bl_options = {'DEFAULT_CLOSED'}
COMPAT_ENGINES = {'BLENDER_RENDER'}
@classmethod
def poll(cls, context):
if context.particle_system is None:
return False
return particle_panel_poll(cls, context)
def draw(self, context):
layout = self.layout
ob = context.object
psys = context.particle_system
col = layout.column()
row = col.row(align=True)
row.prop_search(psys, "vertex_group_density", ob, "vertex_groups", text="Density")
row.prop(psys, "invert_vertex_group_density", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_length", ob, "vertex_groups", text="Length")
row.prop(psys, "invert_vertex_group_length", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_clump", ob, "vertex_groups", text="Clump")
row.prop(psys, "invert_vertex_group_clump", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_kink", ob, "vertex_groups", text="Kink")
row.prop(psys, "invert_vertex_group_kink", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_roughness_1", ob, "vertex_groups", text="Roughness 1")
row.prop(psys, "invert_vertex_group_roughness_1", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_roughness_2", ob, "vertex_groups", text="Roughness 2")
row.prop(psys, "invert_vertex_group_roughness_2", text="", toggle=True, icon='ARROW_LEFTRIGHT')
row = col.row(align=True)
row.prop_search(psys, "vertex_group_roughness_end", ob, "vertex_groups", text="Roughness End")
row.prop(psys, "invert_vertex_group_roughness_end", text="", toggle=True, icon='ARROW_LEFTRIGHT')
# Commented out vertex groups don't work and are still waiting for better implementation
# row = layout.row()
# row.prop_search(psys, "vertex_group_velocity", ob, "vertex_groups", text="Velocity")
# row.prop(psys, "invert_vertex_group_velocity", text="")
# row = layout.row()
# row.prop_search(psys, "vertex_group_size", ob, "vertex_groups", text="Size")
# row.prop(psys, "invert_vertex_group_size", text="")
# row = layout.row()
# row.prop_search(psys, "vertex_group_tangent", ob, "vertex_groups", text="Tangent")
# row.prop(psys, "invert_vertex_group_tangent", text="")
# row = layout.row()
# row.prop_search(psys, "vertex_group_rotation", ob, "vertex_groups", text="Rotation")
# row.prop(psys, "invert_vertex_group_rotation", text="")
# row = layout.row()
# row.prop_search(psys, "vertex_group_field", ob, "vertex_groups", text="Field")
# row.prop(psys, "invert_vertex_group_field", text="")
class PARTICLE_PT_custom_props(ParticleButtonsPanel, PropertyPanel, Panel):
COMPAT_ENGINES = {'BLENDER_RENDER'}
_context_path = "particle_system.settings"
_property_type = bpy.types.ParticleSettings
classes = (
PARTICLE_MT_specials,
PARTICLE_MT_hair_dynamics_presets,
PARTICLE_UL_particle_systems,
PARTICLE_PT_context_particles,
PARTICLE_PT_emission,
PARTICLE_PT_hair_dynamics,
PARTICLE_PT_cache,
PARTICLE_PT_velocity,
PARTICLE_PT_rotation,
PARTICLE_PT_physics,
PARTICLE_PT_boidbrain,
PARTICLE_PT_render,
PARTICLE_PT_draw,
PARTICLE_PT_children,
PARTICLE_PT_field_weights,
PARTICLE_PT_force_fields,
PARTICLE_PT_vertexgroups,
PARTICLE_PT_custom_props,
)
if __name__ == "__main__": # only for live edit.
from bpy.utils import register_class
for cls in classes:
register_class(cls)
| [
"loic.dautry@gmail.com"
] | loic.dautry@gmail.com |
be5ec02c737544bccc2db5410763fde85dcb258f | 844871e54a514ac66871bac253164d3e3b13f4b8 | /app/tracks/schema.py | 458dca2a71b99da2f03457d38f3c6ce21ed0dcaf | [] | no_license | l225li/react-tracks | b469422df4a8d0c06620bb7501a2823d17f2921c | 91f0ec5b7e8e20501ae28f637fee33b724de0be4 | refs/heads/master | 2023-01-20T02:36:36.225660 | 2019-06-14T16:28:07 | 2019-06-14T16:28:07 | 187,041,017 | 0 | 0 | null | 2023-01-03T22:15:53 | 2019-05-16T14:11:12 | Python | UTF-8 | Python | false | false | 3,386 | py | import graphene
from graphene_django import DjangoObjectType
from .models import Track, Like
from users.schema import UserType
from graphql import GraphQLError
from django.db.models import Q
class TrackType(DjangoObjectType):
class Meta:
model = Track
class LikeType(DjangoObjectType):
class Meta:
model = Like
class Query(graphene.ObjectType):
tracks = graphene.List(TrackType, search=graphene.String())
likes = graphene.List(LikeType)
def resolve_tracks(self, info, search=None):
if search:
filter = (
Q(title__icontains=search) |
Q(description__icontains=search) |
Q(url__icontains=search) |
Q(posted_by__username__icontains=search)
)
return Track.objects.filter(filter)
return Track.objects.all()
def resolve_likes(self, info):
return Like.objects.all()
class CreateTrack(graphene.Mutation):
track = graphene.Field(TrackType)
class Arguments:
title = graphene.String()
description = graphene.String()
url = graphene.String()
def mutate(self, info, **kwargs):
user = info.context.user
if user.is_anonymous:
raise GraphQLError("Log in to add a track.")
track = Track(**kwargs, posted_by=user)
track.save()
return CreateTrack(track)
class UpdateTrack(graphene.Mutation):
track = graphene.Field(TrackType)
class Arguments:
track_id = graphene.Int(required=True)
title = graphene.String()
description = graphene.String()
url = graphene.String()
def mutate(self, info, track_id, title=None, url=None, description=None):
user = info.context.user
track = Track.objects.get(id=track_id)
if track.posted_by != user:
raise GraphQLError('Not permitted to update this track.')
if title:
track.title = title
if url:
track.url = url
if description:
track.description = description
track.save()
return UpdateTrack(track=track)
class DeleteTrack(graphene.Mutation):
track_id = graphene.Int()
class Arguments:
track_id = graphene.Int(required=True)
def mutate(self, info, track_id):
user = info.context.user
track = Track.objects.get(id=track_id)
if track.posted_by != user:
raise GraphQLError('Not permitted to deleted this track.')
track.delete()
return DeleteTrack(track_id=track_id)
class CreateLike(graphene.Mutation):
user = graphene.Field(UserType)
track = graphene.Field(TrackType)
class Arguments:
track_id = graphene.Int(required=True)
def mutate(self, info, track_id):
user = info.context.user
if user.is_anonymous:
raise GraphQLError('Login to like tracks.')
track = Track.objects.get(id=track_id)
if not track:
raise GraphQLError('Cannot find track with given track id')
Like.objects.create(
user=user,
track=track
)
return CreateLike(user=user, track=track)
class Mutation(graphene.ObjectType):
create_track = CreateTrack.Field()
update_track = UpdateTrack.Field()
delete_track = DeleteTrack.Field()
create_like = CreateLike.Field()
| [
"vanessa.li104@gmail.com"
] | vanessa.li104@gmail.com |
3bd8e76e4fa14b88b06d97f5cf7c41d5e07f26a1 | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp-with-texts/COM21-HCXSTUPRXY-MIB.py | 9a57eb77f230af9d2f437dffe242ba17ea37211c | [
"LicenseRef-scancode-warranty-disclaimer",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 78,414 | py | #
# PySNMP MIB module COM21-HCXSTUPRXY-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/COM21-HCXSTUPRXY-MIB
# Produced by pysmi-0.3.4 at Wed May 1 12:26:22 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueRangeConstraint, ConstraintsUnion, ConstraintsIntersection, ValueSizeConstraint, SingleValueConstraint = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueRangeConstraint", "ConstraintsUnion", "ConstraintsIntersection", "ValueSizeConstraint", "SingleValueConstraint")
com21Hcx, com21, com21Stu, com21Traps = mibBuilder.importSymbols("COM21-HCX-MIB", "com21Hcx", "com21", "com21Stu", "com21Traps")
hcxAlmSeverity, hcxEventLogTime = mibBuilder.importSymbols("COM21-HCXALM-MIB", "hcxAlmSeverity", "hcxEventLogTime")
ModuleCompliance, NotificationGroup = mibBuilder.importSymbols("SNMPv2-CONF", "ModuleCompliance", "NotificationGroup")
Unsigned32, iso, MibIdentifier, NotificationType, ModuleIdentity, IpAddress, Counter32, TimeTicks, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Integer32, Counter64, Bits, ObjectIdentity = mibBuilder.importSymbols("SNMPv2-SMI", "Unsigned32", "iso", "MibIdentifier", "NotificationType", "ModuleIdentity", "IpAddress", "Counter32", "TimeTicks", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Integer32", "Counter64", "Bits", "ObjectIdentity")
TextualConvention, DisplayString, MacAddress = mibBuilder.importSymbols("SNMPv2-TC", "TextualConvention", "DisplayString", "MacAddress")
com21StuPrxy = ModuleIdentity((1, 3, 6, 1, 4, 1, 1141, 3, 1))
if mibBuilder.loadTexts: com21StuPrxy.setLastUpdated('9701080000Z')
if mibBuilder.loadTexts: com21StuPrxy.setOrganization('Com21, Inc.')
if mibBuilder.loadTexts: com21StuPrxy.setContactInfo(' Network Management Postal: Paul Gordon Com21, Inc. 750 Tasman Drive Milpitas, California 95035 USA Tel: +1 408 953 9100 Fax: +1 408 953 9299 E-mail: pgordon@com21.com')
if mibBuilder.loadTexts: com21StuPrxy.setDescription('This is the Com21 ComController ComPort Proxy MIB. COM21 Part# 005-0025-00')
com21StuControlGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 2))
com21StuPhysicalGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 3))
com21StuAlarmGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 4))
com21StuEtherConfigGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 5))
com21StuEtherTypeGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 6))
com21StuFiltIpMultiGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 7))
com21StuEtherMacGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 8))
com21StuEtherStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 9))
com21StuStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 10))
com21StuQStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 11))
com21StuAlarmSevGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 12))
com21StuCodeImageGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 13))
com21StuVoiceChanStatsGroup = MibIdentifier((1, 3, 6, 1, 4, 1, 1141, 3, 14))
class FrequencyKhz(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 800000)
class UpstrmFreqKhz(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(5000, 40000)
class EpochTime(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 2147483647)
class Offset(Integer32):
subtypeSpec = Integer32.subtypeSpec + ValueRangeConstraint(0, 16383)
class AlarmSeverity(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))
namedValues = NamedValues(("clear", 1), ("warning", 2), ("minor", 3), ("major", 4), ("critical", 5))
class Com21RowStatus(Integer32):
subtypeSpec = Integer32.subtypeSpec + ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))
namedValues = NamedValues(("active", 1), ("create", 2), ("destroy", 3), ("deactive", 4))
com21StuControlTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1), )
if mibBuilder.loadTexts: com21StuControlTable.setStatus('current')
if mibBuilder.loadTexts: com21StuControlTable.setDescription('.')
com21StuControlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuCtrlMacAddress"))
if mibBuilder.loadTexts: com21StuControlEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuControlEntry.setDescription('.')
stuCtrlMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCtrlMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuCtrlMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device.')
stuUserText = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(32, 32)).setFixedLength(32)).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuUserText.setStatus('current')
if mibBuilder.loadTexts: stuUserText.setDescription(" The ComPort User Text is a non-volatile string which is set by the management system with an arbitrary value. This value does not have to be unique. It's purpose is solely to assist the carrier personnel with identifying the ComPort device.")
stuSerialNumber = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuSerialNumber.setStatus('current')
if mibBuilder.loadTexts: stuSerialNumber.setDescription(' The Serial Number is a 10 character serial number used by COM21 to uniquely identify the device.')
stuBoardRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuBoardRevision.setStatus('current')
if mibBuilder.loadTexts: stuBoardRevision.setDescription(' This string defines the hardware revision of the ComPort board.')
stuUnitRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(10, 10)).setFixedLength(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuUnitRevision.setStatus('current')
if mibBuilder.loadTexts: stuUnitRevision.setDescription(' This string defines the hardware revision of the complete ComPort unit.')
stuTunerRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(10, 10)).setFixedLength(10)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuTunerRevision.setStatus('current')
if mibBuilder.loadTexts: stuTunerRevision.setDescription(' This string defines the hardware revision of the RF Tuner.')
stuModelName = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuModelName.setStatus('current')
if mibBuilder.loadTexts: stuModelName.setDescription(' This string defines the ComPort Model Type.')
stuUnitManufacturer = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 8), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(8, 8)).setFixedLength(8)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuUnitManufacturer.setStatus('current')
if mibBuilder.loadTexts: stuUnitManufacturer.setDescription(' This string defines the manufacturer of the ComPort. Initially COM21 and 3COM.')
stuDesKeySize = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("bit56key", 1), ("bit40key", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuDesKeySize.setStatus('current')
if mibBuilder.loadTexts: stuDesKeySize.setDescription(' DES encryption key size supported by the ComPort.')
stuMibRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 10), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(16, 16)).setFixedLength(16)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuMibRevision.setStatus('current')
if mibBuilder.loadTexts: stuMibRevision.setDescription(' Indicates the Mib Revision as display string of max. 8 bytes. That includes string terminator.')
stuEpochTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 11), EpochTime()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEpochTime.setStatus('current')
if mibBuilder.loadTexts: stuEpochTime.setDescription(' ComPort time in seconds since 1970.')
stuRestartAction = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("restart", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuRestartAction.setStatus('current')
if mibBuilder.loadTexts: stuRestartAction.setDescription(' The Restart Action is an active-value. The setting of this value is idempotent. The setting of this value to restart causes the STU to restart utilizing the present software load.')
stuPrevTestResult = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("success", 1), ("failure", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevTestResult.setStatus('current')
if mibBuilder.loadTexts: stuPrevTestResult.setDescription(' Result of previous test. Presently this will be warm or cold restart.')
stuPrevTestFailCode = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 14), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-32768, -1))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevTestFailCode.setStatus('current')
if mibBuilder.loadTexts: stuPrevTestFailCode.setDescription(' Failure result code. Only set if stuPrevTestResult is equal to failure.')
stuOperationState = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 15), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("operational", 1), ("deauthorized", 2), ("offline", 3), ("upstreamTest", 4), ("etherLoopback", 5), ("downloading", 6), ("failedRanging", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuOperationState.setStatus('current')
if mibBuilder.loadTexts: stuOperationState.setDescription(' The ComPort Operational State is controlled either by the stuAdminState or locally detected conditions. Note that downloading and failedRanging are ComController helded states.')
stuOperationStateChange = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 110)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuCtrlMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuOperationState"))
if mibBuilder.loadTexts: stuOperationStateChange.setStatus('current')
if mibBuilder.loadTexts: stuOperationStateChange.setDescription('.')
stuAimModuleId = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("none", 1), ("debugModule", 2), ("teleReturn", 3), ("telephony1", 4), ("telephony2", 5), ("wirelessEthernet", 6), ("atm", 7)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAimModuleId.setStatus('current')
if mibBuilder.loadTexts: stuAimModuleId.setDescription(' identifies the connected AIM module type. The maceEthernet entry indicates NO AIM inserted.')
stuMaceFail = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 119)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuCtrlMacAddress"))
if mibBuilder.loadTexts: stuMaceFail.setStatus('current')
if mibBuilder.loadTexts: stuMaceFail.setDescription(' This trap would be sent after ComPort is completely booted. That is, at least download is completed. Event will be detected during POST (Power On Self Test)')
stuUpstrmTestFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 17), UpstrmFreqKhz()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuUpstrmTestFreq.setStatus('current')
if mibBuilder.loadTexts: stuUpstrmTestFreq.setDescription(' Transmit Frequency used by ComPort for Upstream RF test. The default shall be 0 (an invalid freq).')
stuInbPresent = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("inbInstalled", 1), ("noInbSupport", 2), ("inbReady", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuInbPresent.setStatus('current')
if mibBuilder.loadTexts: stuInbPresent.setDescription(' Indicates if the INB is present on the ComPORT. This is a future item not supported by present ComPORT hardware.')
stuInbContToneEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 20), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuInbContToneEnable.setStatus('current')
if mibBuilder.loadTexts: stuInbContToneEnable.setDescription(' Controls the activation of a continuous pilot tone. This does not effect the standard tone sent before data (which is controlled by a hardware switch). This value is not stored in NVRAM. Default is disable.')
stuLastRestartCause = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 21), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6))).clone(namedValues=NamedValues(("unknown", 1), ("hcxDirected", 2), ("pingFail", 3), ("imageRefresh", 4), ("lof", 5), ("fault", 6)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuLastRestartCause.setStatus('current')
if mibBuilder.loadTexts: stuLastRestartCause.setDescription(' specifies the reason for the last ComPort reset.')
stuUpstrmPingCntrl = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 22), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuUpstrmPingCntrl.setStatus('current')
if mibBuilder.loadTexts: stuUpstrmPingCntrl.setDescription(' used to control the upstream ping. Default is enable.')
stuUpstrmTestTimeout = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 23), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 86400))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuUpstrmTestTimeout.setStatus('current')
if mibBuilder.loadTexts: stuUpstrmTestTimeout.setDescription(' timeout used by ComPort for Upstream RF test. If the ComController does not terminate the test in this period - defined in seconds - then the ComPort will automatically reset, thus terminating the test. 0 means no timeout. Default is 0.')
stuDnstrmAltFreq = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 24), FrequencyKhz()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuDnstrmAltFreq.setStatus('current')
if mibBuilder.loadTexts: stuDnstrmAltFreq.setDescription(' Defines an alternate downstream frequency for the ComPort. This value is non-voltile. Default is 0 (an invalid freq). IMPORTANT - this is a tbd feature not available in 2.3 release.')
stuAsicRevision = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 25), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAsicRevision.setStatus('current')
if mibBuilder.loadTexts: stuAsicRevision.setDescription(' defines the revision of the ASIC. Presently values 1-255 are supported.')
stuVoiceAimLpBk = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 26), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("stuLpBk", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuVoiceAimLpBk.setStatus('current')
if mibBuilder.loadTexts: stuVoiceAimLpBk.setDescription(' If a voiceAim Module is configured on the stu, setting the Aim Module to stuLpBk will configure the Aim Module to loopback from one voice port to the other. default: nil')
stuVoiceAimPort1Status = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 27), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("onHook", 1), ("offHook", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceAimPort1Status.setStatus('current')
if mibBuilder.loadTexts: stuVoiceAimPort1Status.setDescription(' Describes whether the AimPort is onhook or offhook. default: nil')
stuVoiceAimPort2Status = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 2, 1, 1, 28), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("onHook", 1), ("offHook", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceAimPort2Status.setStatus('current')
if mibBuilder.loadTexts: stuVoiceAimPort2Status.setDescription(' Describes whether the AimPort is onhook or offhook. default: nil')
com21StuPhysicalTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1), )
if mibBuilder.loadTexts: com21StuPhysicalTable.setStatus('current')
if mibBuilder.loadTexts: com21StuPhysicalTable.setDescription('.')
com21StuPhysicalEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuPhyMacAddress"))
if mibBuilder.loadTexts: com21StuPhysicalEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuPhysicalEntry.setDescription('.')
stuPhyMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPhyMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuPhyMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device.')
stuXmitFrequency = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 2), UpstrmFreqKhz()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuXmitFrequency.setStatus('current')
if mibBuilder.loadTexts: stuXmitFrequency.setDescription(' Contains the transmit frequency of the ComPort RF board in KHz. A change in this value shall result in the ComPort being hopped to a differing RX card. The Common Controller shall ensure the sanity of a change to this attribute and shall perform the necessary interface to the RX cards. The actual set of this attribute on the ComPort shall be performed by a GO_SET message.')
stuXmitFeqChange = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 111)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuPhyMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuXmitFrequency"))
if mibBuilder.loadTexts: stuXmitFeqChange.setStatus('current')
if mibBuilder.loadTexts: stuXmitFeqChange.setDescription(' This trap is generated whenever an ComPort is hopped. It will not be generated at initial acquisition.')
stuRecvFrequency = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 3), FrequencyKhz()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuRecvFrequency.setStatus('current')
if mibBuilder.loadTexts: stuRecvFrequency.setDescription(' Contains the receive frequency in KHz. This attribute will match the downstream frequency of the ComController. The ONLY time when this should be change is when the NMAPS wishes hop the ComPort to a different ComController.')
stuRecvRfSrEstimate = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuRecvRfSrEstimate.setStatus('current')
if mibBuilder.loadTexts: stuRecvRfSrEstimate.setDescription(' Received Signal S/R estimate in dB on Rf Downstream. This value is an approximation in dB. Received value should be divided by 10 to get a fixed point representation. e.g., value 185 means 18.5 dB.')
stuRecvRfSigLevel = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuRecvRfSigLevel.setStatus('current')
if mibBuilder.loadTexts: stuRecvRfSigLevel.setDescription(' Received Signal Level in dBmv on Rf Downstream.')
stuOutOfSpecRFCond = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 117)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuPhyMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuRecvRfSigLevel"))
if mibBuilder.loadTexts: stuOutOfSpecRFCond.setStatus('current')
if mibBuilder.loadTexts: stuOutOfSpecRFCond.setDescription(' This trap is generated whenever an ComPort is acquired with out of spec signal quality condition. The trap trap is issued after the GO-SET-DONE message.')
stuXmitOffset = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 6), Offset()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuXmitOffset.setStatus('current')
if mibBuilder.loadTexts: stuXmitOffset.setDescription(' Specifies the transmit offset of the ComPort.')
stuXmitDacVRef = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967295))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuXmitDacVRef.setStatus('current')
if mibBuilder.loadTexts: stuXmitDacVRef.setDescription(' Specifies the upstream RF DAC Voltage Reference. This value is set only via a GO_SET message under the control of the RX card.')
stuRecvFreqDrift = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 3, 1, 1, 8), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-250, 250))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuRecvFreqDrift.setStatus('current')
if mibBuilder.loadTexts: stuRecvFreqDrift.setDescription(' Frequency shift (+/- 250Khz) from Downstream Frequency.')
com21StuAlarmTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1), )
if mibBuilder.loadTexts: com21StuAlarmTable.setStatus('current')
if mibBuilder.loadTexts: com21StuAlarmTable.setDescription('.')
com21StuAlarmEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuAlmMacAddress"), (0, "COM21-HCXSTUPRXY-MIB", "stuAlmTime"))
if mibBuilder.loadTexts: com21StuAlarmEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuAlarmEntry.setDescription('.')
stuAlmMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAlmMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuAlmMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device.')
stuAlmTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1, 1, 2), EpochTime()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAlmTime.setStatus('current')
if mibBuilder.loadTexts: stuAlmTime.setDescription(' Specifies the time the alarm condition was detected.')
stuAlmTrapId = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(100, 200))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAlmTrapId.setStatus('current')
if mibBuilder.loadTexts: stuAlmTrapId.setDescription(' Specifies the trap number.')
stuAlmSeverity = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 4, 1, 1, 4), AlarmSeverity()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAlmSeverity.setStatus('current')
if mibBuilder.loadTexts: stuAlmSeverity.setDescription(' Specifies the alarm severity.')
com21StuAlarmSevTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1), )
if mibBuilder.loadTexts: com21StuAlarmSevTable.setStatus('current')
if mibBuilder.loadTexts: com21StuAlarmSevTable.setDescription('.')
com21StuAlarmSevEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuAlmSevMacAddress"))
if mibBuilder.loadTexts: com21StuAlarmSevEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuAlarmSevEntry.setDescription('.')
stuAlmSevMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAlmSevMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device.')
stuAlmSevCrcThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 2), AlarmSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAlmSevCrcThres.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevCrcThres.setDescription(' Specifies the CRC Error Threshold alarm severity. Default Minor.')
stuAlmSevTeiThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 3), AlarmSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAlmSevTeiThres.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevTeiThres.setDescription(' Specifies the TEI Threshold alarm severity. Default Minor.')
stuAlmSevHecThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 4), AlarmSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAlmSevHecThres.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevHecThres.setDescription(' Specifies the HEC Error Threshold alarm severity. Default Minor.')
stuAlmSevEsThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 5), AlarmSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAlmSevEsThres.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevEsThres.setDescription(' Specifies the Errored Seconds Threshold alarm severity. Default Minor.')
stuAlmSevFecThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 12, 1, 1, 6), AlarmSeverity()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAlmSevFecThres.setStatus('current')
if mibBuilder.loadTexts: stuAlmSevFecThres.setDescription(' Specifies the Corrected FEC Threshold alarm severity. Default Minor.')
com21StuEtherConfigTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1), )
if mibBuilder.loadTexts: com21StuEtherConfigTable.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherConfigTable.setDescription('.')
com21StuEtherConfigEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuEthMacAddress"))
if mibBuilder.loadTexts: com21StuEtherConfigEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherConfigEntry.setDescription('.')
stuEthMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuEthMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuEthMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuEtherFiltFlushAction = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("none", 1), ("flushMac", 2), ("flushEtherType", 3), ("flushIp", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherFiltFlushAction.setStatus('current')
if mibBuilder.loadTexts: stuEtherFiltFlushAction.setDescription(' Controls the flush operations.')
stuEtherForwArpOnly = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherForwArpOnly.setStatus('current')
if mibBuilder.loadTexts: stuEtherForwArpOnly.setDescription(' If enable state then only ARP broadcast packets are forwarded upstream. Default is disable.')
stuEtherMacFiltAge = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 4095))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherMacFiltAge.setStatus('current')
if mibBuilder.loadTexts: stuEtherMacFiltAge.setDescription(' Specifies the number of seconds that a learned MAC Address entry survives since last learning.')
stuEtherBCastRateEn = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherBCastRateEn.setStatus('current')
if mibBuilder.loadTexts: stuEtherBCastRateEn.setDescription(' Enable rate control. Default is disabled.')
stuEtherBCastRateCo = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherBCastRateCo.setStatus('current')
if mibBuilder.loadTexts: stuEtherBCastRateCo.setDescription(' Limit of maximum number of broadcast packets per second allowed upstream.')
stuEtherStickyBitCtrl = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherStickyBitCtrl.setStatus('current')
if mibBuilder.loadTexts: stuEtherStickyBitCtrl.setDescription(' When the Sticky Bit control is enabled, a MAC table entry will not be replaced by another MAC address for at least one aging period. Default disable.')
stuEther8021QEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 8), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEther8021QEnable.setStatus('current')
if mibBuilder.loadTexts: stuEther8021QEnable.setDescription(' Enables both 802.1Q priority queuing and the filter of 802.1Q format packets. Default disable.')
stuEtherNonSnapRej = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherNonSnapRej.setStatus('current')
if mibBuilder.loadTexts: stuEtherNonSnapRej.setDescription(' This attribute controls the filtering of 802.2 non- SNAP encoded packets. Ethernet type filtering must be enabled to activate this feature. Default disable.')
stuEtherIgmpEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 10), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherIgmpEnable.setStatus('current')
if mibBuilder.loadTexts: stuEtherIgmpEnable.setDescription(' This attribute controls the forwarding of IGMP multicast traffic upstream. This attribute will override any other multicast filtering in the case of IGMP packets. Default is enable.')
stuEtherNonIpMultiEn = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 11), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherNonIpMultiEn.setStatus('current')
if mibBuilder.loadTexts: stuEtherNonIpMultiEn.setDescription(' This attribute controls the forwarding of non-IP multicast traffic upstream. This attribute will override any other multicast filtering in the case of non-IP multicast packets. Default is enable.')
stuEtherBcmpOnly = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherBcmpOnly.setStatus('current')
if mibBuilder.loadTexts: stuEtherBcmpOnly.setDescription(' When this attribute is enabled, all non-BCMP traffic will be rejected in both the upstream and downstream. This attribute is utilized in a Spannong Tree implementation. Default is disable.')
stuEtherIgmpSnoopEnable = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 5, 1, 1, 13), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("disable", 1), ("enable", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherIgmpSnoopEnable.setStatus('current')
if mibBuilder.loadTexts: stuEtherIgmpSnoopEnable.setDescription(' This attribute controls the snooping of IGMP multicast traffic upstream. Default is disable.')
com21StuEtherTypeTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 6, 1), )
if mibBuilder.loadTexts: com21StuEtherTypeTable.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherTypeTable.setDescription('.')
com21StuFiltEthTypeEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 6, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuFiltEtherMacAddr"), (0, "COM21-HCXSTUPRXY-MIB", "stuFiltEtherType"))
if mibBuilder.loadTexts: com21StuFiltEthTypeEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuFiltEthTypeEntry.setDescription(' Holds Ethernet Types codes that are to be passed upstream. Maximum of 6 entries.')
stuFiltEtherMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 6, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuFiltEtherMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuFiltEtherMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuFiltEtherType = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 6, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuFiltEtherType.setStatus('current')
if mibBuilder.loadTexts: stuFiltEtherType.setDescription(' Holds Ethernet Type codes that is to be passed upstream. Held on ComPort. ')
stuFiltEtherStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 6, 1, 1, 3), Com21RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: stuFiltEtherStatus.setStatus('current')
if mibBuilder.loadTexts: stuFiltEtherStatus.setDescription(' This attribute is used to create or delete Ethernet Type entry.')
com21StuFiltIpMultiTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1), )
if mibBuilder.loadTexts: com21StuFiltIpMultiTable.setStatus('current')
if mibBuilder.loadTexts: com21StuFiltIpMultiTable.setDescription('.')
com21StuFiltIpMultiEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuFiltIpMacAddr"), (0, "COM21-HCXSTUPRXY-MIB", "stuFiltIpMultiAddr"))
if mibBuilder.loadTexts: com21StuFiltIpMultiEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuFiltIpMultiEntry.setDescription(' Holds IP Multicast Addresses that are to be allowed in either upstream, or downstream, or both directions. Maximum of 16 entries.')
stuFiltIpMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuFiltIpMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuFiltIpMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuFiltIpMultiAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuFiltIpMultiAddr.setStatus('current')
if mibBuilder.loadTexts: stuFiltIpMultiAddr.setDescription(' Holds Multicast Address. Held on ComPort. Valid values are within the range of MAC addresses of 01.00.5E.00.00.00 to 01.00.5E.7F.FF.FF')
stuFiltIpMultiStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1, 1, 3), Com21RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: stuFiltIpMultiStatus.setStatus('current')
if mibBuilder.loadTexts: stuFiltIpMultiStatus.setDescription(' Used to delete or create an entry.')
stuFiltIpDirectCntrl = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 7, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("upstreamOnly", 1), ("downstreamOnly", 2), ("bothDirections", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuFiltIpDirectCntrl.setStatus('current')
if mibBuilder.loadTexts: stuFiltIpDirectCntrl.setDescription(' Used to control the filter direction. Default is bothDirections.')
com21StuEtherMacTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1), )
if mibBuilder.loadTexts: com21StuEtherMacTable.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherMacTable.setDescription('.')
com21StuEtherMacEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuEtherStuMac"), (0, "COM21-HCXSTUPRXY-MIB", "stuEtherMacAddr"))
if mibBuilder.loadTexts: com21StuEtherMacEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherMacEntry.setDescription(' Holds Ethernet MAC address that are to be processed as specified. Maximum of 8 entries.')
stuEtherStuMac = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuEtherStuMac.setStatus('current')
if mibBuilder.loadTexts: stuEtherStuMac.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuEtherMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1, 1, 2), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuEtherMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuEtherMacAddr.setDescription(' Holds MAC address. Held on ComPort.')
stuEtherMacType = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("learned", 1), ("processor", 2), ("upstream", 3), ("reject", 4)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherMacType.setStatus('current')
if mibBuilder.loadTexts: stuEtherMacType.setDescription(' Holds action to be performed upon packets of the specified MAC address. In the case of the learned MAC address, this entry becomes read only. Held on ComPort.')
stuEtherMacStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 8, 1, 1, 4), Com21RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: stuEtherMacStatus.setStatus('current')
if mibBuilder.loadTexts: stuEtherMacStatus.setDescription(' Used to create or delete an entry.')
com21StuEtherStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1), )
if mibBuilder.loadTexts: com21StuEtherStatsTable.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherStatsTable.setDescription('.')
com21StuEtherStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuEthStatsMacAddr"))
if mibBuilder.loadTexts: com21StuEtherStatsEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuEtherStatsEntry.setDescription('.')
stuEthStatsMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuEthStatsMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuEthStatsMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuCurrEtherRunts = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherRunts.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherRunts.setDescription(' Contains number of runt packets received by the ComPort in the current 15minute period. This data is held on the ComPort.')
stuCurrEtherCollitns = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherCollitns.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherCollitns.setDescription(' Contains number of collisions detected by the ComPort in the current 15minute period. This data is held on the ComPort.')
stuCurrEtherFramErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherFramErrs.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherFramErrs.setDescription(' Contains number of Ethernet Packets with Framing Errors detected by the ComPort in the current 15minute period. This data is held on the ComPort.')
stuCurrEtherCrcErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherCrcErrs.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherCrcErrs.setDescription(' Contains number of Ethernet Packets with CRC Errors detected by the ComPort in the current 15minute period. This data is held on the ComPort.')
stuCurrEtherCrcThres = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 6), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuCurrEtherCrcThres.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherCrcThres.setDescription(' contains 15minute error level at which a threshold trap shall be generated. This is mapped to theComPort alarm severity entry stuThresholdLevel. Default is 0 - threshold disabled.')
stuEtherCrcThres = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 112)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuEthStatsMacAddr"), ("COM21-HCXSTUPRXY-MIB", "stuCurrEtherCrcErrs"), ("COM21-HCXSTUPRXY-MIB", "stuCurrEtherCrcThres"))
if mibBuilder.loadTexts: stuEtherCrcThres.setStatus('current')
if mibBuilder.loadTexts: stuEtherCrcThres.setDescription('.')
stuCurrEtherTxUnder = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherTxUnder.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherTxUnder.setDescription(' Contains number of times Ethernet TX FIFO Underflow occurred on transmit queue in the current 15minute period.')
stuCurrEtherRxOver = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherRxOver.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherRxOver.setDescription(' Contains number of times Ethernet RX FIFO Overflow occurred on a receive queue in the current 15minute period.')
stuCurrEtherDropFrms = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 9), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCurrEtherDropFrms.setStatus('current')
if mibBuilder.loadTexts: stuCurrEtherDropFrms.setDescription(' Contains number of receive frames dropped by MACE controller in the current 15minute period.')
stuPrevEtherRunts = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherRunts.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherRunts.setDescription(' Contains number of runt packets received by the ComPort in the previous 15minute period. This data is held on the ComPort.')
stuPrevEtherCollitns = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherCollitns.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherCollitns.setDescription(' Contains number of collisions detected by the ComPort in the previous 15minute period. This data is held on the ComPort.')
stuPrevEtherFramErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherFramErrs.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherFramErrs.setDescription(' Contains number of Ethernet Packets with Framing Errors detected by the ComPort in the previous 15minute period. This data is held on the ComPort.')
stuPrevEtherCrcErrs = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherCrcErrs.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherCrcErrs.setDescription(' Contains number of Ethernet Packets with CRC Errors detected by the ComPort in the previous 15minute period. This data is held on the ComPort.')
stuPrevEtherTxUnder = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherTxUnder.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherTxUnder.setDescription(' Contains number of times Ethernet TX FIFO Underflow occurred on transmit queue in the previous 15minute period.')
stuPrevEtherRxOver = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherRxOver.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherRxOver.setDescription(' Contains number of times Ethernet RX FIFO Overflow occurred on a receive queue in the previous 15minute period.')
stuPrevEtherDropFrms = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 16), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuPrevEtherDropFrms.setStatus('current')
if mibBuilder.loadTexts: stuPrevEtherDropFrms.setDescription(' Contains number of receive frames dropped by MACE controller in the previous 15minute period.')
stuEtherConnState = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 17), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("unint", 1), ("is", 2), ("oos", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuEtherConnState.setStatus('current')
if mibBuilder.loadTexts: stuEtherConnState.setDescription(' Contains state of Ethernet connection.')
stuEtherClearStats = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 9, 1, 1, 18), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("clear", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuEtherClearStats.setStatus('current')
if mibBuilder.loadTexts: stuEtherClearStats.setDescription(' This attribute is used to clear all Ethernet statistics counters in this group. This operation is only possible if the hcxStatsControl attribute is set to wrapCurr.')
com21StuStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1), )
if mibBuilder.loadTexts: com21StuStatsTable.setStatus('current')
if mibBuilder.loadTexts: com21StuStatsTable.setDescription('.')
com21StuStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuStatsMacAddress"))
if mibBuilder.loadTexts: com21StuStatsEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuStatsEntry.setDescription('.')
stuStatsMacAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsMacAddress.setStatus('current')
if mibBuilder.loadTexts: stuStatsMacAddress.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuStatsCurrAtmTei = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 2), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsCurrAtmTei.setStatus('current')
if mibBuilder.loadTexts: stuStatsCurrAtmTei.setDescription(' Number of cells received with transport errors - uncorrectable FEC or HEC errors - in the current 15minute period.')
stuAtmTeiThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 3), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAtmTeiThreshold.setStatus('current')
if mibBuilder.loadTexts: stuAtmTeiThreshold.setDescription(' contains 15minute transport error level at which a threshold trap shall be generated. Default is 0 - threshold disabled.')
stuAtmTeiThres = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 113)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuStatsMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuStatsCurrAtmTei"), ("COM21-HCXSTUPRXY-MIB", "stuAtmTeiThreshold"))
if mibBuilder.loadTexts: stuAtmTeiThres.setStatus('current')
if mibBuilder.loadTexts: stuAtmTeiThres.setDescription('.')
stuStatsCurrAtmHec = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsCurrAtmHec.setStatus('current')
if mibBuilder.loadTexts: stuStatsCurrAtmHec.setDescription(' Number of cells received with HEC or Encryption errors in the current 15minute period.')
stuAtmHecThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 5), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAtmHecThreshold.setStatus('current')
if mibBuilder.loadTexts: stuAtmHecThreshold.setDescription(' contains 15minute HEC error level at which a threshold trap shall be generated. Default is 0 - threshold disabled.')
stuAtmHecThres = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 114)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuStatsMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuStatsCurrAtmHec"), ("COM21-HCXSTUPRXY-MIB", "stuAtmHecThreshold"))
if mibBuilder.loadTexts: stuAtmHecThres.setStatus('current')
if mibBuilder.loadTexts: stuAtmHecThres.setDescription('.')
stuStatsCurrESMin = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsCurrESMin.setStatus('current')
if mibBuilder.loadTexts: stuStatsCurrESMin.setDescription(' Represents the number of errored seconds in the current 15 minute period.')
stuESMinThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 7), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuESMinThreshold.setStatus('current')
if mibBuilder.loadTexts: stuESMinThreshold.setDescription(' contains 15minute errored seconds level at which a threshold trap shall be generated. Default is 0 - threshold disabled.')
stuESMinThres = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 115)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuStatsMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuStatsCurrESMin"), ("COM21-HCXSTUPRXY-MIB", "stuESMinThreshold"))
if mibBuilder.loadTexts: stuESMinThres.setStatus('current')
if mibBuilder.loadTexts: stuESMinThres.setDescription('.')
stuStatsCurrFecCorrect = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsCurrFecCorrect.setStatus('current')
if mibBuilder.loadTexts: stuStatsCurrFecCorrect.setDescription(' Fecs corrected by Demodulator on Downstream in the current 15 minute period.')
stuFecCorrectThreshold = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 9), Integer32()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuFecCorrectThreshold.setStatus('current')
if mibBuilder.loadTexts: stuFecCorrectThreshold.setDescription(' contains 15minute Fecs corrected level at which a threshold trap shall be generated. Default is 0 - threshold disabled.')
stuFecCorrectThres = NotificationType((1, 3, 6, 1, 4, 1, 1141, 4, 116)).setObjects(("COM21-HCXSTUPRXY-MIB", "stuAlmSeverity"), ("COM21-HCXSTUPRXY-MIB", "stuAlmTime"), ("COM21-HCXSTUPRXY-MIB", "stuStatsMacAddress"), ("COM21-HCXSTUPRXY-MIB", "stuStatsCurrFecCorrect"), ("COM21-HCXSTUPRXY-MIB", "stuFecCorrectThreshold"))
if mibBuilder.loadTexts: stuFecCorrectThres.setStatus('current')
if mibBuilder.loadTexts: stuFecCorrectThres.setDescription('.')
stuStatsCurrUASMin = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 10), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsCurrUASMin.setStatus('current')
if mibBuilder.loadTexts: stuStatsCurrUASMin.setDescription(' Represents the number of unavailable seconds in the current 15 minute period.')
stuStatsPrevAtmTei = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 11), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsPrevAtmTei.setStatus('current')
if mibBuilder.loadTexts: stuStatsPrevAtmTei.setDescription(' Number of cells received with transport errors - HEC or uncorrectable FEC errors - in the previous 15minute period.')
stuStatsPrevAtmHec = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 12), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsPrevAtmHec.setStatus('current')
if mibBuilder.loadTexts: stuStatsPrevAtmHec.setDescription(' Number of cells received with HEC or Encryption errors in the previous 15minute period.')
stuStatsPrevESMin = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 13), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsPrevESMin.setStatus('current')
if mibBuilder.loadTexts: stuStatsPrevESMin.setDescription(' Represents the number of errored seconds in the previous 15 minute period.')
stuStatsPrevFecCorrect = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 14), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsPrevFecCorrect.setStatus('current')
if mibBuilder.loadTexts: stuStatsPrevFecCorrect.setDescription(' Fecs corrected by Demodulator on Downstream in the previous 15 minute period.')
stuStatsPrevUASMin = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 15), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuStatsPrevUASMin.setStatus('current')
if mibBuilder.loadTexts: stuStatsPrevUASMin.setDescription(' Represents the number of unavailable seconds in the previous 15 minute period.')
stuStatsClearStats = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 10, 1, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("clear", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuStatsClearStats.setStatus('current')
if mibBuilder.loadTexts: stuStatsClearStats.setDescription(' This attribute is used to clear the all ComPort statistics counters in this group. This operation is only possible if the hcxStatsControl attribute is set to wrapCurr.')
com21StuQStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1), )
if mibBuilder.loadTexts: com21StuQStatsTable.setStatus('current')
if mibBuilder.loadTexts: com21StuQStatsTable.setDescription('.')
com21StuQStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuAtmStuMacAddr"), (0, "COM21-HCXSTUPRXY-MIB", "stuAtmStuQNo"))
if mibBuilder.loadTexts: com21StuQStatsEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuQStatsEntry.setDescription('.')
stuAtmStuMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStuMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuAtmStuMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuAtmStuQNo = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStuQNo.setStatus('current')
if mibBuilder.loadTexts: stuAtmStuQNo.setDescription(' This index consists of a number representing a Mongoose queue. For initial phase this number shall equal the associated VCI number. i.e. Ethernet or Telephony VCIs. This VCI number shall be mapped to a Mongoose queue id by the ComController. For 3.0 there is only a single Ethernet queue - queue 5.')
stuAtmStatsCurrMinRx = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 3), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsCurrMinRx.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsCurrMinRx.setDescription(' Represents the number of cells or frames successfully received in the current 15 minute period.')
stuAtmStatsCurrMinRxDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 4), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsCurrMinRxDropped.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsCurrMinRxDropped.setDescription(' Represents the number of cells or frames dropped in the current 15 minute period.')
stuAtmStatsCurrMinCRCErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 5), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsCurrMinCRCErrors.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsCurrMinCRCErrors.setDescription(' Represents the number of cells or frames received with CRC errors in the current 15 minute period.')
stuAtmStatsPrevMinRx = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 6), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsPrevMinRx.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsPrevMinRx.setDescription(' Represents the number of cells or frames successfully received in the previous 15 minute period.')
stuAtmStatsPrevMinRxDropped = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 7), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsPrevMinRxDropped.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsPrevMinRxDropped.setDescription(' Represents the number of cells or frames dropped in the previous 15 minute period.')
stuAtmStatsPrevMinCRCErrors = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 8), Gauge32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuAtmStatsPrevMinCRCErrors.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsPrevMinCRCErrors.setDescription(' Represents the number of cells or frames received with CRC errors in the previous 15 minute period.')
stuAtmStatsClearStats = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 11, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("clear", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuAtmStatsClearStats.setStatus('current')
if mibBuilder.loadTexts: stuAtmStatsClearStats.setDescription(' This attribute is used to clear the ComPort queue statistics counters in this group. This operation is only possible if the hcxStatsControl attribute is set to wrapCurr.')
com21StuCodeImageTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1), )
if mibBuilder.loadTexts: com21StuCodeImageTable.setStatus('current')
if mibBuilder.loadTexts: com21StuCodeImageTable.setDescription('.')
com21StuCodeImageEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuCodeStuMacAddr"), (0, "COM21-HCXSTUPRXY-MIB", "stuCodeImageIndex"))
if mibBuilder.loadTexts: com21StuCodeImageEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuCodeImageEntry.setDescription('.')
stuCodeStuMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeStuMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuCodeStuMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device. Held on ComController.')
stuCodeImageIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("vxWorksImage", 1), ("applicationStu", 2), ("downloadImage", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageIndex.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageIndex.setDescription(' Index to receive information about various images; VxWorks - operating system. ApplicationStu - Image comprising Stu Application, DownloadImage - Image downloaded into DRAM.')
stuCodeImageType = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11))).clone(namedValues=NamedValues(("stuBoot", 1), ("reserved1", 2), ("stuNewapp", 3), ("stuDnld", 4), ("updateApp", 5), ("reserved2", 6), ("reserved3", 7), ("reserved4", 8), ("updateBoth", 9), ("reserved5", 10), ("vxWorks", 11)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageType.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageType.setDescription(' Code image identifier. vxWorks - boot code and OS, boot memory resident, stuBoot - backup application image, boot memory resident, stuNewapp - primary application image, app memory resident, stuDnld - downloaded application supplement (optional), updateApp - downloaded flash update, updates app flash, updateBoth - downloaded flash update, updates boot+app flash.')
stuCodeImageVersion = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 4), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(40, 40)).setFixedLength(40)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageVersion.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageVersion.setDescription(' Version Description for the Code Image.')
stuCodeImageBuildDir = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(80, 80)).setFixedLength(80)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageBuildDir.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageBuildDir.setDescription(' Directory name where image was built.')
stuCodeImageDate = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 6), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(12, 12)).setFixedLength(12)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageDate.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageDate.setDescription(' Date when the image is built.')
stuCodeImageTime = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 13, 1, 1, 7), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(9, 9)).setFixedLength(9)).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuCodeImageTime.setStatus('current')
if mibBuilder.loadTexts: stuCodeImageTime.setDescription(' Time when the image is built.')
com21StuVoiceChanStatsTable = MibTable((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1), )
if mibBuilder.loadTexts: com21StuVoiceChanStatsTable.setStatus('current')
if mibBuilder.loadTexts: com21StuVoiceChanStatsTable.setDescription('.')
com21StuVoiceChanStatsEntry = MibTableRow((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1), ).setIndexNames((0, "COM21-HCXSTUPRXY-MIB", "stuVoiceChanStatsMacAddr"), (0, "COM21-HCXSTUPRXY-MIB", "stuVoiceChanStatsNum"))
if mibBuilder.loadTexts: com21StuVoiceChanStatsEntry.setStatus('current')
if mibBuilder.loadTexts: com21StuVoiceChanStatsEntry.setDescription('.')
stuVoiceChanStatsMacAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 1), MacAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsMacAddr.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsMacAddr.setDescription(' Contains IEEE 802 medium access control address of the ComPort device.')
stuVoiceChanStatsNum = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 2))).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsNum.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsNum.setDescription(' Defines the voice port on the Comport device used for Voice. ')
stuVoiceChanStatsCurrLostCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 3), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrLostCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrLostCellCnt.setDescription(" Defines the number of times the Comport's voice port received Cells out of sequence in the current 15-min period.")
stuVoiceChanStatsCurrDrpdCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 4), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrDrpdCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrDrpdCellCnt.setDescription(" Defines the number of times the Comport's voice port dropped cells due to Queue Full in the current 15-min period.")
stuVoiceChanStatsCurrCrc3ErrCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 5), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrCrc3ErrCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrCrc3ErrCnt.setDescription(" Defines the number of cells Comport's voice port received with CRC3 Errors in the current 15-min period.")
stuVoiceChanStatsCurrSetUpFailCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 6), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrSetUpFailCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrSetUpFailCnt.setDescription(" Defines the number of times the Comport's voice port had to drop the calls due to Call Setup Failure in the current 15-min period.")
stuVoiceChanStatsCurrTxVoiceCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 7), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrTxVoiceCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrTxVoiceCellCnt.setDescription(" Defines the number of Voice cells Comport's voice port Queued to Transmit in the current 15-min period.")
stuVoiceChanStatsCurrRxVoiceCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 8), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrRxVoiceCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrRxVoiceCellCnt.setDescription(" Defines the number of Voice cells the Comport's voice port received in the current 15-min period.")
stuVoiceChanStatsCurrTxOamCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 9), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrTxOamCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrTxOamCellCnt.setDescription(" Defines the number of OAM cells Comport's voice port Queued to Transmit in the current 15-min period.")
stuVoiceChanStatsCurrRxOamCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 10), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsCurrRxOamCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsCurrRxOamCellCnt.setDescription(" Defines the number of OAM cells the Comport's voice port received in the current 15-min period.")
stuVoiceChanStatsPrevLostCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 11), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevLostCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevLostCellCnt.setDescription(" Defines the number of times the Comport's voice port received Cells out of sequence in the previous 15-min period.")
stuVoiceChanStatsPrevDrpdCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 12), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevDrpdCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevDrpdCellCnt.setDescription(" Defines the number of times the Comport's voice port dropped cells due to Queue Full in the previous 15-min period.")
stuVoiceChanStatsPrevCrc3ErrCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 13), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevCrc3ErrCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevCrc3ErrCnt.setDescription(" Defines the number of cells Comport's voice port received with CRC3 Errors in the previous 15-min period.")
stuVoiceChanStatsPrevSetUpFailCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 14), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevSetUpFailCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevSetUpFailCnt.setDescription(" Defines the number of times the Comport's voice port had to drop the calls due to Call Setup Failure in the previous 15-min period.")
stuVoiceChanStatsPrevTxVoiceCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 15), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevTxVoiceCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevTxVoiceCellCnt.setDescription(" Defines the number of Voice cells Comport's voice port Tranmitted in the previous 15-min period")
stuVoiceChanStatsPrevRxVoiceCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 16), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevRxVoiceCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevRxVoiceCellCnt.setDescription(" Defines the number of times the Comport's voice port received Cells out of sequence in the previous 15-min period.")
stuVoiceChanStatsPrevTxOamCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 17), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevTxOamCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevTxOamCellCnt.setDescription(" Defines the number of times the Comport's voice port received Cells out of sequence in the previous 15-min period.")
stuVoiceChanStatsPrevRxOamCellCnt = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 18), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: stuVoiceChanStatsPrevRxOamCellCnt.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsPrevRxOamCellCnt.setDescription(" Defines the number of times the Comport's voice port received Cells out of sequence in the previous 15-min period.")
stuVoiceChanStatsClear = MibTableColumn((1, 3, 6, 1, 4, 1, 1141, 3, 14, 1, 1, 19), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("nil", 1), ("clear", 2)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: stuVoiceChanStatsClear.setStatus('current')
if mibBuilder.loadTexts: stuVoiceChanStatsClear.setDescription(' Provides the capability to clear the statistics. Clearing of statistics allowed only if hcxStatsControl attribute is set to wrapCurr.')
mibBuilder.exportSymbols("COM21-HCXSTUPRXY-MIB", stuStatsPrevESMin=stuStatsPrevESMin, stuCurrEtherDropFrms=stuCurrEtherDropFrms, com21StuQStatsTable=com21StuQStatsTable, stuStatsPrevUASMin=stuStatsPrevUASMin, stuUserText=stuUserText, stuXmitFeqChange=stuXmitFeqChange, com21StuEtherStatsTable=com21StuEtherStatsTable, stuAlmTrapId=stuAlmTrapId, stuStatsClearStats=stuStatsClearStats, stuMibRevision=stuMibRevision, com21StuVoiceChanStatsGroup=com21StuVoiceChanStatsGroup, stuPrevEtherRxOver=stuPrevEtherRxOver, com21StuEtherStatsEntry=com21StuEtherStatsEntry, stuEtherNonSnapRej=stuEtherNonSnapRej, stuStatsPrevAtmTei=stuStatsPrevAtmTei, stuPrevEtherFramErrs=stuPrevEtherFramErrs, stuTunerRevision=stuTunerRevision, stuEtherBCastRateEn=stuEtherBCastRateEn, stuEtherIgmpSnoopEnable=stuEtherIgmpSnoopEnable, com21StuStatsEntry=com21StuStatsEntry, com21StuEtherConfigEntry=com21StuEtherConfigEntry, stuRecvFrequency=stuRecvFrequency, stuCurrEtherRxOver=stuCurrEtherRxOver, stuAtmStatsPrevMinRx=stuAtmStatsPrevMinRx, com21StuCodeImageTable=com21StuCodeImageTable, stuEthMacAddress=stuEthMacAddress, stuRecvFreqDrift=stuRecvFreqDrift, stuEtherMacFiltAge=stuEtherMacFiltAge, stuEtherBcmpOnly=stuEtherBcmpOnly, stuXmitFrequency=stuXmitFrequency, stuVoiceAimPort2Status=stuVoiceAimPort2Status, stuFiltIpMacAddr=stuFiltIpMacAddr, com21StuEtherMacTable=com21StuEtherMacTable, stuRecvRfSrEstimate=stuRecvRfSrEstimate, stuEtherBCastRateCo=stuEtherBCastRateCo, stuAtmTeiThres=stuAtmTeiThres, com21StuAlarmSevTable=com21StuAlarmSevTable, stuPrevEtherDropFrms=stuPrevEtherDropFrms, stuVoiceChanStatsPrevDrpdCellCnt=stuVoiceChanStatsPrevDrpdCellCnt, stuAlmSevEsThres=stuAlmSevEsThres, stuCurrEtherFramErrs=stuCurrEtherFramErrs, stuVoiceChanStatsPrevRxOamCellCnt=stuVoiceChanStatsPrevRxOamCellCnt, com21StuFiltEthTypeEntry=com21StuFiltEthTypeEntry, stuCurrEtherCollitns=stuCurrEtherCollitns, stuAtmStatsCurrMinCRCErrors=stuAtmStatsCurrMinCRCErrors, stuEtherClearStats=stuEtherClearStats, stuFecCorrectThreshold=stuFecCorrectThreshold, stuFiltEtherMacAddr=stuFiltEtherMacAddr, stuFiltIpMultiAddr=stuFiltIpMultiAddr, stuOperationState=stuOperationState, stuCtrlMacAddress=stuCtrlMacAddress, com21StuControlGroup=com21StuControlGroup, stuDnstrmAltFreq=stuDnstrmAltFreq, com21StuEtherTypeGroup=com21StuEtherTypeGroup, stuVoiceAimLpBk=stuVoiceAimLpBk, com21StuAlarmGroup=com21StuAlarmGroup, stuCodeImageVersion=stuCodeImageVersion, stuAtmStatsCurrMinRxDropped=stuAtmStatsCurrMinRxDropped, UpstrmFreqKhz=UpstrmFreqKhz, stuCodeImageIndex=stuCodeImageIndex, com21StuControlTable=com21StuControlTable, stuAlmSevCrcThres=stuAlmSevCrcThres, stuVoiceChanStatsPrevRxVoiceCellCnt=stuVoiceChanStatsPrevRxVoiceCellCnt, stuFiltIpMultiStatus=stuFiltIpMultiStatus, stuRestartAction=stuRestartAction, stuESMinThreshold=stuESMinThreshold, stuUpstrmTestTimeout=stuUpstrmTestTimeout, stuModelName=stuModelName, stuFiltEtherStatus=stuFiltEtherStatus, com21StuQStatsGroup=com21StuQStatsGroup, stuEthStatsMacAddr=stuEthStatsMacAddr, stuAlmSevMacAddress=stuAlmSevMacAddress, FrequencyKhz=FrequencyKhz, stuPrevTestFailCode=stuPrevTestFailCode, stuStatsCurrAtmHec=stuStatsCurrAtmHec, stuCodeImageDate=stuCodeImageDate, stuAtmStatsCurrMinRx=stuAtmStatsCurrMinRx, stuAtmHecThres=stuAtmHecThres, stuEtherForwArpOnly=stuEtherForwArpOnly, com21StuVoiceChanStatsEntry=com21StuVoiceChanStatsEntry, com21StuControlEntry=com21StuControlEntry, stuEther8021QEnable=stuEther8021QEnable, stuCurrEtherCrcErrs=stuCurrEtherCrcErrs, stuVoiceChanStatsCurrTxOamCellCnt=stuVoiceChanStatsCurrTxOamCellCnt, stuPrevEtherRunts=stuPrevEtherRunts, stuCodeImageBuildDir=stuCodeImageBuildDir, stuStatsCurrESMin=stuStatsCurrESMin, stuXmitDacVRef=stuXmitDacVRef, stuAtmStatsPrevMinCRCErrors=stuAtmStatsPrevMinCRCErrors, stuESMinThres=stuESMinThres, stuVoiceAimPort1Status=stuVoiceAimPort1Status, com21StuStatsTable=com21StuStatsTable, stuPrevTestResult=stuPrevTestResult, stuAlmSeverity=stuAlmSeverity, com21StuPhysicalEntry=com21StuPhysicalEntry, Com21RowStatus=Com21RowStatus, stuMaceFail=stuMaceFail, stuAtmStuMacAddr=stuAtmStuMacAddr, com21StuEtherMacEntry=com21StuEtherMacEntry, stuAlmSevFecThres=stuAlmSevFecThres, stuXmitOffset=stuXmitOffset, stuEtherMacStatus=stuEtherMacStatus, PYSNMP_MODULE_ID=com21StuPrxy, com21StuPhysicalGroup=com21StuPhysicalGroup, com21StuPhysicalTable=com21StuPhysicalTable, stuEtherMacType=stuEtherMacType, stuFecCorrectThres=stuFecCorrectThres, stuSerialNumber=stuSerialNumber, EpochTime=EpochTime, stuDesKeySize=stuDesKeySize, stuVoiceChanStatsCurrRxVoiceCellCnt=stuVoiceChanStatsCurrRxVoiceCellCnt, stuBoardRevision=stuBoardRevision, stuUpstrmTestFreq=stuUpstrmTestFreq, stuAsicRevision=stuAsicRevision, stuAlmMacAddress=stuAlmMacAddress, com21StuFiltIpMultiTable=com21StuFiltIpMultiTable, stuVoiceChanStatsPrevLostCellCnt=stuVoiceChanStatsPrevLostCellCnt, stuAtmTeiThreshold=stuAtmTeiThreshold, stuLastRestartCause=stuLastRestartCause, stuEtherCrcThres=stuEtherCrcThres, stuEtherConnState=stuEtherConnState, stuStatsCurrAtmTei=stuStatsCurrAtmTei, com21StuVoiceChanStatsTable=com21StuVoiceChanStatsTable, stuEtherNonIpMultiEn=stuEtherNonIpMultiEn, stuAimModuleId=stuAimModuleId, stuFiltIpDirectCntrl=stuFiltIpDirectCntrl, stuCurrEtherRunts=stuCurrEtherRunts, stuVoiceChanStatsCurrDrpdCellCnt=stuVoiceChanStatsCurrDrpdCellCnt, stuVoiceChanStatsCurrLostCellCnt=stuVoiceChanStatsCurrLostCellCnt, com21StuEtherStatsGroup=com21StuEtherStatsGroup, stuInbPresent=stuInbPresent, com21StuEtherConfigGroup=com21StuEtherConfigGroup, stuAtmStuQNo=stuAtmStuQNo, stuRecvRfSigLevel=stuRecvRfSigLevel, stuEtherMacAddr=stuEtherMacAddr, com21StuQStatsEntry=com21StuQStatsEntry, stuCodeImageTime=stuCodeImageTime, com21StuEtherConfigTable=com21StuEtherConfigTable, stuPrevEtherCrcErrs=stuPrevEtherCrcErrs, stuAtmStatsPrevMinRxDropped=stuAtmStatsPrevMinRxDropped, stuStatsPrevAtmHec=stuStatsPrevAtmHec, stuPhyMacAddress=stuPhyMacAddress, stuCurrEtherCrcThres=stuCurrEtherCrcThres, com21StuAlarmTable=com21StuAlarmTable, com21StuEtherTypeTable=com21StuEtherTypeTable, com21StuPrxy=com21StuPrxy, stuUnitRevision=stuUnitRevision, Offset=Offset, com21StuAlarmSevGroup=com21StuAlarmSevGroup, stuAtmStatsClearStats=stuAtmStatsClearStats, stuVoiceChanStatsCurrRxOamCellCnt=stuVoiceChanStatsCurrRxOamCellCnt, stuVoiceChanStatsPrevTxOamCellCnt=stuVoiceChanStatsPrevTxOamCellCnt, stuVoiceChanStatsCurrSetUpFailCnt=stuVoiceChanStatsCurrSetUpFailCnt, stuOperationStateChange=stuOperationStateChange, stuVoiceChanStatsClear=stuVoiceChanStatsClear, stuVoiceChanStatsPrevSetUpFailCnt=stuVoiceChanStatsPrevSetUpFailCnt, stuStatsMacAddress=stuStatsMacAddress, stuEpochTime=stuEpochTime, stuVoiceChanStatsNum=stuVoiceChanStatsNum, stuCodeStuMacAddr=stuCodeStuMacAddr, stuVoiceChanStatsCurrCrc3ErrCnt=stuVoiceChanStatsCurrCrc3ErrCnt, stuPrevEtherCollitns=stuPrevEtherCollitns, com21StuFiltIpMultiEntry=com21StuFiltIpMultiEntry, stuUpstrmPingCntrl=stuUpstrmPingCntrl, stuEtherStickyBitCtrl=stuEtherStickyBitCtrl, stuInbContToneEnable=stuInbContToneEnable, com21StuStatsGroup=com21StuStatsGroup, stuOutOfSpecRFCond=stuOutOfSpecRFCond, stuPrevEtherTxUnder=stuPrevEtherTxUnder, com21StuCodeImageGroup=com21StuCodeImageGroup, stuAtmHecThreshold=stuAtmHecThreshold, stuStatsPrevFecCorrect=stuStatsPrevFecCorrect, stuVoiceChanStatsPrevTxVoiceCellCnt=stuVoiceChanStatsPrevTxVoiceCellCnt, stuAlmTime=stuAlmTime, stuVoiceChanStatsCurrTxVoiceCellCnt=stuVoiceChanStatsCurrTxVoiceCellCnt, com21StuFiltIpMultiGroup=com21StuFiltIpMultiGroup, AlarmSeverity=AlarmSeverity, com21StuAlarmEntry=com21StuAlarmEntry, com21StuCodeImageEntry=com21StuCodeImageEntry, stuUnitManufacturer=stuUnitManufacturer, stuEtherStuMac=stuEtherStuMac, stuCurrEtherTxUnder=stuCurrEtherTxUnder, stuVoiceChanStatsMacAddr=stuVoiceChanStatsMacAddr, stuVoiceChanStatsPrevCrc3ErrCnt=stuVoiceChanStatsPrevCrc3ErrCnt, com21StuEtherMacGroup=com21StuEtherMacGroup, stuAlmSevTeiThres=stuAlmSevTeiThres, stuAlmSevHecThres=stuAlmSevHecThres, stuStatsCurrFecCorrect=stuStatsCurrFecCorrect, stuStatsCurrUASMin=stuStatsCurrUASMin, stuEtherFiltFlushAction=stuEtherFiltFlushAction, com21StuAlarmSevEntry=com21StuAlarmSevEntry, stuEtherIgmpEnable=stuEtherIgmpEnable, stuCodeImageType=stuCodeImageType, stuFiltEtherType=stuFiltEtherType)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
ae0ca66c3b7e3f8ae0ae2cd46e59bce09d42710f | 68a9c2ee2c891a13089dfefe41c09296a4eac288 | /phoneBook/urls.py | 82a108110f19a69a6998131982a5ed3e628bfc17 | [] | no_license | nashirbekov/phoneBook | 74d853a7044d56ec4e5eaa348fb95d4a3d0bb875 | 4c2a14beff86250a9e690f48571c4864007da91b | refs/heads/main | 2023-03-12T01:24:15.302558 | 2021-03-01T20:11:04 | 2021-03-01T20:11:04 | 343,532,813 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 805 | py | """phoneBook URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/3.1/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path, include
urlpatterns = [
path('admin/', admin.site.urls),
path('phones/', include('phones.urls')),
]
| [
"nashirbekov35@gmail.com"
] | nashirbekov35@gmail.com |
d79aa7eb45cb64e7cbb40bd067f0333db6eb551b | 4f54f4a182dbacba4b513b8b5589f83feb458545 | /venv/Scripts/easy_install-script.py | fcd9faea2bc0344e7a240d6172ff3b45c8d13ccf | [] | no_license | xferrerc/RobotPractica1 | e77a48db11e3feedd7e10e3de258a50c8f6c1f92 | 26f8634439ca2a8caa7974406c578bc83fc40198 | refs/heads/master | 2020-03-22T21:34:13.585997 | 2018-07-12T11:56:49 | 2018-07-12T11:57:02 | 140,696,835 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 457 | py | #!C:\Users\Robot\PycharmProjects\RobotPractica1\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==39.1.0','console_scripts','easy_install'
__requires__ = 'setuptools==39.1.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==39.1.0', 'console_scripts', 'easy_install')()
)
| [
"manu.sancho@gmail.com"
] | manu.sancho@gmail.com |
41f4d29ce42fff304d6a43c393cbb732c7658691 | 42406776e4588bc819f14c342c5f341a46794f23 | /gps/convert.py | 874398a82b7457f362629eab47917963fe229380 | [] | no_license | gantryyork/datasci | a0d55b645c3021db720fbf9ef32b9b53aecccf83 | 0707454ca25726e656921fb9c4b162a040d2a16c | refs/heads/master | 2020-09-23T04:28:09.375823 | 2020-04-21T12:33:24 | 2020-04-21T12:33:24 | 225,401,652 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | import math
def nm(km):
return precision(km * 0.539957, 4)
def mi(km):
return precision(km * 0.621371, 4)
def dms(ddeg):
(r, d) = math.modf(math.fmod(ddeg, 360))
(s, m) = math.modf(math.fmod(r*60, 60))
return [int(d), int(m), int(s*60)]
def ddeg(d, m, s):
deg = d + (m/60) + (s/3600)
return precision(deg, 4)
def latitude(ddeg):
lat_mod = math.fmod(ddeg, 180)
lat = 0
if lat_mod < -90:
lat = -90 - (lat_mod + 90)
elif lat_mod >= 90:
lat = 90 - (lat_mod - 90)
else:
lat = lat_mod
return lat
def longitude(ddeg):
lon_mod = math.fmod(ddeg, 360)
lon = 0
if lon_mod < -180:
lon = 180 + (lon_mod + 180)
elif lon_mod >= 180:
lon = -180 + (lon_mod - 180)
else:
lon = lon_mod
return lon
def precision(num, numdec):
return math.trunc(num * 10 ** numdec + .5) / (10 ** numdec)
| [
"gantry.york@gmail.com"
] | gantry.york@gmail.com |
bb21f9ae3d063d9f922628645b370ffb2b47c8d6 | d35daf29588b97fc7d2e2920bb2f2a837bc012e5 | /ud953/vector.py | 9020199d70b141332d234baa7c73d46dc58c501f | [] | no_license | xBDL/udacity | a918db67feffc9484517c668d49220af886d861d | 5aeec20a8c14cdcb84e0ea459169117cf25f7460 | refs/heads/master | 2022-06-26T12:52:20.980140 | 2020-05-05T07:19:16 | 2020-05-05T07:19:16 | 258,355,024 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,792 | py | from decimal import Decimal, getcontext
from math import acos, sqrt
getcontext().prec = 30
class Vector(object):
def __init__(self, coordinates):
try:
if not coordinates:
raise ValueError
self.coordinates = tuple([Decimal(x) for x in coordinates])
self.dimension = len(coordinates)
except ValueError:
raise ValueError('The coordinates must be nonempty')
except TypeError:
raise TypeError('The coordinates must be an iterable')
def __str__(self):
return 'Vector: {}'.format(self.coordinates)
def __eq__(self, v):
return self.coordinates == v.coordinates
# Quiz1 (Plus, Minus, Scalar Multiply)
def plus(self, v):
return Vector([x + y for x,y in zip(self.coordinates,v.coordinates)])
def minus(self, v):
return Vector([x - y for x,y in zip(self.coordinates,v.coordinates)])
def scale(self, c):
return Vector([Decimal(c) * x for x in self.coordinates])
# Quiz2 (Magnitude, Direction)
def length(self):
return Decimal(sqrt(sum([x**2 for x in self.coordinates])))
def normalize(self):
length = self.length()
if length == 0:
print("Zero vector has no lengthalization")
else:
return self.scale(Decimal(1.0)/length)
# Quiz3 (Dot Product, Angle)
def dot_product(self, v):
return sum([x * y for x,y in zip(self.coordinates,v.coordinates)])
def angle(self, v):
vunit = v.normalize()
return acos(vunit.dot_product(self.normalize()))
# Quiz4 (Parallel, Orthogonal)
def is_parallel(self, v, tol=1e-9):
if self.length() < tol or v.length() < tol:
return True
else:
vunit = v.normalize()
return 0 < tol - abs(1 - abs(vunit.dot_product(self.normalize())))
def is_orthogonal(self, v, tol=1e-9):
return 0 < tol - abs(self.dot_product(v))
# Quiz5 (Projection)
def projection(self, b):
if b.length() == 0:
print("Cannot project onto zero vector")
else:
bunit = b.normalize()
return bunit.scale(self.dot_product(bunit))
def complement(self, b):
return self.minus(self.projection(b))
# Quiz6 (Cross Product)
def cross_product(self, v):
a1, a2, a3 = self.coordinates
b1, b2, b3 = v.coordinates
return Vector([a2 * b3 - a3 * b2,
a3 * b1 - a1 * b3,
a1 * b2 - a2 * b1])
def area_of_parallelogram(self, v):
cross = self.cross_product(v)
return cross.length()
def area_of_triangle(self, v):
return Decimal(0.5) * self.area_of_parallelogram(v)
| [
"xbdl@outlook.com"
] | xbdl@outlook.com |
c62007a470cd06d5e41738b79c7410e80d3d88ea | 7fc22a26689eb4b3c0433aba85cba1db677bb057 | /Untitled20.py | 34246e4035456792ad380e65ce0ab60f6fd50354 | [] | no_license | lovestudymen/Deraining | 3791079a84ae4dfd0c93fbc054785bbe104c5019 | 4e8c8cee3dab2f99c37d42349321ec707db93cc5 | refs/heads/master | 2023-04-06T16:16:28.951445 | 2021-04-06T06:34:02 | 2021-04-06T06:34:02 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,884 | py | {
"nbformat": 4,
"nbformat_minor": 0,
"metadata": {
"colab": {
"name": "Untitled19.ipynb",
"provenance": [],
"authorship_tag": "ABX9TyPY9FM3dpukgutjWM5SKwbl",
"include_colab_link": true
},
"kernelspec": {
"name": "python3",
"display_name": "Python 3"
}
},
"cells": [
{
"cell_type": "markdown",
"metadata": {
"id": "view-in-github",
"colab_type": "text"
},
"source": [
"<a href=\"https://colab.research.google.com/github/mountainway184/Deraining/blob/master/Untitled20.py\" target=\"_parent\"><img src=\"https://colab.research.google.com/assets/colab-badge.svg\" alt=\"Open In Colab\"/></a>"
]
},
{
"cell_type": "code",
"metadata": {
"id": "X0RpJiuOh0VK"
},
"source": [
"def FReLU(inputs,num_channels):\n",
" x = DepthwiseConv2D(kernel_size=3, strides=1, padding='same')(inputs)\n",
" return K.maximum(x,inputs)\n",
"\n",
"def Generator(input_size = (batch_size,None,None,img_channels)):\n",
" inputs = Input(batch_shape = (None,None,None,img_channels))\n",
"\n",
" conv1 = Conv2D(64, (3, 3), padding='same')(inputs)\n",
" conv1 = FReLU(conv1,64)\n",
" conv1 = Conv2D(64, (3, 3), padding='same')(conv1)\n",
" conv1 = FReLU(conv1,64)\n",
" pool1 = MaxPooling2D(pool_size=(2, 2))(conv1)\n",
"\n",
" conv2 = Conv2D(128, (3, 3),padding='same')(pool1)\n",
" conv2 = FReLU(conv2,128)\n",
" conv2 = Conv2D(128, (3, 3),padding='same')(conv2)\n",
" conv2 = FReLU(conv2,128)\n",
" pool2 = MaxPooling2D(pool_size=(2, 2))(conv2)\n",
"\n",
" conv3 = Conv2D(256, (3, 3),padding='same')(pool2)\n",
" conv3 = FReLU(conv3,256)\n",
" conv3 = Conv2D(256, (3, 3),padding='same')(conv3)\n",
" conv3 = FReLU(conv3,256)\n",
" pool3 = MaxPooling2D(pool_size=(2, 2))(conv3)\n",
"\n",
" conv4 = Conv2D(512, (3, 3),padding='same')(pool3)\n",
" conv4 = FReLU(conv4,512)\n",
" conv4 = Conv2D(512, (3, 3),padding='same')(conv4)\n",
" conv4 = FReLU(conv4,512)\n",
"\n",
" up5 = Conv2DTranspose(256, (2, 2), strides=(2, 2), padding='same')(conv4)\n",
" up5 = FReLU(up5,256)\n",
" up5 = concatenate([up5, conv3], axis=3)\n",
" \n",
" conv6 = Conv2D(256, (3, 3), padding='same')(up5)\n",
" conv6 = FReLU(conv6,256)\n",
" conv6 = Conv2D(256, (3, 3),padding='same')(conv6)\n",
" conv6 = FReLU(conv6,256)\n",
"\n",
" up7 = Conv2DTranspose(128, (2, 2), strides=(2, 2), padding='same')(conv6)\n",
" up7 = FReLU(up7,128)\n",
" up7 = concatenate([up7, conv2], axis=3) \n",
"\n",
" conv8 = Conv2D(128, (3, 3), padding='same')(up7)\n",
" conv8 = FReLU(conv8,128)\n",
" conv8 = Conv2D(128, (3, 3),padding='same')(conv8)\n",
" conv8 = FReLU(conv8,128) \n",
"\n",
" up9 = Conv2DTranspose(64, (2, 2), strides=(2, 2), padding='same')(conv8)\n",
" up9 = FReLU(up9,64)\n",
" up9 = concatenate([up9, conv1], axis=3) \n",
"\n",
" conv10 = Conv2D(64, (3, 3), padding='same')(up9)\n",
" conv10 = FReLU(conv10,64)\n",
" conv10 = Conv2D(64, (3, 3),padding='same')(conv10)\n",
" conv10 = FReLU(conv10,64) \n",
"\n",
" conv11 = Conv2D(3,(3, 3),activation = 'sigmoid', padding='same')(conv10)\n",
"\n",
" model = Model(inputs=[inputs], outputs=[conv11])\n",
" return model"
],
"execution_count": null,
"outputs": []
}
]
} | [
"60787846+mountainway184@users.noreply.github.com"
] | 60787846+mountainway184@users.noreply.github.com |
c1cae50d8c044ff6eb2486dc850b1339f83b4f78 | a7c18c15e18bfed2df7a866cae3f8c213de942d6 | /venv/bin/django-admin | cb6939098203b3692f2a96b1b62f5fc709f23406 | [] | no_license | mitun94/Djangobin | 358aa3c66aefb325640a781e0c87457a120daa36 | 32cd11c9f64c03f1d68a0805770c44f497ea9818 | refs/heads/master | 2020-03-26T04:15:22.020406 | 2018-08-13T15:59:32 | 2018-08-13T15:59:32 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 325 | #!/home/mitun/PycharmProjects/Django_Projects/django_project/venv/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"tohidulalam216@gmail.com"
] | tohidulalam216@gmail.com | |
e0ebfdf1d5c2c47b1b207d45d14b65460f34adea | 92e3a6424326bf0b83e4823c3abc2c9d1190cf5e | /scripts/icehouse/opt/stack/glance/glance/openstack/common/strutils.py | 7dc6616a75de15acd1537aa23ffefe37fa6e8007 | [
"Apache-2.0"
] | permissive | AnthonyEzeigbo/OpenStackInAction | d6c21cf972ce2b1f58a93a29973534ded965d1ea | ff28cc4ee3c1a8d3bbe477d9d6104d2c6e71bf2e | refs/heads/master | 2023-07-28T05:38:06.120723 | 2020-07-25T15:19:21 | 2020-07-25T15:19:21 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 7,508 | py | # Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
System-level utilities and helper functions.
"""
import re
import sys
import unicodedata
import six
from glance.openstack.common.gettextutils import _
# Used for looking up extensions of text
# to their 'multiplied' byte amount
BYTE_MULTIPLIERS = {
'': 1,
't': 1024 ** 4,
'g': 1024 ** 3,
'm': 1024 ** 2,
'k': 1024,
}
BYTE_REGEX = re.compile(r'(^-?\d+)(\D*)')
TRUE_STRINGS = ('1', 't', 'true', 'on', 'y', 'yes')
FALSE_STRINGS = ('0', 'f', 'false', 'off', 'n', 'no')
SLUGIFY_STRIP_RE = re.compile(r"[^\w\s-]")
SLUGIFY_HYPHENATE_RE = re.compile(r"[-\s]+")
def int_from_bool_as_string(subject):
"""Interpret a string as a boolean and return either 1 or 0.
Any string value in:
('True', 'true', 'On', 'on', '1')
is interpreted as a boolean True.
Useful for JSON-decoded stuff and config file parsing
"""
return bool_from_string(subject) and 1 or 0
def bool_from_string(subject, strict=False):
"""Interpret a string as a boolean.
A case-insensitive match is performed such that strings matching 't',
'true', 'on', 'y', 'yes', or '1' are considered True and, when
`strict=False`, anything else is considered False.
Useful for JSON-decoded stuff and config file parsing.
If `strict=True`, unrecognized values, including None, will raise a
ValueError which is useful when parsing values passed in from an API call.
Strings yielding False are 'f', 'false', 'off', 'n', 'no', or '0'.
"""
if not isinstance(subject, six.string_types):
subject = str(subject)
lowered = subject.strip().lower()
if lowered in TRUE_STRINGS:
return True
elif lowered in FALSE_STRINGS:
return False
elif strict:
acceptable = ', '.join(
"'%s'" % s for s in sorted(TRUE_STRINGS + FALSE_STRINGS))
msg = _("Unrecognized value '%(val)s', acceptable values are:"
" %(acceptable)s") % {'val': subject,
'acceptable': acceptable}
raise ValueError(msg)
else:
return False
def safe_decode(text, incoming=None, errors='strict'):
"""Decodes incoming str using `incoming` if they're not already unicode.
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a unicode `incoming` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, six.string_types):
raise TypeError("%s can't be decoded" % type(text))
if isinstance(text, six.text_type):
return text
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
try:
return text.decode(incoming, errors)
except UnicodeDecodeError:
# Note(flaper87) If we get here, it means that
# sys.stdin.encoding / sys.getdefaultencoding
# didn't return a suitable encoding to decode
# text. This happens mostly when global LANG
# var is not set correctly and there's no
# default encoding. In this case, most likely
# python will use ASCII or ANSI encoders as
# default encodings but they won't be capable
# of decoding non-ASCII characters.
#
# Also, UTF-8 is being used since it's an ASCII
# extension.
return text.decode('utf-8', errors)
def safe_encode(text, incoming=None,
encoding='utf-8', errors='strict'):
"""Encodes incoming str/unicode using `encoding`.
If incoming is not specified, text is expected to be encoded with
current python's default encoding. (`sys.getdefaultencoding`)
:param incoming: Text's current encoding
:param encoding: Expected encoding for text (Default UTF-8)
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: text or a bytestring `encoding` encoded
representation of it.
:raises TypeError: If text is not an instance of str
"""
if not isinstance(text, six.string_types):
raise TypeError("%s can't be encoded" % type(text))
if not incoming:
incoming = (sys.stdin.encoding or
sys.getdefaultencoding())
if isinstance(text, six.text_type):
if six.PY3:
return text.encode(encoding, errors).decode(incoming)
else:
return text.encode(encoding, errors)
elif text and encoding != incoming:
# Decode text before encoding it with `encoding`
text = safe_decode(text, incoming, errors)
if six.PY3:
return text.encode(encoding, errors).decode(incoming)
else:
return text.encode(encoding, errors)
return text
def to_bytes(text, default=0):
"""Converts a string into an integer of bytes.
Looks at the last characters of the text to determine
what conversion is needed to turn the input text into a byte number.
Supports "B, K(B), M(B), G(B), and T(B)". (case insensitive)
:param text: String input for bytes size conversion.
:param default: Default return value when text is blank.
"""
match = BYTE_REGEX.search(text)
if match:
magnitude = int(match.group(1))
mult_key_org = match.group(2)
if not mult_key_org:
return magnitude
elif text:
msg = _('Invalid string format: %s') % text
raise TypeError(msg)
else:
return default
mult_key = mult_key_org.lower().replace('b', '', 1)
multiplier = BYTE_MULTIPLIERS.get(mult_key)
if multiplier is None:
msg = _('Unknown byte multiplier: %s') % mult_key_org
raise TypeError(msg)
return magnitude * multiplier
def to_slug(value, incoming=None, errors="strict"):
"""Normalize string.
Convert to lowercase, remove non-word characters, and convert spaces
to hyphens.
Inspired by Django's `slugify` filter.
:param value: Text to slugify
:param incoming: Text's current encoding
:param errors: Errors handling policy. See here for valid
values http://docs.python.org/2/library/codecs.html
:returns: slugified unicode representation of `value`
:raises TypeError: If text is not an instance of str
"""
value = safe_decode(value, incoming, errors)
# NOTE(aababilov): no need to use safe_(encode|decode) here:
# encodings are always "ascii", error handling is always "ignore"
# and types are always known (first: unicode; second: str)
value = unicodedata.normalize("NFKD", value).encode(
"ascii", "ignore").decode("ascii")
value = SLUGIFY_STRIP_RE.sub("", value).strip().lower()
return SLUGIFY_HYPHENATE_RE.sub("-", value)
| [
"cody@uky.edu"
] | cody@uky.edu |
c02d354915709bfff78bdcf07d8d696ba1859875 | ea0fb59afa9c1d6579ab28b001ab47dc0096ca5d | /Node Status Monitor/Python v2.7/libs/common.py | 7707a2be087db6d7946c7ce9baff496697f2a0fd | [] | no_license | catchpoint/Community-Scripts | 98255eb8ba4c11ff0c77bf275a77471db7ec3a41 | e05716950e7f38416d20bf980a9fe3b660751ad9 | refs/heads/main | 2022-03-14T17:45:27.623625 | 2022-03-01T19:25:57 | 2022-03-04T16:36:07 | 122,093,072 | 2 | 1 | null | 2022-03-04T16:36:09 | 2018-02-19T16:56:28 | Python | UTF-8 | Python | false | false | 5,858 | py | import json
import logging
from configparser import ConfigParser
import os.path
from os import path
import application_constants
from api import get_catchpoint_token,fetch_catchpoint_node_deatils
from logger import info_logger,error_logger
# Config
config = ConfigParser()
config.read(os.path.join(os.path.dirname(__file__), '../config', 'config.cfg'))
if config.has_option('monitor','lookup_fields'):
lookup_fields_for_comparison = json.loads(config.get('monitor','lookup_fields'))
if len(lookup_fields_for_comparison) == 0 :
lookup_fields_for_comparison = application_constants.default_lookup_fields
else:
lookup_fields_for_comparison = application_constants.default_lookup_fields
# Catchpoint Credentials
client_key=config.get('auth','client_key')
client_secret=config.get('auth','client_secret')
# Catchpoint API URLs for token and node info
get_nodes_url=application_constants.nodes_url
token_url=application_constants.token_url
# Output files and location
current_session_data_file=application_constants.current_session_data_file
previous_session_data_file=application_constants.previous_session_data_file
current_working_directory = os.getcwd()
output_folder_path=application_constants.output_folder_path
raw_data_folder_path=application_constants.raw_data_folder_path
path = os.path.join(current_working_directory, output_folder_path)
sub_path=os.path.join(path,raw_data_folder_path)
# function to write raw Node Data to file
def write_node_data_to_file(node_data, file):
try:
node_dump_data= json.dumps(node_data)
if not os.path.isdir(path):
os.makedirs(sub_path)
else:
if not os.path.isdir(sub_path):
os.mkdir(sub_path)
info_logger.info(" writing Node Details for "+ file +" ")
previous_session_file = open(os.path.join(sub_path,file),'w')
previous_session_file.write(str(node_dump_data))
previous_session_file.close()
except Exception as e:
error_logger.exception(str(e))
# function to write Node Details result
def write_changes_in_node_result(node_data,comparator):
info_logger.info(" writing result ")
try:
changed_data= json.dumps(node_data)
if not os.path.isdir(path):
os.mkdir(path)
opened_result_file = open(os.path.join(path, "changes_in_"+comparator+".json"),'w')
opened_result_file.write(str(changed_data))
opened_result_file.close()
except Exception as e:
error_logger.exception(str(e))
# function to read old Node Details
def read_previous_session_node_data():
previous_session_node_details=[]
try:
info_logger.info(" reading previous session node details ")
if os.path.exists(os.path.join(sub_path,current_session_data_file)):
previous_session_file = open(os.path.join(sub_path,current_session_data_file),'r')
previous_session_node_details = previous_session_file.read()
return previous_session_node_details
except Exception as e:
error_logger.exception(str(e))
# function to get value for set of keys from an nested object
def get_value_for_lookUp_fields(node,keys):
value =''
for key in keys:
if key in node:
if value == '':
value = node[key]
else:
value = value[key]
return value
# function to compare Node Details
def compare_node_status(previous_session_data,current_session_data,lookup_field):
try:
info_logger.info(" Comparing Node Details ")
previous_session_node_details= json.loads(previous_session_data)
def compare(current_session_data_dataObj):
for node_index in range(0,len(previous_session_node_details)):
if current_session_data_dataObj['id'] == previous_session_node_details[node_index]['id']:
keys=lookup_field.split('.')
previous_run_data=get_value_for_lookUp_fields(previous_session_node_details[node_index],keys)
current_run_data=get_value_for_lookUp_fields(current_session_data_dataObj,keys)
return previous_run_data!=current_run_data
return True
unique_result = list(filter(compare,current_session_data))
if not unique_result:
info_logger.info(" no change ")
else:
write_node_data_to_file(current_session_data,current_session_data_file)
return unique_result
except Exception as e:
error_logger.exception(str(e))
# function to fectch current seesion data from catchpoint node api
def fetch_current_session_node_data():
token=get_catchpoint_token(token_url,client_key,client_secret)
if token:
node_details=fetch_catchpoint_node_deatils(get_nodes_url,token)
else:
info_logger.info(" invalid token")
return node_details
# function to check changes in node details from current session in respect to previous session run
def check_node_field_changes():
current_session_node_details = fetch_current_session_node_data()
previous_session_node_details = read_previous_session_node_data()
if previous_session_node_details:
write_node_data_to_file(json.loads(previous_session_node_details),previous_session_data_file)
if(os.stat(os.path.join(sub_path,current_session_data_file)).st_size == 0):
write_node_data_to_file(current_session_node_details,current_session_data_file)
else:
for lookup_field in lookup_fields_for_comparison:
status_result = compare_node_status(previous_session_node_details,current_session_node_details,lookup_field)
write_changes_in_node_result(status_result,lookup_field)
else:
write_node_data_to_file(current_session_node_details,current_session_data_file)
| [
"github@michaelkozicki.com"
] | github@michaelkozicki.com |
c35bdda591a1ae160a5260f373355342cddc478f | f58918aab441e891290f1ca05ba5c3105b1e1499 | /rbac/admin.py | d1ea098433a2af02999e432af15b7b7a253b47fd | [] | no_license | silence-cho/Rbac | 3ff3f09aa548ea69b2050b4b845a34b19ded9179 | 45a09dfca81648ab0b1db67a426938b6a4eba085 | refs/heads/master | 2020-04-01T02:12:14.706410 | 2019-03-28T06:50:31 | 2019-03-28T06:50:31 | 152,768,990 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 368 | py | from django.contrib import admin
# Register your models here.
import models
from django.contrib.admin import ModelAdmin
class UserConfigure(ModelAdmin):
list_display = ['name','role']
admin.site.register(models.User,admin_class=UserConfigure)
admin.site.register(models.Role)
admin.site.register(models.Permission)
admin.site.register(models.PermissionGroup)
| [
"silence_cho@163.com"
] | silence_cho@163.com |
d83cf0765ba46b9ddc92052a9ecd0e3dd1eea18d | 4902dc4d15a1b80d28d060b7f47f05e2185a660e | /huffman.py | a29fa65f995b887bf1896b0a2b70a86ab17712f7 | [] | no_license | surprise3465/HeartSound-Compression | 0f998eedc344f0bbd85f9c25a9696efebe9677a2 | cf0b6dbfa326c6144a0fee5049d2c9887ad2e9ee | refs/heads/master | 2021-04-09T14:52:16.671378 | 2018-08-23T09:32:39 | 2018-08-23T09:32:39 | 125,540,470 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 6,039 | py | import numpy as np
import copy
class HuffNode(object):
"""
定义一个HuffNode虚类,里面包含两个虚方法:
1. 获取节点的权重函数
2. 获取此节点是否是叶节点的函数
"""
def get_wieght(self):
raise NotImplementedError(
"The Abstract Node Class doesn't define 'get_wieght'")
def isleaf(self):
raise NotImplementedError(
"The Abstract Node Class doesn't define 'isleaf'")
class LeafNode(HuffNode):
"""
树叶节点类
"""
def __init__(
self,
value=0,
freq=0,
):
"""
初始化 树节点 需要初始化的对象参数有 :value及其出现的频率freq
"""
super(LeafNode, self).__init__()
# 节点的值
self.value = value
self.wieght = freq
def isleaf(self):
"""
基类的方法,返回True,代表是叶节点
"""
return True
def get_wieght(self):
"""
基类的方法,返回对象属性 weight,表示对象的权重
"""
return self.wieght
def get_value(self):
"""
获取叶子节点的 字符 的值
"""
return self.value
class IntlNode(HuffNode):
"""
中间节点类
"""
def __init__(self, left_child=None, right_child=None):
"""
初始化 中间节点 需要初始化的对象参数有 :left_child, right_chiled, weight
"""
super(IntlNode, self).__init__()
# 节点的值
self.wieght = left_child.get_wieght() + right_child.get_wieght()
# 节点的左右子节点
self.left_child = left_child
self.right_child = right_child
def isleaf(self):
"""
基类的方法,返回False,代表是中间节点
"""
return False
def get_wieght(self):
"""
基类的方法,返回对象属性 weight,表示对象的权重
"""
return self.wieght
def get_left(self):
"""
获取左孩子
"""
return self.left_child
def get_right(self):
"""
获取右孩子
"""
return self.right_child
class HuffTree(object):
"""
huffTree
"""
def __init__(self, flag, value=0, freq=0, left_tree=None, right_tree=None):
super(HuffTree, self).__init__()
if flag == 0:
self.root = LeafNode(value, freq)
else:
self.root = IntlNode(left_tree.get_root(), right_tree.get_root())
def get_root(self):
"""
获取huffman tree 的根节点
"""
return self.root
def get_wieght(self):
"""
获取这个huffman树的根节点的权重
"""
return self.root.get_wieght()
def traverse_huffman_tree(self, root, code, char_freq):
"""
利用递归的方法遍历huffman_tree,并且以此方式得到每个 字符 对应的huffman编码
保存在字典 char_freq中
"""
if root.isleaf():
char_freq[root.get_value()] = code
return None
else:
self.traverse_huffman_tree(root.get_left(), code + '0', char_freq)
self.traverse_huffman_tree(root.get_right(), code + '1', char_freq)
def buildHuffmanTree(list_hufftrees):
"""
构造huffman树
"""
while len(list_hufftrees) > 1:
# 1. 按照weight 对huffman树进行从小到大的排序
list_hufftrees.sort(key=lambda x: x.get_wieght())
# 2. 跳出weight 最小的两个huffman编码树
temp1 = list_hufftrees[0]
temp2 = list_hufftrees[1]
list_hufftrees = list_hufftrees[2:]
# 3. 构造一个新的huffman树
newed_hufftree = HuffTree(1, 0, 0, temp1, temp2)
# 4. 放入到数组当中
list_hufftrees.append(newed_hufftree)
# last. 数组中最后剩下来的那棵树,就是构造的Huffman编码树
return list_hufftrees[0]
def compress(filedata):
filesize = filedata.size
# 保存在字典 char_freq中
char_freq = {}
for x in range(filesize):
tem = filedata[x] # python3.0 version
if tem in char_freq.keys():
char_freq[tem] = char_freq[tem] + 1
else:
char_freq[tem] = 1
# 3. 开始构造原始的huffman编码树 数组,用于构造Huffman编码树
list_hufftrees = []
for x in char_freq.keys():
# 使用 HuffTree的构造函数 定义一棵只包含一个叶节点的Huffman树
tem = HuffTree(0, x, char_freq[x], None, None)
# 将其添加到数组 list_hufftrees 当中
list_hufftrees.append(tem)
# 5. 构造huffman编码树,并且获取到每个字符对应的 编码
char_freq1 = copy.deepcopy(char_freq)
tem = buildHuffmanTree(list_hufftrees)
tem.traverse_huffman_tree(tem.get_root(), '', char_freq1)
# 6. 开始对文件进行压缩
code = []
for i in range(filesize):
# key = six.byte2int(filedata[i]) #python2.7 version
key = filedata[i] #python3 version
code.append(char_freq1[key])
strcode = ''.join(code)
return char_freq, strcode
def decompress(char_freq, strcode):
# 3. 重建huffman 编码树,和压缩文件中建立Huffman编码树的方法一致
list_hufftrees = []
for x in char_freq.keys():
tem = HuffTree(0, x, char_freq[x], None, None)
list_hufftrees.append(tem)
tem = buildHuffmanTree(list_hufftrees)
tem.traverse_huffman_tree(tem.get_root(), '', char_freq)
tempOutput = []
currnode = tem.get_root()
while len(strcode) > 0:
if strcode[0] == '1':
currnode = currnode.get_right()
else:
currnode = currnode.get_left()
if currnode.isleaf():
tempOutput.append(currnode.get_value())
currnode = tem.get_root()
strcode = strcode[1:]
return np.array(tempOutput, dtype=float)
| [
"spirit3465@sina.com"
] | spirit3465@sina.com |
f1242611192ff0c62762bd5d2b56d409622fa446 | 2a289759aed74103db31d23f983f8a94c0e44ab0 | /FunixLesson/Codelearn/HamPython.py | 494250379981fa9bfb5a826dcc4b3472521ee8e4 | [] | no_license | pandare130691/FunixGit | a79401c97b3a8715aeba726b9cbb3b4bcd10f9be | 40cc353c4c6b51dabd041a7401ba21016f542fe9 | refs/heads/main | 2023-02-12T03:15:10.697902 | 2021-01-15T17:04:11 | 2021-01-15T17:04:11 | 324,597,475 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,189 | py | #1
def sum_of_list(lst):
ans = 0
for i in lst:
ans += i
return ans
lst = []
n = int(input())
for i in range(n):
lst.append(int(input()))
print(sum_of_list(lst))
#2
def max3(a, b, c):
ans = a
for i in [a,b,c]:
if(i>ans):
ans = i
return ans
a = int(input())
b = int(input())
c = int(input())
print(max3(a, b, c))
#3
def show(input):
nLower = 0;
nUpper = 0
for i in s:
if i.isupper():
nUpper += 1
if i.islower():
nLower += 1
print("Given string:", input)
print("Number of uppercase letters:", nUpper)
print("Number of lowercase letters:", nLower)
s = str(input())
show(s)
#4
def get_unique_values(lst):
ans = []
for i in lst:
if i not in ans:
ans.append(i)
return ans
n = int(input())
lst = []
for i in range(n):
lst.append(int(input()))
print(get_unique_values(lst))
#5
def is_prime(n):
rt = True
if n==0:
rt = False
elif n==1:
rt = True
else:
for i in range(1, n+1):
if i!=1 and i!=n and n%i==0:
rt = False
break
return rt
n = int(input())
print(is_prime(n))
| [
"phanxuandung09h5@gmail.com"
] | phanxuandung09h5@gmail.com |
79d3ca7185cfabf9fb13e19308253dc419c828d9 | 2f954ab39ec135fed31325596063fdf0d553613d | /src/conanfile-h195.py | 93b7b66160198112f77c6299dccfb531320a92ae | [
"LicenseRef-scancode-proprietary-license",
"Apache-2.0"
] | permissive | Esri/palladio | 7e2946b2f3e188895ec0258a336d3b44e32b2146 | 8dc99d5cd3dfee79d7c1cc6b17ba2087c7cbe0e0 | refs/heads/main | 2023-08-28T21:41:11.831215 | 2023-07-13T08:51:59 | 2023-07-13T08:51:59 | 30,315,957 | 104 | 22 | Apache-2.0 | 2023-07-13T08:52:01 | 2015-02-04T19:11:20 | C++ | UTF-8 | Python | false | false | 783 | py | import os
from conans import ConanFile
class PalladioConan(ConanFile):
settings = "os", "compiler", "build_type", "arch"
generators = "cmake"
def requirements(self):
self.requires("catch2/2.13.7")
if "PLD_CONAN_HOUDINI_VERSION" in os.environ:
self.requires("houdini/{}@sidefx/stable".format(os.environ["PLD_CONAN_HOUDINI_VERSION"]))
else:
self.requires("houdini/[>19.5.0 <20.0.0]@sidefx/stable")
if "PLD_CONAN_SKIP_CESDK" not in os.environ:
if "PLD_CONAN_CESDK_VERSION" in os.environ:
cesdk_version = os.environ["PLD_CONAN_CESDK_VERSION"]
else:
cesdk_version = "3.0.8905"
self.requires("cesdk/{}@esri-rd-zurich/stable".format(cesdk_version))
| [
"chr11115@esri.com"
] | chr11115@esri.com |
e39954b99947b29743c56e5a794eefbe6f1ae82f | 29d6216a5ca58b68776ff5d1534b30549f07bb1a | /enrich-ssl.py | 16a7c0b0587de6ecc37374841dec0d2676e0bbf8 | [] | no_license | cudeso/digital-footprint-light | 9e958343cdaba3519c0634cb8d81e0f4d21d51db | 060d8bc7d855f379af249937cb8825f6f2cf3a0f | refs/heads/master | 2022-08-07T09:15:35.705161 | 2022-07-25T11:21:38 | 2022-07-25T11:21:38 | 217,532,379 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,749 | py | import os
import json
import random
import requests
import socket
import warnings
from ipwhois import IPWhois
from datetime import datetime
import time
from time import sleep
basepath = os.path.dirname(os.path.realpath(__file__)) + '/'
log_suspicious = basepath + 'suspicious_domains.log'
log_suspicious_json = basepath + 'suspicious_domains.json'
def get_webpage_title(request):
try:
page = request.text.strip()
tit = re.search('<title>(.*?)</title>', page, re.IGNORECASE)
if tit is not None:
title = tit.group(1)
else:
title = ""
return title
except Exception as e:
return ""
def get_ASN_Infos(ipaddr):
"""
Get Autonomous System Number informations linked to an ip address
:param ipaddr: ip address of the website linked to the certificate common name
:return: list of ASN infos: asn, asn_cidr, asn_country_code, asn_description, asn_abuse_email or the same with empty values
"""
try:
warnings.filterwarnings("ignore")
obj = IPWhois(ipaddr)
results = obj.lookup_rdap(depth=1)
asn = results['asn']
asn_cidr = results['asn_cidr']
asn_country_code = results['asn_country_code']
asn_description = results['asn_description']
try:
for entity in results['objects'].values():
if 'abuse' in entity['roles']:
asn_abuse_email = entity['contact']['email'][0]['value']
break
except Exception as e:
asn_abuse_email = ""
return asn, asn_cidr, asn_country_code, asn_description, asn_abuse_email
except Exception as e:
asn, asn_cidr, asn_country_code, asn_description, asn_abuse_email = "", "", "", "", ""
return asn, asn_cidr, asn_country_code, asn_description, asn_abuse_email
def enrich(domain, useragent):
headers = {'user-agent': useragent}
proxy = {}
url = "https://{}".format(domain)
try:
req = requests.get(url, headers=headers, proxies=proxy, timeout=5)
status_code = req.status_code
response_text = req.text
response_headers = req.headers
try:
response_server = response_headers["Server"]
except Exception as e:
response_server = ""
try:
response_last_modified = response_headers["Last-Modified"]
except Exception as e:
response_last_modified = ""
page_title = get_webpage_title(req)
ipaddr = socket.gethostbyname(domain)
asn, asn_cidr, asn_country_code, asn_description, asn_abuse_email = get_ASN_Infos(ipaddr)
now = time.strftime("%Y-%m-%d %H:%M:%S")
result = { "enriched_status": "ok",
"enriched_timestamp": now,
"enriched_domain": domain,
"status_code": status_code,
"response_server": response_server,
"response_last_modified": response_last_modified,
"page_title": page_title,
"ipaddr": ipaddr,
"asn": asn,
"asn_cidr": asn_cidr,
"asn_country_code": asn_country_code,
"asn_description": asn_description,
"asn_abuse_email": asn_abuse_email
}
return result
except Exception as ex:
return { "enriched_status": "Unable to contact site" }
def main(ua):
tested_domains = {}
with open(log_suspicious_json, 'r') as reader:
data = True
while data:
data = reader.readline()
useragent = random.choice(ua)
if data:
data_json = json.loads(data)
if data_json["update_type"] == "X509LogEntry":
all_domains = data_json["leaf_cert"]["all_domains"]
if len(all_domains) > 0:
for domain in all_domains:
test_domain = domain
if "*." in domain:
test_domain = domain.replace("*.","www.")
do_enrichment = True
for el in tested_domains:
if el == test_domain:
do_enrichment = False
break
if do_enrichment:
update_json = { "timestamp": data_json["timestamp"],
"match": data_json["match"],
"current_domain": data_json["current_domain"],
"update_type": data_json["update_type"],
"not_before": data_json["not_before"],
"not_after": data_json["not_after"],
"leaf_cert": data_json["leaf_cert"],
"enriched": enrich(test_domain, useragent)
}
tested_domains[test_domain] = update_json
reader.close()
with open("certificate_stream_for_elk.json", "w") as writer:
for el in tested_domains:
json.dump(tested_domains[el], writer)
writer.write("\n")
if __name__ == '__main__':
try:
ua = open('useragent_list.txt').read().splitlines()
except:
ua = ['Mozilla/5.0 (Windows NT 6.2; WOW64; rv:55.0) Gecko/20100101 Firefox/55.0']
main(ua)
| [
"noreply@github.com"
] | cudeso.noreply@github.com |
d69b743b1add535c69e1d83e6680e19ee5e67bc3 | 5a33d2a369d5cba8348ead5f3f9f0a57029db5ce | /SecPy.py | 598a9955ee3ec4b0735953984dea142d896bcbee | [] | no_license | HelianMartins/SecPy | 220453cd917b109ae493c6db61c3a7308825f08a | a71995d87ea7e81c7a98d91e2b79d495a4edc580 | refs/heads/master | 2022-06-24T20:04:20.294540 | 2020-05-10T19:40:04 | 2020-05-10T19:40:04 | 262,855,264 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 19,219 | py | # altor : Helian Martins
import banelib
import os
def secpy():
def clear():
os.system('clear') or None
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" ============Helian Martins===========")
print(" scan Vulnerabilities: [ 1]")
print(" Dos/DDos: [ 2]")
print(" sair: [ 3]")
pg1 = int(input("valor: "))
if (pg1==1) :
# Vulnerabilities TESTING
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Vulnerabilities ")
print("SQL-Injection: 1")
print("XSS: 2")
print("FI: 3")
print("Injecao de codigo PHP: 4")
print("Injecao de comandos: 5")
print("volta: 6")
print("sair: 7")
valor2 =int(input("valor: "))
if (valor2==1) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" SQL-Injection ")
link = input("link ")
banelib.sqlieb(link)
if (valor2==2) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" XSS")
link = input("link ")
banelib.xss(link)
if (valor2==3) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" FI")
link = input("link ")
banelib.fi(link)
if (valor2==4) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Injecao de codigo PHP")
link = input("link ")
banelib.getinject(link)
if (valor2==5) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Injecao de comandos")
link = input("link ")
banelib.execlink(link)
if (valor2==6) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
secpy()
if (valor2==7) :
print("saio")
# Dos / DDos
if (pg1==2) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Dos/DDos ")
print("________________________________________")
print("Hulk ataque: [ 1]")
print("________________________________________")
print("Hulk ataque com proxies http: [ 2]")
print("________________________________________")
print("Slowloris ataque: [ 3]")
print("________________________________________")
print("Xerxes ataque: [ 4]")
print("________________________________________")
print("Http flood: [ 5]")
print("________________________________________")
print("Http inundacao com proxies: [ 6]")
print("________________________________________")
print("Tcp flood: [ 7]")
print("________________________________________")
print("Udp flood: [ 8]")
print("________________________________________")
print("Doser: [ 9]")
print("________________________________________")
print("Prox doser: [10]")
print("________________________________________")
print("Torshammer: [11]")
print("________________________________________")
print("Slow read: [12]")
print("________________________________________")
print("Apache killer: [13]")
print("________________________________________")
print("Gold eneye: [14]")
print("________________________________________")
print("Medusa: [15]")
print("________________________________________")
print("Icmp: [16]")
print("________________________________________")
print("Syn flood: [17]")
print("________________________________________")
print("Icmp storm: [18]")
print("________________________________________")
print("Land: [19]")
print("________________________________________")
print("Udp storm: [20]")
print("________________________________________")
print("Black nurse: [21]")
print("________________________________________")
print("voltar: [22]")
print("________________________________________")
print("sair: [23]")
print("________________________________________")
valor2 =int(input(" valor: "))
if (valor2==1) :
#hulk ataque
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Hulk ataque")
link = input("Host ip ")
banelib.hulk (link, threads = 1000)
if (valor2==2) :
#hulk ataque com proxies http
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Hulk ataque com proxies http")
link = input("Host ip ")
banelib.proxhulk (link, threads = 1000)
if (valor2==3) :
#slowloris ataque
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Slowloris ataque")
link = input("Host ip ")
banelib.slowloris (link, p = 80, threads = 50)
if (valor2==4) :
#xerxes ataque
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Xerxes")
link = input("Host ip ")
banelib.xerxes (link, p = 443, threads = 500)
if (valor2==5) :
#http flood
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Httpflood")
link = input("Host ip ")
banelib.httpflood (link, p = 80, threads = 1000)
if (valor2==6) :
#http flood with proxies
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Lulzer")
link = input("Host ip ")
banelib.lulzer(link,p=80,threads=1000)
if (valor2==7) :
#tcp flood
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Tcpflood")
link = input("Host ip ")
banelib.tcpflood(link,threads=1000)
if (valor2==8) :
#udp flood
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Udp")
link = input("Host ip ")
banelib.udp(link,p=80)
if (valor2==9) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Doser")
link = input("Host ip ")
banelib.doser(link,threads=500)
if (valor2==10) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Proxdoser")
link = input("Host ip ")
banelib.proxdoser(link,threads=500)
if (valor2==11) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Torshammer")
link = input("Host ip ")
banelib.torshammer(link,p=80,threads=1000)
if (valor2==12) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Slowread")
link = input("Host ip ")
banelib.slowread(link,p=80,threads=1000)
if (valor2==13) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Apache Killer")
link = input("Host ip ")
banelib.apachekiller(link,p=80,threads=500)
if (valor2==14) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" goldeneye")
link = input("Host ip ")
banelib.goldeneye(link,p=80,threads=1000)
if (valor2==15) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Medusa")
link = input("Host ip ")
banelib.medusa(link,p=80,threads=1000)
if (valor2==16) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Icmp")
link = input("Host ip ")
banelib.icmp(link,p=80,threads=100)
if (valor2==17) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Syn flood")
link = input("Host ip ")
banelib.synflood(link,p=80,threads=100)
if (valor2==18) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Icmp storm")
link = input("Host ip ")
baneliblink = raw_input("Host ip ")
banelib.icmpstorm(link,p=80,threads=100)
if (valor2==19) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Land")
link = input("Host ip ")
banelib.land(link,p=80,threads=100)
if (valor2==20) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Udp storm")
link = input("Host ip ")
banelib.udpstorm(link,p=80,threads=100)
if (valor2==21) :
clear()
print(" =====================================")
print(" #### ###### #### ##### # # ")
print(" # # # # # # # # ")
print(" #### ##### # # # # ")
print(" # # # ##### # ")
print(" # # # # # # # ")
print(" #### ###### #### # # ")
print(" =====================================")
print(" Black nurse")
link = input("Host ip ")
banelib.blacknurse(link,p=80,threads=100)
if (valor2==22) :
secpy()
if (valor2==23) :
print("saio")
if (pg1==3) :
print("saio")
secpy()
| [
"noreply@github.com"
] | HelianMartins.noreply@github.com |
ea94c660c332c7831fda74aa5659f4c874998483 | ac713c0c573c7980c50be791ff789bca5dc35917 | /gte460/wsgi.py | 342080188645b398c6f80f961a5a924a370f3be8 | [] | no_license | aristodemos/gte460 | 0f6c64c6db6f3c4c11f84ec5868b1f8cc7495166 | b31ee2da719c1ff3e02bba1ac0138b2c53b31c54 | refs/heads/master | 2020-06-06T13:18:12.483779 | 2014-06-10T18:00:55 | 2014-06-10T18:00:55 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 387 | py | """
WSGI config for gte460 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.6/howto/deployment/wsgi/
"""
import os
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "gte460.settings")
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
| [
"a.rpiko@gmail.com"
] | a.rpiko@gmail.com |
4a509ced8a8371d22676ed9916b2c6281c81013e | 48093bb65c5683475d7e7db0625025150fb03f97 | /Q-learning maze/Q_Learning.py | 81698b9e555d72f53d0ea4704a1c2c08bef03921 | [] | no_license | HITLB17/basic_RL | ee682ba319e857f4dd24f8785106f544febd64ae | 0c497e109d3fee347339fbc453a2a37848752556 | refs/heads/master | 2020-05-31T21:45:36.316266 | 2019-08-04T08:25:47 | 2019-08-04T08:25:47 | 190,505,669 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,154 | py | """"
most of this code copied from morvan
this part is a Q-learning algorithm for finding a optimal path in a maze
Q(s,a) = Q(s,a) + learning_rate * [reward + gamma* max_a'{Q'(s',a')} - Q(s,a)]
"""
import numpy as np
import pandas as pd
class QLearningTable:
def __init__(self, actions, learning_rate = 0.01, reward_decay = 0.9, e_greedy = 0.9):
self.actions = actions # a list
self.lr = learning_rate # learning rate for each episode
self.gamma = reward_decay # discount factor
self.epsilon = e_greedy # epsilon-greedy police for choosing a action
self.q_table = pd.DataFrame(columns=self.actions, dtype=np.float64) # initial q_table
# choose action using epsilon-greedy police depended on the observation
def choose_action(self, observation):
# check whether the observation in the state_table
self.check_state_exist(observation)
# action select
if np.random.uniform() < self.epsilon:
# choose best action
state_action = self.q_table.loc[observation, :]
# some action have same value so random the order
state_action = state_action.reindex(np.random.permutation(state_action.index))
action = state_action.idxmax()
else:
# choose random action
action = np.random.choice(self.actions)
return action
def learn(self, s, a, reward, s_):
self.check_state_exist(s_)
# current state q-table as predict q-table
q_predict = self.q_table.loc[s, a]
if s_ != 'terminal':
q_target = reward + self.gamma * self.q_table.loc[s_, :].max()
else:
q_target = reward # next state is terminal
self.q_table.loc[s, a] += self.lr * (q_target - q_predict) # update the q table
def check_state_exist(self, state):
if state not in self.q_table.index:
self.q_table = self.q_table.append(
pd.Series(
[0] * len(self.actions),
index=self.q_table.columns,
name=state
)
) | [
"17b904042@stu.hit.edu.cn"
] | 17b904042@stu.hit.edu.cn |
9666a9a73114d3023bc7e7166da3745bb5daedc3 | 43973d97eb15d48a1af2e53ce8538e76a6a40c1a | /.virtualenvs/mysite-virtualenv/bin/django-admin | b5ee6cd0db1dd5f382e69eec2e22b8c3bc94e901 | [] | no_license | satyamsammi/fcon | 5693ea7be7804c6e26323088a00a12e802432222 | 8c2478e4b7479cd0d34fa110c79b0c24c9ecb4b4 | refs/heads/master | 2021-01-17T08:07:26.579950 | 2016-07-26T11:40:52 | 2016-07-26T11:40:52 | 62,948,130 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 305 | #!/home/fcon/.virtualenvs/mysite-virtualenv/bin/python3.4
# -*- coding: utf-8 -*-
import re
import sys
from django.core.management import execute_from_command_line
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(execute_from_command_line())
| [
"fcon@fff5562e8198"
] | fcon@fff5562e8198 | |
d1b1803bbb5a573a95d3ae340beec996d30a8766 | 2f95c4762e0d57e97da6c291228761e3a8c8e938 | /vvdatalab_nifi_flow_generator/models/processors/creations/create_processor_routeonattribute.py | 5ae53f7a28bfd6d46636621e0c7284f4f00594f4 | [] | no_license | felipelobasrocha/nifi_flow_generator | 9964050a6834fb15434f2f8ece16b1d26afe192c | 6dea7c27250e3447e5febf6a6e55625f413ec62c | refs/heads/master | 2020-08-24T22:00:43.115585 | 2019-10-22T21:40:34 | 2019-10-22T21:40:34 | 216,914,965 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 825 | py | from nipyapi import canvas, nifi
from .create_processor import CreateProcessor
class CreateProcessorRouteOnAttribute(CreateProcessor):
type = None
def __init__(self, process_group, processor_name, processor_location, processor_config):
CreateProcessor.__init__(self, process_group, processor_name, processor_location, processor_config)
self.type = canvas.get_processor_type('RouteOnAttribute')
self.config.properties={
"Routing Strategy": processor_config.get("properties").get("routeonattribute.routing_strategy", ""),
"Insert": processor_config.get("properties").get("routeonattribute.routeon_insert", "")
}
def create(self):
return CreateProcessor.create(self,self.type) | [
"felipelobas@gmail.com"
] | felipelobas@gmail.com |
d1414f16cce1d099928c9824e36401395b01d041 | 4305b79e7e23221f3f58060da282e3a4c1115516 | /ex17.py | 38cc56a0321d56835ca1c33395ac8ddea80579f6 | [] | no_license | Cathryne/Python | da533636431513bba82b75a25efd67f02d4712e8 | b520f270205264e8856dba0923b5aeeb1d813a25 | refs/heads/master | 2020-06-04T21:56:27.481723 | 2015-03-01T16:56:19 | 2015-03-01T16:56:19 | 23,039,947 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 684 | py | # Exercise 17: More Files
# http://learnpythonthehardway.org/book/ex17.html
from sys import argv
from os.path import exists
script, from_file, to_file = argv
print "Copying from %s to %s..." % (from_file, to_file)
# sequential combination of file opening and content reading
in_data = open(from_file).read()
print "The input file is %d bytes long." % len(in_data) # built-in function, also for number of items in sequence
print "Does the output file exist? %r" % exists(to_file) # function of Posix module
raw_input("Ready! Press RETURN to continue or CTRL+C to abort. ")
# Another function sequence: opening and writing
open(to_file, 'w').write(in_data)
print "OK, all done!"
| [
"katrinleinweber@MAC.local"
] | katrinleinweber@MAC.local |
b69f5c9d38a5d66f3802e83800a79da6780808d5 | 32d9d4d2804996d2c0b3283727594d30770797f0 | /app.py | 77c8eb199a777011a8a31c2e4cea973f5f2b4fb0 | [
"MIT"
] | permissive | ianforrest11/dash | 55a269e15336b13d8925aa4e56d11b177b25c489 | 7e10e5ebf5d5ce6ba8d77e50990d36848b1e8e8c | refs/heads/master | 2022-12-02T07:32:11.181695 | 2019-08-29T16:01:09 | 2019-08-29T16:01:09 | 204,800,986 | 0 | 0 | MIT | 2022-11-22T04:15:13 | 2019-08-27T22:29:06 | Jupyter Notebook | UTF-8 | Python | false | false | 1,699 | py | import dash
import dash_bootstrap_components as dbc
"""
https://github.com/facultyai/dash-bootstrap-components
dash-bootstrap-components provides Bootstrap components.
Plotly Dash is great! However, creating the initial layout can require a lot
of boilerplate. dash-bootstrap-components reduces this boilerplate by providing
standard layouts and high-level components.
A good way to start customising the stylesheet is to use an alternative
pre-compiled theme. Bootswatch is a great place to find new themes. Links to
CDNs for each of the Bootswatch styles are also included , and can be used
with the external_stylesheets argument of the Dash constructor:
app = dash.Dash(__name__, external_stylesheets=[dbc.themes.CERULEAN])
Go to https://bootswatch.com/ to preview these Bootswatch themes:
dbc.themes.BOOTSTRAP
dbc.themes.CERULEAN
dbc.themes.COSMO
dbc.themes.CYBORG
dbc.themes.DARKLY
dbc.themes.FLATLY
dbc.themes.JOURNAL
dbc.themes.LITERA
dbc.themes.LUMEN
dbc.themes.LUX
dbc.themes.MATERIA
dbc.themes.MINTY
dbc.themes.PULSE
dbc.themes.SANDSTONE
dbc.themes.SIMPLEX
dbc.themes.SKETCHY
dbc.themes.SLATE
dbc.themes.SOLAR
dbc.themes.SPACELAB
dbc.themes.SUPERHERO
dbc.themes.UNITED
dbc.themes.YETI
"""
external_stylesheets = [
dbc.themes.BOOTSTRAP, # Bootswatch theme
'https://use.fontawesome.com/releases/v5.9.0/css/all.css', # for social media icons
]
meta_tags=[
{'name': 'viewport', 'content': 'width=device-width, initial-scale=1'}
]
app = dash.Dash(__name__, external_stylesheets=external_stylesheets, meta_tags=meta_tags)
app.config.suppress_callback_exceptions = True
app.title = 'Components of 2018-19 NBA Salaries' # appears in browser title bar
server = app.server | [
"ianforrest11@gmail.com"
] | ianforrest11@gmail.com |
fae5bcfb182d6c5898546cad55b36967f10e7dfa | 37feae2d674f5d492691d8dacde5f5d2a7bc763b | /lr/apps.py | 16ec0bfbd8af261119993adadfa2b4be6d199abd | [] | no_license | HansWan/LoveRelay | 6097b59a3d7c6a24f0567c552a313a3fb325a6ce | 58f553f81e499d07fabfaf3f86109cb46fca21c6 | refs/heads/master | 2021-09-21T00:58:19.108066 | 2018-08-18T06:56:55 | 2018-08-18T06:56:55 | 118,108,611 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 84 | py | from django.apps import AppConfig
class LrConfig(AppConfig):
name = 'lr'
| [
"thelastchoice@vip.sina.com"
] | thelastchoice@vip.sina.com |
c167087c5de0258595432b19c6eab24944bf5cc3 | 36126f91a2d5903483b84ba2d8be77e160803058 | /examples/python/deterministic_paths.py | 3f8d8e0ab3a502deaaed71a38504c425db042893 | [
"Apache-2.0"
] | permissive | open-risk/transitionMatrix | 9962bb2656eb637ba56afc3adecf42bbe68f9593 | d05e75cbc251f01842dd8c5ce225894b988f4d99 | refs/heads/master | 2023-03-05T08:01:20.816425 | 2023-02-22T20:46:38 | 2023-02-22T20:46:38 | 110,365,127 | 73 | 29 | Apache-2.0 | 2022-12-08T11:37:12 | 2017-11-11T17:25:08 | Python | UTF-8 | Python | false | false | 1,885 | py | # encoding: utf-8
# (c) 2017-2022 Open Risk, all rights reserved
#
# TransitionMatrix is licensed under the Apache 2.0 license a copy of which is included
# in the source distribution of TransitionMatrix. This is notwithstanding any licenses of
# third-party software included in this distribution. You may not use this file except in
# compliance with the License.
#
# Unless required by applicable law or agreed to in writing, software distributed under
# the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND,
# either express or implied. See the License for the specific language governing permissions and
# limitations under the License.
"""
Create deterministic transitions
"""
import pandas as pd
import transitionMatrix as tm
from transitionMatrix.estimators import cohort_estimator as es
from transitionMatrix.generators import dataset_generators
from transitionMatrix.utils.converters import datetime_to_float, to_compact
sequences = [[(0.0, 0), (0.5, 1), (1.0, 2)],
[(0.0, 1), (0.3, 0), (0.8, 1)],
[(0.0, 2), (0.2, 1), (0.7, 2)]]
replication_count = 10
definition = [('0', "A"), ('1', "B"), ('2', "C")]
myState = tm.StateSpace(definition)
# myState = tm.StateSpace(definition)
input_data = dataset_generators.deterministic(sequences, replication_count)
print(input_data)
sorted_data = input_data.sort_values(['ID', 'Time'], ascending=[True, True])
cohort_data, cohort_bounds = tm.utils.bin_timestamps(sorted_data, cohorts=100)
print(80*'=')
print(cohort_data)
myEstimator = es.CohortEstimator(states=myState, cohort_bounds=cohort_bounds, ci={'method': 'goodman', 'alpha': 0.05})
result = myEstimator.fit(cohort_data, labels={'Time': 'Time', 'State': 'State', 'ID': 'ID'})
myMatrix = tm.TransitionMatrix(myEstimator.average_matrix)
myEstimator.print(select='Counts')
myMatrix.print_matrix(accuracy=3) | [
"openrisk@outlook.com"
] | openrisk@outlook.com |
f6ba1d6a3fdd7328ee6a1ca2d8f7286f9c4224d6 | 7ade08349d2181eec4956b6f224c72f59341d833 | /Caesar Shift/Caesar Shift.py | 59bd98cfb5607942f3de949424ce3ad25b92d60d | [] | no_license | MadhavMenon1007/Smaller-Projects | 72d0cfba12f93eeef71f5ac7c201268450b453c1 | 773aa6a101891ad7592c5bed1fb056798b51f8cf | refs/heads/main | 2023-06-12T04:33:55.204795 | 2021-07-07T06:02:03 | 2021-07-07T06:02:03 | 374,316,624 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 653 | py | def caesar_shift(phrase, key):
list_of_chars = [i for i in phrase]
encrypted_list = []
encrypted_message = ""
for i in list_of_chars:
if i == " ":
list_of_chars.remove(i)
encrypted_list.append(chr(ord(i)+key))
return encrypted_message.join(encrypted_list)
def decode_caesar_shift(cipher_text, key):
list_of_chars = [i for i in cipher_text]
decrypted_list = []
decrypted_message = ""
for i in list_of_chars:
if i == " ":
list_of_chars.remove(i)
decrypted_list.append(chr(ord(i)-key))
return decrypted_message.join(decrypted_list)
| [
"noreply@github.com"
] | MadhavMenon1007.noreply@github.com |
ddc36023cd9ece79efda06105a15c8d2ae25987e | c003d5341804370e7311284c1ddc2ef2d9ab175e | /train.py | 24d618f87dd772c7603cd166b9ba9825806efe97 | [] | no_license | SamirYousuf/AutoEncoder | b09062bb171d85d8c4b8d5bd380f5b247cffcb4c | b2483a21473fd7d506b18615a94328246488597d | refs/heads/master | 2020-04-06T23:02:04.203570 | 2018-11-16T11:10:28 | 2018-11-16T11:10:28 | 157,855,614 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,982 | py | # This is the main training script that we should be able to run to grade
# your model training for the assignment.
# You can create whatever additional modules and helper scripts you need,
# as long as all the training functionality can be reached from this script.
# Add/update whatever imports you need.
from keras import Model
from keras.layers import Input,Conv2D,MaxPooling2D,UpSampling2D, Dense, Activation, Flatten, Dropout
from keras.callbacks import ModelCheckpoint
from argparse import ArgumentParser
import mycoco
# If you do option A, you may want to place your code here. You can
# update the arguments as you need.
def optA():
mycoco.setmode('train')
ids = mycoco.query(args.categories, exclusive=False)
if args.maxinstances:
x = args.maxinstances
else:
x = len(min(ids, key=len))
list1 = []
for i in range(len(ids)):
list1.append(ids[i][:x])
print("Maximum number of instances are :" , str(x))
imgiter = mycoco.iter_images(list1, [0,1], batch=100)
input_img = Input(shape=(200,200,3))
# Encoder Layers
x = Conv2D(8, (3, 3), activation='relu')(input_img)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Dropout(0.5)(x)
x = Conv2D(8, (3, 3), activation='relu')(x)
x = MaxPooling2D((2, 2), padding='same')(x)
x = Conv2D(16, (3, 3), activation='relu')(x)
# Decoder Layers
x = Conv2D(16, (3, 3), activation='relu')(x)
x = UpSampling2D((2, 2))(x)
x = Conv2D(8, (3, 3), activation='relu')(x)
x = UpSampling2D((2, 2))(x)
x = Conv2D(1, (3, 3), activation='relu')(x)
x = Flatten()(x)
x = Dense(10)(x)
decode = Dense(1, activation="sigmoid")(x)
model = Model(input_img, decode)
model.compile(loss="binary_crossentropy", optimizer="adam", metrics=["accuracy"])
filepath="/scratch/gusmohyo/checkfile.hdf5"
checkpoint = ModelCheckpoint(filepath, monitor='acc', verbose=1, save_best_only=True, mode='max')
callbacks_list = [checkpoint]
model.fit_generator(imgiter, steps_per_epoch=10, epochs=30, callbacks=callbacks_list, verbose=0)
model.save(args.modelfile)
print("Option A is implemented!")
# If you do option B, you may want to place your code here. You can
# update the arguments as you need.
def optB():
mycoco.setmode('train')
print("Option B not implemented!")
# Modify this as needed.
if __name__ == "__main__":
parser = ArgumentParser("Train a model.")
# Add your own options as flags HERE as necessary (and some will be necessary!).
# You shouldn't touch the arguments below.
parser.add_argument('-P', '--option', type=str,
help="Either A or B, based on the version of the assignment you want to run. (REQUIRED)",
required=True)
parser.add_argument('-m', '--maxinstances', type=int,
help="The maximum number of instances to be processed per category. (optional)",
required=False)
parser.add_argument('checkpointdir', type=str,
help="directory for storing checkpointed models and other metadata (recommended to create a directory under /scratch/)")
parser.add_argument('modelfile', type=str, help="output model file")
parser.add_argument('categories', metavar='cat', type=str, nargs='+',
help='two or more COCO category labels')
args = parser.parse_args()
print("Output model in " + args.modelfile)
print("Working directory at " + args.checkpointdir)
print("Maximum instances is " + str(args.maxinstances))
if len(args.categories) < 2:
print("Too few categories (<2).")
exit(0)
print("The queried COCO categories are:")
for c in args.categories:
print("\t" + c)
print("Executing option " + args.option)
if args.option == 'A':
optA()
elif args.option == 'B':
optB()
else:
print("Option does not exist.")
exit(0)
| [
"noreply@github.com"
] | SamirYousuf.noreply@github.com |
fc70be99402b8f210c571deaa997b0a968e80c8a | 639df091214a02e6b3e069c173048120d53c25d0 | /PS6_EncryptionApplyCoder.py | 572eb75e4e2aa57f279418b88a6b0c39aca540cb | [] | no_license | VamsikrishnaNallabothu/MITx-My-Python-Work | 98b5c0a041688f1d278583fc2a2f6238f46889b6 | 34b8f8c7ed387528c7c145e254eb224160a9220c | refs/heads/master | 2016-09-06T13:58:23.396134 | 2015-08-14T22:25:19 | 2015-08-14T22:25:19 | 40,645,450 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 500 | py | #EncryptionApplyCoder.py
def applyCoder(text, coder):
"""
Applies the coder to the text. Returns the encoded text.
text: string
coder: dict with mappings of characters to shifted characters
returns: text after mapping coder chars to original text
"""
encodedT = ''
# For each letter in the text change it for its couple in the coder.
for pos in range(len(text)):
letter = text[pos]
encodedT += coder.get(letter,letter)
return encodedT
| [
"vamsikrishna.nallabothu@sjsu.edu"
] | vamsikrishna.nallabothu@sjsu.edu |
2faa8732c420dc76b27e2eda26ec8fea1a5ba3ab | 9947d1e328a3262a35a61385dc537c3dc557ab7d | /TensorFlow-Poems-master/model.py | bbc6b813c1e22dc56e257aabd4f1d6187cc34ca8 | [] | no_license | nuass/lzh | d0a7c74a3295523d1fe15eeaa73997fc04469f06 | 3cb1cf1e448b88ade226d113a7da4eab7bbb5c09 | refs/heads/master | 2021-02-06T06:10:32.772831 | 2019-06-10T08:54:49 | 2019-06-10T08:54:49 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,693 | py | # -*- coding: utf-8 -*-
import tensorflow as tf
import numpy as np
def rnn_model(model, input_data, output_data, vocab_size, rnn_size=128, num_layers=2, batch_size=64,
learning_rate=0.01):
"""
construct rnn seq2seq model.
:param model: model class 模型种类
:param input_data: input data placeholder 输入
:param output_data: output data placeholder 输出
:param vocab_size: 词长度
:param rnn_size: 一个RNN单元的大小
:param num_layers: RNN层数
:param batch_size: 步长
:param learning_rate: 学习速率
:return:
"""
end_points = {}
if model == 'rnn':
cell_fun = tf.contrib.rnn.BasicRNNCell
elif model == 'gru':
cell_fun = tf.contrib.rnn.GRUCell
elif model == 'lstm':
cell_fun = tf.contrib.rnn.BasicLSTMCell
cell = cell_fun(rnn_size, state_is_tuple=True)
cell = tf.contrib.rnn.MultiRNNCell([cell] * num_layers, state_is_tuple=True)
if output_data is not None:
initial_state = cell.zero_state(batch_size, tf.float32)
else:
initial_state = cell.zero_state(1, tf.float32)
with tf.device("/cpu:0"):
embedding = tf.get_variable('embedding', initializer=tf.random_uniform(
[vocab_size + 1, rnn_size], -1.0, 1.0))
inputs = tf.nn.embedding_lookup(embedding, input_data)
# [batch_size, ?, rnn_size] = [64, ?, 128]
outputs, last_state = tf.nn.dynamic_rnn(cell, inputs, initial_state=initial_state)
output = tf.reshape(outputs, [-1, rnn_size])
weights = tf.Variable(tf.truncated_normal([rnn_size, vocab_size + 1]))
bias = tf.Variable(tf.zeros(shape=[vocab_size + 1]))
logits = tf.nn.bias_add(tf.matmul(output, weights), bias=bias)
# [?, vocab_size+1]
if output_data is not None:
# output_data must be one-hot encode
labels = tf.one_hot(tf.reshape(output_data, [-1]), depth=vocab_size + 1)
# should be [?, vocab_size+1]
loss = tf.nn.softmax_cross_entropy_with_logits(labels=labels, logits=logits)
# loss shape should be [?, vocab_size+1]
total_loss = tf.reduce_mean(loss)
train_op = tf.train.AdamOptimizer(learning_rate).minimize(total_loss)
end_points['initial_state'] = initial_state
end_points['output'] = output
end_points['train_op'] = train_op
end_points['total_loss'] = total_loss
end_points['loss'] = loss
end_points['last_state'] = last_state
else:
prediction = tf.nn.softmax(logits)
end_points['initial_state'] = initial_state
end_points['last_state'] = last_state
end_points['prediction'] = prediction
return end_points
| [
"1581627402@qq.com"
] | 1581627402@qq.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.