blob_id stringlengths 40 40 | directory_id stringlengths 40 40 | path stringlengths 3 281 | content_id stringlengths 40 40 | detected_licenses listlengths 0 57 | license_type stringclasses 2 values | repo_name stringlengths 6 116 | snapshot_id stringlengths 40 40 | revision_id stringlengths 40 40 | branch_name stringclasses 313 values | visit_date timestamp[us] | revision_date timestamp[us] | committer_date timestamp[us] | github_id int64 18.2k 668M ⌀ | star_events_count int64 0 102k | fork_events_count int64 0 38.2k | gha_license_id stringclasses 17 values | gha_event_created_at timestamp[us] | gha_created_at timestamp[us] | gha_language stringclasses 107 values | src_encoding stringclasses 20 values | language stringclasses 1 value | is_vendor bool 2 classes | is_generated bool 2 classes | length_bytes int64 4 6.02M | extension stringclasses 78 values | content stringlengths 2 6.02M | authors listlengths 1 1 | author stringlengths 0 175 |
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
2dab3c4572f599087e9a031e18e2bfaff929b373 | 3258b071acd981bdab785c6de0de89db3e40ac97 | /useradd.py | da4638eecca129fa3beeb7ebb91c0bd6e9b01992 | [] | no_license | urQlikIp/gittest | 449695ed42b8600fd75dd8c0a273c70740e63675 | e3b42a95d5adf36be5bb4605dd90411b829fb6a4 | refs/heads/master | 2023-04-12T04:22:43.571939 | 2020-08-28T03:04:19 | 2020-08-28T03:04:19 | 363,427,870 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 789 | py | import time,http.client
from common.Login_Background import Log
class user_add:
def add(self):
sessionid = Log().login()
url = '/TinyShop_v1.7/index.php?con=customer&act=customer_save'
param = 'name=哈哈la&password=123456&repassword=123456&email=1lahjdycbbr2%40tiny.com&' \
'real_name=121&sex=0&birthday=&province=210000&city=210600&county=210681&' \
'addr=&phone=&mobile=&point=0'
header = {'Content-type':'application/x-www-form-urlencoded','Cookie':sessionid}
conn = http.client.HTTPConnection('192.168.0.188')
conn.request('POST',url,param.encode(),header)
response = conn.getresponse().read()
print(response.decode())
if __name__ == '__main__':
s = user_add().add()
print(s) | [
"niexie19960405@126.com"
] | niexie19960405@126.com |
5e4e4b99d01da7a4993b8896434ad8b652c976f1 | 616f369bdd502396594e009781b45bcbf8968946 | /SwordToOffer/PrintList.py | 74aeeeade26632b7c9df584971f56f5f4945c751 | [] | no_license | ryan623555007/interview-python | f75b1f03cd067b879e3bf122711b9e380f129b2b | cedb03a43dc2e9cdd8a5077a7802d04caafe55b9 | refs/heads/master | 2020-03-23T22:05:57.051982 | 2018-07-27T21:30:16 | 2018-07-27T21:30:16 | 142,152,782 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 399 | py | class ListNode:
def __init__(self, x=None):
self.val = x
self.next = None
class Solution:
def printList(self, listNode):
if not listNode:
return []
result = []
while listNode.next is not None:
result.extend([listNode.val])
listNode = listNode.next
result.extend([listNode.val])
return result[::-1]
| [
"623555007@qq.com"
] | 623555007@qq.com |
b351ca90b3b3097863f1e9c4d1db09cc08919070 | d4ffc8822d1a3efd2d513636851e6258ae43d84d | /Line_task.py | ab8982c08f911b9d90988e70a66490b88116400e | [
"MIT"
] | permissive | joshuaLei/ROV | 05c9cd5517b21293f2b29f4a925c5151c68ca1fc | 9af22e66bc3a3d855fb1fc3902e1deef1f4b989a | refs/heads/master | 2020-04-27T06:43:55.323086 | 2019-05-22T09:25:36 | 2019-05-22T09:25:36 | 174,116,370 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,804 | py | import cv2 as cv
import time
import random
import math
#from video import Video
class ImageTool(object):
def __init__(self):
#self.video1 = cv.VideoCapture(1)
#self.video2 = Video(port=4777)
self.frame = None
self.pause = False
self.mode = 0
self.srcframe = None
self.ref_scale_rate = 1
self.ref_pos_start = (0, 0)
self.ref_pos_end = (0, 0)
self.tmp_pos_start = []
self.tmp_pos_end = []
self.detect_color_from = (0, 0, 0)
self.detect_color_to = (255, 255, 255)
self.e1 = 60
self.e2 = 60
self.e3 = 60
self.ref = 0
self.tmp = 0
self.ref_real = 47.2#25
self.tmp_real = 0
self.ref_val = 6
def capture(self):
'''
success, self.frame = self.video2.read()
self.frame = cv.flip(self.frame, 3)
return self.frame
'''
video = self.video2
if not video.frame_available():
return None
cap = video.frame()
frame = cv.resize(cap, (800, 600))
self.frame = frame
self.srcframe = cap
return self.frame
def debug(self):
success, self.frame = self.video1.read()
self.frame = cv.flip(self.frame, 3)
return self.frame
def on_mouse_frame(self, event, x, y, flags, param):
if self.pause:
if event == cv.EVENT_RBUTTONDOWN:
self.mode = 1
self.ref_pos_start = (x, y)
self.tmp_pos_start = []
self.tmp_pos_end = []
if event == cv.EVENT_RBUTTONUP:
self.mode = 0
if event == cv.EVENT_LBUTTONDOWN:
self.mode = 2
self.tmp_pos_start.append((x, y))
if event == cv.EVENT_LBUTTONUP:
self.mode = 0
self.tmp_pos_end.append((x, y))
if event == cv.EVENT_MOUSEMOVE:
if self.mode == 1:
image = self.srcframe.copy()
self.ref_pos_end = (x, y)
cv.line(image, self.ref_pos_start, self.ref_pos_end, (0, 255, 255), 1, cv.LINE_AA)
self.frame = image
if self.mode == 2:
image = self.srcframe.copy()
cv.line(image, self.ref_pos_start, self.ref_pos_end, (0, 255, 255), 1, cv.LINE_AA)
cv.line(image, self.tmp_pos_start[-1], (x, y), (0, 255, 0), 1, cv.LINE_AA)
for i in range(len(self.tmp_pos_end)):
cv.line(image, self.tmp_pos_start[i], self.tmp_pos_end[i], (0, 255, 0), 1, cv.LINE_AA)
self.frame = image
else:
if event == cv.EVENT_LBUTTONDBLCLK:
#temp = cv.cvtColor(self.frame, cv.COLOR_BGR2HSV)
h, s, v = self.frame[y, x]
h = int(h)
s = int(s)
v = int(v)
print('b, g, r',h,s,v)
self.detect_color_from = (max(h - self.e1, 0), max(s - self.e2, 0), max(v - self.e3, 0))
self.detect_color_to = (min(h + self.e1, 255), min(s + self.e2, 255), min(v + self.e3, 255))
if event == cv.EVENT_RBUTTONDBLCLK:
self.detect_color_from = (0, 0, 0)
self.detect_color_to = (180, 255, 255)
def calculation_length(self, Rp_start, Rp_end, Tp_start, Tp_end):
#get ref point x, y
xRS = Rp_start[0]
yRS = Rp_start[1]
xRE = Rp_end[0]
yRE = Rp_end[1]
ref_line = math.sqrt(math.pow((xRS - xRE), 2) + math.pow((yRS - yRE), 2))
self.ref = ref_line
print('ref_line:', ref_line)
#get tmp point x, y
xTS = Tp_start[-1][0]
yTS = Tp_start[-1][1]
xTE = Tp_end[-1][0]
yTE = Tp_end[-1][1]
tmp_line = math.sqrt(math.pow((xTS - xTE), 2) + math.pow((yTS - yTE), 2))
self.tmp = tmp_line
print('tmp_line', tmp_line)
def calculation_result(self, ref, tmp):
print('ref_real', self.ref_real)
self.tmp_real = ((self.ref_real * tmp)/ref)
print("result", self.tmp_real)
if __name__ == "__main__":
tool = ImageTool()
cv.namedWindow("frame")
cv.setMouseCallback("frame", tool.on_mouse_frame)
#video = Video(port=4777)
i = 0
while True:
if not tool.pause:
#frame = tool.capture()
frame = tool.debug()
#if not video.frame_available():
#continue
#frame = video.frame()
frame = cv.resize(frame, (800, 600))
tool.frame = frame
hsv = cv.cvtColor(frame, cv.COLOR_BGR2HSV)
mask = cv.inRange(hsv, tool.detect_color_from, tool.detect_color_to)
_, contours, _ = cv.findContours(mask, cv.RETR_EXTERNAL, cv.CHAIN_APPROX_SIMPLE)
cv.drawContours(frame, contours, -1, (0, 255, 0), 2)
else:
frame = tool.frame
cv.imshow("frame", frame)
key = cv.waitKey(1)
if key == 27:
break
if key == 32:
tool.pause = not tool.pause
#frame = tool.capture()
tool.srcframe= frame.copy()
if key == ord('s'):
file = "photos/IMG_%s_%d.jpg" % (time.strftime("%Y%m%d_%H%M%S", time.localtime()), random.randint(1, 1000))
cv.imwrite(file, tool.frame)
if key == ord('c'):
tool.calculation_length(tool.ref_pos_start, tool.ref_pos_end, tool.tmp_pos_start, tool.tmp_pos_end)
tool.calculation_result(tool.ref, tool.tmp)
#print('tmp start:', tool.tmp_pos_start)
#print('tmp end:', tool.tmp_pos_end)
#tool.video1.release()
cv.destroyAllWindows() | [
"joshualei06@gmail.com"
] | joshualei06@gmail.com |
037e50418658ee008115d34ac0001eac2e505af5 | c4c9e81e9af2db426ed4cbfb3523664f927e5d5a | /0x16-rotate_2d_matrix/0-rotate_2d_matrix.py | f7d1b9cfb812aef67ce2cb1094d59a869f290153 | [] | no_license | Eddyszh/holbertonschool-interview | 3d4ffcca64464ce5c416d8f4bf2f83364fc14787 | b4ee82a46215757d2a1d5b69184f893f6c48f6a5 | refs/heads/main | 2023-04-21T09:26:16.485898 | 2021-05-27T22:20:00 | 2021-05-27T22:20:00 | 319,394,819 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 416 | py | #!/usr/bin/python3
"""
Module rotate 2d matrix
"""
def rotate_2d_matrix(matrix):
"""Rotates 2d matrix"""
n = len(matrix)
for x in range(0, int(n / 2)):
for y in range(x, n-x-1):
temp = matrix[x][y]
matrix[x][y] = matrix[n-1-y][x]
matrix[n-1-y][x] = matrix[n-1-x][n-1-y]
matrix[n-1-x][n-1-y] = matrix[y][n-1-x]
matrix[y][n-1-x] = temp
| [
"eddyszh@gmail.com"
] | eddyszh@gmail.com |
fa43c1fe25521c71355b2ab08a11fffd84cbf13f | be5ca49e9fe765b24c284e70f0f03094b280f294 | /tests/__init__.py | 0dbda16f325bb8e4c2fdf51dd24bd705147891f8 | [
"Apache-2.0"
] | permissive | salesking/salesking_python_sdk | 584e37ed39057854b44172869ca1a1c88fb20f91 | 0d5a95c5ee4e16a85562ceaf67bb11b55e47ee4c | refs/heads/master | 2021-01-10T21:50:06.639171 | 2015-02-01T18:40:53 | 2015-02-01T18:40:53 | 7,263,808 | 0 | 0 | null | 2013-06-18T14:06:51 | 2012-12-20T20:28:20 | Python | UTF-8 | Python | false | false | 864 | py | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import tempfile
import unittest
import logging
## logging
LEVELS = {'debug': logging.DEBUG,
'info': logging.INFO,
'warning': logging.WARNING,
'error': logging.ERROR,
'critical': logging.CRITICAL}
class MyLogHandler(logging.Handler):
def emit(self, record):
pass
logging.getLogger().addHandler(MyLogHandler())
# Path hack.
sys.path.insert(0, os.path.abspath('..'))
sys.path.insert(1, os.path.abspath('../lib/'))
#print sys.path
import salesking
# More hacks
sys.path.append('.')
from salesking.tests import *
if len(sys.argv) > 1:
level_name = sys.argv[1]
level = LEVELS.get(level_name, logging.NOTSET)
logging.basicConfig(level=level)
if __name__ == '__main__':
unittest.main() | [
"frank.bieniek@produktlaunch.de"
] | frank.bieniek@produktlaunch.de |
b7588d77781e80e9850aab062925583a8dbd38b0 | 3104a6b3218dc99638e8f541669988a16fe778b7 | /data.py | 3e6fc1753790f7a32761e5a8e8009973afe80613 | [] | no_license | faaizuddin/Basic-Data-Preprocessing | ab5ae7e4b2626576afc44b23000333682f79ec17 | 5f232ea436ec015e345421d0a701b10ae0280743 | refs/heads/master | 2021-06-29T15:13:56.828395 | 2020-12-12T11:59:48 | 2020-12-12T11:59:48 | 182,529,592 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,327 | py | #creating column names for airq402 data
nc=["City1","City2","Average Fair","Distance","Avg Weekly Passengers","Market Leading Airline","Market Share","Average Fair","Low Price Airline","Market Share","Price"] #header names
airq402=pd.read_csv("http://www.stat.ufl.edu/~winner/data/airq402.dat", names=nc, sep="\s+", header=None)
print(airq402)
dframe=pd.DataFrame(airq402)
# this function is converting categorical values into numerical values
def handle_non_numeric_data(dframes):
columns=dframes.columns.values
for column in columns:
text_digit_values={}
def convert_to_int(val):
return text_digit_values[val]
if dframes[column].dtype != np.int64 and dframes[column].dtype != np.float64:
column_content=dframes[column].values.tolist()
unique_elements=set(column_content)
x=0
for unique in unique_elements:
if unique not in text_digit_values:
text_digit_values[unique]=x
x+=1
dframes[column]=list(map(convert_to_int,dframes[column]))
return dframes
dframe=handle_non_numeric_data(dframe)
print(dframe.head())
# finding rows with null values in all three datasets
null_data = airq402[airq402.isnull().any(axis=1)]
print(null_data)
| [
"noreply@github.com"
] | noreply@github.com |
f8c70c1da41cfea53b6d1f02569fd71e0439f618 | 35e00d1996515ccf3151067ff28ff3357078f0b6 | /samples/generated_samples/pubsub_v1_generated_schema_service_validate_message_async.py | add86c6fa8941035f2205bc30efda5abdc9894e2 | [
"Apache-2.0"
] | permissive | googleapis/python-pubsub | 5bb18674307bd89236a61c0d7c5079f10e19467e | 1b9724324c58d27bcee42020b751cda58d80fddb | refs/heads/main | 2023-09-03T13:14:22.894233 | 2023-08-28T13:18:36 | 2023-08-28T13:18:36 | 226,992,581 | 321 | 195 | Apache-2.0 | 2023-09-10T23:29:10 | 2019-12-10T00:09:52 | Python | UTF-8 | Python | false | false | 1,876 | py | # -*- coding: utf-8 -*-
# Copyright 2023 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for ValidateMessage
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-pubsub
# [START pubsub_v1_generated_SchemaService_ValidateMessage_async]
# This snippet has been automatically generated and should be regarded as a
# code template only.
# It will require modifications to work:
# - It may require correct/in-range values for request initialization.
# - It may require specifying regional endpoints when creating the service
# client as shown in:
# https://googleapis.dev/python/google-api-core/latest/client_options.html
from google import pubsub_v1
async def sample_validate_message():
# Create a client
client = pubsub_v1.SchemaServiceAsyncClient()
# Initialize request argument(s)
request = pubsub_v1.ValidateMessageRequest(
name="name_value",
parent="parent_value",
)
# Make the request
response = await client.validate_message(request=request)
# Handle the response
print(response)
# [END pubsub_v1_generated_SchemaService_ValidateMessage_async]
| [
"noreply@github.com"
] | noreply@github.com |
27a647aa0325fc6672f392661ec6acbcdbc62d55 | 63802a7c72a77489f0acb415550069627c198166 | /EmployerPanel/forms.py | 9d487dd70bf6327e827925920c93d5725ee161fb | [] | no_license | amit1992321/Placementcellautomation | 36e24944296d2efaf11aa286e5c20381d882dc82 | ac9dc8f78d03c65e8ba4723875f95b024bcbd421 | refs/heads/master | 2020-06-08T22:18:22.483068 | 2019-06-23T07:24:57 | 2019-06-23T07:24:57 | 193,316,348 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,090 | py | from django import forms
import datetime
from . import models
from django.forms import formset_factory
class EmployerForm(forms.ModelForm):
class Meta:
model = models.Employer
fields = ['user', 'mobile', 'alternate_mobile', 'company_details']
class JobPostForm(forms.ModelForm):
class Meta:
model = models.JobPost
fields = ['employer',
'job_Profile',
'job_vacancy',
'job_description',
'job_ctc',
'interview_date',
]
class CompanyDetailForm(forms.ModelForm):
company = forms.CharField(max_length=120, label="Username", widget=forms.Select(
attrs={
'placeholder': 'Username'
},
choices=models.CompanyDetails.objects.all().values_list('id', 'company_name')
))
class Meta:
model = models.CompanyDetails
fields = ['industry_type',
'company_name',
'company_address',
'registration_no',
'company'
]
class SignUpForm(forms.Form):
username = forms.CharField(max_length=120, label="Username", widget=forms.TextInput(
attrs={
'placeholder': 'Username'
}))
first_name = forms.CharField(max_length=200, label="First's Name",
widget=forms.TextInput(
attrs={
'placeholder': 'First Name'
}))
last_name = forms.CharField(max_length=200)
mobile = forms.CharField(max_length=10)
alternate_mobile = forms.CharField(max_length=10)
email = forms.EmailField(max_length=200, required=False)
password = forms.CharField(max_length=200)
repassword = forms.CharField(max_length=200)
def clean(self):
# import pdb; pdb.set_trace()
if self.cleaned_data['password'] != self.cleaned_data['repassword']:
raise forms.ValidationError(('Password mismatch'), code='invalid')
| [
"noreply@github.com"
] | noreply@github.com |
6f1fe808bbdf4766f29cd182eb50f6b82f93229a | f6f9facdef2ba878951239f0ff207c3dbee4aad7 | /NewTest2.py | c38555e6f1dbec702a564b502fb61228684e70f5 | [] | no_license | ssmaximilian/Maps2 | bd9a3b458a6e24fe3d11f04a6e041e3ca99a82ce | 419ba51a66fdc33651c515b4e23499ed6d6ca65a | refs/heads/master | 2022-11-17T06:31:52.717753 | 2020-07-19T16:39:59 | 2020-07-19T16:39:59 | 280,901,091 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 731 | py | import pandas as pd
import numpy as np
from IPython.display import display
import sqlite3
from sqlalchemy import create_engine
import string
from statsmodels.stats.multicomp import pairwise_tukeyhsd
from scipy import stats
import chart_studio as py
from plotly.tools import FigureFactory as FF
from plotly.graph_objs import Bar, Scatter, Marker, Layout, Choropleth, Histogram
display(pd.read_csv('complaints2020.csv', nrows=9).head())
db_conn = create_engine('sqlite:///databases/complaints.db')
chunks = 5
query = pd.read_sql_query('SELECT Product, Company, COUNT(*) as `Complaints`'
'FROM data '
'GROUP BY Product '
'ORDER BY `Complaints` DESC', db_conn) | [
"noreply@github.com"
] | noreply@github.com |
1bab715b0c564a7a2941200a68f23a04ab4bfd58 | be2c022b270522fe24475b794d53a3fd973a5de1 | /영동/05_11049_행렬 곱셈 순서.py | 9a26a4594789aceefcc502611d23e25d9aedf66e | [] | no_license | zeroistfilm/week04 | ea4a358be0931fe28202b7ce543ed246536a1c50 | fdb5985e2d899c8b1a60cb81d660937304fa5bcb | refs/heads/main | 2023-02-09T09:35:27.795180 | 2021-01-07T02:29:28 | 2021-01-07T02:29:28 | 325,717,500 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | # https://www.acmicpc.net/problem/11049
import sys
#sys.stdin = open("input.txt", "r")
N = int(sys.stdin.readline())
M = [0 for i in range(N+1)]
for i in range(N):
a,b = map(int, sys.stdin.readline().split())
M[i]=a
M[i+1] = b
Matrix = [[0 for i in range(N)] for i in range(N)]
for i in range(1,N):
r = 0
c = i
for _ in range(N,i,-1):
tmp=[]
for k in range(r,c):
tmp.append(Matrix[r][k]+Matrix[k+1][c]+(M[r]*M[k+1]*M[c+1]))
Matrix[r][c]=min(tmp)
r += 1
c += 1
print(Matrix[0][-1])
| [
"zeroistfilm@naver.com"
] | zeroistfilm@naver.com |
05ce1b3fbd1318701f6cf0e64b26c7245bdb8cee | e4697742d4b745b5ce56537ccb7f852a481b2bb7 | /venv/bin/python-config | 931e68da668c19880b05905d5be9f5f5326f7a71 | [] | no_license | AbrarQuazi/Project-Midas-Stock-Prediction-Web-App | fde1b6b76b05af988a77b5dd27552931ac103aec | 45331718659b32e190432c7ff622f0a303da9484 | refs/heads/master | 2022-12-20T04:53:35.258381 | 2017-09-01T02:50:44 | 2017-09-01T02:50:44 | 98,698,506 | 0 | 0 | null | 2022-06-21T21:13:53 | 2017-07-29T00:50:37 | Python | UTF-8 | Python | false | false | 2,362 | #!/Users/abrarquazi/Desktop/stock_web_app/venv/bin/python
import sys
import getopt
import sysconfig
valid_opts = ['prefix', 'exec-prefix', 'includes', 'libs', 'cflags',
'ldflags', 'help']
if sys.version_info >= (3, 2):
valid_opts.insert(-1, 'extension-suffix')
valid_opts.append('abiflags')
if sys.version_info >= (3, 3):
valid_opts.append('configdir')
def exit_with_usage(code=1):
sys.stderr.write("Usage: {0} [{1}]\n".format(
sys.argv[0], '|'.join('--'+opt for opt in valid_opts)))
sys.exit(code)
try:
opts, args = getopt.getopt(sys.argv[1:], '', valid_opts)
except getopt.error:
exit_with_usage()
if not opts:
exit_with_usage()
pyver = sysconfig.get_config_var('VERSION')
getvar = sysconfig.get_config_var
opt_flags = [flag for (flag, val) in opts]
if '--help' in opt_flags:
exit_with_usage(code=0)
for opt in opt_flags:
if opt == '--prefix':
print(sysconfig.get_config_var('prefix'))
elif opt == '--exec-prefix':
print(sysconfig.get_config_var('exec_prefix'))
elif opt in ('--includes', '--cflags'):
flags = ['-I' + sysconfig.get_path('include'),
'-I' + sysconfig.get_path('platinclude')]
if opt == '--cflags':
flags.extend(getvar('CFLAGS').split())
print(' '.join(flags))
elif opt in ('--libs', '--ldflags'):
abiflags = getattr(sys, 'abiflags', '')
libs = ['-lpython' + pyver + abiflags]
libs += getvar('LIBS').split()
libs += getvar('SYSLIBS').split()
# add the prefix/lib/pythonX.Y/config dir, but only if there is no
# shared library in prefix/lib/.
if opt == '--ldflags':
if not getvar('Py_ENABLE_SHARED'):
libs.insert(0, '-L' + getvar('LIBPL'))
if not getvar('PYTHONFRAMEWORK'):
libs.extend(getvar('LINKFORSHARED').split())
print(' '.join(libs))
elif opt == '--extension-suffix':
ext_suffix = sysconfig.get_config_var('EXT_SUFFIX')
if ext_suffix is None:
ext_suffix = sysconfig.get_config_var('SO')
print(ext_suffix)
elif opt == '--abiflags':
if not getattr(sys, 'abiflags', None):
exit_with_usage()
print(sys.abiflags)
elif opt == '--configdir':
print(sysconfig.get_config_var('LIBPL'))
| [
"aquazi1109@gmail.com"
] | aquazi1109@gmail.com | |
af2aab80153959b6e9323af29d3835ba863fc16d | 3d1f5b69d9db15744b9f927634be002716419ddf | /b105_data_parallel.py | 92524a90aa03e37ca21f2dd558370bdc1bdd6b19 | [] | no_license | yexiaoqi/pytorch_learn | 5e87a3e85cbbbecc79fe75893a4a774cd35dbad7 | bf4c76de44d763580d4584d86387fb43763a4456 | refs/heads/master | 2023-02-07T07:30:06.863002 | 2020-12-31T07:51:10 | 2020-12-31T07:51:10 | 322,521,073 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,201 | py | import torch
import torch.nn as nn
from torch.utils.data import Dataset,DataLoader
input_size=5
output_size=2
batch_size=30
data_size=100
device=torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class RandomDataset(Dataset):
def __init__(self,size,length):
self.len=length
self.data=torch.randn(length,size)
def __getitem__(self,index):
return self.data[index]
def __len__(self):
return self.len
rand_loader=DataLoader(dataset=RandomDataset(input_size,data_size),batch_size=batch_size,shuffle=True)
class Model(nn.Module):
def __init__(self,input_size,output_size):
super().__init__()
self.fc=nn.Linear(input_size,output_size)
def forward(self,input):
output=self.fc(input)
print("\tIn Model:input size",input.size(),"output size",output.size())
return output
model=Model(input_size,output_size)
if torch.cuda.device_count()>1:
print("Let's use",torch.cuda.device_count(),"GPUs!")
modeL=nn.DataParallel(model)
model.to(device)
for data in rand_loader:
input=data.to(device)
output=model(input)
print("Outside: input size",input.size(),"output_size",output.size()) | [
"angel7ge7@163.com"
] | angel7ge7@163.com |
4c502425af4a0552d4ca6a8d65160670dab7b1f4 | a44cf7658e9c8f7805972a43caeb549d1edb86b8 | /vp/migrations/0094_auto_20191021_1923.py | 4f08b67e91648f2b377acf221cfd3b0646ebfdd4 | [] | no_license | almazim/vagonpodarkov | 11c36a51996c53909a762b2776239bce9a87932f | 37a724f7eb7dde62558ec226df3e9b829ae84fa0 | refs/heads/master | 2020-11-29T23:38:27.657192 | 2019-12-26T10:03:18 | 2019-12-26T10:03:18 | 230,240,476 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,699 | py | # Generated by Django 2.2.6 on 2019-10-21 19:23
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('vp', '0093_remove_sweets_sweet'),
]
operations = [
migrations.CreateModel(
name='TypeBox',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('title', models.CharField(db_index=True, max_length=150, verbose_name='Название')),
],
options={
'verbose_name': 'Типы упаковок',
'verbose_name_plural': 'Тип упаковки',
},
),
migrations.AlterModelOptions(
name='classicimg',
options={'verbose_name': 'Название | Изображение', 'verbose_name_plural': 'Изображения подарков серии "Классика"'},
),
migrations.AlterModelOptions(
name='premiumimg',
options={'verbose_name': 'Название | Изображение', 'verbose_name_plural': 'Изображения подарков серии "Премиум"'},
),
migrations.AddField(
model_name='classic',
name='typebox',
field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, to='vp.TypeBox'),
),
migrations.AddField(
model_name='premium',
name='typebox',
field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, to='vp.TypeBox'),
),
]
| [
"alimzagrick@gmail.com"
] | alimzagrick@gmail.com |
9a160c81fd980b2b5ae5b90944c631d5206f7566 | 5cd660b97c88b76f26ed5f349ebbec105567ba0e | /apps/sync/migrations/0006_auto_20180928_0813.py | 064716445484660e3687ca5509ca1c4be8c6096d | [] | no_license | JaredAvila/Django-Project | 4b42f6e9cc804cc9a8522d6f03098a6dd868af88 | bed080a72c320018999ca4f749084c32779d864a | refs/heads/master | 2020-03-30T05:12:56.678091 | 2018-10-29T22:59:56 | 2018-10-29T22:59:56 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 566 | py | # -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2018-09-28 15:13
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('sync', '0005_post_postrecipe'),
]
operations = [
migrations.AlterField(
model_name='post',
name='postRecipe',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, related_name='reicpePosts', to='sync.Recipe'),
),
]
| [
"jared.c.avila@gmail.com"
] | jared.c.avila@gmail.com |
4e5e6edbef06d2952e8fc4d14209df3765306d1b | 600bed5ab71bdf29b6ce7f14bcfb996455e0356d | /contested_shots.py | 91f538ef264fb98480c0d62f0d86004501effbe7 | [] | no_license | shardul17/Shot-Quality | 3519de5144c511327aa2cdaeb8768c8a43f96423 | 28265eb6147588441af59d2e4629cbb607f28e70 | refs/heads/master | 2020-07-09T05:21:53.461514 | 2019-08-23T02:52:31 | 2019-08-23T02:52:31 | 203,892,097 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 6,813 | py | from selenium import webdriver
from pandas import *
import pandas
import numpy as np
import matplotlib.pyplot as plt
from sqlalchemy import *
import pickle
path_to_chromedriver = 'chromedriver.exe' # Path to access a chrome driver
browser = webdriver.Chrome(executable_path=path_to_chromedriver)
url = 'https://stats.nba.com/players/advanced/?sort=EFG_PCT&dir=-1&CF=FGA*G*300:GP*G*50'
#url = 'https://stats.nba.com/leaders/'
browser.get(url)
browser.find_element_by_xpath('/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[1]/div/div/select/option[1]').click()
#
# browser.find_element_by_xpath('/html/body/main/div[2]/div/div[2]/div/div/nba-stat-table/div[3]/div/div/select/option[1]').click()
table = browser.find_element_by_class_name('nba-stat-table__overflow')
player_ids = []
player_names = []
player_stats = []
count = 1
for line_id, lines in enumerate(table.text.split('\n')):
#print(lines)
if line_id == 0:
pass
#column_names = lines.split(' ')[1:]
elif line_id > 0:
if count == 1:
player_ids.append(lines)
count += 1
# if line_id % 3 == 2:
# player_names.append(lines)
elif count == 2:
player_names.append(lines)
count += 1
elif count == 3:
player_stats.append( [i for i in lines.split(' ')] )
count = 1
master_dict = pickle.load(open("plus10_included.p", "rb"))
column_names = ['TEAM', 'AGE', 'GP', 'G', 'FREQ', 'FGM', 'FGA', 'FG%', 'EFG%',
'2FG FREQ', '2FGM', '2FGA', '2FG%', '3FG FREQ', '3PM', '3PA', '3P%']
efg_perc = {}
for i in range(len(player_names)):
efg_perc[player_names[i]] = player_stats[i][15]
#efg_perc[player_names[i]] = player_stats[i][16]
"""
for i in range(len(player_names)):
if player_names[i] not in master_dict:
master_dict[player_names[i]] = {'0-2 Feet (Very Tight)': {'Layups': {}, 'Mid-Range': {}, '3pt Range': {}},
'2-4 Feet (Tight)': {'Layups': {}, 'Mid-Range': {}, '3pt Range': {}},
'4-6 Feet (Open)': {'Layups': {}, 'Mid-Range': {}, '3pt Range': {}},
'6+ Feet (Wide Open)': {'Layups': {}, 'Mid-Range': {}, '3pt Range': {}}
}
# if player_names[i] not in master_dict:
# master_dict[player_names[i]] = {'6+ Feet (Wide Open)': {}}
# else:
# master_dict[player_names[i]]['6+ Feet (Wide Open)'] = {}
for j in range(len(column_names)):
curr_range = '0-2 Feet (Very Tight)'
if column_names[j] in ['2FG FREQ', '2FGM', '2FGA', '2FG%']:
if column_names[j] == '2FG FREQ':
if '2FG FREQ' in master_dict[player_names[i]][curr_range]['Mid-Range']:
freq = str(float(player_stats[i][j][:-1]) - float(master_dict[player_names[i]][curr_range]['Mid-Range']['2FG FREQ'][:-1])) + '%'
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = freq
else:
freq = str(float(player_stats[i][j][:-1]))
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = freq
elif column_names[j] == '2FGM':
if '2FGM' in master_dict[player_names[i]][curr_range]['Mid-Range']:
fgm = str(int(player_stats[i][j]) - int(master_dict[player_names[i]][curr_range]['Mid-Range'][column_names[j]]))
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = fgm
else:
fgm = str(int(player_stats[i][j]))
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = fgm
elif column_names[j] == '2FGA':
if '2FGA' in master_dict[player_names[i]][curr_range]['Mid-Range']:
fga = str(int(player_stats[i][j]) - int(master_dict[player_names[i]][curr_range]['Mid-Range'][column_names[j]]))
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = fga
else:
fga = str(int(player_stats[i][j]))
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = fga
elif column_names[j] == '2FG%':
if int(master_dict[player_names[i]][curr_range]['Layups']['2FGA']) != 0:
ha = float(master_dict[player_names[i]][curr_range]['Layups']['2FGM'])/float(master_dict[player_names[i]][curr_range]['Layups']['2FGA'])
ha = round(ha,1)
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = str(ha) + "%"
else:
master_dict[player_names[i]][curr_range]['Layups'][column_names[j]] = '-'
print(master_dict)
pickle.dump(master_dict, open("plus10_included.p", "wb"))
"""
print(efg_perc)
pickle.dump(efg_perc, open("efg_perc.p", "wb"))
"""
db = pandas.DataFrame({'player': player_names,
'team': [i[0] for i in player_stats],
'age': [i[1] for i in player_stats],
'gp': [i[2] for i in player_stats],
'g': [i[3] for i in player_stats],
'freq': [i[4] for i in player_stats],
'fgm': [i[5] for i in player_stats],
'fga': [i[6] for i in player_stats],
'fg%': [i[7] for i in player_stats],
'efg%': [i[8] for i in player_stats],
'2fg freq': [i[9] for i in player_stats],
'2fgm': [i[10] for i in player_stats],
'2fga': [i[11] for i in player_stats],
'2fg%': [i[12] for i in player_stats],
'3fg freq': [i[13] for i in player_stats],
'3pm': [i[14] for i in player_stats],
'3pa': [i[15] for i in player_stats],
'3p%': [i[16] for i in player_stats],
#'blk': [i[17] for i in player_stats],
#'tov': [i[18] for i in player_stats],
#'eff': [i[19] for i in player_stats]
}
)
db = db[['player',
'team',
'age',
'gp',
'g',
'freq',
'fgm',
'fga',
'fg%',
'efg%',
'2fg freq',
'2fgm',
'2fga',
'2fg%',
'3fg freq',
'3pm',
'3pa',
'3p%'
]
]
"""
| [
"noreply@github.com"
] | noreply@github.com |
ebb234d79ad5026002b45235c64c948c60818cae | f1f53fc9fdcc627492eff0a5dec96d3f72c6bcd1 | /app.py | b77d88c3b43de568526c20cbed6e343a96436062 | [] | no_license | corneliussteven/Camera-RaspberryPi | 3f1b00fdaa29a56890c30f5de3b97ba88215ab78 | 883c1773975e6e676416ecc827c149d1cd4315b3 | refs/heads/master | 2020-04-11T05:36:33.774618 | 2016-09-13T06:48:53 | 2016-09-13T06:48:53 | 68,079,792 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,249 | py | #!flask/bin/python
from flask import Flask, jsonify, abort
from flask import make_response
from flask import request
from flask import url_for
from flask.ext.httpauth import HTTPBasicAuth
from flask import Blueprint
auth = HTTPBasicAuth()
app = Flask(__name__)
def make_public_task(task):
new_task = {}
for field in task:
if field == 'id':
new_task['uri'] = url_for('get_task', task_id=task['id'], _external=True)
else:
new_task[field] = task[field]
return new_task
@auth.get_password
def get_password(username):
if username == 'miguel':
return 'python'
return None
@auth.error_handler
def unauthorized():
return make_response(jsonify({'error': 'Unauthorized access'}), 403)
@app.route('/todo/api/v1.0/tasks', methods=['GET'])
@auth.login_required
def get_tasks():
return jsonify({'tasks': tasks})
@app.route('/upload', methods=['GET', 'POST'])
def upload():
photos = UploadSet('1.jpg', IMAGES)
if request.method == 'POST' and 'photo' in request.files:
filename = photos.save(request.files['photo'])
rec = Photo(filename=filename, user=g.user.id)
rec.store()
flash("Photo saved.")
return redirect(url_for('show', id=rec.id))
return render_template('upload.html')
@app.route('/photo/<id>')
def show(id):
photo = Photo.load(id)
if photo is None:
abort(404)
url = photos.url(photo.filename)
return render_template('show.html', url=url, photo=photo)
@app.route('/todo/api/v1.0/tasks', methods=['POST'])
def create_task():
if not request.json or not 'title' in request.json or not 'description' in request.json:
abort(400)
task = {
'id': tasks[-1]['id'] + 1,
'title': request.json['title'],
'description': request.json['description'],
'done': False
}
tasks.append(task)
return jsonify({'task': task}), 201
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['PUT'])
def update_task(task_id):
task = [task for task in tasks if task['id'] == task_id]
if len(task) == 0:
abort(404)
if not request.json:
abort(400)
if 'title' in request.json and type(request.json['title']) != unicode:
abort(400)
if 'description' in request.json and type(request.json['description']) is not unicode:
abort(400)
if 'done' in request.json and type(request.json['done']) is not bool:
abort(400)
task[0]['title'] = request.json.get('title', task[0]['title'])
task[0]['description'] = request.json.get('description', task[0]['description'])
task[0]['done'] = request.json.get('done', task[0]['done'])
return jsonify({'task': task[0]})
@app.route('/todo/api/v1.0/tasks/<int:task_id>', methods=['DELETE'])
def delete_task(task_id):
task = [task for task in tasks if task['id'] == task_id]
if len(task) == 0:
abort(404)
tasks.remove(task[0])
return jsonify({'result': True})
@app.errorhandler(404)
def not_found(error):
return make_response(jsonify({'error': 'Not found'}), 404)
@app.errorhandler(400)
def salah(error1):
return make_response(jsonify({'error': 'Cobaa ah cobaa'}), 400)
if __name__ == '__main__':
app.run(debug=True)
| [
"cornelius steven"
] | cornelius steven |
8120afa2fa7f150d257ef6033deff9fcab1f8f81 | 7085f2fa4a519bf5cc831313f45b8ca9cb619e73 | /main.py | 8d03510c11a869f1ff3a6e95899abc106e9709e7 | [] | no_license | andrewbates09/maxit | f3916560d365862c9f05ad5872cc069b64f92231 | 3f488907b9aae92cf5f24f051e879e30ac6c9a14 | refs/heads/master | 2016-09-06T14:57:10.072112 | 2013-12-04T19:43:06 | 2013-12-04T19:43:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 981 | py | #!/usr/bin/python3
import playmaxit
# import testmaxit
# import curses
def controlMaxit():
userChoice = 0
anyKey = 0
while (userChoice != 2) :
playmaxit.clearScreen()
print('Welcome to the Most Awesome Xciting Inside Terminal game (MAXIT)!\n\n'
'Options\n'
'\t1. Play Maxit\n'
'\t2. Exit Maxit\n')
#'\t3. Test Maxit\n'
try:
userChoice = int (input('Enter your choice: '))
if (userChoice == 1):
playmaxit.mainMaxit()
elif (userChoice == 2):
print('\nThanks for playing MAXIT!\n')
break
# elif (userChoice == 3):
# testmaxit.testall()
# anyKey = input('Testing completed! See log for details. (press enter to continue) ')
except ValueError:
anyKey = input('Please enter a valid choice. (press enter to continue) ')
return
controlMaxit()
| [
"andrewbates09@gmail.com"
] | andrewbates09@gmail.com |
23f2b61b669a1d020a56fe2145e8e5b58625908e | 0e89b2c2f0f281b110ae283de6471600bee6d0a5 | /style-token-master/StyleToken.py | d756ac548fd2626e1efa898fdf2440f56dc4c854 | [] | no_license | vmwaregit/sublime_common_packages | 556f0165eb83d930fe9ac98faef98f433d2269e3 | 06ca7204c5967fe1f8381fd99356249a5f6f07b5 | refs/heads/master | 2021-01-19T08:05:03.020537 | 2013-07-13T13:20:11 | 2013-07-13T13:20:11 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,297 | py | import sublime, sublime_plugin
REGION_NAME = 'StyleTokenListener'
MAX_STYLES = 10
cs_settings = sublime.load_settings('StyleToken.sublime-settings')
styles = [cs_settings.get('styletoken_style'+ str(i+1), 'invalid') for i in range(MAX_STYLES)]
class TokenStyleCommand(sublime_plugin.TextCommand):
def run(self, edit, style_index):
color_selection(self.view, styles[rollover(style_index)])
class TokenStyleGoCommand(sublime_plugin.TextCommand):
def run(self, edit, style_index=-1):
currentRegions = []
if style_index < 0:
for style in styles:
currentRegions = currentRegions + self.view.get_regions(REGION_NAME + style)
currentRegions = sorted(currentRegions, key=lambda region: region.begin())
else:
currentRegions = currentRegions + self.view.get_regions(REGION_NAME + styles[rollover(style_index)])
pos = self.view.sel()[0].end()
#print 'current sel' + str(self.view.sel()[0])
#print 'current pos ' + str(pos)
for region in currentRegions:
if region.begin() > pos:
move_selection(self.view, region)
return
move_selection(self.view, currentRegions[0])
class TokenStyleClearCommand(sublime_plugin.TextCommand):
def run(self, edit, style_index=-1):
if style_index < 0:
for style in styles: self.view.erase_regions(REGION_NAME + style)
else: self.view.erase_regions(REGION_NAME + styles[rollover(style_index)])
class StyleTokenListener(sublime_plugin.EventListener):
def on_modified(self, view):
return
def on_activated(self, view):
return
def on_load(self, view):
return
def color_selection(view, color):
currentSelection = view.sel()[0]
#print view.substr(view.sel()[0])
if currentSelection.size() > 0:
currentRegions = view.get_regions(REGION_NAME + color)
currentRegions.extend(view.find_all(view.substr(currentSelection), sublime.LITERAL))
view.add_regions(REGION_NAME + color, currentRegions, color, sublime.DRAW_EMPTY)
def move_selection(view, region):
#print 'move_selection ' + str(region.begin())
view.sel().clear()
view.sel().add(sublime.Region(region.begin(), region.begin()))
view.show(region)
def rollover(style_index):
if style_index > len(styles) - 1:
style_index = style_index - len(styles)
return style_index | [
"ee6662@gmail.com"
] | ee6662@gmail.com |
fa8d95911c385ecc64e8c6de19551c72cc6d6c37 | d40ee0fed27e784c56ce7257ddc91c10bc34b7c0 | /node_modules/bson-ext/build/config.gypi | f525d312e28569aded5ce63fb7cfae8529a26f4d | [
"Apache-2.0"
] | permissive | arizona2014/Knowledgebase | 3360f2efd11009515138a8558162eb73fce054db | 25bdb002180d564d69b8960c24cece92c0f497f6 | refs/heads/master | 2021-01-10T13:55:03.000203 | 2016-04-20T15:06:58 | 2016-04-20T15:06:58 | 53,052,849 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,784 | gypi | # Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"asan": 0,
"host_arch": "x64",
"icu_data_file": "icudt56l.dat",
"icu_data_in": "../../deps/icu/source/data/in\\icudt56l.dat",
"icu_endianness": "l",
"icu_gyp_path": "tools/icu/icu-generic.gyp",
"icu_locales": "en,root",
"icu_path": "deps\\icu",
"icu_small": "true",
"icu_ver_major": "56",
"node_byteorder": "little",
"node_install_npm": "true",
"node_prefix": "/usr/local",
"node_release_urlbase": "",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_use_dtrace": "false",
"node_use_etw": "true",
"node_use_lttng": "false",
"node_use_openssl": "true",
"node_use_perfctr": "true",
"openssl_fips": "",
"openssl_no_asm": 0,
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_enable_i18n_support": 1,
"v8_no_strict_aliasing": 1,
"v8_optimized_debug": 0,
"v8_random_seed": 0,
"v8_use_snapshot": "true",
"want_separate_host_toolset": 0,
"nodedir": "C:\\Users\\Arizona\\.node-gyp\\4.4.0",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"access": "",
"also": "",
"always_auth": "",
"bin_links": "true",
"browser": "",
"ca": "",
"cache": "C:\\Users\\Arizona\\AppData\\Roaming\\npm-cache",
"cache_lock_retries": "10",
"cache_lock_stale": "60000",
"cache_lock_wait": "10000",
"cache_max": "Infinity",
"cache_min": "10",
"cafile": "",
"cert": "",
"color": "true",
"depth": "Infinity",
"description": "true",
"dev": "",
"dry_run": "",
"editor": "notepad.exe",
"engine_strict": "",
"fetch_retries": "2",
"fetch_retry_factor": "10",
"fetch_retry_maxtimeout": "60000",
"fetch_retry_mintimeout": "10000",
"force": "",
"git": "git",
"git_tag_version": "true",
"global": "",
"globalconfig": "C:\\Program Files\\nodejs\\etc\\npmrc",
"globalignorefile": "C:\\Program Files\\nodejs\\etc\\npmignore",
"group": "",
"heading": "npm",
"https_proxy": "",
"if_present": "",
"ignore_scripts": "",
"init_author_email": "",
"init_author_name": "",
"init_author_url": "",
"init_license": "ISC",
"init_module": "C:\\Users\\Arizona\\.npm-init.js",
"init_version": "1.0.0",
"json": "",
"key": "",
"link": "",
"local_address": "",
"long": "",
"message": "%s",
"node_version": "4.4.0",
"npat": "",
"onload_script": "",
"only": "",
"optional": "true",
"parseable": "",
"prefix": "C:\\Program Files\\nodejs",
"production": "",
"progress": "true",
"proprietary_attribs": "true",
"rebuild_bundle": "true",
"registry": "https://registry.npmjs.org/",
"rollback": "true",
"save": "",
"save_bundle": "",
"save_dev": "",
"save_exact": "",
"save_optional": "",
"save_prefix": "^",
"scope": "",
"searchexclude": "",
"searchopts": "",
"searchsort": "name",
"shell": "C:\\WINDOWS\\system32\\cmd.exe",
"shrinkwrap": "true",
"sign_git_tag": "",
"strict_ssl": "true",
"tag": "latest",
"tag_version_prefix": "v",
"tmp": "C:\\Users\\Arizona\\AppData\\Local\\Temp",
"umask": "0000",
"unicode": "true",
"unsafe_perm": "true",
"usage": "",
"user": "",
"userconfig": "C:\\Users\\Arizona\\.npmrc",
"user_agent": "npm/3.4.1 node/v4.4.0 win32 x64",
"version": "",
"versions": "",
"viewer": "browser"
}
}
| [
"andy.lisac@gmail.com"
] | andy.lisac@gmail.com |
f96729b38a64cf05d84ab0e508ac4cb889ce989b | ce348e6f43e0eeb83a171f73dc924b95c121fe7f | /backend/sharedstory_24977/wsgi.py | 58a5c6babe8473b4c0580a501312c526f0f7ed5c | [] | no_license | crowdbotics-apps/sharedstory-24977 | ec19e35d67d0119dac3d30521e2aef050f60fa8c | 2dbd37503ab449981acbfb86f98d54580b4d6a92 | refs/heads/master | 2023-03-23T15:31:00.793794 | 2021-03-11T16:01:22 | 2021-03-11T16:01:22 | 346,757,867 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 411 | py | """
WSGI config for sharedstory_24977 project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'sharedstory_24977.settings')
application = get_wsgi_application()
| [
"team@crowdbotics.com"
] | team@crowdbotics.com |
115fbb012b26456b279ce1b38eb085e54238ee4e | a15f8d9f509cdee635ccc822adf13908af47de51 | /pytagged/_mode.py | 75e86983d3dc7e32f1f9c8453951fd63d370cd5d | [
"MIT"
] | permissive | ntn9995/pytagged | 97eb0323b229c2b281f186dc26dabeed44d50c30 | bee423751ec63519abed1810af3326d5fa9770d3 | refs/heads/master | 2022-11-23T22:09:39.774184 | 2020-07-23T09:18:41 | 2020-07-23T09:18:41 | 279,422,157 | 0 | 0 | MIT | 2020-07-16T08:56:11 | 2020-07-13T22:10:50 | Python | UTF-8 | Python | false | false | 135 | py | from enum import Enum
class Mode(Enum):
"""Enum for PyTagged cli mode
"""
DEFAULT = 0
PRINTONLY = 1
BENCHMARK = 2 | [
"ngocnguyen9995@gmail.com"
] | ngocnguyen9995@gmail.com |
1ea20151d1db0d31a7b702522a6280ebbe9f68b6 | 2f519f09a296616846089f2ae405adeb8877fb5d | /mnist_seq.py | e80f100bd383f4b037b1badb0862c4fe233cc984 | [] | no_license | GMADHURIDSP/Convolutional-Neural-Networks | c025912433c5f991e59b0f14c5f6fdb1a04bfb35 | d753215f958fce2b098d4f993854358dc2ba3c91 | refs/heads/master | 2022-11-07T14:05:49.189633 | 2020-06-18T15:18:03 | 2020-06-18T15:18:03 | 273,272,851 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,431 | py | from keras.datasets import mnist
from keras.models import Sequential
from keras.layers import Dense
from keras.layers import Dropout
from keras.layers import Flatten
from keras.layers.convolutional import Conv2D
from keras.layers.convolutional import MaxPooling2D
from keras.utils import np_utils
(x_train, y_train), (x_test, y_test) = mnist.load_data()
x_train = x_train.reshape((x_train.shape[0], 28, 28, 1)).astype('float32')
x_test = x_test.reshape((x_test.shape[0], 28, 28, 1)).astype('float32')
x_train = x_train / 255
x_test = x_test / 255
y_train = np_utils.to_categorical(y_train)
y_test = np_utils.to_categorical(y_test)
num_classes = y_test.shape[1]
def baseline_model():
model = Sequential()
model.add(Conv2D(32, (5, 5), input_shape=(28, 28, 1), activation='relu'))
model.add(MaxPooling2D())
model.add(Dropout(0.2))
model.add(Flatten())
model.add(Dense(128, activation='relu'))
model.add(Dense(num_classes, activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer='adam', metrics=['accuracy'])
return model
model = baseline_model()
model.fit(x_train, y_train, validation_data=(x_test, y_test), epochs=10, batch_size=100)
scores = model.evaluate(x_test, y_test, verbose=0)
print("CNN Error: %.2f%%" % (100-scores[1]*100))
print("CNN EFFICIENCY: %2.f%%" % (scores[1]*100))
| [
"noreply@github.com"
] | noreply@github.com |
1f4ddfa1c8bc8ae0575ee67ac34d8226efa92e7e | e1efc8e0b0e4629dea61504fbc816c0527691bd9 | /3.jvm/24-静态分派.py | 4057e69948dec7c7341531bc1d10fa9e78285067 | [] | no_license | xiongmengmeng/xmind-technology | 2bb67a0bf92cfd660cac01f8ab3a2454423ccba5 | e2fdb6987ef805a65f0a4feb52d84383853f4b77 | refs/heads/main | 2023-07-31T07:10:29.868120 | 2021-09-11T08:18:17 | 2021-09-11T08:18:17 | 307,636,242 | 3 | 1 | null | null | null | null | UTF-8 | Python | false | false | 3,314 | py | import os,sys
parentdir = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0,parentdir)
import xmind
xmind_name="jvm"
w = xmind.load(os.path.dirname(os.path.abspath(__file__))+"\\"+xmind_name+".xmind")
s2=w.createSheet()
s2.setTitle("静态分派")
r2=s2.getRootTopic()
r2.setTitle("静态分派")
content={
'Java具备面向对象的3个基本特征':[
'继承',
'封装(get/set)',
{'多态':[
'继承,重写(Override),向上转型(Human h=new Man())三大必要条件',
'方法重载:同一个方法名,参数或者类型不同。(Overload)',
'方法重写:父类与子类有同样的方法名和参数,这叫方法覆盖。(Override)'
]}
],
'任务':[
'不等同于【方法执行】,该阶段唯一任务是确定【被调用方法版本】,不涉及方法内部具体运行过程'
],
'五条字节码指令':[
{'invokestatic':[
'调用静态方法'
]},
{'invokespecial':[
'调用实例构造器<init>()方法、私有方法和父类中的方法'
]},
{'invokevirtual':[
'调用所有的虚方法'
]},
{'invokeinterface':[
'调用接口方法,在运行时确定一个实现该接口的对象'
]},
{'invokedynamic':[
'运行时动态解析出调用点限定符所引用的方法,然后再执行该方法'
]}
],
'解析':[
{'定义':[
'静态过程',
'编译期间确定',
'把【符号引用】转变为【直接引用】,确定唯一的【方法调用版本】',
'如能被invokestatic和invokespecial指令调用的方法'
]},
{'分类':[
{'静态方法':[
'与类型直接关联,不能通过【重写】出现别的版本,适合类加载阶段进行解析'
]},
{'私有方法':[
'外部不可被访问,不能通过【继承】出现别的版本,适合类加载阶段进行解析'
]},
'实例构造器',
'父类方法',
{'被final修饰的方法(invokevirtual指令调用)':[
'【无法被覆盖】,没有其他版本的可能'
]}
]},
],
'静态分派':[
{'定义':[
'依赖【静态类型】决定【方法执行版本】',
'发生在【编译阶段】,不由虚拟机来执行的',
{'典型表现':[
'方法重载'
]}
]},
{'重载':[
'通过【参数的静态类型】而不是实际类型作为判定依据的',
'静态类型是在【编译期可知】',
'实际类型在运行期才可确认'
]},
{'重载时目标方法选择(字面量没有显示的静态类型时)':[
'1.char>int>long>float>double的顺序转型进行匹配',
'2.一次自动装箱,封装类型java.lang.Character',
'3.java.lang.Serializable,是java.lang.Character类实现的一个接口,自动装箱之后还是找不到装箱类,会找装箱类所实现的接口类型',
'4.Object,如果有多个父类,那将在继承关系中从下往上开始搜索',
'5.变长参数的重载优先级是最低的'
]}
],
}
#构建xmind
xmind.build(content,r2)
#保存xmind
xmind.save(w,os.path.dirname(os.path.abspath(__file__))+"\\"+xmind_name+".xmind") | [
"xiongmengmeng@qipeipu.com"
] | xiongmengmeng@qipeipu.com |
899d61119110a0fdcdba377335475e797ba20c50 | 34b1ab46a70fe81143874a40d6493c0254f1e5c9 | /python/flask_test_1.py | f8dd69799b9f664d6d21726091262f35e8b32f44 | [] | no_license | yama1968/Spikes | 5f974a20812dbd88f789cabf7720826d358f8e76 | 498b0cacfc23627ecee743f012a6fda6451cda7f | refs/heads/master | 2021-06-06T00:33:33.637745 | 2020-11-14T18:49:25 | 2020-11-14T18:49:25 | 29,531,065 | 2 | 0 | null | 2020-11-12T21:13:21 | 2015-01-20T13:29:35 | Jupyter Notebook | UTF-8 | Python | false | false | 232 | py | # -*- coding: utf-8 -*-
"""
Éditeur de Spyder
Ceci est un script temporaire.
"""
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello():
return "Hello World!"
if __name__ == "__main__":
app.run()
| [
"ymartel@gmail.com"
] | ymartel@gmail.com |
80092859c0ed3666cab7996e9c378ca9e0e5a49a | 7fcaf22d5880692598a38cad6f682795cbc13bf8 | /knowlege.py | 7135215a9601832aef3a8bd925a18afe7c2687cb | [] | no_license | xiaolong-liang/paralleldao | f0e6b57a519ffed604397a8e39d9598f02af8029 | 22ea10434746efedf4998c3b3e3b2489c6b55b13 | refs/heads/main | 2023-04-17T03:57:08.187886 | 2021-04-27T11:11:04 | 2021-04-27T11:11:04 | 359,088,908 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 227 | py | class Knowlege(object):
def ask(self, stmt):
pass
def update(self, stmt):
pass
class Belief(Knowlege):
pass
class Intent(Knowlege):
pass
class Governance(Knowlege):
pass
| [
"dreammakerlxl@gmail.com"
] | dreammakerlxl@gmail.com |
948b79a6176688f080bc49d0afa3edff51db7ac5 | eeddbee377f4ae39513fdc9e2fc111b378d15fb0 | /MD5sig/deleteRe.py | 441b4d55e329ccc07f5d0cda1b7fb817939be7e3 | [] | no_license | Viwilla/MD5sig | af9811957ea8f421a047d75a0c5707da8ccbef99 | 3598b4e42f893d878d98c752093a4f87cf8e61b4 | refs/heads/master | 2021-01-10T15:23:18.364356 | 2015-12-03T05:29:33 | 2015-12-03T05:29:33 | 47,310,132 | 1 | 1 | null | null | null | null | GB18030 | Python | false | false | 844 | py | import os,sys,os.path
#import MySQLdb
reload(sys)
sys.setdefaultencoding('utf-8')
if __name__=="__main__":
md5total = []
md5 = []
count = 0
count1 =0
counttotal= 0
file1 = open ("MD5Sig.txt","r")
file2 = open("MD5Sig2.txt","a+")
lines = file1.readlines()
for line in lines:
counttotal = counttotal +1
md5 = line[0:32]
if md5 in md5total:
print md5
count1 = count1+1
# data = splitlines(True)
# del line
else:
md5total.append(md5.lower())
file2.write(line)
count = count +1
file1.close()
file2.close()
# with open("1.txt",'w') as fout:
# fout.writelines(data[1:])
print "End"
print "共有%d条信息,%d条重复,有效%d条"%(counttotal,count1,count) | [
"954116843@qq.com"
] | 954116843@qq.com |
cb90ab6fc8b20634eb92c304e768e063589475be | 887198d9dd6ec287f5c2fff96fb4c32295ab15a5 | /app.py | 00928a9e6756afad85cac26c606d61892fb57e89 | [] | no_license | thejoby/dogDoor | 28c9b695f8c2dac0493aa3a28cbee609a9da705f | e588a80ae208fc704f5da245ece99b6f0a3974ab | refs/heads/master | 2021-01-04T12:59:42.633377 | 2020-02-16T04:54:53 | 2020-02-16T04:54:53 | 240,561,092 | 0 | 0 | null | 2020-02-15T08:01:44 | 2020-02-14T17:15:48 | Python | UTF-8 | Python | false | false | 1,259 | py | from flask import Flask, request
import time
import datetime
import RPi.GPIO as GPIO
import sys
app = Flask(__name__)
@app.route('/')
def index():
now = datetime.datetime.now()
return 'Flask app is running at: %s' % (now)
@app.route('/open', methods=['GET','POST'])
def open():
if request.method == 'GET':
return "POST to this endpoint instead"
if request.method == 'POST':
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
p = GPIO.PWM(18, 100)
p.start(17.5)
time.sleep(1.2)
p.stop()
GPIO.cleanup()
return 'Dog Door Open!'
@app.route('/close', methods=['GET','POST'])
def close():
if request.method == 'GET':
return "POST to this endpoint instead"
if request.method == 'POST':
GPIO.setmode(GPIO.BCM)
GPIO.setup(18, GPIO.OUT)
#GPIO.PWM(PIN, frequency in Hz)
p = GPIO.PWM(18, 100)
#p.start(duty_cycle)
#The duty cycle describes the proportion of on time
# to the regular interval or period of time.
p.start(7)
time.sleep(1)
p.stop()
GPIO.cleanup()
return 'Dog Door Closed!'
if __name__ == '__main__':
app.run(debug=True, host='0.0.0.0')
| [
"thejoby@gmail.com"
] | thejoby@gmail.com |
a1d28dddbbd1f06c8bf3597513d5a740a98a5dcd | 786c0d355e04c885c196d421b74b53fa98ca2d0e | /test.py | f27358e2c62b03540f773611db597677ec2541ff | [] | no_license | underwatersaloon/bot.match | 9b071b3668797d61350d32290c12fe511d3e0a81 | caf2e4c3c2d60cc738cee788bc80aca69311a8a5 | refs/heads/master | 2020-04-24T17:54:17.547743 | 2019-02-27T03:01:44 | 2019-02-27T03:01:44 | 172,162,639 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 404 | py | from ship_test import ship
def prShip(lst):
for i in lst:
print('capID : {}, boat Index : {}, boat name : {}'.format(i.captain,i.index,i.subject))
def main():
capIds=[i+1000 for i in range(10)]
capNames=['captain no.' + str(capIds[i]) for i in range(10)]
crewIds=[i+10 for i in range(40)]
for i in range(10):
ship(capIds[i],capNames[i])
prShip(ship.sList)
| [
"m123oo@naver.com"
] | m123oo@naver.com |
ce699701faac3603c92fb933fe420ff5f4c0f5ed | 374dc7e7d3244a16b0ea5e616db81738bc6ad7ff | /Blog-Website/webapps/grumblr/views.py | 9ecf18c6c9b45eef1460ad74a80bd2618a8bbe2c | [] | no_license | calvinlee619/Projects | 018660c3631e133359aab10622d906ef9a17bf32 | d5f3ba6e3aa16b6c920d9a523a22526a365f1a1c | refs/heads/master | 2021-01-12T18:17:31.960738 | 2016-08-23T15:57:04 | 2016-08-23T15:57:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 9,024 | py | from mimetypes import guess_type
from django.contrib.auth.tokens import default_token_generator
from django.http import Http404, HttpResponse
from django.shortcuts import render, redirect,get_object_or_404
from django.core.exceptions import ObjectDoesNotExist
from django.core.urlresolvers import reverse
# Decrator to use built-in authentication system
from django.contrib.auth.decorators import login_required
from django.contrib.auth.models import User
from django.contrib.auth import login, authenticate
from smtplib import SMTP
from django.core.mail import send_mail
from grumblr.forms import *
# Create your views here.
@login_required
def home(request):
# Sets up list of just the Login-user's blog
global_stream_flag = True
return render(request,'GlobalStream.html',{'global_stream_flag':global_stream_flag})
@login_required()
def get_blogs(request):
max_entry = LogEntry.get_max_id()
blogs = Blog.objects.all().order_by('time').reverse()
context = {"max_entry":max_entry,"blogs":blogs}
return render(request,"blogs.json",context,content_type='application/json')
@login_required()
def get_changes(request,max_id=-1):
max_entry = LogEntry.get_max_id()
blogs = Blog.get_changes(max_id).order_by('time').reverse()
context = {"max_entry":max_entry,"blogs":blogs}
return render(request,"blogs.json",context,content_type='application/json')
@login_required()
def follower_stream(request):
return render(request,'GlobalStream.html')
@login_required()
def get_follower_blogs(request):
max_entry=LogEntry.get_max_id()
followers = UserInfo.get_Followers(user=request.user).all()
blogs = Blog.objects.filter(user__in=followers).order_by('time').reverse()
context = {"max_entry":max_entry,"blogs":blogs}
return render(request,"blogs.json",context,content_type="application/json")
@login_required()
def get_follower_changes(request,max_id=-1):
max_entry = LogEntry.get_max_id()
followers = UserInfo.get_Followers(user=request.user).all()
blogs = Blog.objects.filter(logentry__gt=max_id,user__in=followers).order_by('time').reverse()
context = {"max_entry":max_entry,"blogs":blogs}
return render(request,"blogs.json",context,content_type="application/json")
# Allow login-user to follow other users
@login_required()
def follow(request,id):
user = User.objects.get(id=id)
user_info = UserInfo.objects.get(user=request.user)
user_info.followers.add(user)
user_info.save()
return redirect(reverse('other_profile',kwargs={'id':user.id}))
# allow user to unfollow other user
@login_required()
def unfollow(request,id):
follower = User.objects.get(id = id)
user_info = UserInfo.objects.get(user=request.user)
user_info.followers.remove(follower)
return redirect(reverse("other_profile",kwargs={'id':follower.id}))
# user's profile page
@login_required
def profile(request):
user_info = UserInfo.objects.get(user = request.user)
flag= True
return render(request,'Profile.html',{'user_info':user_info,'flag':flag})
# allow login user to browse other user's profile
@login_required
def other_profile(request, id):
errors = []
try:
user = User.objects.get(id=id)
user_info = UserInfo.objects.get(user=user)
owner_info = UserInfo.objects.get(user=request.user)
if len(owner_info.followers.filter(id=id))>0:
flag_follow = True
else:
flag_follow = False
except ObjectDoesNotExist:
errors.append('The user does not exist')
if errors:
return render(request,'GlobalStream.html',{'errors':errors})
else:
if user==request.user:
return redirect(reverse('profile'))
else:
context = {'user_info':user_info,'flag_follow':flag_follow}
return render(request,'Profile.html',context)
@login_required()
def get_profile_blogs(request,id):
user = get_object_or_404(User,id=id)
blogs = Blog.objects.filter(user=user).order_by('time').reverse()
context = {"max_entry":-1,"blogs":blogs}
return render(request,"blogs.json",context,content_type="application/json")
@login_required
def post_blog(request):
form = BlogForm(request.POST)
if not form.is_valid():
return HttpResponse("Form is not valid")
try:
user_info = UserInfo.objects.get(user = request.user)
text = form.cleaned_data["message"]
new_message = Blog(text=text,user=request.user,\
time = datetime.now(), user_info=user_info)
new_message.save()
log_entry = LogEntry(blog = new_message,op='add')
log_entry.save()
except ObjectDoesNotExist:
raise Http404
return HttpResponse("")
@login_required()
def add_comment(request,id):
form = BlogForm(request.POST)
if not form.is_valid():
return HttpResponse("Message is not valid")
try:
blog = Blog.objects.get(id=id)
user = request.user
user_info = UserInfo.objects.get(user=user)
text = form.cleaned_data['message']
new_comment = Comment(blog=blog,user_info=user_info,\
time=datetime.now(),text=text)
new_comment.save()
log_entry = LogEntry(blog=blog,op='edit')
log_entry.save()
except ObjectDoesNotExist:
return HttpResponse("The blog or user doesnot exist")
context = {"comment":new_comment}
return render(request,"comment.json",context,content_type="application/json")
# Allow user to edit his or her profile
@login_required()
def edit_profile(request):
context= {}
userinfo_to_edit = get_object_or_404(UserInfo,user=request.user)
email = userinfo_to_edit.user.email
if request.method == 'GET':
form = UserForm(instance=userinfo_to_edit)
return render(request,'edit_profile.html',{'form':form,'userinfo_to_edit':userinfo_to_edit})
form = UserForm(request.POST,request._files,instance=userinfo_to_edit)
context['form'] = form
if not form.is_valid():
return render(request,'edit_profile.html',context)
user = userinfo_to_edit.user
user.email = request.POST['email']
user.save()
form.save()
return redirect(reverse('profile'))
@login_required()
def edit_password(request):
user = request.user
if request.method == "GET":
form = ResetPasswordForm()
return render(request,"ResetPassword.html",{'form':form})
form = ResetPasswordForm(request.POST)
if not form.is_valid():
return render(request,"ResetPassword.html",{'form':form})
password = form.cleaned_data["password1"]
user.set_password(password)
user.save()
user= authenticate(username = user.username,
password = form.cleaned_data["password1"])
login(request,user)
return redirect(reverse("profile"))
@login_required()
def get_photo(request,id):
user = User.objects.get(id=id)
user_info = get_object_or_404(UserInfo,user = user)
content_type = guess_type(user_info.pitcture.name)
return HttpResponse(user_info.pitcture,content_type = content_type)
def send_password_confirm(request):
context = {}
if request.method == "GET":
form = PasswordChangeForm()
return render(request,"PasswordChange.html",{'form':form})
form = PasswordChangeForm(request.POST)
if not form.is_valid():
return render(request,"PasswordChange.html",{'form':form})
username = form.cleaned_data['username']
user = User.objects.get(username = username)
token = default_token_generator.make_token(user)
email_body = """ This email is comfirmation for your password change.Please click the link below to
change your password.
http://%s%s
""" % (request.get_host(),reverse('confirm',args={token,username}))
send_mail(subject = "Password Change",
message = email_body,
from_email = "kangw@andrew.cmu.edu",
recipient_list = [user.email])
return render(request,"PasswordChangeEmailConfirm.html",context)
def comfirm(request,username,token):
user = get_object_or_404(User,username=username)
if not default_token_generator.check_token(user,token):
raise Http404
if request.method == "GET":
form = ResetPasswordForm()
return render(request,"ResetPassword.html",{'form':form})
form = ResetPasswordForm(request.POST)
if not form.is_valid():
return render(request,"ResetPassword.html",{'form':form})
password = form.cleaned_data["password1"]
user.set_password(password)
user.save()
return render(request,"ResetPasswordSuccess.html",{'form':form})
def register(request):
context = {}
# display the registration form if this is a GET request
if request.method =="GET":
context['form'] = RegisterForm()
return render(request,'Register.html',context)
form = RegisterForm(request.POST)
context['form'] = form
# Check the data
if not form.is_valid():
return render(request,"Register.html",context)
# Create a new user from valid data
new_user = User.objects.create_user(username = form.cleaned_data['username'],\
password = form.cleaned_data['password1'],\
email=form.cleaned_data['email'])
new_user.save()
user_info = UserInfo(first_name = form.cleaned_data['first_name'],\
last_name = form.cleaned_data['last_name'],\
user = new_user)
user_info.save()
# Logs in the new user and redirect to home page
new_user = authenticate(username = form.cleaned_data['username'],\
password = form.cleaned_data['password1'])
login(request,new_user)
return redirect(reverse("profile"))
| [
"kangw@andrew.cmu.edu"
] | kangw@andrew.cmu.edu |
8ae3347d2a89108558a061fa27bf203aa8ebfd4b | 1b0e6de5485339a3b5dfff461d6239a3ac57a982 | /SpecialistAI/health/form_utilities.py | f74dafd220b5d4cb3e3ad02d7d50c11a4073ffc4 | [] | no_license | moemursi/DJango-AI-Medical-assistant | fd48b5e68033d71fc880f58903b89ceadca5d0d9 | bb5108cddb81e5909a172b29ba9628b524a04d60 | refs/heads/master | 2022-12-22T10:41:15.323896 | 2020-07-16T12:54:50 | 2020-07-16T12:54:50 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,141 | py | # Class which is responsible for everything regarding forms,
# I.e. sanitisation,
# email being valid,
# password matching and much more.
import re
from django.core import validators
from django.core.exceptions import ValidationError
from django.contrib.admin import models
from django.contrib.contenttypes.models import ContentType
from django.utils.text import get_text_list
# Function responsible for sanitising telephone numbers.
def sanitize_phone(number):
# If the variable number is not equal to a number.
if not number:
# Return none.
return None
# Else remove all none number characters.
regex = re.compile(r'[^\d.]+')
# Return the sanitised number variable.
return regex.sub('', number)
# Function responsible for checking invalidness, I.e. if a field contains invalid informaiton.
def none_if_invalid(item):
# Using the built in functionality of falseness of python, we check for false informaiotn.
return item if bool(item) else None
# Function responsible for checking if an email address is valid.
def email_is_valid(email):
# Try.
try:
# Using the in built validation functionality within DJango
# we validate that the email provided meets the validation criteria.
validators.validate_email(email)
# If the above is successful, then return true.
return True
# Except, catch any error that occured.
except ValidationError:
# If the above is true, then return the error.
return False
# Function responsible for checking if a message has been changed,
# I.e. used for displaying changes to the admin account,
# used within the change method below.
def get_change_message(fields):
# Return the changed fields.
return 'Changed %s.' % get_text_list(fields, 'and')
# Function responsible for changing an object and logging it.
### Log that something was changed ###
def change(request, obj, message_or_fields):
# The argument *message_or_fields* must be a sequence of modified field names
# or a custom change message.
# If the variable message_or_fields is an instance of a string, I.e. if the variable is of string type.
if isinstance(message_or_fields, str):
# Instantiate message and set it equal to the contents of message_or_fields.
message = message_or_fields
# Else.
else:
# Instantiate message and set it equal to the contents of message_or_fields usiong function
# get_change_message, I.e. checks if a message has changed.
message = get_change_message(message_or_fields)
# Using the models.LogEntry.objects.log_action functionality from DJango contrib main,
# we log that somehting has changed and what that something is.
# user_id set to the users primary key, I.e. id.
# content_type_id sets the content_type_id equal to the objects priamry key.
# object_id sets the objects primary key, I.e. id.
# object_repr sets the repr to the object changed.
# action_flag sets the action flag, I.e. something was changed.
# change_message sets the change message, I.e. user (username) changed there telephone number.
models.LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=ContentType.objects.get_for_model(obj).pk,
object_id=obj.pk,
object_repr=repr(obj),
action_flag=models.CHANGE,
change_message=message
)
# Function responsible for addition, I.e. specifying that something was added,
# I.e. user (username) added a new message.
### Log that something was added ###
def addition(request, obj):
# Using the models.LogEntry.objects.log_action functionality from DJango contrib main,
# we log that somehting has changed and what that something is.
# user_id set to the users primary key, I.e. id.
# content_type_id sets the content_type_id equal to the objects priamry key.
# object_id sets the objects primary key, I.e. id.
# object_repr sets the repr to the object changed.
# action_flag sets the action flag, I.e. something was added.
models.LogEntry.objects.log_action(
user_id=request.user.pk,
content_type_id=ContentType.objects.get_for_model(obj).pk,
object_id=obj.pk,
object_repr=repr(obj),
action_flag=models.ADDITION
)
### Log that something was deleted ###
def deletion(request, obj, object_repr=None):
# Using the models.LogEntry.objects.log_action functionality from DJango contrib main,
# we log that somehting has changed and what that something is.
# user_id set to the users primary key, I.e. id.
# content_type_id sets the content_type_id equal to the objects priamry key.
# object_id sets the objects primary key, I.e. id.
# object_repr sets the repr to the object deleted.
# action_flag sets the action flag, I.e. something was added.
models.LogEntry.objects.log_action(
user_id=request.user.id,
content_type_id=ContentType.objects.get_for_model(obj).pk,
object_id=obj.pk,
object_repr=object_repr or repr(obj),
action_flag=models.DELETION
)
| [
"bulmer.neville@hotmail.co.uk"
] | bulmer.neville@hotmail.co.uk |
b5c4683595506c36f2bbe83636e6d3b5240a13b2 | 9404f0134c580cb18df4de6e434f4a6f83d1cd33 | /tfidf/tokenizer.py | 6473eec7812e1d55b8e80165689a1a11509e934c | [] | no_license | xyh97/JingDong-Dialog-Challenge | 77a1c7fee0bed551d39eef79b79049ddac1f74fc | 95c137e09606c39b5f8dd36c25f5baf178835ef0 | refs/heads/master | 2020-04-03T01:53:10.980316 | 2018-10-27T08:23:01 | 2018-10-27T08:23:01 | 154,941,380 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 637 | py | # -*- coding: utf-8 -*-
import jieba
from tfidf.util import read_lines
_STOP_WORDS_FP = 'stop_words.txt'
class Tokenizer:
def __init__(self):
self.stop_words = [i for i in read_lines(_STOP_WORDS_FP) if i]
def cut_for_search(self, sentence, filter_stop_word=True):
tokens = jieba.cut_for_search(sentence)
return [t for t in tokens if (not filter_stop_word) or (t not in self.stop_words)]
if __name__ == '__main__':
tokenizer = Tokenizer()
text = '清华大学离世界一流大学有一条街的距离'
print('origin:', text)
print('cutted:', tokenizer.cut_for_search(sentence=text))
| [
"xiongyh@zju.edu.cn"
] | xiongyh@zju.edu.cn |
d6be6ba7b30d8faaec1895bb04e32de8fc239c7e | 3f507c5cd0386f3e4a7310f2675ae5ebb9c81426 | /manage_site/migrations/0004_auto_20180715_1404.py | 51e7e200d4dd7fdd9f57082b7c0adb32f20d5445 | [
"Apache-2.0"
] | permissive | Chocomilk-Leo/FITA | aa6af2b34ca032558393f06bb5a4dbacc464e4e6 | f2b04c9e40da11ed35732e9f69fc24ac5c31bc70 | refs/heads/master | 2020-04-15T01:39:16.363443 | 2018-10-30T04:43:02 | 2018-10-30T04:43:02 | 164,285,474 | 1 | 0 | Apache-2.0 | 2019-01-06T07:33:23 | 2019-01-06T07:33:23 | null | UTF-8 | Python | false | false | 452 | py | # Generated by Django 2.0.6 on 2018-07-15 14:04
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('manage_site', '0003_auto_20180715_1338'),
]
operations = [
migrations.AlterField(
model_name='indexcourse',
name='img',
field=models.ImageField(max_length=200, upload_to='manage_site/%Y/%m', verbose_name='课程LOGO'),
),
]
| [
"1227085585@qq.com"
] | 1227085585@qq.com |
0dfab621a2eb3a0f601ca535af21b6bf1120739f | 5c1de66937151f56c739ed8b9ea4309c28e333c6 | /maxfw/core/api.py | e513d0746ecf471d9e4205ae99b3073ac6200b3a | [
"Apache-2.0"
] | permissive | cclauss/MAX-Framework | 40b0259da1aa84a4550ac897afb595142353bf85 | 2bc9c090058557a5ce863bbf19a18c7c47a78695 | refs/heads/master | 2020-04-09T12:21:36.033643 | 2018-11-29T01:08:07 | 2018-11-29T01:08:07 | 160,346,052 | 0 | 0 | null | 2018-12-04T11:20:13 | 2018-12-04T11:20:13 | null | UTF-8 | Python | false | false | 977 | py | from .app import MAX_API
from flask_restplus import Resource, Model, fields, reqparse
from werkzeug.datastructures import FileStorage
METADATA_SCHEMA = MAX_API.model('ModelMetadata', {
'id': fields.String(required=True, description='Model identifier'),
'name': fields.String(required=True, description='Model name'),
'description': fields.String(required=True, description='Model description'),
'license': fields.String(required=False, description='Model license')
})
class MAXAPI(Resource):
pass
class MetadataAPI(MAXAPI):
def get(self):
"""To be implemented"""
raise NotImplementedError()
class PredictAPI(MAXAPI):
def post(self):
"""To be implemented"""
raise NotImplementedError()
# class FileRequestParser(object):
# def __init__(self):
# self.parser = reqparse.RequestParser()
# self.parser.add_argument('file', type=FileStorage, location='files', required=True)
| [
"djalova@us.ibm.com"
] | djalova@us.ibm.com |
8152f5de1e216e50d57f2ee029225b5144c4beb2 | ed2be337ce4b8a3c772862fce99ec99416784a62 | /play/models.py | c889f87afcafa52f0ca12af45ece8a4485629983 | [
"MIT"
] | permissive | fraferra/PlayPaloAltoServer | e5ecc7557a02b2b14750e929f656a121984a560f | a7128d363efd6059007df2c9da77f7bd033f7987 | refs/heads/master | 2020-05-20T05:30:19.020450 | 2014-07-08T02:34:14 | 2014-07-08T02:34:14 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,899 | py | from django.db import models
from datetime import date
from django import forms
from django.contrib.auth.models import User,UserManager
from django.utils import timezone
from django.db.models.signals import post_save
from django.utils.translation import ugettext as _
from utils import *
from social_auth.models import UserSocialAuth
import constants
from django.core.exceptions import *
import charity.models
import shop.models
# Create your models here.
import requests
import datetime
#from social_auth.backends.pipeline.user import update_user_details
class Player(models.Model):
user=models.ForeignKey(User)
custom_auth = forms.BooleanField(initial=False)
token=models.CharField(max_length=100, null=True, default=None)
score=models.DecimalField(max_digits=4, decimal_places=0, null=True, default=20)
experience=models.DecimalField(max_digits=5, decimal_places=0, null=True, default=0)
level=models.DecimalField(max_digits=4, decimal_places=0, null=True, default=0)
picture_url=models.CharField(max_length=400, null=True, default='/static/img/avatar-1.png')
facebook_pic=models.BooleanField(default=True)
def __unicode__(self): # Python 3: def __str__(self):
return unicode(self.user) or u''
def create_user_profile(sender, instance, created, **kwargs):
if created:
Player.objects.create(user=instance)
post_save.connect(create_user_profile, sender=User)
class CouponHistory(models.Model):
title=models.CharField(max_length=100, null=True)
#coupon=models.ForeignKey(Coupon, related_name='coupon')
shop=models.CharField(max_length=100, null=True)
player=models.ForeignKey(Player)
#shop=models.ForeignKey(Shop, related_name='created')
class EventHistory(models.Model):
date=models.DateTimeField( null=True)
title=models.CharField(max_length=100, null=True)
#event_done=models.ForeignKey(Event, related_name='created')
organization=models.CharField(max_length=100, null=True)
player=models.ForeignKey(Player)
points=models.DecimalField(max_digits=4, decimal_places=0)
event_type=models.CharField(max_length=50,choices=constants.TYPE, default=None, null=True)
#organization=models.ForeignKey(Organization, related_name='organization')
class Idea(models.Model):
title=models.CharField(max_length=100, null=True)
author=models.CharField(max_length=100, null=True)
description=models.TextField(max_length=500, null=True)
points=models.DecimalField(max_digits=4, decimal_places=0)
experience=models.DecimalField(max_digits=5, decimal_places=0, null=True, default=0)
class Comment(models.Model):
comment=models.TextField(max_length=500, null=True)
commenter=models.ForeignKey(Player)
event=models.ForeignKey('charity.Event')
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class Feed(models.Model):
player=models.ForeignKey(Player)
event=models.ForeignKey('charity.Event')
likes= models.DecimalField(max_digits=4, decimal_places=0, default=0)
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class CommentFeed(models.Model):
comment=models.TextField(max_length=500, null=True)
commenter=models.ForeignKey(Player)
feed=models.ForeignKey(Feed)
date=models.DateTimeField( null=True, default=datetime.datetime.now)
class Badge(models.Model):
player=models.ForeignKey(Player)
title=models.CharField(max_length=100, null=True, default='Beginner!')
icon=models.CharField(max_length=50,choices=constants.ICON, default='fa-thumbs-o-up')
'''
def assign_badge(sender, instance, created, **kwargs):
if created:
badge=Badge.objects.create(player=instance.player)
type_event=['Animals', 'Food','Art', 'Shopping', 'Elders', 'Environment']
for tt in type_event:
post_save.connect(assign_badge, sender=EventHistory) ''' | [
"fraferra@cisco.com"
] | fraferra@cisco.com |
aa7ae0c68281ac9ce0ab5270f0a8c3357b63d971 | 3f287644cedc068e11d0d770e58a66771a721b97 | /TrainingExtensions/tensorflow/src/python/aimet_tensorflow/defs.py | eb9d2e516eddb45adcd051bf5cb9ba5f3c9dacf4 | [
"BSD-3-Clause"
] | permissive | Rohan-Chaudhury/aimet | a1c3d1f6b688a45bc333398cf23bba3d8cffad55 | 1c38cac8cc0fd32dca40ce5e39940805d29f7a4a | refs/heads/develop | 2023-04-03T08:57:13.982404 | 2021-04-01T22:35:01 | 2021-04-01T22:35:01 | 332,067,268 | 0 | 0 | NOASSERTION | 2021-04-01T22:35:02 | 2021-01-22T21:32:36 | Python | UTF-8 | Python | false | false | 7,393 | py | # /usr/bin/env python3.5
# -*- mode: python -*-
# =============================================================================
# @@-COPYRIGHT-START-@@
#
# Copyright (c) 2018-2020, Qualcomm Innovation Center, Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# 3. Neither the name of the copyright holder nor the names of its contributors
# may be used to endorse or promote products derived from this software
# without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE
# ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE
# LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR
# CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF
# SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS
# INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN
# CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE)
# ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED OF THE
# POSSIBILITY OF SUCH DAMAGE.
#
# SPDX-License-Identifier: BSD-3-Clause
#
# @@-COPYRIGHT-END-@@
# =============================================================================
""" Common type definitions that are used across aimet """
from enum import Enum
from typing import List, Optional, Union
import tensorflow as tf
from aimet_common.defs import GreedySelectionParameters
class ModuleCompRatioPair:
"""
Pair of tf.Operation and a compression-ratio
:ivar module: Module of type tf.Operation
:ivar comp_ratio: Compression ratio. Compression ratio is the ratio of cost of compressed model
to cost of the original model.
"""
def __init__(self, module: tf.Operation, comp_ratio: float):
self.module = module
self.comp_ratio = comp_ratio
class SpatialSvdParameters:
""" Configuration parameters for spatial svd compression """
class ManualModeParams:
"""
Configuration parameters for manual-mode spatial svd compression
"""
def __init__(self, list_of_module_comp_ratio_pairs: List[ModuleCompRatioPair]):
"""
:param list_of_module_comp_ratio_pairs: List of (module, comp-ratio) pairs
"""
self.list_of_module_comp_ratio_pairs = list_of_module_comp_ratio_pairs
class AutoModeParams:
"""
Configuration parameters for auto-mode compression
"""
def __init__(self, greedy_select_params: GreedySelectionParameters,
modules_to_ignore: Optional[List[tf.Operation]] = None):
"""
:param greedy_select_params: Params for greedy comp-ratio selection algorithm
:param modules_to_ignore: List of modules to ignore (None indicates nothing to ignore)
"""
self.greedy_params = greedy_select_params
self.modules_to_ignore = [] if modules_to_ignore is None else modules_to_ignore
class Mode(Enum):
""" Mode enumeration """
manual = 1
""" Manual mode """
auto = 2
""" Auto mode """
def __init__(self, input_op_names: List[str], output_op_names: List[str], mode: Mode,
params: Union[ManualModeParams, AutoModeParams], multiplicity=1):
"""
:param input_op_names: list of input op names to the model
:param output_op_names: List of output op names of the model
:param mode: Either auto mode or manual mode
:param params: Parameters for the mode selected
:param multiplicity: The multiplicity to which ranks/input channels will get rounded. Default: 1
"""
self.input_op_names = input_op_names
self.output_op_names = output_op_names
self.mode = mode
self.mode_params = params
self.multiplicity = multiplicity
class ChannelPruningParameters:
""" Configuration parameters for channel pruning compression """
class ManualModeParams:
"""
Configuration parameters for manual-mode channel pruning compression
"""
def __init__(self, list_of_module_comp_ratio_pairs: List[ModuleCompRatioPair]):
"""
:param list_of_module_comp_ratio_pairs: List of (module, comp-ratio) pairs
"""
self.list_of_module_comp_ratio_pairs = list_of_module_comp_ratio_pairs
class AutoModeParams:
"""
Configuration parameters for auto-mode compression
"""
def __init__(self, greedy_select_params: GreedySelectionParameters,
modules_to_ignore: Optional[List[tf.Operation]] = None):
"""
:param greedy_select_params: Params for greedy comp-ratio selection algorithm
:param modules_to_ignore: List of modules to ignore (None indicates nothing to ignore)
"""
self.greedy_params = greedy_select_params
self.modules_to_ignore = [] if modules_to_ignore is None else modules_to_ignore
class Mode(Enum):
""" Mode enumeration """
manual = 1
""" Manual mode: User specifies comp-ratio per layer """
auto = 2
""" Auto mode: aimet computes optimal comp-ratio per layer """
def __init__(self, input_op_names: List[str], output_op_names: List[str], data_set: tf.data.Dataset,
batch_size: int, num_reconstruction_samples: int, allow_custom_downsample_ops: bool, mode: Mode,
params: Union[ManualModeParams, AutoModeParams], multiplicity=1):
"""
:param input_op_names: list of input op names to the model
:param output_op_names: List of output op names of the model
:param data_set: data set
:param batch_size: batch size
:param num_reconstruction_samples: number of samples to be used for reconstruction
:param allow_custom_downsample_ops: If set to True, DownSampleLayer and UpSampleLayer will be added as required
:param mode: indicates whether the mode is manual or auto
:param params: ManualModeParams or AutoModeParams, depending on teh value of mode
:param multiplicity: The multiplicity to which ranks/input channels will get rounded. Default: 1
"""
# pylint: disable=too-many-arguments
self.input_op_names = input_op_names
self.output_op_names = output_op_names
self.data_set = data_set
self.batch_size = batch_size
self.num_reconstruction_samples = num_reconstruction_samples
self.allow_custom_downsample_ops = allow_custom_downsample_ops
self.mode = mode
self.mode_params = params
self.multiplicity = multiplicity
| [
"quic_bharathr@quicinc.com"
] | quic_bharathr@quicinc.com |
18a82c0623cf3f9a43a6f936f07e388b4f2de0f4 | 9e4fe0016f8d322f1911df4c4fcfc07d090d4ef3 | /lab7/codingbat/warmup-2/5.py | c003f9559abfe1a21ddf9a3315526fea109d8ffa | [] | no_license | WooWooNursat/Python | e84055fed32aeee2d32dcd49dad6fe10ae02bb33 | b37e6ceb46b4fe068c9e989c1cde3c1cf8ad42f9 | refs/heads/master | 2021-04-04T03:43:48.143976 | 2020-04-17T05:58:17 | 2020-04-17T05:58:17 | 248,422,200 | 0 | 1 | null | null | null | null | UTF-8 | Python | false | false | 199 | py | def last2(str):
if len(str) < 2:
return 0
last2 = str[len(str)-2:]
count = 0
for i in range(len(str)-2):
sub = str[i:i+2]
if sub == last2:
count = count + 1
return count
| [
"noreply@github.com"
] | noreply@github.com |
1694e7e6f39b764c68f5192514499aab84206a67 | 7e133bf590000ad6bc06c18c77f174373f05af3b | /questao11.py | 674ba232f746316bfb7905dd3e1ae36545a0a493 | [] | no_license | LucasFerreira06/Exercicio1---Prova-2BIM | a716ed669b89b822fc431b0c25ed7a4f2c397168 | 6c9a97aa2fbf333b165df84b00a2a9a1836d2166 | refs/heads/master | 2020-12-05T03:11:33.581597 | 2016-09-04T01:52:05 | 2016-09-04T01:52:05 | 66,965,708 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 223 | py | Numero1 = int(input("Digite um número:"))
Numero2 = int(input("Digite um número:"))
Soma = 0
for Numero1 in range(Numero1 + 1,Numero2):
print(Numero1)
Soma = Soma + Numero1
print("A soma dos números é:",Soma)
| [
"lucasnatacao@gmail.com"
] | lucasnatacao@gmail.com |
fec6a3aa31a220c668b93a5b34d034e735fbae41 | 233087c1eb99e1d13f80de6f43d2cc3264aa9ca6 | /polyaxon_cli/cli/version.py | e1a7f0433468d235fe651db2f75bb5fd16ca9f7f | [
"MIT"
] | permissive | DXist/polyaxon-cli | e33cd3b3633df5b21b9eb3cc48d7a6affed8e4ec | 0b01512548f9faea77fb60cb7c6bd327e0638b13 | refs/heads/master | 2020-07-08T07:02:43.248549 | 2019-08-15T16:00:05 | 2019-08-15T16:04:31 | 203,601,306 | 0 | 0 | MIT | 2019-08-21T14:27:56 | 2019-08-21T14:27:56 | null | UTF-8 | Python | false | false | 5,988 | py | # -*- coding: utf-8 -*-
from __future__ import absolute_import, division, print_function
import sys
import click
from polyaxon_deploy.operators.pip import PipOperator
from polyaxon_cli import pkg
from polyaxon_cli.client import PolyaxonClient
from polyaxon_cli.client.exceptions import (
AuthorizationError,
PolyaxonHTTPError,
PolyaxonShouldExitError
)
from polyaxon_cli.logger import clean_outputs, logger
from polyaxon_cli.managers.auth import AuthConfigManager
from polyaxon_cli.managers.cli import CliConfigManager
from polyaxon_cli.utils import indentation
from polyaxon_cli.utils.formatting import Printer, dict_tabulate
from polyaxon_client.exceptions import PolyaxonClientException
PROJECT_CLI_NAME = "polyaxon-cli"
def pip_upgrade(project_name=PROJECT_CLI_NAME):
PipOperator.execute(['install', '--upgrade', project_name], stream=True)
click.echo('polyaxon-cli upgraded.')
def session_expired():
AuthConfigManager.purge()
CliConfigManager.purge()
click.echo('Session has expired, please try again.')
sys.exit(1)
def get_version(package):
import pkg_resources
try:
return pkg_resources.get_distribution(package).version
except pkg_resources.DistributionNotFound:
logger.error('`%s` is not installed', package)
def get_current_version():
return pkg.VERSION
def get_server_version():
try:
return PolyaxonClient().version.get_cli_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def get_log_handler():
try:
return PolyaxonClient().version.get_log_handler()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
def check_cli_version():
"""Check if the current cli version satisfies the server requirements"""
if not CliConfigManager.should_check():
return
from distutils.version import LooseVersion # pylint:disable=import-error
server_version = get_server_version()
current_version = get_current_version()
CliConfigManager.reset(current_version=current_version,
min_version=server_version.min_version)
if LooseVersion(current_version) < LooseVersion(server_version.min_version):
click.echo("""Your version of CLI ({}) is no longer compatible with server.""".format(
current_version))
if click.confirm("Do you want to upgrade to "
"version {} now?".format(server_version.latest_version)):
pip_upgrade()
sys.exit(0)
else:
indentation.puts("Your can manually run:")
with indentation.indent(4):
indentation.puts("pip install -U polyaxon-cli")
indentation.puts(
"to upgrade to the latest version `{}`".format(server_version.latest_version))
sys.exit(0)
elif LooseVersion(current_version) < LooseVersion(server_version.latest_version):
indentation.puts("New version of CLI ({}) is now available. To upgrade run:".format(
server_version.latest_version
))
with indentation.indent(4):
indentation.puts("pip install -U polyaxon-cli")
elif LooseVersion(current_version) > LooseVersion(server_version.latest_version):
indentation.puts("You version of CLI ({}) is ahead of the latest version "
"supported by Polyaxon Platform ({}) on your cluster, "
"and might be incompatible.".format(current_version,
server_version.latest_version))
@click.command()
@click.option('--cli', is_flag=True, default=False, help='Version of the Polyaxon cli.')
@click.option('--platform', is_flag=True, default=False, help='Version of the Polyaxon platform.')
@clean_outputs
def version(cli, platform):
"""Print the current version of the cli and platform."""
version_client = PolyaxonClient().version
cli = cli or not any([cli, platform])
if cli:
try:
server_version = version_client.get_cli_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get cli version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
cli_version = get_current_version()
Printer.print_header('Current cli version: {}.'.format(cli_version))
Printer.print_header('Supported cli versions:')
dict_tabulate(server_version.to_dict())
if platform:
try:
platform_version = version_client.get_platform_version()
except AuthorizationError:
session_expired()
sys.exit(1)
except (PolyaxonHTTPError, PolyaxonShouldExitError, PolyaxonClientException) as e:
Printer.print_error('Could not get platform version.')
Printer.print_error('Error message `{}`.'.format(e))
sys.exit(1)
chart_version = version_client.get_chart_version()
Printer.print_header('Current platform version: {}.'.format(chart_version.version))
Printer.print_header('Supported platform versions:')
dict_tabulate(platform_version.to_dict())
@click.command()
@clean_outputs
def upgrade():
"""Install/Upgrade polyaxon-cli."""
try:
pip_upgrade(PROJECT_CLI_NAME)
except Exception as e:
logger.error(e)
| [
"mouradmourafiq@gmail.com"
] | mouradmourafiq@gmail.com |
505fdfd9abfb20f8f910c8177095e34659135c1c | a5e60d6bdc5ffa932e9d2638ee56e88ed4c07fc7 | /df_websockets/__init__.py | 3bbf7505c0fc1eea26fb8bed553558cc70657926 | [
"LicenseRef-scancode-cecill-b-en"
] | permissive | webclinic017/df_websockets | c5d82afe90e475731e61186fa8df39897c64252e | f0d072c4d6f2ddfa8bf17dd408236553fc154731 | refs/heads/master | 2023-04-16T05:45:42.293330 | 2021-04-16T08:50:42 | 2021-04-16T08:50:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,238 | py | # ##############################################################################
# This file is part of df_websockets #
# #
# Copyright (C) 2020 Matthieu Gallet <github@19pouces.net> #
# All Rights Reserved #
# #
# You may use, distribute and modify this code under the #
# terms of the (BSD-like) CeCILL-B license. #
# #
# You should have received a copy of the CeCILL-B license with #
# this file. If not, please visit: #
# https://cecill.info/licences/Licence_CeCILL-B_V1-en.txt (English) #
# or https://cecill.info/licences/Licence_CeCILL-B_V1-fr.txt (French) #
# #
# ##############################################################################
__version__ = "0.10.9"
| [
"github@19pouces.net"
] | github@19pouces.net |
0968061f1cd254b767175a9dfbd30f5d783bcc15 | 92a9ac5f4e66e37a6b07c1ed8cfa02966b66ed0e | /project.py | 6319f17892be739a28c0d19da5636980c267bf2f | [
"CC-BY-2.0"
] | permissive | dakshvarshneya/itemCatalog | 6898c53b9dfec596384eef05c3803f4a68e0fcd2 | 056bfa10896a35929eb8dff29911730e06df09ac | refs/heads/master | 2021-01-20T08:38:21.734878 | 2018-10-01T09:06:31 | 2018-10-01T09:06:31 | 101,568,599 | 0 | 1 | null | 2018-10-01T09:06:32 | 2017-08-27T17:26:37 | Python | UTF-8 | Python | false | false | 17,664 | py | from flask import Flask, render_template
from flask import url_for, request, redirect, flash, jsonify, make_response
from flask import session as login_session
from sqlalchemy import create_engine, asc, desc
from sqlalchemy.orm import sessionmaker
from database_setup import *
from oauth2client.client import flow_from_clientsecrets
from oauth2client.client import FlowExchangeError
import os
import random
import string
import datetime
import json
import httplib2
import requests
# Import login_required from login_decorator.py
from login_decorator import login_required
# Flask instance
app = Flask(__name__)
# GConnect CLIENT_ID
CLIENT_ID = json.loads(
open('client_secrets.json', 'r').read())['web']['client_id']
APPLICATION_NAME = "My Project"
# Connect to database
engine = create_engine('sqlite:///catalog.db')
Base.metadata.bind = engine
# Create session
DBSession = sessionmaker(bind=engine)
session = DBSession()
# Login Routing
# Login - Create anti-forgery state token
@app.route('/login')
def showLogin():
state = ''.join(
random.choice(string.ascii_uppercase + string.digits) for x in range(32))
login_session['state'] = state
# return "The current session state is %s" % login_session['state']
return render_template('login.html', STATE=state)
# GConnect
@app.route('/gconnect', methods=['POST'])
def gconnect():
# Validate state token
if request.args.get('state') != login_session['state']:
response = make_response(json.dumps('Invalid state parameter.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Obtain authorization code, now compatible with Python3
code = request.data.decode('utf-8')
try:
# Upgrade the authorization code into a credentials object
oauth_flow = flow_from_clientsecrets('client_secrets.json', scope='')
oauth_flow.redirect_uri = 'postmessage'
credentials = oauth_flow.step2_exchange(code)
except FlowExchangeError:
response = make_response(
json.dumps('Failed to upgrade the authorization code.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Check that the access token is valid.
access_token = credentials.access_token
url = ('https://www.googleapis.com/oauth2/v1/tokeninfo?access_token=%s'
% access_token)
# Submit request, parse response - Python3 compatible
h = httplib2.Http()
response = h.request(url, 'GET')[1]
str_response = response.decode('utf-8')
result = json.loads(str_response)
# If there was an error in the access token info, abort.
if result.get('error') is not None:
response = make_response(json.dumps(result.get('error')), 500)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is used for the intended user.
gplus_id = credentials.id_token['sub']
if result['user_id'] != gplus_id:
response = make_response(
json.dumps("Token's user ID doesn't match given user ID."), 401)
response.headers['Content-Type'] = 'application/json'
return response
# Verify that the access token is valid for this app.
if result['issued_to'] != CLIENT_ID:
response = make_response(
json.dumps("Token's client ID does not match app's."), 401)
response.headers['Content-Type'] = 'application/json'
return response
stored_access_token = login_session.get('access_token')
stored_gplus_id = login_session.get('gplus_id')
if stored_access_token is not None and gplus_id == stored_gplus_id:
response = make_response(json.dumps('''Current user is
already connected.'''),
200)
response.headers['Content-Type'] = 'application/json'
return response
# Store the access token in the session for later use.
login_session['access_token'] = access_token
login_session['gplus_id'] = gplus_id
# Get user info
userinfo_url = "https://www.googleapis.com/oauth2/v1/userinfo"
params = {'access_token': access_token, 'alt': 'json'}
answer = requests.get(userinfo_url, params=params)
data = answer.json()
login_session['username'] = data['name']
login_session['picture'] = data['picture']
login_session['email'] = data['email']
# see if user exists, if it doesn't make a new one
user_id = getUserID(login_session['email'])
if not user_id:
user_id = createUser(login_session)
login_session['user_id'] = user_id
output = ''
output += '<h1>Welcome, '
output += login_session['username']
output += '!</h1>'
output += '<img src="'
output += login_session['picture']
output += ' " style = "width: 300px; height: 300px;border-radius: 150px;-webkit-border-radius: 150px;-moz-border-radius: 150px;"> '
flash("you are now logged in as %s" % login_session['username'])
return output
# User Helper Functions
def createUser(login_session):
newUser = User(name=login_session['username'], email=login_session[
'email'], picture=login_session['picture'])
session.add(newUser)
session.commit()
user = session.query(User).filter_by(email=login_session['email']).one()
return user.id
def getUserInfo(user_id):
user = session.query(User).filter_by(id=user_id).one()
return user
def getUserID(email):
try:
user = session.query(User).filter_by(email=email).one()
return user.id
except:
return None
# DISCONNECT - Revoke a current user's token and reset their login_session
@app.route('/gdisconnect')
def gdisconnect():
# Only disconnect a connected user.
access_token = login_session.get('access_token')
if access_token is None:
response = make_response(
json.dumps('Current user not connected.'), 401)
response.headers['Content-Type'] = 'application/json'
return response
url = 'https://accounts.google.com/o/oauth2/revoke?token=%s' % access_token
h = httplib2.Http()
result = h.request(url, 'GET')[0]
if result['status'] == '200':
# Reset the user's sesson.
del login_session['access_token']
del login_session['gplus_id']
del login_session['username']
del login_session['email']
del login_session['picture']
# response = make_response(json.dumps('Successfully disconnected.'), 200)
# response.headers['Content-Type'] = 'application/json'
response = redirect(url_for('showCatalog'))
flash("You are now logged out.")
return response
else:
# For whatever reason, the given token was invalid.
response = make_response(
json.dumps('Failed to revoke token for given user.', 400))
response.headers['Content-Type'] = 'application/json'
return response
# Flask Routing
# Homepage
@app.route('/')
@app.route('/catalog/')
def showCatalog():
categories = session.query(Category).order_by(asc(Category.name))
items = session.query(Items).order_by(desc(Items.date)).limit(5)
return render_template('catalog.html',
categories=categories,
items=items)
# Category Items
@app.route('/catalog/<path:category_name>/items/')
def showCategory(category_name):
categories = session.query(Category).order_by(asc(Category.name))
category = session.query(Category).filter_by(name=category_name).one()
items = session.query(Items).filter_by(category=category).order_by(asc(Items.name)).all()
print items
count = session.query(Items).filter_by(category=category).count()
creator = getUserInfo(category.user_id)
if 'username' not in login_session or creator.id != login_session['user_id']:
return render_template('public_items.html',
category=category.name,
categories=categories,
items=items,
count=count)
else:
user = getUserInfo(login_session['user_id'])
return render_template('items.html',
category=category.name,
categories=categories,
items=items,
count=count,
user=user)
# Display a Specific Item
@app.route('/catalog/<path:category_name>/<path:item_name>/')
def showItem(category_name, item_name):
item = session.query(Items).filter_by(name=item_name).one()
creator = getUserInfo(item.user_id)
categories = session.query(Category).order_by(asc(Category.name))
if 'username' not in login_session or creator.id != login_session['user_id']:
return render_template('public_itemdetail.html',
item=item,
category=category_name,
categories=categories,
creator=creator)
else:
return render_template('itemdetail.html',
item=item,
category=category_name,
categories=categories,
creator=creator)
# Add a category
@app.route('/catalog/addcategory', methods=['GET', 'POST'])
@login_required
def addCategory():
if request.method == 'POST':
newCategory = Category(
name=request.form['name'],
user_id=login_session['user_id'])
print newCategory
session.add(newCategory)
session.commit()
flash('New Category Added!')
return redirect(url_for('showCatalog'))
else:
return render_template('addcategory.html')
# Edit a category
@app.route('/catalog/<path:category_name>/edit', methods=['GET', 'POST'])
@login_required
def editCategory(category_name):
editedCategory = session.query(Category).filter_by(name=category_name).one()
category = session.query(Category).filter_by(name=category_name).one()
# See if the logged in user is the owner of item
creator = getUserInfo(editedCategory.user_id)
user = getUserInfo(login_session['user_id'])
# If logged in user != item owner redirect them
if creator.id != login_session['user_id']:
flash("""You cannot edit this Category.
This Category belongs to %s""" % creator.name)
return redirect(url_for('showCatalog'))
# POST methods
if request.method == 'POST':
if request.form['name']:
editedCategory.name = request.form['name']
session.add(editedCategory)
session.commit()
flash('Category Successfully Edited!')
return redirect(url_for('showCatalog'))
else:
return render_template('editcategory.html',
categories=editedCategory,
category=category)
# Delete a category
@app.route('/catalog/<path:category_name>/delete', methods=['GET', 'POST'])
@login_required
def deleteCategory(category_name):
categoryToDelete = session.query(Category).filter_by(name=category_name).one()
# See if the logged in user is the owner of item
creator = getUserInfo(categoryToDelete.user_id)
user = getUserInfo(login_session['user_id'])
# If logged in user != item owner redirect them
if creator.id != login_session['user_id']:
flash("You cannot delete this Category. This Category belongs to %s" % creator.name)
return redirect(url_for('showCatalog'))
if request.method == 'POST':
session.delete(categoryToDelete)
session.commit()
flash('Category Successfully Deleted! '+categoryToDelete.name)
return redirect(url_for('showCatalog'))
else:
return render_template('deletecategory.html',
category=categoryToDelete)
# Add an item
@app.route('/catalog/add', methods=['GET', 'POST'])
@login_required
def addItem():
categories = session.query(Category).all()
if request.method == 'POST':
newItem = Items(
name=request.form['name'],
description=request.form['description'],
picture=request.form['picture'],
category=session.query(Category).filter_by(name=request.form['category']).one(),
date=datetime.datetime.now(),
user_id=login_session['user_id'])
session.add(newItem)
session.commit()
flash('New Item Added!')
return redirect(url_for('showCatalog'))
else:
return render_template('additem.html',
categories=categories)
# Edit an item
@app.route('''/catalog/<path:category_name>/<path:item_name>/edit''',
methods=['GET', 'POST'])
@login_required
def editItem(category_name, item_name):
editedItem = session.query(Items).filter_by(name=item_name).one()
categories = session.query(Category).all()
# See if the logged in user is the owner of item
creator = getUserInfo(editedItem.user_id)
user = getUserInfo(login_session['user_id'])
# If logged in user != item owner redirect them
if creator.id != login_session['user_id']:
flash("""You cannot edit this item.
This item belongs to %s""" % creator.name)
return redirect(url_for('showCatalog'))
# POST methods
if request.method == 'POST':
if request.form['name']:
editedItem.name = request.form['name']
if request.form['description']:
editedItem.description = request.form['description']
if request.form['picture']:
editedItem.picture = request.form['picture']
if request.form['category']:
category = session.query(Category).filter_by(name=request.form['category']).one()
editedItem.category = category
time = datetime.datetime.now()
editedItem.date = time
session.add(editedItem)
session.commit()
flash('Item Successfully Edited!')
return redirect(url_for('showCategory',
category_name=editedItem.category.name))
else:
return render_template('edititem.html',
item=editedItem,
categories=categories)
# Delete an item
@app.route('''/catalog/<path:category_name>/<path:item_name>/delete''',
methods=['GET', 'POST'])
@login_required
def deleteItem(category_name, item_name):
itemToDelete = session.query(Items).filter_by(name=item_name).one()
category = session.query(Category).filter_by(name=category_name).one()
categories = session.query(Category).all()
# See if the logged in user is the owner of item
creator = getUserInfo(itemToDelete.user_id)
user = getUserInfo(login_session['user_id'])
# If logged in user != item owner redirect them
if creator.id != login_session['user_id']:
flash("""You cannot delete this item.
This item belongs to %s""" % creator.name)
return redirect(url_for('showCatalog'))
if request.method == 'POST':
session.delete(itemToDelete)
session.commit()
flash('Item Successfully Deleted! '+itemToDelete.name)
return redirect(url_for('showCategory',
category_name=category.name))
else:
return render_template('deleteitem.html',
item=itemToDelete)
# JSON
@app.route('/catalog/JSON')
def allItemsJSON():
categories = session.query(Category).all()
category_dict = [c.serialize for c in categories]
for c in range(len(category_dict)):
items = [i.serialize for i in session.query(Items)\
.filter_by(category_id=category_dict[c]["id"]).all()]
if items:
category_dict[c]["Item"] = items
return jsonify(Category=category_dict)
@app.route('/catalog/categories/JSON')
def categoriesJSON():
categories = session.query(Category).all()
return jsonify(categories=[c.serialize for c in categories])
@app.route('/catalog/items/JSON')
def itemsJSON():
items = session.query(Items).all()
return jsonify(items=[i.serialize for i in items])
@app.route('/catalog/<path:category_name>/items/JSON')
def categoryItemsJSON(category_name):
category = session.query(Category).filter_by(name=category_name).one()
items = session.query(Items).filter_by(category=category).all()
return jsonify(items=[i.serialize for i in items])
@app.route('/catalog/<path:category_name>/<path:item_name>/JSON')
def ItemJSON(category_name, item_name):
category = session.query(Category).filter_by(name=category_name).one()
item = session.query(Items).filter_by(name=item_name,\
category=category).one()
return jsonify(item=[item.serialize])
# url_for static path processor
# remove when deployed
@app.context_processor
def override_url_for():
return dict(url_for=dated_url_for)
def dated_url_for(endpoint, **values):
if endpoint == 'static':
filename = values.get('filename', None)
if filename:
file_path = os.path.join(app.root_path,
endpoint, filename)
values['q'] = int(os.stat(file_path).st_mtime)
return url_for(endpoint, **values)
# Always at end of file !Important!
if __name__ == '__main__':
app.secret_key = 'DEV_SECRET_KEY'
app.debug = True
app.run(host='0.0.0.0', port=5000)
| [
"dakshvarshneya5@gmail.com"
] | dakshvarshneya5@gmail.com |
6a2b4d88152101f141b37064f7b8551c398301f3 | 513c664ac7e7b7aa34750c8fd21ca0cb2f9df309 | /Legacy Code/Display_weather.py | b4496dbae4d0a4eec4a169a5a0d1c5e2fbc345c0 | [] | no_license | BirchPrat/Raspi-Infodisplay | 656474533def8d7394b1e164ffbf658fdfc2f14e | de8f88752c6d59a63d8c0c2dc4a1b127344d7d50 | refs/heads/master | 2023-08-29T06:46:44.618313 | 2021-10-13T09:49:35 | 2021-10-13T09:49:35 | 327,003,999 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 10,522 | py | # Weather Display##
###################
# Importing necessary python libraries
import time
import json
import subprocess
import requests
from random import choice
import sys
sys.path.insert(1, '/home/pi/Desktop/PythonCode/Api_cred/')
import cred
import digitalio
import board
#Import for the temperature reader
from gpiozero import OutputDevice
import Adafruit_DHT
# Import Python Imaging Library
from PIL import Image, ImageDraw, ImageFont
import adafruit_rgb_display.st7789 as st7789
##### Temperature Reader Settup
#Supplying the temeperature reader with GPIO pin power
GPIO_PIN = 21
power = OutputDevice(GPIO_PIN)
power.on()
#Data input pin
DHT_SENSOR = Adafruit_DHT.DHT22
DHT_PIN = 20
#temperature fetcher function
def tempfetcher():
humidity, temperature = Adafruit_DHT.read_retry(DHT_SENSOR, DHT_PIN)
humi = round(humidity,2)
temp = round(temperature, 2)
return [temp, humi]
#### Pihole settup
#apiurl for pihole stats
api_url = 'http://localhost/admin/api.php?overTimeData10mins'
def get_piholedatv1():
try:
r = requests.get(api_url)
data = json.loads(r.text)
DNSQUERIES = sum(data['domains_over_time'].values())
ADS = sum(data['ads_over_time'].values())
ADSBLOCKED = round((ADS/DNSQUERIES)*100, 2)
except KeyError:
pass
return [DNSQUERIES, ADSBLOCKED, ADS]
def dayquerrycalc(dnsdic):
dayquerry = 0
date = int(time.strftime('%d'))
for key,value in dnsdic.items():
keyint = int(time.strftime("%d", time.gmtime(int(key))))
valueint = int(value)
if keyint == date:
dayquerry = valueint + dayquerry
return(dayquerry)
def get_piholedatv2():
try:
r = requests.get(api_url)
data = json.loads(r.text)
DNSQUERIES = dayquerrycalc(data['domains_over_time'])
ADS = dayquerrycalc(data['ads_over_time'])
try:
ADSBLOCKED = round((ADS/DNSQUERIES)*100, 2)
except ZeroDivisionError:
ADSBLOCKED = 0
except KeyError:
pass
return [DNSQUERIES, ADSBLOCKED, ADS]
#### System Stats Settup
# Shell scripts for system monitoring:
def get_systemstats():
#cmd = "top -bn1 | grep load | awk '{printf \"%.2f\", $(NF-2)}'"
#CPU = subprocess.check_output(cmd, shell=True).decode("utf-8")
#cmd = "free -m | awk 'NR==2{printf \"%s/%s\", $3,$2,$3*100/$2 }'"
#MemUsage = subprocess.check_output(cmd, shell=True).decode("utf-8")
cmd = "cat /sys/class/thermal/thermal_zone0/temp | awk '{printf \"%.1f\", $(NF-0) / 1000}'" # pylint: disable=line-too-long
Temp = subprocess.check_output(cmd, shell=True).decode("utf-8")
return [Temp]
# Uptime
def get_uptime():
with open('/proc/uptime', 'r') as f:
uptime_seconds = float(f.readline().split()[0])
uptime_minutes = uptime_seconds/60
uptime_hours = uptime_minutes/60
uptime_days = uptime_hours/24
return uptime_days
#### Weather API Settup
#getting the api_key
api_key = cred.weather_key
#Api fetch function
def get_weather(api_key, location):
url = f"https://api.openweathermap.org/data/2.5/weather?q={location}&units=metric&appid={api_key}"
r = requests.get(url)
return r.json()
#Weatheroutput function
def weatherout():
try:
weather = get_weather(api_key, "Cologne")
currenttemp = weather['main']['temp']
humidity = weather['main']['humidity']
windspeed = weather['wind']['speed']
clouds = weather['clouds']['all']
pressure = weather['main']['pressure']
city = weather['name']
return [city, currenttemp, humidity, windspeed, pressure, clouds]
except KeyError or OSError:
problem = "Api Failed"
return [problem]
#food suggest function
foodlist = ["Hamburger", "Geschnetzeltes", "Bolognese",
"Korean Chicken", "Bulgogi", "Wraps",
"Curry", "Lasagne", "Flammkuchen",
"Pizza", "Gefüllte Paprika", "Gemüsesuppe"]
def foodchoice(foodlist):
food = choice(foodlist).title()
return food
#####Display Settup
# Configuration for CS and DC pins (these are FeatherWing defaults on M0/M4):
cs_pin = digitalio.DigitalInOut(board.CE0)
dc_pin = digitalio.DigitalInOut(board.D25)
reset_pin = None
# Config for display baudrate (default max is 24mhz):
BAUDRATE = 64000000
# Setup SPI bus using hardware SPI:
spi = board.SPI()
# Create the ST7789 display:
disp = st7789.ST7789(spi, cs=cs_pin, dc=dc_pin, rst=reset_pin, baudrate=BAUDRATE,
width=135, height=240, x_offset=53, y_offset=40)
# Create blank image for drawing.
# Make sure to create image with mode 'RGB' for full color.
height = disp.width # we swap height/width to rotate it to landscape!
width = disp.height
image = Image.new('RGB', (width, height))
rotation = 90
# Get drawing object to draw on image.
draw = ImageDraw.Draw(image)
# Draw a black filled box to clear the image.
draw.rectangle((0, 0, width, height), outline=0, fill=(0, 0, 0))
disp.image(image, rotation)
# Draw some shapes.
# First define some constants to allow easy resizing of shapes.
padding = -2
top = padding
bottom = height-padding
# Move left to right keeping track of the current x position for drawing shapes.
x = 0
# Alternatively load a TTF font. Make sure the .ttf font file is in the
# same directory as the python script!
# Some other nice fonts to try: http://www.dafont.com/bitmap.php
font = ImageFont.truetype('/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf', 22)
font2 = ImageFont.truetype('/usr/share/fonts/truetype/dejavu/DejaVuSans.ttf', 45)
# Turn on the backlight
backlight = digitalio.DigitalInOut(board.D22)
backlight.switch_to_output()
backlight.value = True
# Add buttons as inputs
buttonA = digitalio.DigitalInOut(board.D23)
buttonA.switch_to_input()
buttonB = digitalio.DigitalInOut(board.D24)
buttonB.switch_to_input()
#####Main Loop Running Constantly
while True:
# Draw a black filled box to clear the image.
draw.rectangle((0, 0, width, height), outline=0, fill=0)
y = top
timeofday = time.strftime('%H')
date = time.strftime('%d')
if timeofday == '00': #display sleeping for 6 hours from 0-6 am
backlight.value = False
hoursasleep = 6
sleepy = hoursasleep*60*60
time.sleep(sleepy)
backlight.value = True
elif not buttonA.value: # just button A pressed
weather = weatherout()
if len(weather) > 1:
#return [city, currenttemp, humidity, windspeed, winddirection, clouds]
draw.text((x, y), "Stadt: {}".format(str(weather[0])), font=font, fill="#D85930")
y += font.getsize(str(weather[0]))[1]
draw.text((x, y), "Außen Temp: {}C".format(str(weather[1])), font=font, fill="#E06565")
y += font.getsize(str(weather[1]))[1]
draw.text((x, y), "H2O Luft: {}%".format(str(weather[2])), font=font, fill="#145FA7")
y += font.getsize(str(weather[2]))[1]
draw.text((x, y), "Windgesch: {}kmh".format(str(weather[3])), font=font, fill="#3EB8C2")
y += font.getsize(str(weather[3]))[1]
draw.text((x, y), "Luftdruck: {}hPa".format(str(weather[4])), font=font, fill="#37AEC3")
y += font.getsize(str(weather[4]))[1]
draw.text((x, y), "Wolkendichte: {}%".format(str(weather[5])), font=font, fill="#B2BBA7")
y += font.getsize(str(weather[5]))[1]
# Display image.
disp.image(image, rotation)
time.sleep(30)
else:
draw.text((x, y), "{}".format(str(weather[0])), font=font, fill="#D85930")
y += font.getsize(str(weather[0]))[1]
# Display image.
disp.image(image, rotation)
time.sleep(5)
elif not buttonB.value: # just button B pressed
msg = "I <3 Batzi"
draw.text((x, y), msg, font=font2, fill="#ff0000")
y += font2.getsize(str(msg))[1]
#uptime = round(get_uptime(), 2)
#draw.text((x, y), f"Uptime in Days: {uptime}", font=font, fill="#B2BBA7")
food = foodchoice(foodlist)
draw.text((x, y), f"{food}?", font=font, fill="#B2BBA7")
# Display image.
disp.image(image, rotation)
time.sleep(5)
else:
roomtemp = tempfetcher()
sysstats = get_systemstats()
pystats = get_piholedatv2()
draw.text((x, y), "{}".format(time.strftime('%b %d %H:%M:%S')), font=font, fill="#D61A46")
y += font.getsize(str(roomtemp[0]))[1]
draw.text((x, y), "Innen Temp: {}C".format(str(roomtemp[0])), font=font, fill="#FC600A")
y += font.getsize(str(roomtemp[0]))[1]
draw.text((x, y), "H2O Luft: {}%".format(str(roomtemp[1])), font=font, fill="#347B98")
y += font.getsize(str(roomtemp[1]))[1]
draw.text((x, y), "CPU Temp: {}C".format(str(sysstats[0])), font=font, fill="#FB9902")
y += font.getsize(str(sysstats[0]))[1]
draw.text((x, y), "DNS Queries: {}".format(str(pystats[0])), font=font, fill="#9BD770")
y += font.getsize(str(pystats[0]))[1]
draw.text((x, y), "Ads Blocked: {}%".format(str(pystats[1])), font=font, fill="#66B032")
y += font.getsize(str(pystats[1]))[1]
"""
draw.text((x, y), "{}".format(time.strftime('%b %d %Y %H:%M:%S')), font=font, fill="#FF0000")
y += font.getsize(str(roomtemp[0]))[1]
draw.text((x, y), "Innen Temp: {}C".format(str(roomtemp[0])), font=font, fill="#FF7F00")
y += font.getsize(str(roomtemp[0]))[1]
draw.text((x, y), "H2O Luft: {}%".format(str(roomtemp[1])), font=font, fill="#FFFF00")
y += font.getsize(str(roomtemp[1]))[1]
draw.text((x, y), "CPU Temp: {}C".format(str(sysstats[0])), font=font, fill="#00cc1a")
y += font.getsize(str(sysstats[0]))[1]
draw.text((x, y), "DNS Queries: {}".format(str(pystats[0])), font=font, fill="#0000FF")
y += font.getsize(str(pystats[0]))[1]
draw.text((x, y), "Ads Blocked: {}%".format(str(pystats[1])), font=font, fill="#7700cc")
y += font.getsize(str(pystats[1]))[1]
"""
# Display image.
disp.image(image, rotation)
time.sleep(1)
| [
"birchprat@gmail.com"
] | birchprat@gmail.com |
33ca4b9212fc1ec9f22bed0a9e440b4ae4f4f446 | 8338a6e2ae38b81baa0616eab9327b80a8a24b74 | /onlinelda/json_handler/jsonify.py | eaae24af162baeeb65916fef21622fc12354226a | [] | no_license | LeandroOrdonez/Topicalizer | 0b95f7b05d3e608e2f71e0ead44db591f0339299 | 62407827030d1e107ba454a0d4c1363da993d939 | refs/heads/master | 2021-01-19T14:56:28.275086 | 2014-09-27T03:04:32 | 2014-09-27T03:04:32 | 11,018,828 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,548 | py | #!/usr/bin/python
# jsonify.py: Generates a JSON file from the per-document topic
# distribution fitted by onlineldavb.py
#
# Copyright (C) 2013 Leandro Ordonez
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import numpy as np
import sys, nltk, re
import MySQLdb as mysql
#from xml.sax.saxutils import escape
"""
Generates a JSON file from the per-document topic distribution fitted by
onlineldavb.py
"""
def run():
if(len(sys.argv) > 1):
csv_path = sys.argv[1]
json_path = sys.argv[1][:(sys.argv[1].index('.csv'))] + '.json'
else:
csv_path = '../../outcome/per-document-topics.csv'
json_path = '../../outcome/per-document-topics.json'
pdc = np.loadtxt(csv_path, dtype={'names': \
('Operation ID', 'Operation Name', 'Topic', 'Topic Probability', 'Terms', 'Service URI'),\
'formats': ('i4', 'S100', 'i4', 'f4', 'S400', 'S400')}, delimiter=',', skiprows=1)
pcd = sorted(pdc, key = lambda x: (x[2], (1-x[3]))) # sorting by topic and topic probability
# print pdt[0:5]
# print ptd[0:5]
doc_categories = dict()
for doc in pdc:
if int(doc[3]*1000) > 150:
key = (doc[0], doc[1], doc[5])
# print key
if (not key in doc_categories):
doc_categories[key] = [('Category-'+`doc[2]`, int(doc[3]*1000))]
else:
doc_categories[key].append(('Category-'+`doc[2]`, int(doc[3]*1000)))
#print doc_topics
category_docs = dict()
for category in pcd:
if int(category[3]*1000) > 150:
key = 'Category-'+`category[2]`
if (not key in category_docs):
category_docs[key] = [(category[0], category[1], category[5])]
else:
if (len(category_docs[key]) <= 30):
category_docs[key].append((category[0], category[1], category[5]))
#print topic_docs
json_output = '' + \
"""{
"name": "categories",
"children": [
%s
]
}""" % jsonify_categories(category_docs, doc_categories)
#print json_output
#np.savetxt('../../outcome/per-document-topics.json', json_output)
pdc_json_file = open(json_path, 'w')
pdc_json_file.write(json_output)
pdc_json_file.close()
def jsonify_categories(category_docs, doc_categories):
#print 'entro a jsonify_categories'
result = ''
for category in category_docs:
result += \
'{\n "name": "' + category + \
'", \n "id": "' + category + \
'", \n "children": [\n %s\n]\n},' % jsonify_documents(category, category_docs[category], doc_categories)
#print result[:-1]
return result[:-1]
def jsonify_documents(category, docs, doc_categories):
#print 'entro a jsonify_documents'
# Connecting to MySQL database:
db = mysql.connect(host='localhost', user='root', passwd='', db='service_registry', unix_socket='/opt/lampp/var/mysql/mysql.sock')
cursor = db.cursor()
result = ''
for doc in docs:
#Querying the database for retrieving the operation name and service uri
query = 'SELECT SOAP_OPERATION.OPERATIONDOCUMENTATION FROM SOAP_OPERATION WHERE SOAP_OPERATION.ID=%s' % `(doc[0])`
id_op = cursor.execute(query)
db_results = cursor.fetchall()
op_doc = nltk.clean_html(db_results[0][0])
op_doc = re.sub('"', "'", re.sub("\t|\n", " ", op_doc))
result += \
'{\n "name": "' + doc[1] + \
'", \n "id": "Operation-' + category[(category.index('-')+1):] + '.' + `doc[0]` + \
'", \n "service_uri": "' + doc[2] + \
'", \n "operation_doc": "' + op_doc + \
'", \n "children": [\n %s\n]\n},' % jsonify_cat_per_doc(doc_categories[doc])
#print result[:-1]
return result[:-1]
def jsonify_cat_per_doc(categories):
#print 'entro a jsonify_cat_per_doc'
result = ''
for category in categories:
result += \
'{"name": "' + category[0] + '", "size": ' + `category[1]` + '},'
#print result[:-1]
return result[:-1]
if __name__ == '__main__':
run()
| [
"leandro.ordonez.ante@gmail.com"
] | leandro.ordonez.ante@gmail.com |
3d73e4bb4071a04e2289b417ddc0a5cbbc0b927b | 1293877cc3fde664f0b3b56a67f83ceff27e91f6 | /Leopold_Olivia/test.py | 8257fbdae1cefe37223d671bacbb8efd672207ba | [] | no_license | obalderes/pd7-miniproject1 | dbf5bd1d83100bcbcab9c9829880f5f3d8569180 | 7f89f376cd8ce804f39e73b065ab4df176e7ca75 | refs/heads/master | 2021-01-20T23:03:45.774203 | 2012-10-21T23:28:52 | 2012-10-21T23:28:52 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 206 | py | import mongo
def testing123():
#we will test our methods am i right!?
mongo.newStory("Jets")
mongo.addLine("Jets" , "The Jets lost today...again.")
if __name__=="__main__":
testing123()
| [
"leopoldsg94@gmail.com"
] | leopoldsg94@gmail.com |
638ad0a44f682b5e0f02d9d651d4f490a4ac7d49 | a8c0664b02ff552620bcee32846ef27686559361 | /validation/model.euler/euler-rayleigh.py | 2b912c8e2b276622c1c38b165adc70962292cdea | [] | no_license | ZhaoMY93/flowdyn | 6d8f22b71ab5a733bcb465ae52dc3c3216750c0b | 60625cc089fc17b7647a36ab98525c189a744f14 | refs/heads/master | 2023-06-20T23:42:48.673591 | 2021-06-05T13:21:12 | 2021-06-05T13:21:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,470 | py | # -*- coding: utf-8 -*-
"""
test integration methods
"""
import time
import cProfile
from pylab import *
import numpy as np
from flowdyn.mesh import *
from flowdyn.field import *
from flowdyn.xnum import *
from flowdyn.integration import *
import flowdyn.modelphy.euler as euler
import flowdyn.modeldisc as modeldisc
#import flowdyn.solution.euler_riemann as sol
nx = 50
length=10.
meshsim = unimesh(ncell=nx, length=length)
model = euler.model(source=[None, None, lambda x,q:.1])
bcL = { 'type': 'insub', 'ptot': 1.4, 'rttot': 1. }
bcR = { 'type': 'outsub', 'p': 1. }
rhs = modeldisc.fvm(model, meshsim, muscl(minmod),
bcL=bcL, bcR=bcR)
solver = rk2(meshsim, rhs)
# computation
#
endtime = 500.
cfl = .5
finit = rhs.fdata_fromprim([ 1., 0., 1. ]) # rho, u, p
fsol = solver.solve(finit, cfl, [endtime])
solver.show_perf()
mach_th = np.sqrt(((bcL['ptot']/bcR['p'])**(1./3.5)-1.)/.2)
error = np.sqrt(np.sum((fsol[-1].phydata('mach')-mach_th)**2)/nx)/mach_th
print ("theoretical Mach : {:3.3f}\nerror : {:.2}".format(mach_th, error*100))
vars = ['density', 'pressure', 'mach']
fig, axs = subplots(1, len(vars), figsize=(6*len(vars),6))
fig.suptitle('flow in straight duct')
# Figure / Plot
for name, ax in zip(vars, axs):
ax.set_xlim(0., length)
ax.set_ylabel(name)
ax.grid(linestyle='--', color='0.5')
#finit.plot(name, 'k-.')
fsol[0].plot(name, 'o', axes=ax)
#fig.savefig(name+'.png', bbox_inches='tight')
fig.tight_layout()
show()
| [
"jeremie.gressier@isae.fr"
] | jeremie.gressier@isae.fr |
1be13eebadb30837a50498bf56c567f3ae17a166 | 4f00c6a08db5755b294bd519b9377866f5ff6c19 | /src/tests/google/appengine/api/xmpp/xmpp_service_stub.py | 8071acee686aa1637f430255e904c727b3a3af37 | [
"Apache-2.0",
"LicenseRef-scancode-unknown-license-reference"
] | permissive | cooljeanius/cauliflowervest | 02035a8455b1dde469ebfd0b202c02456820a679 | a9bc209b610a927083bf16274d8451c6c45227bf | refs/heads/main | 2022-12-24T15:28:30.616604 | 2020-09-25T23:55:15 | 2020-09-25T23:55:15 | 303,812,548 | 1 | 0 | Apache-2.0 | 2023-09-04T16:48:46 | 2020-10-13T19:46:58 | Python | UTF-8 | Python | false | false | 5,161 | py | #!/usr/bin/env python
#
# Copyright 2007 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Stub version of the XMPP API, writes messages to logs."""
import logging
import os
from google.appengine.api import apiproxy_stub
from google.appengine.api import app_identity
from google.appengine.api import xmpp
from google.appengine.api.xmpp import xmpp_service_pb
class XmppServiceStub(apiproxy_stub.APIProxyStub):
"""Python only xmpp service stub.
This stub does not use an XMPP network. It prints messages to the console
instead of sending any stanzas.
"""
def __init__(self, log=logging.info, service_name='xmpp'):
"""Initializer.
Args:
log: A logger, used for dependency injection.
service_name: Service name expected for all calls.
"""
super(XmppServiceStub, self).__init__(service_name)
self.log = log
def _Dynamic_GetPresence(self, request, response):
"""Implementation of XmppService::GetPresence.
Returns online if the first character of the JID comes before 'm' in the
alphabet, otherwise returns offline.
Args:
request: A PresenceRequest.
response: A PresenceResponse.
"""
jid = request.jid()
self._GetFrom(request.from_jid())
if jid[0] < 'm':
response.set_is_available(True)
else:
response.set_is_available(False)
def _Dynamic_SendMessage(self, request, response):
"""Implementation of XmppService::SendMessage.
Args:
request: An XmppMessageRequest.
response: An XmppMessageResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Message:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' Body:')
self.log(' ' + request.body())
self.log(' Type:')
self.log(' ' + request.type())
self.log(' Raw Xml:')
self.log(' ' + str(request.raw_xml()))
self.log(' To JIDs:')
for jid in request.jid_list():
self.log(' ' + jid)
for jid in request.jid_list():
response.add_status(xmpp_service_pb.XmppMessageResponse.NO_ERROR)
def _Dynamic_SendInvite(self, request, response):
"""Implementation of XmppService::SendInvite.
Args:
request: An XmppInviteRequest.
response: An XmppInviteResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Invite:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' To: ' + request.jid())
def _Dynamic_SendPresence(self, request, response):
"""Implementation of XmppService::SendPresence.
Args:
request: An XmppSendPresenceRequest.
response: An XmppSendPresenceResponse .
"""
from_jid = self._GetFrom(request.from_jid())
self.log('Sending an XMPP Presence:')
self.log(' From:')
self.log(' ' + from_jid)
self.log(' To: ' + request.jid())
if request.type():
self.log(' Type: ' + request.type())
if request.show():
self.log(' Show: ' + request.show())
if request.status():
self.log(' Status: ' + request.status())
def _GetFrom(self, requested):
"""Validates that the from JID is valid.
Args:
requested: The requested from JID.
Returns:
string, The from JID.
Raises:
xmpp.InvalidJidError if the requested JID is invalid.
"""
appid = app_identity.get_application_id()
if requested == None or requested == '':
return appid + '@appspot.com/bot'
node, domain, resource = ('', '', '')
at = requested.find('@')
if at == -1:
self.log('Invalid From JID: No \'@\' character found. JID: %s', requested)
raise xmpp.InvalidJidError()
node = requested[:at]
rest = requested[at+1:]
if rest.find('@') > -1:
self.log('Invalid From JID: Second \'@\' character found. JID: %s',
requested)
raise xmpp.InvalidJidError()
slash = rest.find('/')
if slash == -1:
domain = rest
resource = 'bot'
else:
domain = rest[:slash]
resource = rest[slash+1:]
if resource.find('/') > -1:
self.log('Invalid From JID: Second \'/\' character found. JID: %s',
requested)
raise xmpp.InvalidJidError()
if domain == 'appspot.com' and node == appid:
return node + '@' + domain + '/' + resource
elif domain == appid + '.appspotchat.com':
return node + '@' + domain + '/' + resource
self.log('Invalid From JID: Must be appid@appspot.com[/resource] or '
'node@appid.appspotchat.com[/resource]. JID: %s', requested)
raise xmpp.InvalidJidError()
| [
"egall@gwmail.gwu.edu"
] | egall@gwmail.gwu.edu |
6d4956a0ac9bb6cca1d3e068b2f69a2948fa6cc7 | 79b51123340f8fc97066c1647f536cb16e3308ec | /main/migrations/0005_auto_20180516_2301.py | cf1c6f0fade9f0751f2d2eb6f32650eba076e751 | [] | no_license | ox1230/capl | 1edb9bd130cba0133d0106e411545d79a7fd881f | 7f81937795699381b1df26e524a5e52d909ce462 | refs/heads/master | 2021-07-12T09:01:18.888306 | 2018-11-01T16:30:44 | 2018-11-01T16:30:44 | 132,298,555 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 562 | py | # Generated by Django 2.0.4 on 2018-05-16 14:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('main', '0004_auto_20180516_2300'),
]
operations = [
migrations.AlterField(
model_name='category',
name='maxi',
field=models.PositiveIntegerField(null=True),
),
migrations.AlterField(
model_name='history',
name='name',
field=models.TextField(blank=True, default='', null=True),
),
]
| [
"ox11223300@hanmail.net"
] | ox11223300@hanmail.net |
cee4a664fcc22aa71cfe586182065adf4a6ac247 | 0510d945b39e504fd9533d7aca630a46cea1d724 | /Francisco_Carvalho/loginSystem.py | aa0f9867236503e0cf56a561f7fd7cb550a80b53 | [] | no_license | HackerSchool/HS_Recrutamento_Python | b6d7c099aa6f5bc2325be5e3b0a8fa0e4d439b4a | 5d77d4f0e59da8d69edf02f03d9dd888e83fc33a | refs/heads/main | 2023-08-28T10:55:31.567770 | 2021-11-09T11:03:54 | 2021-11-09T11:03:54 | 425,946,545 | 1 | 1 | null | 2021-11-08T18:36:36 | 2021-11-08T18:15:41 | null | UTF-8 | Python | false | false | 1,211 | py | class LoginSystem:
def __init__(self):
self.currentUsername = ""
self.loggedIn = False
self.users = dict()
def logOut(self):
if(self.loggedIn):
self.currentUsername = ""
self.loggedIn = False
else:
raise Exception("Not logged in")
def checkLogin(self,name, passowrd):
if name in self.users:
if self.users[name] == passowrd:
return True
return False
def doRegisterUser(self,user,password):
if user not in self.users:
self.users[user] = password
else:
raise Exception("User \"{}\" already exists".format(user))
def doLogin(self,name,password):
if self.loggedIn:
# ? - Should this be an exception?
print("Already loged in")
else:
if self.checkLogin(name, password):
self.currentUsername = name;
self.loggedIn = True
print("Hello "+self.currentUsername)
else:
print("Invalid username or password")
def changePassword(self, pwd):
# abstraction
self.users[self.currentUsername] = pwd | [
"francisco.parreira.fonseca@gmail.com"
] | francisco.parreira.fonseca@gmail.com |
d555bbcd2c811c9d1ec8cb0c3f6cdd62d05c4a97 | 951e36478d75044ba1895da343bc3930de4bd37c | /cone_side_code/source/various_tests/die.py | e85e8d8a424f99e56255f3d65c1d4fe828d10a69 | [] | no_license | orion2166/ConeDroneApp | 76c8f01b371a11da26e57a57869f5f91af49967f | af7b920e59b6501714e4560cf40f107148b26354 | refs/heads/master | 2023-01-23T15:49:49.115007 | 2020-12-03T12:58:44 | 2020-12-03T12:58:44 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | import signal
import sys
import RPi.GPIO as GPIO
from smbus2 import SMBus #using smbus2 by Karl-Petter Lindegaard (I2C messaging)
import time
import sensor
import schedule
import indicator
GPIO.setwarnings(False)
indicator.clearLEDS()
indicator.lowerFlag() | [
"h38383838@tamu.edu"
] | h38383838@tamu.edu |
2f50c5d8fbaf7359990e0e5264f56327e41de7cc | 0e2a58dce33bb412f19d019b98168d68af9bdeec | /model.py | 9d1eb91c29159a77b3e6e6ab01503bc436ef9099 | [] | no_license | Naveenprabaharan/Salary_Prediction | 0ea2810a177b7c0d3de8f4044970f35d51efa820 | c36cf19545667c4e330cb08bb273c45afa74b06a | refs/heads/master | 2023-08-23T16:16:10.834688 | 2021-10-23T15:39:28 | 2021-10-23T15:39:28 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from sklearn.model_selection import train_test_split
from sklearn.linear_model import LinearRegression
import pickle
# MODEL TRAINING:
# dataset = pd.read_csv('__demo\Salary_Data.csv')
# X = dataset.iloc[:, :-1].values
# y = dataset.iloc[:, -1].values
# X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 1/3, random_state = 0)
# regressor = LinearRegression()
# regressor.fit(X_train, y_train)
# X_test = input("enter year:")
# y_pred = regressor.predict([[X_test]])
# print(y_pred)
# MODEL DEPLOYEMENT:
def salaryPrediction(hrs):
model = pickle.load(open('__demo/reg_model.p','rb'))
year = hrs
y_out = model.predict([[year]])
return y_out
| [
"you@example.com"
] | you@example.com |
7bf946dbafdf97b8a37a69fc222646baade45db5 | 2452d22a7aacf49d473dc414f6cc750af9857f8b | /src/facio/run.py | c7d8ffa7980c7db33fab17f031b7edfaa05c931a | [
"BSD-2-Clause"
] | permissive | krak3n/Facio | ce507edb0df5e161453ff04c402febeecd48e98a | 65ec0d078b133f0de77bfb7bc1c037acda63c193 | refs/heads/master | 2020-05-19T11:16:24.545548 | 2013-08-19T08:26:19 | 2013-08-19T08:26:19 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,332 | py | # -*- coding: utf-8 -*-
"""
.. module:: facio.run
:synopsis: Runs the Facio template process.
"""
import os
from facio.base import BaseFacio
from facio.config import (HOOKS_FILE_NAME,
Settings,
CommandLineInterface,
ConfigurationFile)
from facio.hooks import Hook
from facio.template import Template
from facio.state import state
class Run(BaseFacio):
def run(self):
""" Run the Facio processes. """
interface = CommandLineInterface()
interface.start()
config = ConfigurationFile()
parsed = config.read()
settings = Settings(interface, parsed)
state.update_context_variables(settings.get_variables())
template = Template(settings.get_template_path())
template.update_copy_ignore_globs(settings.copy_ignore_globs())
template.update_render_ignore_globs(settings.render_ignore_globs())
template.copy()
pipeline = Hook()
pipeline.load(os.path.join(
state.get_project_root(),
HOOKS_FILE_NAME))
if pipeline.has_before():
pipeline.run_before()
template.rename()
template.render()
if pipeline.has_after():
pipeline.run_after()
self.success('Done')
| [
"hello@chris.reeves.io"
] | hello@chris.reeves.io |
f7b4ee0dbde59d9bfb30f2f86c1500ec367ab637 | b557c7e9554e2d18f92b9b5f069839998c288737 | /tests_diff_between_times.py | ac47236d75f40655ef35303190e3177f567f4f50 | [] | no_license | davidone/misc | a50ed21ef61c40605ce21ea796167eda4f6da009 | 43a82a70f4378ad1cf1b91ba6b74a94c17ed4626 | refs/heads/master | 2021-09-27T22:55:24.120783 | 2021-09-15T13:20:50 | 2021-09-15T13:20:50 | 18,870,704 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 454 | py | import pytest
import diff_between_times
def test_validate_date():
val = ('2021-09-15 00:00:00')
assert diff_between_times.validate_date(None, None, val) == val
with pytest.raises(Exception):
val = ('2021-09-15 00:')
assert diff_between_times.validate_date(None, None, val)
since_val = '2021-09-15 00:00:00'
until_val = '2021-09-15 00:01:00'
assert diff_between_times.dateDiff(since_val, until_val) == 1
| [
"noreply@github.com"
] | noreply@github.com |
f9cf49601f28234a3dc2bbac71ed62fd49b12ede | 9dc77d85384f8a888e7d1699c2e519cafe7f8491 | /person/views.py | 69ece19fecbf722c83d4f0a363231e59cfc2219f | [] | no_license | ChaseKnowlden/2021SpringGroup1 | e5e93ebaf477ba87f7742f89cb5d990a720c74fa | fb820e5e852386310866894c7a6f289dff225ce1 | refs/heads/master | 2023-05-27T06:31:32.194596 | 2021-05-30T15:02:19 | 2021-05-30T15:02:19 | 375,468,835 | 0 | 0 | null | 2021-06-09T19:37:28 | 2021-06-09T19:32:07 | null | UTF-8 | Python | false | false | 1,920 | py | from django.shortcuts import render
from django.http import HttpResponse
# Create your views here.
from .models import Person
import requests
import json
numberPerson = 10
PERSON_API = 'https://randomuser.me/api/?results='+str(numberPerson)
def person(response,id):
person = Person()
person.name = "bilal"
person.age = 12
person.content = "Icerik"
person.isStudent = True
person.save()
return HttpResponse("<h1>person:</h1>")
def createPerson(req):
print(req.GET["title"])
person = Person()
person.title = req.GET["title"]
person.firstname = req.GET["firstname"]
person.lastname = req.GET["lastname"]
person.location = req.GET["location"]
person.email = req.GET["email"]
person.age = req.GET["age"]
person.phone = req.GET["phone"]
person.imageUrl = req.GET["imageUrl"]
person.save()
return HttpResponse(person)
def getFirst(req):
person = Person.objects.first()
return HttpResponse(person)
def getLast(req):
person = Person.objects.last()
return HttpResponse(person)
def savePeople(req):
res = requests.get(PERSON_API)
myJson = res.content.decode('utf8').replace("'", '"')
data = json.loads(myJson)
dataPretty = json.dumps(data, indent=2)
print(dataPretty)
# print(data["results"][0]["gender"])
data = data["results"]
for value in data:
person = Person()
person.title = value["name"]["title"]
person.firstname = value["name"]["first"]
person.lastname = value["name"]["last"]
person.location = value["location"]["street"]["name"]
person.email = value["email"]
person.age = value["dob"]["age"]
person.phone = value["phone"]
person.imageUrl = value["picture"]["medium"]
person.save()
print(person)
# person.email = value.
# print(key)
return HttpResponse(res.content) | [
"4teko7@gmail.com"
] | 4teko7@gmail.com |
e407f871a1483c7da6a8d86f78c3ebf7f0942aeb | d2e822f47779dff3cec81e1c9e71f381a6ceb0f0 | /COURS/python/TP3/TP3 Ex2.py | 66b3a89b0c21ad0ebdee99bde9ba26dd50bd92d8 | [] | no_license | BrandidiCH4O4/Code-des-cours | fe019ba211785ade42d54fc1238d4944bb178373 | ac9eb6bf9042f2ceaca5e8e5a6de49848395bffb | refs/heads/main | 2023-01-21T02:03:51.412532 | 2020-12-02T13:41:19 | 2020-12-02T13:41:19 | 309,106,429 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 832 | py | """Ex 2-- Travail avec des chaînes"""
#######################################################################################
#1:
phrase=input("Entrer une phrase: ")
lettre1=input("Entrer une première lettre: ")
lettre2=input("Entrer une seconde lettre: ")
x=0
y=0
for ch in phrase:
if ch==lettre1:
x=x+1
elif ch==lettre2:
y=y+1
print("Il y a ",x, "lettres ", lettre1, "et ",y, "lettres", lettre2, "dans la phrase.")
#######################################################################################
#2:
ph=input("Entrer un phrase: ")
lettre=input("Entrer une première lettre: ")
lettre3=input("Entrer une seconde lettre: ")
ph2=''
for c in ph:
if c==lettre:
ph2=ph2+lettre3
elif c==lettre3:
ph2=ph2+lettre
else:
ph2=ph2+c
print(ph2)
| [
"noreply@github.com"
] | noreply@github.com |
0547381d2254684900dcf79141d5b76f75c00912 | 65fce73a1e6a36718238cdef09a17493b19532a0 | /8/swagger_client/__init__.py | d6ffda23006c1131e1217a75ca10767be0046ebb | [
"Apache-2.0"
] | permissive | apitore/apitore-sdk-python | eb419589609efb86bd279cd1733c2a03cdc03680 | c0814c5635ddd09e9a20fcb155b62122bee41d33 | refs/heads/master | 2020-03-21T10:06:34.557781 | 2018-06-23T21:26:27 | 2018-06-23T21:26:27 | 138,434,217 | 3 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,044 | py | # coding: utf-8
# flake8: noqa
"""
Word2Vec APIs
Word2Vec.<BR />[Endpoint] https://api.apitore.com/api/8 # noqa: E501
OpenAPI spec version: 1.0.2
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
# import apis into sdk package
from swagger_client.api.word_2_vec_controller_api import Word2VecControllerApi
# import ApiClient
from swagger_client.api_client import ApiClient
from swagger_client.configuration import Configuration
# import models into sdk package
from swagger_client.models.analogy_response_entity import AnalogyResponseEntity
from swagger_client.models.distance_entity import DistanceEntity
from swagger_client.models.distance_response_entity import DistanceResponseEntity
from swagger_client.models.similarity_response_entity import SimilarityResponseEntity
from swagger_client.models.vector_distance_response_entity import VectorDistanceResponseEntity
from swagger_client.models.word_vector_response_entity import WordVectorResponseEntity
| [
"keigoht@gmail.com"
] | keigoht@gmail.com |
9ab304a7e81f8015e16701fc650f207c2421c33b | d5e5c7e2fab03ecdb074435ea7a0d249e0da1514 | /sso_auth/authentication.py | 476c94c36bcb3001d0ade4be15ab04fd317cf7a9 | [] | no_license | th3abhi97/sso_auth | 9be8dc116ae7eb57004cf9577a8d3a15953782e5 | ad6cd34ef82a1cb3a9318ee875c0260c4808a7a4 | refs/heads/master | 2022-01-09T04:13:27.700722 | 2017-10-16T22:35:42 | 2017-10-16T22:35:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 258 | py | from rest_framework_sso import claims
def create_authorization_payload(session_token, user, **kwargs):
return {
claims.TOKEN: claims.TOKEN_AUTHORIZATION,
claims.SESSION_ID: session_token.pk,
claims.USER_ID: user.username,
}
| [
"abhi3@uw.edu"
] | abhi3@uw.edu |
143f92c1451594cde84b625fb24f6ce60b314439 | e7495daa51d48087da2cb1fb81e917d4923645e0 | /src/control_app (stable)/venv/bin/f2py | 01341ca47ad253e7725511c52003d92451b51ecf | [] | no_license | sebastiandlc/UCF-Duct-Inspection-Bot | a200a103b463cc40f1a31e5ff447bbd3a894d56d | 26499cccf3be393c7ff523a821ecbf84ca9941a6 | refs/heads/main | 2023-08-11T05:15:04.861545 | 2021-10-01T20:35:05 | 2021-10-01T20:35:05 | 412,609,020 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 450 | #!/home/spiderbotgold/SeniorDesignSpiderBot/catkin_ws/src/robot_gui_bridge/gui/venv/bin/python3
# EASY-INSTALL-ENTRY-SCRIPT: 'numpy==1.17.2','console_scripts','f2py'
__requires__ = 'numpy==1.17.2'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('numpy==1.17.2', 'console_scripts', 'f2py')()
)
| [
"sebastian.dlc@outlook.com"
] | sebastian.dlc@outlook.com | |
6736e09edf00b418607e71443127f44af7ec6cea | 85a9ffeccb64f6159adbd164ff98edf4ac315e33 | /pysnmp/DXS3326GSR-SWL3MGMT-MIB.py | b6967e401b97f5dc7cbce020ce7e6f82e3f21569 | [
"Apache-2.0"
] | permissive | agustinhenze/mibs.snmplabs.com | 5d7d5d4da84424c5f5a1ed2752f5043ae00019fb | 1fc5c07860542b89212f4c8ab807057d9a9206c7 | refs/heads/master | 2020-12-26T12:41:41.132395 | 2019-08-16T15:51:41 | 2019-08-16T15:53:57 | 237,512,469 | 0 | 0 | Apache-2.0 | 2020-01-31T20:41:36 | 2020-01-31T20:41:35 | null | UTF-8 | Python | false | false | 43,774 | py | #
# PySNMP MIB module DXS3326GSR-SWL3MGMT-MIB (http://snmplabs.com/pysmi)
# ASN.1 source file:///Users/davwang4/Dev/mibs.snmplabs.com/asn1/DXS3326GSR-SWL3MGMT-MIB
# Produced by pysmi-0.3.4 at Mon Apr 29 18:40:25 2019
# On host DAVWANG4-M-1475 platform Darwin version 18.5.0 by user davwang4
# Using Python version 3.7.3 (default, Mar 27 2019, 09:23:15)
#
OctetString, Integer, ObjectIdentifier = mibBuilder.importSymbols("ASN1", "OctetString", "Integer", "ObjectIdentifier")
NamedValues, = mibBuilder.importSymbols("ASN1-ENUMERATION", "NamedValues")
ValueSizeConstraint, ConstraintsIntersection, ValueRangeConstraint, SingleValueConstraint, ConstraintsUnion = mibBuilder.importSymbols("ASN1-REFINEMENT", "ValueSizeConstraint", "ConstraintsIntersection", "ValueRangeConstraint", "SingleValueConstraint", "ConstraintsUnion")
InterfaceIndex, = mibBuilder.importSymbols("IF-MIB", "InterfaceIndex")
DesignatedRouterPriority, RouterID, Status, TOSType, HelloRange, Metric, UpToMaxAge, AreaID, PositiveInteger = mibBuilder.importSymbols("OSPF-MIB", "DesignatedRouterPriority", "RouterID", "Status", "TOSType", "HelloRange", "Metric", "UpToMaxAge", "AreaID", "PositiveInteger")
NotificationGroup, ModuleCompliance = mibBuilder.importSymbols("SNMPv2-CONF", "NotificationGroup", "ModuleCompliance")
ModuleIdentity, iso, MibScalar, MibTable, MibTableRow, MibTableColumn, Gauge32, Integer32, ObjectIdentity, Counter32, MibIdentifier, Unsigned32, Bits, NotificationType, Counter64, IpAddress, TimeTicks = mibBuilder.importSymbols("SNMPv2-SMI", "ModuleIdentity", "iso", "MibScalar", "MibTable", "MibTableRow", "MibTableColumn", "Gauge32", "Integer32", "ObjectIdentity", "Counter32", "MibIdentifier", "Unsigned32", "Bits", "NotificationType", "Counter64", "IpAddress", "TimeTicks")
TruthValue, PhysAddress, TextualConvention, RowStatus, DisplayString = mibBuilder.importSymbols("SNMPv2-TC", "TruthValue", "PhysAddress", "TextualConvention", "RowStatus", "DisplayString")
dxs3326GSR, = mibBuilder.importSymbols("SW-PROJECTX-SRPRIMGMT-MIB", "dxs3326GSR")
swL3MgmtMIB = ModuleIdentity((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3))
if mibBuilder.loadTexts: swL3MgmtMIB.setLastUpdated('0007150000Z')
if mibBuilder.loadTexts: swL3MgmtMIB.setOrganization(' ')
class NodeAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(6, 6)
fixedLength = 6
class NetAddress(OctetString):
subtypeSpec = OctetString.subtypeSpec + ValueSizeConstraint(4, 4)
fixedLength = 4
swL3DevMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1))
swL3IpMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2))
swL3RelayMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3))
swL3IpCtrlMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1))
swL3IpFdbMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2))
swL3RelayBootpMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1))
swL3RelayDnsMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2))
swL3DevCtrl = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1))
swL3DevCtrlRIPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlRIPState.setStatus('current')
swL3DevCtrlOSPFState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlOSPFState.setStatus('current')
swL3DevCtrlDVMRPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlDVMRPState.setStatus('current')
swL3DevCtrlPIMState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlPIMState.setStatus('current')
swL3DevCtrlVRRPState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 1, 1, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3DevCtrlVRRPState.setStatus('current')
swL3IpCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1), )
if mibBuilder.loadTexts: swL3IpCtrlTable.setStatus('current')
swL3IpCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpCtrlIpAddr"))
if mibBuilder.loadTexts: swL3IpCtrlEntry.setStatus('current')
swL3IpCtrlIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3IpCtrlIpAddr.setStatus('current')
swL3IpCtrlIfIndex = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpCtrlIfIndex.setStatus('current')
swL3IpCtrlInterfaceName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 3), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 12))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlInterfaceName.setStatus('current')
swL3IpCtrlIpSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlIpSubnetMask.setStatus('current')
swL3IpCtrlVlanName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 5), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 32))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlVlanName.setStatus('current')
swL3IpCtrlMode = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4))).clone(namedValues=NamedValues(("other", 1), ("manual", 2), ("bootp", 3), ("dhcp", 4)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlMode.setStatus('current')
swL3IpCtrlSecondary = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 7), TruthValue()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlSecondary.setStatus('current')
swL3IpCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 8), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpCtrlState.setStatus('current')
swL3IpCtrlOperState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 1, 1, 1, 9), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("up", 2), ("down", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpCtrlOperState.setStatus('current')
swL3IpFdbInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1), )
if mibBuilder.loadTexts: swL3IpFdbInfoTable.setStatus('current')
swL3IpFdbInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpFdbInfoIpAddr"))
if mibBuilder.loadTexts: swL3IpFdbInfoEntry.setStatus('current')
swL3IpFdbInfoIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoIpAddr.setStatus('current')
swL3IpFdbInfoIpSubnetMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoIpSubnetMask.setStatus('current')
swL3IpFdbInfoPort = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoPort.setStatus('current')
swL3IpFdbInfoType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 2, 1, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("static", 2), ("dynamic", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpFdbInfoType.setStatus('current')
swL3IpArpAgingTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3IpArpAgingTime.setStatus('current')
swL3IpStaticRouteTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5), )
if mibBuilder.loadTexts: swL3IpStaticRouteTable.setStatus('current')
swL3IpStaticRouteEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteDest"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteMask"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3IpStaticRouteBkupState"))
if mibBuilder.loadTexts: swL3IpStaticRouteEntry.setStatus('current')
swL3IpStaticRouteDest = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteDest.setStatus('current')
swL3IpStaticRouteMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteMask.setStatus('current')
swL3IpStaticRouteBkupState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2))).clone(namedValues=NamedValues(("primary", 1), ("backup", 2)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3IpStaticRouteBkupState.setStatus('current')
swL3IpStaticRouteNextHop = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 4), IpAddress()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteNextHop.setStatus('current')
swL3IpStaticRouteMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 65535))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteMetric.setStatus('current')
swL3IpStaticRouteStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 2, 5, 1, 6), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3IpStaticRouteStatus.setStatus('current')
swL3RelayBootpState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpState.setStatus('current')
swL3RelayBootpHopCount = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpHopCount.setStatus('current')
swL3RelayBootpTimeThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 65535))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpTimeThreshold.setStatus('current')
swL3RelayBootpCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4), )
if mibBuilder.loadTexts: swL3RelayBootpCtrlTable.setStatus('current')
swL3RelayBootpCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayBootpCtrlInterfaceName"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayBootpCtrlServer"))
if mibBuilder.loadTexts: swL3RelayBootpCtrlEntry.setStatus('current')
swL3RelayBootpCtrlInterfaceName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(0, 12))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayBootpCtrlInterfaceName.setStatus('current')
swL3RelayBootpCtrlServer = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayBootpCtrlServer.setStatus('current')
swL3RelayBootpCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 1, 4, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayBootpCtrlState.setStatus('current')
swL3RelayDnsState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsState.setStatus('current')
swL3RelayDnsPrimaryServer = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 2), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsPrimaryServer.setStatus('current')
swL3RelayDnsSecondaryServer = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 3), IpAddress()).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsSecondaryServer.setStatus('current')
swL3RelayDnsCacheState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsCacheState.setStatus('current')
swL3RelayDnsStaticTableState = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 5), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("disabled", 2), ("enabled", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsStaticTableState.setStatus('current')
swL3RelayDnsCtrlTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6), )
if mibBuilder.loadTexts: swL3RelayDnsCtrlTable.setStatus('current')
swL3RelayDnsCtrlEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayDnsCtrlDomainName"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RelayDnsCtrlIpAddr"))
if mibBuilder.loadTexts: swL3RelayDnsCtrlEntry.setStatus('current')
swL3RelayDnsCtrlDomainName = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 32))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayDnsCtrlDomainName.setStatus('current')
swL3RelayDnsCtrlIpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 2), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RelayDnsCtrlIpAddr.setStatus('current')
swL3RelayDnsCtrlState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 3, 2, 6, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("invalid", 2), ("valid", 3)))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RelayDnsCtrlState.setStatus('current')
swL3Md5Table = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4), )
if mibBuilder.loadTexts: swL3Md5Table.setStatus('current')
swL3Md5Entry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3Md5KeyId"))
if mibBuilder.loadTexts: swL3Md5Entry.setStatus('current')
swL3Md5KeyId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3Md5KeyId.setStatus('current')
swL3Md5Key = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 16))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3Md5Key.setStatus('current')
swL3Md5RowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 4, 1, 3), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3Md5RowStatus.setStatus('current')
swL3RouteRedistriTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5), )
if mibBuilder.loadTexts: swL3RouteRedistriTable.setStatus('current')
swL3RouteRedistriEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RouteRedistriSrcProtocol"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3RouteRedistriDstProtocol"))
if mibBuilder.loadTexts: swL3RouteRedistriEntry.setStatus('current')
swL3RouteRedistriSrcProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 1), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5))).clone(namedValues=NamedValues(("other", 1), ("rip", 2), ("ospf", 3), ("static", 4), ("local", 5)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RouteRedistriSrcProtocol.setStatus('current')
swL3RouteRedistriDstProtocol = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 2), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("other", 1), ("rip", 2), ("ospf", 3)))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RouteRedistriDstProtocol.setStatus('current')
swL3RouteRedistriType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 3), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7, 8, 9, 10))).clone(namedValues=NamedValues(("other", 1), ("all", 2), ("type-1", 3), ("type-2", 4), ("internal", 5), ("external", 6), ("inter-E1", 7), ("inter-E2", 8), ("extType1", 9), ("extType2", 10)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriType.setStatus('current')
swL3RouteRedistriMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 16777214))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriMetric.setStatus('current')
swL3RouteRedistriRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 5, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3RouteRedistriRowStatus.setStatus('current')
swL3OspfHostTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6), )
if mibBuilder.loadTexts: swL3OspfHostTable.setStatus('current')
swL3OspfHostEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3OspfHostIpAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3OspfHostTOS"))
if mibBuilder.loadTexts: swL3OspfHostEntry.setStatus('current')
swL3OspfHostIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3OspfHostIpAddress.setStatus('current')
swL3OspfHostTOS = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 2), TOSType()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3OspfHostTOS.setStatus('current')
swL3OspfHostMetric = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 3), Metric()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostMetric.setStatus('current')
swL3OspfHostAreaID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 4), AreaID()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostAreaID.setStatus('current')
swL3OspfHostStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 6, 1, 5), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3OspfHostStatus.setStatus('current')
swL3ospfVirtIfTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7), )
if mibBuilder.loadTexts: swL3ospfVirtIfTable.setStatus('current')
swL3ospfVirtIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfVirtIfAreaId"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfVirtIfNeighbor"))
if mibBuilder.loadTexts: swL3ospfVirtIfEntry.setStatus('current')
swL3ospfVirtIfAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 1), AreaID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfAreaId.setStatus('current')
swL3ospfVirtIfNeighbor = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 2), RouterID()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfNeighbor.setStatus('current')
swL3ospfVirtIfTransitDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 3), UpToMaxAge().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfTransitDelay.setStatus('current')
swL3ospfVirtIfRetransInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 4), UpToMaxAge().clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfRetransInterval.setStatus('current')
swL3ospfVirtIfHelloInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 5), HelloRange().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfHelloInterval.setStatus('current')
swL3ospfVirtIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 6), PositiveInteger().clone(60)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfRtrDeadInterval.setStatus('current')
swL3ospfVirtIfState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 7), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 4))).clone(namedValues=NamedValues(("down", 1), ("pointToPoint", 4))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfState.setStatus('current')
swL3ospfVirtIfEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 8), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfVirtIfEvents.setStatus('current')
swL3ospfVirtIfAuthType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 9), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthType.setStatus('current')
swL3ospfVirtIfAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 10), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthKey.setStatus('current')
swL3ospfVirtIfAuthKeyID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 11), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfAuthKeyID.setStatus('current')
swL3ospfVirtIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 7, 1, 12), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfVirtIfStatus.setStatus('current')
swL3ospfIfTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8), )
if mibBuilder.loadTexts: swL3ospfIfTable.setStatus('current')
swL3ospfIfEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfIfIpAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3ospfAddressLessIf"))
if mibBuilder.loadTexts: swL3ospfIfEntry.setStatus('current')
swL3ospfIfIpAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 1), IpAddress()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfIpAddress.setStatus('current')
swL3ospfAddressLessIf = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 2), Integer32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfAddressLessIf.setStatus('current')
swL3ospfIfAreaId = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 3), AreaID().clone(hexValue="00000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAreaId.setStatus('current')
swL3ospfIfType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 4), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 5))).clone(namedValues=NamedValues(("broadcast", 1), ("nbma", 2), ("pointToPoint", 3), ("pointToMultipoint", 5)))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfType.setStatus('current')
swL3ospfIfAdminStat = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 5), Status().clone('enabled')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAdminStat.setStatus('current')
swL3ospfIfRtrPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 6), DesignatedRouterPriority().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRtrPriority.setStatus('current')
swL3ospfIfTransitDelay = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 7), UpToMaxAge().clone(1)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfTransitDelay.setStatus('current')
swL3ospfIfRetransInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 8), UpToMaxAge().clone(5)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRetransInterval.setStatus('current')
swL3ospfIfHelloInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 9), HelloRange().clone(10)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfHelloInterval.setStatus('current')
swL3ospfIfRtrDeadInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 10), PositiveInteger().clone(40)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfRtrDeadInterval.setStatus('current')
swL3ospfIfPollInterval = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 11), PositiveInteger().clone(120)).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfPollInterval.setStatus('current')
swL3ospfIfState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 12), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3, 4, 5, 6, 7))).clone(namedValues=NamedValues(("down", 1), ("loopback", 2), ("waiting", 3), ("pointToPoint", 4), ("designatedRouter", 5), ("backupDesignatedRouter", 6), ("otherDesignatedRouter", 7))).clone('down')).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfState.setStatus('current')
swL3ospfIfDesignatedRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 13), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfDesignatedRouter.setStatus('current')
swL3ospfIfBackupDesignatedRouter = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 14), IpAddress().clone(hexValue="00000000")).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfBackupDesignatedRouter.setStatus('current')
swL3ospfIfEvents = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 15), Counter32()).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3ospfIfEvents.setStatus('current')
swL3ospfIfMulticastForwarding = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 16), Integer32().subtype(subtypeSpec=ConstraintsUnion(SingleValueConstraint(1, 2, 3))).clone(namedValues=NamedValues(("blocked", 1), ("multicast", 2), ("unicast", 3))).clone('blocked')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfMulticastForwarding.setStatus('current')
swL3ospfIfDemand = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 17), TruthValue().clone('false')).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfDemand.setStatus('current')
swL3ospfIfAuthType = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 18), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthType.setStatus('current')
swL3ospfIfAuthKey = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 19), OctetString().subtype(subtypeSpec=ValueSizeConstraint(0, 256)).clone(hexValue="0000000000000000")).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthKey.setStatus('current')
swL3ospfIfAuthKeyID = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 20), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfAuthKeyID.setStatus('current')
swL3ospfIfStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 8, 1, 21), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3ospfIfStatus.setStatus('current')
swL3RoutePreference = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9))
swL3RoutePreferenceRIP = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceRIP.setStatus('current')
swL3RoutePreferenceOSPFIntra = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFIntra.setStatus('current')
swL3RoutePreferenceStatic = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceStatic.setStatus('current')
swL3RoutePreferenceLocal = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 4), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 999))).setMaxAccess("readonly")
if mibBuilder.loadTexts: swL3RoutePreferenceLocal.setStatus('current')
swL3RoutePreferenceOSPFInter = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 5), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFInter.setStatus('current')
swL3RoutePreferenceOSPFExtT1 = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 6), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFExtT1.setStatus('current')
swL3RoutePreferenceOSPFExtT2 = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 9, 7), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 999))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3RoutePreferenceOSPFExtT2.setStatus('current')
swL3PimMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11))
swL3PimCbsrInfoMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1))
swL3pimCbsrBootStrapPeriod = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrBootStrapPeriod.setStatus('current')
swL3pimCbsrHashMaskLen = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrHashMaskLen.setStatus('current')
swL3pimCbsrTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3), )
if mibBuilder.loadTexts: swL3pimCbsrTable.setStatus('current')
swL3pimCbsrEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimCbsrInterface"))
if mibBuilder.loadTexts: swL3pimCbsrEntry.setStatus('current')
swL3pimCbsrInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: swL3pimCbsrInterface.setStatus('current')
swL3pimCbsrPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 1, 3, 1, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(-1, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCbsrPriority.setStatus('current')
swL3pimCandidateRPMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2))
swL3pimCandidateRPHoldtime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPHoldtime.setStatus('current')
swL3pimCandidateRPPriority = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPPriority.setStatus('current')
swL3pimCandidateRPWildcardPrefixCnt = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 2, 3), Integer32().subtype(subtypeSpec=ValueRangeConstraint(0, 1))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimCandidateRPWildcardPrefixCnt.setStatus('current')
swL3pimSptMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3))
swL3pimLastHopSptThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3, 1), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimLastHopSptThreshold.setStatus('current')
swL3pimRPSptThreshold = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 3, 2), DisplayString().subtype(subtypeSpec=ValueSizeConstraint(1, 8))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRPSptThreshold.setStatus('current')
swL3pimRegChksumIncDataTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4), )
if mibBuilder.loadTexts: swL3pimRegChksumIncDataTable.setStatus('current')
swL3pimRegChksumIncDataEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3SwL3pimRegChksumIncDataRpAddr"))
if mibBuilder.loadTexts: swL3pimRegChksumIncDataEntry.setStatus('current')
swL3SwL3pimRegChksumIncDataRpAddr = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3SwL3pimRegChksumIncDataRpAddr.setStatus('current')
swL3SwL3pimRegChksumIncDataState = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 4, 1, 2), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3SwL3pimRegChksumIncDataState.setStatus('current')
swL3PimInfoMgmt = MibIdentifier((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5))
swL3pimRegisterProbeTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 1), Integer32().subtype(subtypeSpec=ValueRangeConstraint(1, 127))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRegisterProbeTime.setStatus('current')
swL3pimRegisterSuppressionTime = MibScalar((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 2), Integer32().subtype(subtypeSpec=ValueRangeConstraint(3, 255))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimRegisterSuppressionTime.setStatus('current')
swL3pimInfoTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3), )
if mibBuilder.loadTexts: swL3pimInfoTable.setStatus('current')
swL3pimInfoEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimInterface"))
if mibBuilder.loadTexts: swL3pimInfoEntry.setStatus('current')
swL3pimInterface = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1, 1), InterfaceIndex())
if mibBuilder.loadTexts: swL3pimInterface.setStatus('current')
swL3pimDRPriority = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 5, 3, 1, 2), Unsigned32().subtype(subtypeSpec=ValueRangeConstraint(0, 4294967294))).setMaxAccess("readwrite")
if mibBuilder.loadTexts: swL3pimDRPriority.setStatus('current')
swL3pimStaticRPTable = MibTable((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6), )
if mibBuilder.loadTexts: swL3pimStaticRPTable.setStatus('current')
swL3pimStaticRPEntry = MibTableRow((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1), ).setIndexNames((0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPGroupAddress"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPGroupMask"), (0, "DXS3326GSR-SWL3MGMT-MIB", "swL3pimStaticRPAddress"))
if mibBuilder.loadTexts: swL3pimStaticRPEntry.setStatus('current')
swL3pimStaticRPGroupAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 1), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPGroupAddress.setStatus('current')
swL3pimStaticRPGroupMask = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 2), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPGroupMask.setStatus('current')
swL3pimStaticRPAddress = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 3), IpAddress())
if mibBuilder.loadTexts: swL3pimStaticRPAddress.setStatus('current')
swL3pimStaticRPRowStatus = MibTableColumn((1, 3, 6, 1, 4, 1, 171, 11, 59, 7, 3, 11, 6, 1, 4), RowStatus()).setMaxAccess("readcreate")
if mibBuilder.loadTexts: swL3pimStaticRPRowStatus.setStatus('current')
mibBuilder.exportSymbols("DXS3326GSR-SWL3MGMT-MIB", swL3IpCtrlOperState=swL3IpCtrlOperState, swL3PimInfoMgmt=swL3PimInfoMgmt, swL3pimCbsrBootStrapPeriod=swL3pimCbsrBootStrapPeriod, swL3RouteRedistriRowStatus=swL3RouteRedistriRowStatus, swL3pimCandidateRPPriority=swL3pimCandidateRPPriority, swL3MgmtMIB=swL3MgmtMIB, swL3ospfIfDemand=swL3ospfIfDemand, swL3ospfIfDesignatedRouter=swL3ospfIfDesignatedRouter, swL3DevCtrlPIMState=swL3DevCtrlPIMState, swL3RelayDnsState=swL3RelayDnsState, swL3OspfHostEntry=swL3OspfHostEntry, swL3pimCbsrHashMaskLen=swL3pimCbsrHashMaskLen, PYSNMP_MODULE_ID=swL3MgmtMIB, swL3RouteRedistriDstProtocol=swL3RouteRedistriDstProtocol, swL3ospfIfPollInterval=swL3ospfIfPollInterval, swL3IpCtrlSecondary=swL3IpCtrlSecondary, swL3pimCandidateRPWildcardPrefixCnt=swL3pimCandidateRPWildcardPrefixCnt, swL3DevCtrlRIPState=swL3DevCtrlRIPState, swL3Md5Entry=swL3Md5Entry, swL3ospfIfAreaId=swL3ospfIfAreaId, NetAddress=NetAddress, swL3IpStaticRouteDest=swL3IpStaticRouteDest, swL3RelayDnsCtrlIpAddr=swL3RelayDnsCtrlIpAddr, swL3ospfIfBackupDesignatedRouter=swL3ospfIfBackupDesignatedRouter, swL3pimCandidateRPHoldtime=swL3pimCandidateRPHoldtime, swL3pimRegChksumIncDataTable=swL3pimRegChksumIncDataTable, swL3IpStaticRouteTable=swL3IpStaticRouteTable, swL3pimRPSptThreshold=swL3pimRPSptThreshold, swL3ospfIfAuthType=swL3ospfIfAuthType, swL3ospfIfAuthKeyID=swL3ospfIfAuthKeyID, swL3ospfIfMulticastForwarding=swL3ospfIfMulticastForwarding, swL3RoutePreference=swL3RoutePreference, swL3IpFdbInfoEntry=swL3IpFdbInfoEntry, swL3pimRegisterProbeTime=swL3pimRegisterProbeTime, swL3ospfVirtIfRetransInterval=swL3ospfVirtIfRetransInterval, swL3pimStaticRPTable=swL3pimStaticRPTable, swL3RoutePreferenceOSPFIntra=swL3RoutePreferenceOSPFIntra, swL3IpStaticRouteMask=swL3IpStaticRouteMask, swL3RelayDnsCtrlEntry=swL3RelayDnsCtrlEntry, swL3ospfIfState=swL3ospfIfState, swL3ospfIfAuthKey=swL3ospfIfAuthKey, swL3RoutePreferenceStatic=swL3RoutePreferenceStatic, swL3RelayDnsSecondaryServer=swL3RelayDnsSecondaryServer, swL3RouteRedistriMetric=swL3RouteRedistriMetric, swL3PimCbsrInfoMgmt=swL3PimCbsrInfoMgmt, swL3OspfHostTable=swL3OspfHostTable, swL3ospfIfAdminStat=swL3ospfIfAdminStat, swL3IpStaticRouteBkupState=swL3IpStaticRouteBkupState, swL3RelayBootpCtrlState=swL3RelayBootpCtrlState, swL3IpCtrlMgmt=swL3IpCtrlMgmt, swL3IpCtrlIpSubnetMask=swL3IpCtrlIpSubnetMask, swL3RelayBootpCtrlServer=swL3RelayBootpCtrlServer, swL3RelayDnsCtrlDomainName=swL3RelayDnsCtrlDomainName, swL3IpStaticRouteStatus=swL3IpStaticRouteStatus, swL3RouteRedistriSrcProtocol=swL3RouteRedistriSrcProtocol, swL3RelayBootpCtrlTable=swL3RelayBootpCtrlTable, swL3pimCbsrInterface=swL3pimCbsrInterface, swL3ospfIfStatus=swL3ospfIfStatus, swL3IpStaticRouteMetric=swL3IpStaticRouteMetric, swL3RelayDnsMgmt=swL3RelayDnsMgmt, swL3ospfVirtIfTransitDelay=swL3ospfVirtIfTransitDelay, swL3DevCtrlVRRPState=swL3DevCtrlVRRPState, swL3IpCtrlEntry=swL3IpCtrlEntry, swL3IpFdbMgmt=swL3IpFdbMgmt, swL3pimInterface=swL3pimInterface, swL3DevCtrlOSPFState=swL3DevCtrlOSPFState, swL3RelayBootpMgmt=swL3RelayBootpMgmt, swL3RouteRedistriType=swL3RouteRedistriType, swL3pimCbsrPriority=swL3pimCbsrPriority, swL3RelayDnsCtrlState=swL3RelayDnsCtrlState, swL3RelayBootpHopCount=swL3RelayBootpHopCount, swL3OspfHostStatus=swL3OspfHostStatus, swL3PimMgmt=swL3PimMgmt, swL3IpFdbInfoIpAddr=swL3IpFdbInfoIpAddr, NodeAddress=NodeAddress, swL3RelayBootpCtrlInterfaceName=swL3RelayBootpCtrlInterfaceName, swL3RoutePreferenceOSPFExtT2=swL3RoutePreferenceOSPFExtT2, swL3pimCbsrEntry=swL3pimCbsrEntry, swL3RoutePreferenceRIP=swL3RoutePreferenceRIP, swL3ospfVirtIfTable=swL3ospfVirtIfTable, swL3pimCbsrTable=swL3pimCbsrTable, swL3IpArpAgingTime=swL3IpArpAgingTime, swL3RouteRedistriEntry=swL3RouteRedistriEntry, swL3IpCtrlMode=swL3IpCtrlMode, swL3pimStaticRPGroupMask=swL3pimStaticRPGroupMask, swL3DevCtrl=swL3DevCtrl, swL3pimRegChksumIncDataEntry=swL3pimRegChksumIncDataEntry, swL3OspfHostAreaID=swL3OspfHostAreaID, swL3ospfVirtIfState=swL3ospfVirtIfState, swL3ospfIfEvents=swL3ospfIfEvents, swL3Md5KeyId=swL3Md5KeyId, swL3ospfIfIpAddress=swL3ospfIfIpAddress, swL3ospfIfRtrDeadInterval=swL3ospfIfRtrDeadInterval, swL3ospfVirtIfNeighbor=swL3ospfVirtIfNeighbor, swL3SwL3pimRegChksumIncDataState=swL3SwL3pimRegChksumIncDataState, swL3RelayDnsCacheState=swL3RelayDnsCacheState, swL3IpStaticRouteNextHop=swL3IpStaticRouteNextHop, swL3ospfIfHelloInterval=swL3ospfIfHelloInterval, swL3RoutePreferenceOSPFInter=swL3RoutePreferenceOSPFInter, swL3SwL3pimRegChksumIncDataRpAddr=swL3SwL3pimRegChksumIncDataRpAddr, swL3pimStaticRPEntry=swL3pimStaticRPEntry, swL3RelayDnsCtrlTable=swL3RelayDnsCtrlTable, swL3DevCtrlDVMRPState=swL3DevCtrlDVMRPState, swL3ospfIfEntry=swL3ospfIfEntry, swL3IpCtrlInterfaceName=swL3IpCtrlInterfaceName, swL3RelayDnsPrimaryServer=swL3RelayDnsPrimaryServer, swL3OspfHostMetric=swL3OspfHostMetric, swL3Md5Table=swL3Md5Table, swL3ospfVirtIfAuthType=swL3ospfVirtIfAuthType, swL3pimCandidateRPMgmt=swL3pimCandidateRPMgmt, swL3pimInfoEntry=swL3pimInfoEntry, swL3ospfVirtIfHelloInterval=swL3ospfVirtIfHelloInterval, swL3ospfIfRtrPriority=swL3ospfIfRtrPriority, swL3RelayBootpCtrlEntry=swL3RelayBootpCtrlEntry, swL3pimDRPriority=swL3pimDRPriority, swL3RelayBootpState=swL3RelayBootpState, swL3ospfAddressLessIf=swL3ospfAddressLessIf, swL3IpStaticRouteEntry=swL3IpStaticRouteEntry, swL3pimStaticRPGroupAddress=swL3pimStaticRPGroupAddress, swL3pimStaticRPAddress=swL3pimStaticRPAddress, swL3pimStaticRPRowStatus=swL3pimStaticRPRowStatus, swL3IpCtrlVlanName=swL3IpCtrlVlanName, swL3pimLastHopSptThreshold=swL3pimLastHopSptThreshold, swL3ospfVirtIfEvents=swL3ospfVirtIfEvents, swL3pimInfoTable=swL3pimInfoTable, swL3pimSptMgmt=swL3pimSptMgmt, swL3IpFdbInfoTable=swL3IpFdbInfoTable, swL3ospfIfType=swL3ospfIfType, swL3ospfVirtIfAreaId=swL3ospfVirtIfAreaId, swL3ospfIfTransitDelay=swL3ospfIfTransitDelay, swL3RouteRedistriTable=swL3RouteRedistriTable, swL3IpCtrlTable=swL3IpCtrlTable, swL3ospfIfTable=swL3ospfIfTable, swL3IpCtrlIpAddr=swL3IpCtrlIpAddr, swL3Md5RowStatus=swL3Md5RowStatus, swL3IpFdbInfoPort=swL3IpFdbInfoPort, swL3ospfVirtIfEntry=swL3ospfVirtIfEntry, swL3ospfVirtIfAuthKey=swL3ospfVirtIfAuthKey, swL3ospfVirtIfRtrDeadInterval=swL3ospfVirtIfRtrDeadInterval, swL3IpFdbInfoIpSubnetMask=swL3IpFdbInfoIpSubnetMask, swL3ospfIfRetransInterval=swL3ospfIfRetransInterval, swL3ospfVirtIfStatus=swL3ospfVirtIfStatus, swL3RelayMgmt=swL3RelayMgmt, swL3IpCtrlIfIndex=swL3IpCtrlIfIndex, swL3OspfHostIpAddress=swL3OspfHostIpAddress, swL3RoutePreferenceLocal=swL3RoutePreferenceLocal, swL3DevMgmt=swL3DevMgmt, swL3ospfVirtIfAuthKeyID=swL3ospfVirtIfAuthKeyID, swL3RelayBootpTimeThreshold=swL3RelayBootpTimeThreshold, swL3Md5Key=swL3Md5Key, swL3IpFdbInfoType=swL3IpFdbInfoType, swL3RoutePreferenceOSPFExtT1=swL3RoutePreferenceOSPFExtT1, swL3IpMgmt=swL3IpMgmt, swL3OspfHostTOS=swL3OspfHostTOS, swL3pimRegisterSuppressionTime=swL3pimRegisterSuppressionTime, swL3RelayDnsStaticTableState=swL3RelayDnsStaticTableState, swL3IpCtrlState=swL3IpCtrlState)
| [
"dcwangmit01@gmail.com"
] | dcwangmit01@gmail.com |
ed8d86ed590ce25d40ea9e13bcd9f423cfe2d0a5 | 989272dc1df2c994fa5e6ade63f844320e8def5a | /Using Regular Expression.py | 49943b9779543edb06fd89abbf2971eb13c5bd91 | [] | no_license | Mohiuddin-LU/Identifying-diabetic-users-in-twitter | b0d10ab5a65cc2e6b50280a190cf1172e8fc0b20 | 5e7611f0e45bbcfc2bb4d3c401dc6c3d90b0dacb | refs/heads/master | 2020-09-07T08:26:16.225955 | 2020-03-11T14:42:15 | 2020-03-11T14:42:15 | 220,722,319 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 767 | py | # -*- coding: utf-8 -*-
"""
Created on Tue Mar 10 14:50:05 2020
@author: mabdulq
"""
import re
def accuracy(count0, count1):
total = count0+count1
return (count1/total)*100
#reading label and text from a separate files
f = open("text.txt", "r")
l = open("label.txt", "r")
count0 = 0
count1 = 0
for i in range(1956):
sentence = f.readline()
label = l.readline()
label = label.strip('\n')
# s = "insulin" #multiple string to joined eg i take insulin
r = re.compile(r'\binsulin\b | \bI\b | \bHave\b | \bdiabetes\b | \bmyself\b | \bme\b | \bmy\b', flags=re.I | re.X)
x = re.findall(r, sentence)
if(x and label == '1'):
count1 = count1 + 1
if(x and label == '0'):
count0 = count0 + 1
print(accuracy(count0,count1)) | [
"55677245+mohiuddin02@users.noreply.github.com"
] | 55677245+mohiuddin02@users.noreply.github.com |
269593f796418ca7ab97be96aa100357de24da16 | 9622f1a3511ed13c79b109a92e57fed837c3f592 | /recursion1/reverse_string.py | ed5be07b56f627e48fa27710f1165e9876c284e4 | [] | no_license | elugens/leet-code-python | fb484ea52aa0dbef62d57d31a8a904e367e563f4 | 53e67e63078d629f19ed9b307b867eed912c1cec | refs/heads/main | 2023-08-07T20:58:35.001224 | 2021-09-20T18:32:25 | 2021-09-20T18:32:25 | 408,557,125 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 168 | py |
def reverseString(s):
left, right = 0, len(s) - 1
while left < right:
s[left], s[right] = s[right], s[left]
left, right = left + 1, right - 1
| [
"elugens@gmail.com"
] | elugens@gmail.com |
e877c55d2fc6c8d1ea93f772e9162375f77c38a6 | cd51ffd9715d8d5ad991db4b2d1e9da76310c0bc | /main.py | 74e0331027d03fd436dc8d48332c43726d89941f | [] | no_license | CLeDoPbIT/dl_rr_pipeline | 6235ce8e6771189aae634fad053aa709455ca540 | afba7682a0af5206a65f00e4d69aa48b05ba871d | refs/heads/master | 2023-04-04T17:42:33.055980 | 2020-04-09T20:48:33 | 2020-04-09T20:48:33 | 254,188,649 | 0 | 0 | null | 2023-03-25T00:38:49 | 2020-04-08T20:04:17 | Python | UTF-8 | Python | false | false | 2,218 | py | import importlib
import traceback
import time
import utils.mainUtils as mainUtils
import utils.misc
PROCESSOR_REGISTRY_CONFIG = "./processorRegistry.json"
CONSTANS = "./constants.json"
def run(processor_registry_config_path, constants_path):
"""
Main manager of processors. Run all processors from processorRegistry.json consistently
Args:
processor_registry_config_path: path processorRegistry.json
constants_path: path constants.json
"""
# read configs
constants_config = utils.misc.read_json(constants_path)
processor_registry_config = utils.misc.read_json(processor_registry_config_path)
# main processors loop
for processor in processor_registry_config:
# get processors data
processor_data_types = mainUtils.get_processor_data(processor["input"]["data"], constants_config)
processor_output_data_types = mainUtils.get_processor_data(processor["output"]["data"], constants_config)
processor_input_config = mainUtils.get_processor_data(processor["input"]["config"], constants_config)
is_output_existed = mainUtils.is_processor_output_created(processor_output_data_types)
# check on force run processor even data is existed. If data is existed then also skip processor
if processor["forceCreate"]=="False" and is_output_existed:
print(f"INFO: For {processor['name']} output is existed")
continue
timestamp = time.time()
processor_module = importlib.import_module(processor['module'])
# run processor
print(f"INFO: Running {processor['name']}")
try:
processor_module.process(processor_data_types, processor_input_config, processor_output_data_types)
except Exception as e:
print(f"ERROR: Exception during processor {processor['name']} execution: ", e)
traceback.print_exc()
print(f"INFO: {processor['name']} failure time = {str(time.time() - timestamp)}")
break
print(f"INFO: Processor \"{processor['name']}\" execution time = {str(time.time() - timestamp)}")
print("Done!")
if __name__ == "__main__":
run(PROCESSOR_REGISTRY_CONFIG, CONSTANS)
| [
"Evgeny.Burashnikov@harman.com"
] | Evgeny.Burashnikov@harman.com |
82a8b3eab92e0cf6cf8a43a66206a1eef88a20d0 | 2212a32833776a5d5d2164d8efd11bd18bd3f768 | /tf_agents/networks/sequential_test.py | 001323efb5ecede3876c8fdbfa391b5b484a47d9 | [
"Apache-2.0"
] | permissive | tensorflow/agents | f39805fb98ef9af712dcaff3ba49e1ac6d42804b | eca1093d3a047e538f17f6ab92ab4d8144284f23 | refs/heads/master | 2023-08-14T04:56:30.774797 | 2023-08-02T17:43:44 | 2023-08-02T17:44:09 | 157,936,206 | 2,755 | 848 | Apache-2.0 | 2023-07-26T02:35:32 | 2018-11-17T00:29:12 | Python | UTF-8 | Python | false | false | 9,811 | py | # coding=utf-8
# Copyright 2020 The TF-Agents Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for tf_agents.networks.sequential."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import os
from absl import flags
import numpy as np
import tensorflow as tf
import tensorflow_probability as tfp
from tf_agents.distributions import utils as distribution_utils
from tf_agents.keras_layers import dynamic_unroll_layer
from tf_agents.keras_layers import inner_reshape
from tf_agents.networks import nest_map
from tf_agents.networks import network
from tf_agents.networks import sequential as sequential_lib
from tf_agents.policies import actor_policy
from tf_agents.policies import policy_saver
from tf_agents.specs import tensor_spec
from tf_agents.trajectories import time_step as ts
from tf_agents.utils import common
from tf_agents.utils import test_utils
FLAGS = flags.FLAGS
tfd = tfp.distributions
class ActorNetwork(network.Network):
def __init__(self, input_tensor_spec, output_tensor_spec):
num_actions = output_tensor_spec.shape.num_elements()
self._sequential = sequential_lib.Sequential(
[
tf.keras.layers.Dense(50),
tf.keras.layers.Dense(10),
tf.keras.layers.Dense(num_actions),
],
input_spec=input_tensor_spec,
) # pytype: disable=wrong-arg-types
super(ActorNetwork, self).__init__(
input_tensor_spec=input_tensor_spec,
state_spec=self._sequential.state_spec,
name='TestActorNetwork',
)
def call(self, observations, step_type=(), network_state=(), training=False):
return self._sequential(observations, network_state)
class SequentialTest(test_utils.TestCase):
def setUp(self):
if not common.has_eager_been_enabled():
self.skipTest('Only supported in TF2.x.')
super(SequentialTest, self).setUp()
def testCall(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(4, use_bias=False), tf.keras.layers.ReLU()],
input_spec=tf.TensorSpec((3,), tf.float32),
) # pytype: disable=wrong-arg-types
inputs = np.ones((2, 3))
out, state = sequential(inputs)
self.assertEqual(state, ())
self.evaluate(tf.compat.v1.global_variables_initializer())
out = self.evaluate(out)
weights = self.evaluate(sequential.layers[0].weights[0])
expected = np.dot(inputs, weights)
expected[expected < 0] = 0
self.assertAllClose(expected, out)
def testMixOfNonRecurrentAndRecurrent(self):
sequential = sequential_lib.Sequential(
[
tf.keras.layers.Dense(2),
tf.keras.layers.LSTM(2, return_state=True, return_sequences=True),
tf.keras.layers.RNN(
tf.keras.layers.StackedRNNCells(
[
tf.keras.layers.LSTMCell(1),
tf.keras.layers.LSTMCell(32),
],
),
return_state=True,
return_sequences=True,
),
# Convert inner dimension to [4, 4, 2] for convolution.
inner_reshape.InnerReshape([32], [4, 4, 2]),
tf.keras.layers.Conv2D(2, 3),
# Convert 3 inner dimensions to [?] for RNN.
inner_reshape.InnerReshape([None] * 3, [-1]),
tf.keras.layers.GRU(2, return_state=True, return_sequences=True),
dynamic_unroll_layer.DynamicUnroll(tf.keras.layers.LSTMCell(2)),
tf.keras.layers.Lambda(
lambda x: tfd.MultivariateNormalDiag(loc=x, scale_diag=x)
),
],
input_spec=tf.TensorSpec((3,), tf.float32),
) # pytype: disable=wrong-arg-types
self.assertEqual(
sequential.input_tensor_spec, tf.TensorSpec((3,), tf.float32)
)
output_spec = sequential.create_variables()
self.assertIsInstance(output_spec, distribution_utils.DistributionSpecV2)
output_event_spec = output_spec.event_spec
self.assertEqual(output_event_spec, tf.TensorSpec((2,), dtype=tf.float32))
tf.nest.map_structure(
self.assertEqual,
sequential.state_spec,
(
( # LSTM
tf.TensorSpec((2,), tf.float32),
tf.TensorSpec((2,), tf.float32),
),
( # RNN(StackedRNNCells)
[
tf.TensorSpec((1,), tf.float32),
tf.TensorSpec((1,), tf.float32),
],
[
tf.TensorSpec((32,), tf.float32),
tf.TensorSpec((32,), tf.float32),
],
),
# GRU
tf.TensorSpec((2,), tf.float32),
( # DynamicUnroll
tf.TensorSpec((2,), tf.float32),
tf.TensorSpec((2,), tf.float32),
),
),
)
inputs = tf.ones((8, 10, 3), dtype=tf.float32)
dist, _ = sequential(inputs)
outputs = dist.sample()
self.assertEqual(outputs.shape, tf.TensorShape([8, 10, 2]))
def testBuild(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(4, use_bias=False), tf.keras.layers.ReLU()]
)
inputs = np.ones((2, 3))
out, _ = sequential(inputs)
self.evaluate(tf.compat.v1.global_variables_initializer())
out = self.evaluate(out)
weights = self.evaluate(sequential.layers[0].weights[0])
expected = np.dot(inputs, weights)
expected[expected < 0] = 0
self.assertAllClose(expected, out)
def testAllZeroLengthStateSpecsShowAsEmptyState(self):
sequential = sequential_lib.Sequential(
[
nest_map.NestMap({
'a': tf.keras.layers.Dense(2),
'b': tf.keras.layers.Dense(3),
})
]
)
self.assertEqual(sequential.state_spec, ())
def testTrainableVariables(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4)]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 4)
self.assertLen(sequential.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 4)
def testTrainableVariablesWithNonTrainableLayer(self):
non_trainable_layer = tf.keras.layers.Dense(4)
non_trainable_layer.trainable = False
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), non_trainable_layer]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 2)
self.assertLen(sequential.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 4)
def testTrainableVariablesNestedNetwork(self):
sequential_inner = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4)]
)
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), sequential_inner]
)
sequential.create_variables(tf.TensorSpec(shape=(3, 2)))
self.evaluate(tf.compat.v1.global_variables_initializer())
variables = self.evaluate(sequential.trainable_variables)
self.assertLen(variables, 6)
self.assertLen(sequential.variables, 6)
self.assertLen(sequential_inner.variables, 4)
self.assertTrue(sequential.trainable)
sequential.trainable = False
self.assertFalse(sequential.trainable)
self.assertEmpty(sequential.trainable_variables)
self.assertLen(sequential.variables, 6)
def testCopy(self):
sequential = sequential_lib.Sequential(
[tf.keras.layers.Dense(3), tf.keras.layers.Dense(4, use_bias=False)]
)
clone = type(sequential).from_config(sequential.get_config())
self.assertLen(clone.layers, 2)
for l1, l2 in zip(sequential.layers, clone.layers):
self.assertEqual(l1.dtype, l2.dtype)
self.assertEqual(l1.units, l2.units)
self.assertEqual(l1.use_bias, l2.use_bias)
def testPolicySaverCompatibility(self):
observation_spec = tensor_spec.TensorSpec(shape=(100,), dtype=tf.float32)
action_spec = tensor_spec.TensorSpec(shape=(5,), dtype=tf.float32)
time_step_tensor_spec = ts.time_step_spec(observation_spec)
net = ActorNetwork(observation_spec, action_spec)
net.create_variables()
policy = actor_policy.ActorPolicy(time_step_tensor_spec, action_spec, net)
sample = tensor_spec.sample_spec_nest(
time_step_tensor_spec, outer_dims=(5,)
)
policy.action(sample)
train_step = common.create_variable('train_step')
saver = policy_saver.PolicySaver(policy, train_step=train_step)
self.initialize_v1_variables()
with self.cached_session():
saver.save(os.path.join(FLAGS.test_tmpdir, 'sequential_model'))
if __name__ == '__main__':
test_utils.main()
| [
"copybara-worker@google.com"
] | copybara-worker@google.com |
d973653f84166354990b4df25cb162438aa56b9e | ed9b286cc1fba177abae3449540e95cde558b7e3 | /tests/unit/test_logging.py | 57a6cff2087deaf7e117e341b0311904534212d9 | [
"Apache-2.0"
] | permissive | AndrewNg/anchore | e706f0a0c47e298be3295d1aa6d167ec58788cd2 | 308e91881be65dd546dbfc79b9d3982b501252a8 | refs/heads/master | 2020-09-29T04:58:03.114023 | 2019-12-09T20:07:08 | 2019-12-09T20:07:08 | 226,957,427 | 0 | 0 | Apache-2.0 | 2019-12-09T20:06:16 | 2019-12-09T20:06:15 | null | UTF-8 | Python | false | false | 2,428 | py | import logging
import unittest
import anchore.cli.logs
import anchore.cli.common
import anchore.util
class TestLogging (unittest.TestCase):
@staticmethod
def do_generic(some_logger, name=None):
assert isinstance(some_logger, logging.Logger)
some_logger.debug('debug message - ' + name)
some_logger.info('info message - ' + name)
some_logger.warn('warn message - ' + name)
some_logger.error('error message - ' + name)
try:
raise KeyError('Some key not found')
except KeyError:
some_logger.exception('Some exception caught - ' + name)
@staticmethod
def do_anchore_logging():
print '--ANCHORE LOGGER'
anchore_logger = logging.getLogger('anchore')
TestLogging.do_generic(anchore_logger, 'anchore')
@staticmethod
def do_non_anchore_logging():
print '--NON-ANCHORE LOGGER'
rand_logger = logging.getLogger('somepackage.somemodule')
TestLogging.do_generic(rand_logger, 'non-anchore')
@staticmethod
def reset_logging_config():
logging.root.setLevel('NOTSET')
for f in logging.root.filters:
logging.root.filters.remove(f)
for f in logging.root.handlers:
print 'Removing handler %s' % str(f)
logging.root.handlers.remove(f)
def test_quiet(self):
print '--STARTING TEST: quiet'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='quiet')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_normal(self):
print '--STARTING TEST: normal'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='normal')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_verbose(self):
print '--STARTING TEST: verbose'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='verbose')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
def test_debug(self):
print '--STARTING TEST: debug'
TestLogging.reset_logging_config()
anchore.cli.logs.init_output_formatters(output_verbosity='debug')
TestLogging.do_anchore_logging()
TestLogging.do_non_anchore_logging()
| [
"nurmi@anchore.com"
] | nurmi@anchore.com |
630a17eceb74a3892bd59ab00b61f09ff63f75c5 | 949ebd7bc2ab1526b3d535def4c90c80fab907f0 | /Decision_Tree_Classification/decision_tree_classification_f1score.py | 543282a5320bd6834cdfb946ee193307187f8799 | [] | no_license | mbhushan/ml | 1c5c0d79f56dbc374f5163a032900da14ca5bc58 | 89441760c489bb265339bcdcbe975888686fc8a5 | refs/heads/master | 2021-05-15T05:31:47.801454 | 2018-05-12T17:34:23 | 2018-05-12T17:34:23 | 116,192,180 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,358 | py | # Decision Tree Classification
# Importing the libraries
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
# Importing the dataset
dataset = pd.read_csv('Social_Network_Ads.csv')
X = dataset.iloc[:, [2, 3]].values
y = dataset.iloc[:, 4].values
# Splitting the dataset into the Training set and Test set
from sklearn.cross_validation import train_test_split
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size = 0.20, random_state = 0)
# Feature Scaling
from sklearn.preprocessing import StandardScaler
sc = StandardScaler()
X_train = sc.fit_transform(X_train)
X_test = sc.transform(X_test)
# Fitting Decision Tree Classification to the Training set
from sklearn.tree import DecisionTreeClassifier
classifier = DecisionTreeClassifier(criterion = 'entropy', min_samples_split=25,
random_state = 1, max_depth=3,
min_samples_leaf=5, splitter='best')
classifier.fit(X_train, y_train)
# Predicting the Test set results
y_pred = classifier.predict(X_test)
# Making the Confusion Matrix
from sklearn.metrics import confusion_matrix
from sklearn.metrics import precision_recall_fscore_support
from sklearn.metrics import f1_score
cm = confusion_matrix(y_test, y_pred)
tn, fp, fn, tp = cm.ravel()
print ('TN: %s, FP: %s, FN: %s, TP: %s' %(tn, fp, fn, tp))
precision, recall, fscore, support = precision_recall_fscore_support(y_test, y_pred, average='binary')
print ('fscore: %s' % (fscore))
print ('precision: %s' % (precision))
print ('recall: %s' % (recall))
# f1_score = f1_score(y_test, y_pred, average='binary')
# print ('F1 Score: ', str(f1_score))
# Visualising the Training set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_train, y_train
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Decision Tree Classification (Training set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show()
# Visualising the Test set results
from matplotlib.colors import ListedColormap
X_set, y_set = X_test, y_test
X1, X2 = np.meshgrid(np.arange(start = X_set[:, 0].min() - 1, stop = X_set[:, 0].max() + 1, step = 0.01),
np.arange(start = X_set[:, 1].min() - 1, stop = X_set[:, 1].max() + 1, step = 0.01))
plt.contourf(X1, X2, classifier.predict(np.array([X1.ravel(), X2.ravel()]).T).reshape(X1.shape),
alpha = 0.75, cmap = ListedColormap(('red', 'green')))
plt.xlim(X1.min(), X1.max())
plt.ylim(X2.min(), X2.max())
for i, j in enumerate(np.unique(y_set)):
plt.scatter(X_set[y_set == j, 0], X_set[y_set == j, 1],
c = ListedColormap(('red', 'green'))(i), label = j)
plt.title('Decision Tree Classification (Test set)')
plt.xlabel('Age')
plt.ylabel('Estimated Salary')
plt.legend()
plt.show() | [
"manibhushan.cs@gmail.com"
] | manibhushan.cs@gmail.com |
8c049759037ec8218f98e394232db59d2162c027 | f79c7d8e68f520ccf27db7139dcdf7ade54d37d6 | /zerebralweb/goals/models.py | ec1233be4414a9835c09ba45eaa4bead5742cd73 | [] | no_license | JackMcGrath/Zerebral-Rewards | 84385693f9a34a30b98abca1908ff7dea7776a13 | cf0044985a43e7801f8b6ff9e80873d2a63b7473 | refs/heads/master | 2020-04-10T04:00:24.988277 | 2013-09-11T18:41:45 | 2013-09-11T18:41:45 | 11,531,890 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 507 | py | from django.db import models
from badges.models import Badge
class Goal(models.Model):
name = models.CharField(max_length=100)
description = models.CharField(max_length=500)
point_cost = models.DecimalField(max_digits=15, decimal_places=2)
# can this goal be earned?
active = models.BooleanField(default=False)
# the badge associated with earning this goal
badge = models.ForeignKey(Badge, blank=True, null=True)
def __unicode__(self):
return unicode(self.name) | [
"ericb@ericbarch.com"
] | ericb@ericbarch.com |
8e47ffdb9a7ff9e64f74027eb4f68986a2bc1daa | 712a0540264285214915e9fbd11c1d569622388b | /src/testFindAppAPI.py | 49db3cefd7f05cfccee98fc8ba38854769aaed50 | [] | no_license | helloluzhengfei/unitestAPI | ebd7c940aaa78024d538b2aee02609562588e225 | 4980104a260eefb02bc109e73a1540f8dcf3b54d | refs/heads/master | 2021-01-11T20:54:02.137663 | 2017-01-18T07:25:47 | 2017-01-18T07:25:47 | 79,206,416 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 237 | py | #coding=GBK
import unittest
from testFindApp import testFindApp
if __name__ == '__main__':
#unittest.main()
suite = unittest.TestLoader().loadTestsFromTestCase(testFindApp)
unittest.TextTestRunner(verbosity=2).run(suite)
| [
"1101113045@qq.com"
] | 1101113045@qq.com |
6b88975e6ef58157f6152dccba99b16643749118 | f49fbc09fce8c8a9e425ecca3695549330ad5ca0 | /vampire/praha_vfn.py | 9599e51f7bb55d01656f00d2294ea51505bf251f | [
"Unlicense",
"LicenseRef-scancode-public-domain"
] | permissive | honzasp/vampire | 716f42c205c91b9bee86c94addf8646d1a95fe75 | 752b9dea10626fd3f85d9139aff846e00ce7108c | refs/heads/master | 2021-12-14T13:53:34.295885 | 2021-01-07T23:26:32 | 2021-01-10T07:32:51 | 248,964,139 | 0 | 0 | Unlicense | 2021-12-13T20:35:13 | 2020-03-21T11:41:51 | Python | UTF-8 | Python | false | false | 907 | py | import re
from .helpers import get_html, inner_text, BLOOD_TEXT_TO_TYPE
UUID = "d052dc10eb314a96399c574fce480165"
SHORT_ID = "praha_vfn"
URL = "https://www.vfn.cz/pacienti/kliniky-ustavy/" \
"fakultni-transfuzni-oddeleni/aktualni-potreba-krve/"
NAME = "Všeobecná fakultní nemocnice v Praze"
COLOR_RE = re.compile(r'background-color:\s*#([0-9a-f]{6})')
BLOOD_COLOR_TO_STATUS = {
"32ff33": "urgent",
"21a900": "normal",
"fa0106": "full",
}
async def scrape(client):
doc = await get_html(client, URL)
blood_statuses = {}
table = doc.cssselect("#idobsahu>table")[0]
for td in table.cssselect("td"):
type_text = inner_text(td).strip()
blood_type = BLOOD_TEXT_TO_TYPE[type_text]
color = COLOR_RE.search(td.get("style"))[1]
blood_status = BLOOD_COLOR_TO_STATUS[color]
blood_statuses[blood_type] = blood_status
return blood_statuses
| [
"patek.mail@gmail.com"
] | patek.mail@gmail.com |
abf95e0dd4723e91a880d13b55defe3ee4660537 | 241579b4dba7e87bb5c3766da59b9f7bc796711e | /my_answers.py | 95736b80d4c7f236b394940a2d4c8e70b36a1e9a | [] | no_license | jmuras/bike-sharing | beac76075157391e9db352165a61f71d00513079 | e4ec3dac9488710be3efc21eee169d0ee424279f | refs/heads/master | 2021-01-04T22:11:37.182253 | 2020-02-15T20:07:46 | 2020-02-15T20:07:46 | 240,780,021 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 5,982 | py | import numpy as np
class NeuralNetwork(object):
def __init__(self, input_nodes, hidden_nodes, output_nodes, learning_rate):
# Set number of nodes in input, hidden and output layers.
self.input_nodes = input_nodes
self.hidden_nodes = hidden_nodes
self.output_nodes = output_nodes
# Initialize weights
self.weights_input_to_hidden = np.random.normal(0.0, self.input_nodes**-0.5,
(self.input_nodes, self.hidden_nodes))
self.weights_hidden_to_output = np.random.normal(0.0, self.hidden_nodes**-0.5,
(self.hidden_nodes, self.output_nodes))
self.lr = learning_rate
#### TODO: Set self.activation_function to your implemented sigmoid function ####
#
# Note: in Python, you can define a function with a lambda expression,
# as shown below.
self.activation_function = lambda x : 1 / (1 + np.exp(-x)) # Replace 0 with your sigmoid calculation.
### If the lambda code above is not something you're familiar with,
# You can uncomment out the following three lines and put your
# implementation there instead.
#
#def sigmoid(x):
# return 0 # Replace 0 with your sigmoid calculation here
#self.activation_function = sigmoid
def train(self, features, targets):
''' Train the network on batch of features and targets.
Arguments
---------
features: 2D array, each row is one data record, each column is a feature
targets: 1D array of target values
'''
n_records = features.shape[0]
delta_weights_i_h = np.zeros(self.weights_input_to_hidden.shape)
delta_weights_h_o = np.zeros(self.weights_hidden_to_output.shape)
for X, y in zip(features, targets):
final_outputs, hidden_outputs = self.forward_pass_train(X) # Implement the forward pass function below
# Implement the backproagation function below
delta_weights_i_h, delta_weights_h_o = self.backpropagation(final_outputs, hidden_outputs, X, y,
delta_weights_i_h, delta_weights_h_o)
self.update_weights(delta_weights_i_h, delta_weights_h_o, n_records)
def forward_pass_train(self, X):
''' Implement forward pass here
Arguments
---------
X: features batch
'''
#### Implement the forward pass here ####
### Forward pass ###
# TODO: Hidden layer - Replace these values with your calculations.
hidden_inputs = np.dot(X, self.weights_input_to_hidden)
hidden_outputs = self.activation_function(hidden_inputs)
# TODO: Output layer - Replace these values with your calculations.
final_inputs = np.dot(hidden_outputs, self.weights_hidden_to_output)
final_outputs = final_inputs
return final_outputs, hidden_outputs
def backpropagation(self, final_outputs, hidden_outputs, X, y, delta_weights_i_h, delta_weights_h_o):
''' Implement backpropagation
Arguments
---------
final_outputs: output from forward pass
y: target (i.e. label) batch
delta_weights_i_h: change in weights from input to hidden layers
delta_weights_h_o: change in weights from hidden to output layers
'''
#### Implement the backward pass here ####
### Backward pass ###
# TODO: Output error - Replace this value with your calculations.
output_error = y - final_outputs
# TODO: Backpropagated error terms - Replace these values with your calculations.
output_error_term = output_error * 1
# TODO: Calculate the hidden layer's contribution to the error
hidden_error = np.dot(output_error, self.weights_hidden_to_output.T)
hidden_error_term = hidden_error * hidden_outputs * (1 - hidden_outputs)
# Weight step (hidden to output)
delta_weights_h_o += output_error_term * hidden_outputs[:, None]
# Weight step (input to hidden)
delta_weights_i_h += hidden_error_term * X[:, None]
return delta_weights_i_h, delta_weights_h_o
def update_weights(self, delta_weights_i_h, delta_weights_h_o, n_records):
''' Update weights on gradient descent step
Arguments
---------
delta_weights_i_h: change in weights from input to hidden layers
delta_weights_h_o: change in weights from hidden to output layers
n_records: number of records
'''
self.weights_hidden_to_output += self.lr * delta_weights_h_o / n_records
self.weights_input_to_hidden += self.lr * delta_weights_i_h / n_records
def run(self, features):
''' Run a forward pass through the network with input features
Arguments
---------
features: 1D array of feature values
'''
#### Implement the forward pass here ####
# TODO: Hidden layer - Replace these values with your calculations.
hidden_inputs = np.dot(features, self.weights_input_to_hidden)
hidden_outputs = self.activation_function(hidden_inputs)
# TODO: Output layer - Replace these values with your calculations.
final_inputs = np.dot(hidden_outputs, self.weights_hidden_to_output)
final_outputs = final_inputs
return final_outputs
#########################################################
# Set your hyperparameters here
##########################################################
iterations = 5000
learning_rate = 0.5
hidden_nodes = 10
output_nodes = 1
| [
"noreply@github.com"
] | noreply@github.com |
f2828219a52b0c680cb628ac7cd9cf04db168ac8 | 91654c7365c42424b57d5c7304f0f7f944cb1ea7 | /ballena_custodio/app6.py | 03d97957974533c70888cc863447cac61d2a0746 | [] | no_license | Antonio2401/t09_ballena.ramos | 8d9824a9e1fbd5828f34299aa91759ee2fcd8fb5 | 966696339446abba4eb7774ebb06827f36c4d14e | refs/heads/master | 2020-11-27T03:30:13.230560 | 2019-12-20T15:34:21 | 2019-12-20T15:34:21 | 229,287,697 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 139 | py | import libreria
import os
x=int(os.sys.argv[1])
y=int(os.sys.argv[2])
res=libreria.raiz_cuadrada(x,y)
print("la raiz cuadrada es :",res)
| [
"aballenac@unprg.edu.pe"
] | aballenac@unprg.edu.pe |
9a239cb2175eaae59444de0eb0c36e3ab608a4f9 | f6f24b57f4e3f5fdcea8006615d1c59f6d4adb06 | /router/poc/poc-publisher.py | b3d3c932ed69cfaa72bc398c210453654817f1a5 | [] | no_license | jeffmurphy/cif-v2 | 542640a59462193fa1ecda887b6391e98d8e57be | 09bd6694688cfb6c962d9adb97f564eea97097a7 | refs/heads/master | 2021-01-10T20:59:40.681466 | 2013-12-31T17:52:12 | 2013-12-31T17:52:12 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,118 | py | #!/usr/bin/python
#
# poc-publisher proof of concept
#
# poc-publisher [-c 5656] [-p 5657] [-r cif-router:5555] [-t #] [-c #] [-m name] [-h]
# -c control port (REQ - for inbound messages)
# -p publisher port (PUB)
# -r cif-router hostname:port
# -t secs between publishing messages (decimal like 0.5 is ok)
# -n number of messages to send (and then quit)
# -m my name
#
# cif-publisher uses the following sockets:
# REP
# for 'control' messages
# SHUTDOWN
# STATS
# PING
# PAUSE
# RESUME
# REQ
# for requesting things
# REGISTER
# IPUBLISH
# XPUB
# for publishing messages
#
# a typical use case:
#
# poc-publisher REQ connects to cif-router's ROUTER
# sends REGISTER message to cif-router
# waits for REGISTERED message
# sends IPUBLISH message to cif-router to indicate we are a publisher
# waits for connections to our XPUB port
# publishes messages via XPUB until control-c
import sys
import zmq
import random
import time
import os
import datetime
import threading
import getopt
import socket
sys.path.append('/opt/cif/lib/cif-protocol/pb-python/gen-py')
import msg_pb2
import feed_pb2
import RFC5070_IODEF_v1_pb2
import MAEC_v2_pb2
import control_pb2
import cifsupport
sys.path.append('/opt/cif/lib')
from CIF.CtrlCommands.Clients import *
from CIF.Foundation import Foundation
def usage():
print "\
# poc-publisher [-c 5656] [-p 5657] [-r cif-router:5555] [-t #] [-c #]\n\
# -c control port (REQ - for inbound messages)\n\
# -p publisher port (PUB)\n\
# -r cif-router hostname:port\n\
# -t secs between publishing messages (decimal like 0.5 is ok)\n\
# -n number of messages to send (and then quit)\n\
# -k apikey\n"
try:
opts, args = getopt.getopt(sys.argv[1:], 'c:p:r:t:m:h')
except getopt.GetoptError, err:
print str(err)
usage()
sys.exit(2)
controlport = "5656"
publisherport = "5657"
cifrouter = "sdev.nickelsoft.com:5555"
sleeptime = 1.0
count = -1
myid = "poc-publisher"
apikey = "c31ebb50-18df-4f47-8ec9-3f7ff778114a"
for o, a in opts:
if o == "-c":
controlport = a
elif o == "-k":
apikey = a
elif o == "-p":
publisherport = a
elif o == "-m":
myid = a
elif o == "-r":
cifrouter = a
elif o == "-t":
sleeptime = float(a)
elif o == "-n":
count = int(a)
if count > 0:
count -= 1
elif o == "-h":
usage()
sys.exit(2)
myip = socket.gethostbyname(socket.gethostname()) # has caveats
global cf
cf = Foundation({'apikey' : apikey,
'myip' : myip,
'cifrouter' : cifrouter,
'controlport' : controlport,
'publisherport' : publisherport,
'myid' : myid,
'routerid' : "cif-router"
})
try:
print "Register with " + cifrouter + " (req->rep)"
cf.ctrlsocket()
(routerport, routerpubport) = cf.register()
publisher = cf.publishsocket()
cf.ipublish()
time.sleep(1) # wait for router to connect, sort of lame but see this a lot in zmq code
hasMore = True
while hasMore:
sys.stdout.write ("[forever]" if (count == -1) else str(count))
msg = msg_pb2.MessageType()
msg.version = msg.version # required
msg.apikey = apikey
msg.guid = '123456-abcdef'
msg.type = msg_pb2.MessageType.SUBMISSION
maec = MAEC_v2_pb2.maecPlaceholder()
maec.msg = "test message: " + str(count) + " " + str(time.time())
sr = msg.submissionRequest.add()
sr.baseObjectType = 'MAEC_v2'
sr.data = maec.SerializeToString()
print " publishing a message: ", maec.msg
publisher.send(msg.SerializeToString())
time.sleep(sleeptime)
if count == 0:
hasMore = False
elif count > 0:
count = count - 1
cf.unregister()
except KeyboardInterrupt:
cf.ctrlc()
| [
"jcmurphy@jeffmurphy.org"
] | jcmurphy@jeffmurphy.org |
112d2d8320692eba6ef70e6342254ab8abb37bd3 | 875921eb2b486923cfef0c2af249e8f456bdf0c9 | /config.py | 71df786a370703f818458fa7a87ac6837cb8727c | [
"Apache-2.0"
] | permissive | baozixifan/ASRFrame | c0d3d477409b0e262fbf760860c6c7b6ddd59caf | 307596dc729f7611b270b9f6d279fefa05ef488d | refs/heads/master | 2020-06-23T07:06:07.973172 | 2019-07-18T16:26:07 | 2019-07-18T16:26:07 | 198,550,805 | 1 | 0 | null | 2019-07-24T03:23:48 | 2019-07-24T03:23:47 | null | UTF-8 | Python | false | false | 1,964 | py | import platform
import os
project_path = os.path.split(os.path.realpath(__file__))[0] #
thu_datapath = None # 目录下应该有data/ dev/ 等目录
z200_datapath = None # 目录下应该有一大堆G../格式的目录
aishell_datapath = None # 目录下应有wav/和transcript/两个目录
prime_datapath = None # 目录下应有一个json文件和一个目录
stcmd_datapath = None # 目录下应该直接是音频文件
wiki_datapath = None
if platform.system() == "Linux":
thu_datapath = "/data/voicerec/thchs30/data_thchs30"
z200_datapath = "/data/voicerec/z200"
aishell_datapath = "/data/voicerec/ALShell-1/data_aishell"
prime_datapath = "/data/voicerec/Primewords Chinese Corpus Set 1/primewords_md_2018_set1"
stcmd_datapath = "/data/voicerec/Free ST Chinese Mandarin Corpus/ST-CMDS-20170001_1-OS"
wiki_datapath = "/data/voicerec/wiki/wiki_corpus_2"
elif platform.system() == "Windows":
thu_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\thchs30"
z200_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\z200"
aishell_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\data_aishell"
prime_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\primewords_md_2018_set1"
stcmd_datapath = r"C:\E\jupyter_notebook\voice_reco\Dataset\ST-CMDS-20170001_1-OS"
model_dir = os.path.join(project_path,"model") # ./model
dict_dir = os.path.join(project_path,"util","dicts") #./util/dicts
acoustic_model_dir = os.path.join(model_dir, "acoustic") # ./acoustic
language_model_dir = os.path.join(model_dir, "language") # ./language
loss_dir = "./loss_plot/"
acoustic_loss_dir = os.path.join(loss_dir,"acoustic") # ./loss_plot/acoustic
language_loss_dir = os.path.join(loss_dir,"language") # ./loss_plot/language
join_model_path = lambda x:os.path.join(model_dir, x)
chs_dict_path = os.path.join(dict_dir,"pure_chs.txt") # ./util/dicts/...
py_dict_path = os.path.join(dict_dir,"pure_py.txt") # ./util/dicts/... | [
"sailist@outlook.com"
] | sailist@outlook.com |
3f1fb4bbc6d2b5185a78deac4d61e6f99e71d675 | 6023b767f104d51288fed3e872777f4b974f083d | /4948.py | a1c036c2b0386239e0cdf05b50457a56ce356411 | [] | no_license | andyjung2104/BOJ-source-codes | daff7b804469473b898fb58500a1c8cff542b52e | a97b5e2b4bfb6d26fac67bb94d3d259836d875d3 | refs/heads/main | 2023-07-19T02:30:13.806978 | 2021-09-13T01:03:06 | 2021-09-13T01:03:06 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 462 | py | import sys
def pri(n):
sieve=[True]*n
m=int(n**.5)
for j in range(4,n,2):
sieve[j]=False
for i in range(3,m+1,2):
if sieve[i]:
for j in range(2*i,n,i):
sieve[j]=False
sieve[0]=False
sieve[1]=False
return sieve
a=[]
while True:
n=int(sys.stdin.readline())
if n==0:break
a.append(n)
s=pri(2*max(a)+1)
for u in a:
print(s[(u+1):(2*u+1)].count(True))
| [
"noreply@github.com"
] | noreply@github.com |
d5d172753e17d1517f9584944e14c6b55bd48f89 | 69aebd774103a7eccbd2a175002f3dda15734c5e | /result/server.py | e74460e3e49f79a49d096dc71c8dccff2a15e4b2 | [] | no_license | AimeTPGM/BackendAPIsGenerator | b90c855ba852325e157837fe8da0f635dff3222c | 6843894e535c53cd92750c31b202922e5239d50a | refs/heads/master | 2021-01-02T22:49:22.821832 | 2017-10-11T15:54:42 | 2017-10-11T15:54:42 | 99,402,976 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 341 | py | from flask import Flask
app = Flask(__name__)
##########
# Description:
# Req: get
# Params:
# Return:
##########
@app.route("/test")
def test()
return
##########
# Description:
# Req: post
# Params:
# Return:
##########
@app.route("/lol")
def lol()
return
if __name__ == "__main__":
app.run(host='0.0.0.0',port = 3000, debug=True) | [
"aim.ske09@gmail.com"
] | aim.ske09@gmail.com |
f66ea93a627b6da3eb08a72f0648757437501aaf | b06497b1d7cb19c03572e48a71b663185134b41b | /電子辭典client.py | d5b5ccfe516abdd9b45a9c6f77b95104addcaf43 | [] | no_license | wangdizzy/dictionary | b88f0e77adbbe1e96cd2ea77ac5ee1279eecdc57 | e64f43532b38af04357a4caac8e3d9aa30fe5e80 | refs/heads/master | 2020-04-17T11:11:57.153416 | 2019-01-19T09:54:08 | 2019-01-19T09:54:08 | 166,531,368 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,511 | py | import socket
import sys
import os
import signal
import time
import pymysql
#發送連接請求
def main():
HOST = sys.argv[1]
PORT = int(sys.argv[2])
s = socket.socket()
s.connect((HOST,PORT))
while True:
print('''
============歡迎使用============
| 1.註冊 2.登入 3.退出 |
================================
''')
try:
cmd = int(input('輸入選項:'))
except:
print('請請入1,2,3其中一個')
continue
if cmd not in [1,2,3]:
print('輸入有誤')
sys.stdin.flush()#清空緩存
continue
if cmd == 1:
if do_register(s) == 0:
print('註冊成功')
else:
print('註冊失敗')
elif cmd == 2:
name = do_login(s)
if name == -1:
print('登入失敗')
else:
print('登入成功')
login(s,name)
elif cmd == 3:
do_quit(s)
#發送註冊請求
def do_register(s):
while True:
name = input('用戶名:')
passwd = input('密碼:')
passwd1 = input('重新輸入密碼:')
if passwd != passwd1:
print('密碼不一致')
continue
msg = 'R %s %s'%(name,passwd)
s.send(msg.encode())
data = s.recv(128).decode()
if data == 'OK':
return 0
elif data == 'Fall':
print('用戶名存在')
return -1
else:
return -1
#用戶退出
def do_quit(s):
s.send('Q'.encode())
s.close()
sys.exit(0)
#二級界面
def login(s,name):
while True:
print('''
============歡迎使用==============
| 1.查單字 2.歷史紀錄 3.退出 |
==================================
''')
try:
cmd = int(input('輸入選項:'))
except:
print('請請入1,2,3其中一個')
continue
if cmd not in [1, 2,3]:
print('輸入有誤')
sys.stdin.flush() # 清空緩存
continue
if cmd == 1:
do_query(s,name)
elif cmd == 2:
do_history(s,name)
elif cmd == 3:
break
#登入請求
def do_login(s):
name = input('輸入用戶名:')
passwd = input('輸入密碼:')
msg = 'L %s %s'%(name,passwd)
s.send(msg.encode())
data = s.recv(128).decode()
if data == 'OK':
return name
else:
return -1
#循環發送查詢請求
def do_query(s,name):
while True:
word = input('查詢單字:')
if word == '#':
break
msg = 'S %s %s'%(name,word)
s.send(msg.encode())
data = s.recv(128).decode()
if data == 'OK':
data = s.recv(2048).decode()
if data == 'not found':
print('無此單字解釋')
else:
print(data)
else:
print('失敗')
#發送查看紀錄請求
def do_history(s,name):
msg = 'H %s'%(name)
s.send(msg.encode())
data = s.recv(128).decode()
if data == 'OK':
while True:
data = s.recv(1024).decode()
if data == '##':
break
print(data)
else:
print('查看失敗')
if __name__ =='__main__':
main() | [
"kw65045566@gmail.com"
] | kw65045566@gmail.com |
8cdb24abd262545c7c71d44a9a66849c7c8fa418 | 8142f6ef075773be82e34d181d89ce07e6547139 | /api/database/migrations/0008_comment.py | 5054f0f1aa5f92463295cbfd9975df9b191c18ac | [] | no_license | Dazhuzhu-github/ASTA_website | 686e532681b9d4a332e152b728ead6fc758a932b | 0de1914f1b3a5b274860e52e34ac5170f63c4088 | refs/heads/master | 2023-03-23T00:18:32.706136 | 2020-10-11T16:41:42 | 2020-10-11T16:41:42 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 925 | py | # Generated by Django 3.0.2 on 2020-08-12 13:08
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('database', '0007_auto_20200812_1703'),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('content', models.CharField(default='', max_length=1024)),
('timestamp', models.DateTimeField(auto_now_add=True)),
('author', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to=settings.AUTH_USER_MODEL)),
('blog', models.ForeignKey(null=True, on_delete=django.db.models.deletion.SET_NULL, to='database.Blog')),
],
),
]
| [
"suikasxt@gmail.com"
] | suikasxt@gmail.com |
66e136cb864afc7b488c438d5291b80ea8ba357f | 1ba81f525d5e743dec31b9f8d9ffa29d59db38db | /manage.py | 80655c30fd495f7c62b912dcb614d1289a2ea543 | [] | no_license | apricotdonut/ConUHacks-text-recognition-server | 2dda327d12d8ac25a22e9957e2bd3d8661122040 | 425a80bbfdb50da6c38711cc6feb6fe38266e76d | refs/heads/master | 2022-12-25T01:26:34.931228 | 2020-01-26T18:54:54 | 2020-01-26T18:54:54 | 236,197,189 | 0 | 2 | null | 2022-12-11T21:59:15 | 2020-01-25T16:37:52 | Python | UTF-8 | Python | false | false | 628 | py | #!/usr/bin/env python
"""Django's command-line utility for administrative tasks."""
import os
import sys
def main():
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'NLServer.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
if __name__ == '__main__':
main()
| [
"robbie.knowles@ucc.on.ca"
] | robbie.knowles@ucc.on.ca |
7c7f7d2371dc7a44fc32612820e0a7fc07035ce6 | b068787bc14c2963a9035abdc17850b6d656f59c | /lib_communication/testCommunication.py | f34d2495b7a789f08e96a67cda2ef1f4d192cd5e | [] | no_license | dariodotti/ICT4LIFE_ABD_indoor | b1f8f0b57b0c0fddf7d17d80db4be9b97f8c2158 | c1a32a5eeb5e00ac22121ae87eac540eb58319fb | refs/heads/master | 2021-07-09T15:49:49.738915 | 2020-09-19T10:56:15 | 2020-09-19T10:56:15 | 74,750,804 | 0 | 1 | null | 2018-07-02T08:56:10 | 2016-11-25T10:54:51 | Python | UTF-8 | Python | false | false | 444 | py | from communication.communication import Communication
from communication.domain.messageValueObject import MessageVO
if __name__ == '__main__':
try:
message_content = MessageVO(title='test', text="prueba envio", uuid_to="d20d7fc0-c0eb-4d49-8551-745bc149594e",
priority="HIGH")
com = Communication(message_content)
com.send
except Exception as e:
print "ERROR "+ e.message
| [
"dario.dotti@maastrichtuniversity.nl"
] | dario.dotti@maastrichtuniversity.nl |
95538e7aef91f2c0da0aabdfe2f36a93b27b9b60 | d363f11d8e5e640b365648ad544b540eb4386394 | /backend/src/people/migrations/0001_initial.py | f73b798f2fead2cc464cf61f0d237de000090661 | [] | no_license | mlytvynchuk/djreact | 0c2bd4c5bfb9fc0b1004d87121ef75b22931c878 | 0c52b08b7b31ed584e2326f2de96507df3cf1fd0 | refs/heads/master | 2020-05-05T11:50:59.016886 | 2019-04-07T18:32:51 | 2019-04-07T18:32:51 | 180,006,075 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,125 | py | # Generated by Django 2.2 on 2019-04-06 08:39
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Group',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=100)),
('is_video', models.BooleanField(default=False)),
('is_data_analytics', models.BooleanField(default=False)),
('is_voice_analytics', models.BooleanField(default=False)),
('is_services_analytics', models.BooleanField(default=False)),
],
),
migrations.CreateModel(
name='Person',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('username', models.CharField(max_length=50)),
('email', models.EmailField(max_length=100)),
],
),
]
| [
"lytvynchuk.maksym@gmail.com"
] | lytvynchuk.maksym@gmail.com |
1ecd30e9c26252e69351525d93da360caa3eaf0b | 533e773696d8cc09680eaf4c0ae6bc7cc6e7eb08 | /store/models.py | 0627c046cfad1c100c5a8998bf78991d1c120f92 | [] | no_license | pdave1573/Eshop | 2885cce3d0f157d4e9e35ee2cc911d97cbbab0ed | 788a3aab039adfcb2bdb0f9683435f382993e2e1 | refs/heads/master | 2020-04-06T03:59:49.769858 | 2017-02-24T19:40:40 | 2017-02-24T19:40:40 | 83,078,084 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 467 | py | from __future__ import unicode_literals
from django.db import models
from django.utils import timezone
# Create your models here.
class Book(models.Model):
title = models.CharField(max_length=200)
author = models.CharField(max_length=200)
description = models.TextField()
publish_date = models.DateField(default=timezone.now)
price = models.DecimalField(decimal_places=2, max_digits=8, default=0.00)
quantity = models.IntegerField(default=0) | [
"pdave1573@gmail.com"
] | pdave1573@gmail.com |
4c85a50a5aa40d7a5fff6322568792ac5f65f8a3 | 48b1feba376f24ca2e52ba0172a89cb69af852af | /hello.py | c49c730400d61888ec2cbc389dedb4fb0981c71e | [] | no_license | priyankasathiyaseelan/priyaa | 0bb205fd2e210ecd95557b51e1a70577a7c76f62 | 588d7fc47051b6a6f33238e629b5fcb988f41d77 | refs/heads/master | 2020-07-02T03:21:27.289532 | 2019-08-19T06:03:02 | 2019-08-19T06:03:02 | 201,400,250 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 57 | py | n=int(input(" "))
for i in range(n):
print ("Hello")
| [
"noreply@github.com"
] | noreply@github.com |
7cae145eeb1765e1dc1249a7c25c4f9b5a5a80c0 | 2612f336d667a087823234daf946f09b40d8ca3d | /python/lib/Lib/site-packages/django/utils/formats.py | c23a37cb2b51c0f4fca23725608c0e1326cc71ee | [
"Apache-2.0"
] | permissive | tnorbye/intellij-community | df7f181861fc5c551c02c73df3b00b70ab2dd589 | f01cf262fc196bf4dbb99e20cd937dee3705a7b6 | refs/heads/master | 2021-04-06T06:57:57.974599 | 2018-03-13T17:37:00 | 2018-03-13T17:37:00 | 125,079,130 | 2 | 0 | Apache-2.0 | 2018-03-13T16:09:41 | 2018-03-13T16:09:41 | null | UTF-8 | Python | false | false | 6,513 | py | import decimal
import datetime
from django.conf import settings
from django.utils.translation import get_language, to_locale, check_for_language
from django.utils.importlib import import_module
from django.utils.encoding import smart_str
from django.utils import dateformat, numberformat, datetime_safe
from django.utils.safestring import mark_safe
# format_cache is a mapping from (format_type, lang) to the format string.
# By using the cache, it is possible to avoid running get_format_modules
# repeatedly.
_format_cache = {}
_format_modules_cache = {}
def iter_format_modules(lang):
"""
Does the heavy lifting of finding format modules.
"""
if check_for_language(lang) or settings.USE_L10N:
format_locations = ['django.conf.locale.%s']
if settings.FORMAT_MODULE_PATH:
format_locations.append(settings.FORMAT_MODULE_PATH + '.%s')
format_locations.reverse()
locale = to_locale(lang)
locales = set((locale, locale.split('_')[0]))
for location in format_locations:
for loc in locales:
try:
yield import_module('.formats', location % loc)
except ImportError:
pass
def get_format_modules(reverse=False):
"""
Returns an iterator over the format modules found
"""
lang = get_language()
modules = _format_modules_cache.setdefault(lang, list(iter_format_modules(lang)))
if reverse:
modules.reverse()
return modules
def get_format(format_type, lang=None, use_l10n=None):
"""
For a specific format type, returns the format for the current
language (locale), defaults to the format in the settings.
format_type is the name of the format, e.g. 'DATE_FORMAT'
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
format_type = smart_str(format_type)
if use_l10n or (use_l10n is None and settings.USE_L10N):
if lang is None:
lang = get_language()
cache_key = (format_type, lang)
try:
return _format_cache[cache_key] or getattr(settings, format_type)
except KeyError:
for module in get_format_modules():
try:
val = getattr(module, format_type)
_format_cache[cache_key] = val
return val
except AttributeError:
pass
_format_cache[cache_key] = None
return getattr(settings, format_type)
def date_format(value, format=None, use_l10n=None):
"""
Formats a datetime.date or datetime.datetime object using a
localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.format(value, get_format(format or 'DATE_FORMAT', use_l10n=use_l10n))
def time_format(value, format=None, use_l10n=None):
"""
Formats a datetime.time object using a localizable format
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
return dateformat.time_format(value, get_format(format or 'TIME_FORMAT', use_l10n=use_l10n))
def number_format(value, decimal_pos=None, use_l10n=None):
"""
Formats a numeric value using localization settings
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if use_l10n or (use_l10n is None and settings.USE_L10N):
lang = get_language()
else:
lang = None
return numberformat.format(
value,
get_format('DECIMAL_SEPARATOR', lang, use_l10n=use_l10n),
decimal_pos,
get_format('NUMBER_GROUPING', lang, use_l10n=use_l10n),
get_format('THOUSAND_SEPARATOR', lang, use_l10n=use_l10n),
)
def localize(value, use_l10n=None):
"""
Checks if value is a localizable type (date, number...) and returns it
formatted as a string using current locale format.
If use_l10n is provided and is not None, that will force the value to
be localized (or not), overriding the value of settings.USE_L10N.
"""
if isinstance(value, bool):
return mark_safe(unicode(value))
elif isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.datetime):
return date_format(value, 'DATETIME_FORMAT', use_l10n=use_l10n)
elif isinstance(value, datetime.date):
return date_format(value, use_l10n=use_l10n)
elif isinstance(value, datetime.time):
return time_format(value, 'TIME_FORMAT', use_l10n=use_l10n)
else:
return value
def localize_input(value, default=None):
"""
Checks if an input value is a localizable type and returns it
formatted with the appropriate formatting string of the current locale.
"""
if isinstance(value, (decimal.Decimal, float, int, long)):
return number_format(value)
elif isinstance(value, datetime.datetime):
value = datetime_safe.new_datetime(value)
format = smart_str(default or get_format('DATETIME_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.date):
value = datetime_safe.new_date(value)
format = smart_str(default or get_format('DATE_INPUT_FORMATS')[0])
return value.strftime(format)
elif isinstance(value, datetime.time):
format = smart_str(default or get_format('TIME_INPUT_FORMATS')[0])
return value.strftime(format)
return value
def sanitize_separators(value):
"""
Sanitizes a value according to the current decimal and
thousand separator setting. Used with form field input.
"""
if settings.USE_L10N:
decimal_separator = get_format('DECIMAL_SEPARATOR')
if isinstance(value, basestring):
parts = []
if decimal_separator in value:
value, decimals = value.split(decimal_separator, 1)
parts.append(decimals)
if settings.USE_THOUSAND_SEPARATOR:
parts.append(value.replace(get_format('THOUSAND_SEPARATOR'), ''))
else:
parts.append(value)
value = '.'.join(reversed(parts))
return value
| [
"dmitry.trofimov@jetbrains.com"
] | dmitry.trofimov@jetbrains.com |
a73f8302a9249594d2ed5b77f6688c6768dc5b63 | 6a2b0db7d6c4ecef8434f3b35fcaef71eeb0d896 | /VENV/py3_venv/lib/python3.6/site-packages/pyntc/templates/__init__.py | f9a12282a24b39159158a59ac474ea95c08b289c | [] | no_license | pseudonode/nornircourse | 9bf890ecfadd1a08691f113e0cd2acadd4b9bffa | 1ad0372f9673de784233937cc15779bc2391e267 | refs/heads/master | 2022-11-09T20:18:22.714703 | 2019-10-04T08:06:42 | 2019-10-04T08:06:42 | 211,856,983 | 3 | 2 | null | null | null | null | UTF-8 | Python | false | false | 926 | py | import os
import textfsm
TEMPLATE_PATH_ENV_VAR = "NTC_TEMPLATES"
def get_structured_data(template_name, rawtxt):
"""Returns structured data given raw text using
TextFSM templates
"""
template_file = get_template(template_name)
with open(template_file) as template:
fsm = textfsm.TextFSM(template)
table = fsm.ParseText(rawtxt)
structured_data = []
for row in table:
temp_dict = {}
for index, element in enumerate(row):
temp_dict[fsm.header[index].lower()] = element
structured_data.append(temp_dict)
return structured_data
def get_template(template_name):
template_dir = get_template_dir()
return os.path.join(template_dir, template_name)
def get_template_dir():
try:
return os.environ[TEMPLATE_PATH_ENV_VAR]
except KeyError:
return os.path.realpath(os.path.dirname(__file__))
| [
"andre@recursivenet.com"
] | andre@recursivenet.com |
977922ac36268edcaa041e79fd97eed215a5b6ac | 179577ecdd7fda84ad970b3aad573a575fef56bc | /exercicios/ex034.py | cc2175d2d31399159743980d7251f1a8965d04fb | [] | no_license | Elvis-Lopes/Curso-em-video-Python | 6c12fa17a5c38c722a7c8e9677f6d9596bc5653c | 65f093975af9bd59c8aaa37606ba648b7ba1e1c4 | refs/heads/master | 2021-02-11T12:15:13.580496 | 2020-05-05T21:55:06 | 2020-05-05T21:55:06 | 244,490,886 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 218 | py | sal = float(input('Insira o salário: '))
aumento = float()
if sal > 1250:
aumento = (sal*15)/100
sal = sal + aumento
else:
aumento = (sal*10)/100
sal = sal + aumento
print(f'Novo salario R${sal:.2f}')
| [
"elvislopes1996@hotmail.com"
] | elvislopes1996@hotmail.com |
d97e382378d958796ce6131eaafaefa48e402bd5 | 8bb206d294da3f88cb928ea7b48cba1f13135566 | /SkilledIndia/contractor/migrations/0001_initial.py | 1fe7e724b162dfbfdcade31d05d1c4ab104f2c41 | [] | no_license | aadhityasw/Skilled-India | af3f4117e983938eda2e56a28f0362843d07be7a | 374ee40f308e0db91e56456cc5e8610b78f06ef8 | refs/heads/master | 2020-08-28T19:14:24.045836 | 2020-04-01T10:44:15 | 2020-04-01T10:44:15 | 217,796,096 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 754 | py | # Generated by Django 2.2.1 on 2019-09-20 06:48
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
('project', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='Contractor',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('name', models.CharField(max_length=200)),
('budget', models.DecimalField(decimal_places=2, max_digits=10)),
('proj', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='project.Project')),
],
),
]
| [
""
] | |
ee53ecbdab905a3659c81e6f7d19a1e23fa95ec1 | 332e2c7016bffd65ce8eda9573fcc5d56f8d73cf | /02_monoplex_centralities.py | e144dd376e2321daa74b7026f44ad2f3acb21589 | [] | no_license | shizelong1985/Financial_Contagion_Network | 291a28b2ea8ae0455ab4cea84d6d7e1721dc81cc | c45674cb36667351c75402a5ee853233f3db775e | refs/heads/master | 2023-03-17T06:30:05.758133 | 2020-12-01T20:21:04 | 2020-12-01T20:21:04 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,158 | py | import sys
import os
lib_path = './lib'
sys.path.insert(0, lib_path) ## add lib path to sys path
import igraph
import random
import copy
import scipy.stats
import pandas as pd
import numpy as np
from matplotlib import pylab as plt
from scipy.sparse import linalg as LA
import MultiContagion as mc
year = 2016
import_path = "./data/adjacency_matrix/"
export_path_csv = "./results/csv_structuralmeasurements/"
#import the aggregate adjacency matrix, and make graph with names
aggregate_am = np.genfromtxt(import_path + 'AM4_all_nodes_aggregate' + \
str(year) + '.csv', delimiter=",")
df_names = pd.read_csv(import_path + 'all_country_name4.csv', header=None)
names = list(df_names[0])
aggregate_g = igraph.Graph.Weighted_Adjacency(list(aggregate_am))
aggregate_g.vs["name"] = copy.deepcopy(names)
def order_centrality(centrality_vector, names=names):
'''Takes eigenvector of centrality and returns list of ranked
nodes and another of their score.
Args
centrality_vector(numpy array): the centrality measure for each node-layer
names(list of strings): name of nodes (countries)
Return:
sort_names(list of strings): names of countries ordered by centrality
sort_centrality(list of flots): sorted score of nodes
'''
node_names = np.array(copy.deepcopy(names))
inds = centrality_vector.argsort()[::-1][:]
sort_names= node_names[inds]
sort_centrality = np.array(centrality_vector)[inds]
return sort_names, sort_centrality
def save_centrality(y, namefile, namecent, country, cent, path=export_path_csv):
f = open(path+namefile+str(y) +".csv", "w")
f.write("Country,"+namecent+"\n")
n = len(country)
for i in range(n):
f.write(str(country[i]) + "," + str(cent[i])+ "\n")
f.close()
hubs_names, hub_score = order_centrality(np.array(aggregate_g.hub_score(weights=aggregate_g.es["weight"])))
auth_names, auth_score = order_centrality(np.array(aggregate_g.authority_score(weights=aggregate_g.es["weight"])))
pr_names, pr_score = order_centrality(np.array(\
aggregate_g.personalized_pagerank(weights=aggregate_g.es["weight"])))
save_centrality(year, "Aggregate_hub", "Hub score" , hubs_names, hub_score)
save_centrality(year, "Aggregate_auth", "Authority score" , auth_names, auth_score)
save_centrality(year, "Aggregate_PageRank", "PageRank score" , pr_names, pr_score)
aggregate_gT = igraph.Graph.Weighted_Adjacency(list(np.transpose(aggregate_am)))
aggregate_gT.vs["name"] = copy.deepcopy(names)
hubs_namesT, hub_scoreT = order_centrality(np.array(aggregate_gT.hub_score(weights=aggregate_gT.es["weight"])))
auth_namesT, auth_scoreT = order_centrality(np.array(aggregate_gT.authority_score(weights=aggregate_gT.es["weight"])))
pr_namesT, pr_scoreT = order_centrality(np.array(\
aggregate_gT.personalized_pagerank(weights=aggregate_gT.es["weight"])))
save_centrality(year, "AggregateT_hub", "Hub score" , hubs_namesT, hub_scoreT)
save_centrality(year, "AggregateT_auth", "Authority score" , auth_namesT, auth_scoreT)
save_centrality(year, "AggregateT_PageRank", "PageRank score" , pr_namesT, pr_scoreT)
| [
"huangchengyu16@gmail.com"
] | huangchengyu16@gmail.com |
b30388f626ab8501ab13c025023c2cf4d5f0ef61 | 0a0b31bed500d0714daf997b2d57b1eca4160672 | /2.py | 791fa0fc73a941e5e876687f63bb7847fa4fb74b | [] | no_license | jiapinjiea/test_project | 4ed191fc54f47624bbb6a799813c3828251ffc59 | 171a818ce99d354de4805ed24dfc4d4f20013f0b | refs/heads/master | 2020-05-02T01:51:41.003430 | 2019-03-26T03:48:29 | 2019-03-26T03:48:29 | 177,693,284 | 0 | 1 | null | 2019-03-26T03:48:30 | 2019-03-26T01:35:30 | Python | UTF-8 | Python | false | false | 41 | py | #!/usr/bin/python
print ("hello pyhhon")
| [
"xiaoqicn@163.com"
] | xiaoqicn@163.com |
3c53e42d5a2371b1683e62b91621f013f2474ebd | 7e50b94379132a4156fd693bc73d640ff6752ed9 | /tests/conftest.py | 6981e1f250018bce62a66937c9462a5ed171ebab | [
"MIT"
] | permissive | Pylons/plaster_pastedeploy | 145ac4c5310babf78ea7a0f7ad0639cc1b3f8a33 | c0a146cdfac61781057ecaaa1b7938ef53dae9af | refs/heads/main | 2023-06-12T04:08:37.382145 | 2023-01-03T02:44:28 | 2023-01-03T02:44:28 | 60,292,293 | 7 | 8 | MIT | 2023-09-09T04:19:56 | 2016-06-02T19:40:32 | Python | UTF-8 | Python | false | false | 515 | py | import os.path
import sys
import pkg_resources
import pytest
@pytest.fixture(scope="session")
def fake_packages():
# we'd like to keep this scope more focused but it's proven really
# difficult to fully monkeypatch pkg_resources and so for now we just
# install the packages for the duration of the test suite
test_dir = os.path.dirname(__file__)
info_dir = os.path.join(test_dir, "fake_packages", "FakeApp")
sys.path.insert(0, info_dir)
pkg_resources.working_set.add_entry(info_dir)
| [
"michael@merickel.org"
] | michael@merickel.org |
9a0760a05815cba73bcdc18ed1a4efc2c7ce1927 | b4155a5883522546d1bb50ac6f7af3e81f674d4c | /recaman.py | e922f827e6c93d3e6f310f6f94bc2c8202f04443 | [] | no_license | isennkubilay/iterators | 07f71c26b747e2f4e53afc2d2e09727e4afbe96b | 4114197277b181d3edad08507f2cedceb8ed2956 | refs/heads/master | 2023-02-19T08:31:28.462595 | 2021-01-11T10:47:35 | 2021-01-11T10:47:35 | 328,461,735 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 259 | py | from itertools import count
def recaman():
"""Generate Recaman's sequence
"""
seen = set()
a = 0
for n in count(1):
yield a
seen.add(a)
c = a - n
if c < 0 or c in seen:
c = a + n
a = c | [
"isen.kubilay@gmail.com"
] | isen.kubilay@gmail.com |
14c29cd5be9f391d57dffd9b957cf499b483dfd1 | 41a240365e242785bbdeaac9a75da03c8e200016 | /python_tools/src/multi.py | 3f48734103453be360770a1cbd1d3f858a05878d | [] | no_license | DHolmanCoding/python-tools | cff1e7bb9f16df803aa157c11944dc5dbfd4f839 | a65da40d801094ecfbe83b47e5e4fb1d1c16eba7 | refs/heads/master | 2023-01-23T02:44:48.509882 | 2020-12-13T00:19:43 | 2020-12-13T00:19:43 | 314,902,941 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 503 | py | import os
from multiprocessing.pool import ThreadPool
from multiprocessing.pool import Pool as ProcessPool
from enum import Enum
class MultiType(Enum):
threading = ThreadPool
processing = ProcessPool
def multiplex(function, data, multi_type: MultiType):
num_cpus = os.cpu_count()
with multi_type.value(processes=num_cpus) as multiplex_pool:
return filter(None, multiplex_pool.map(function, data))
if __name__ == "__main__":
x = MultiType.processing
print(x.value)
| [
"DouglasHolman9@gmail.com"
] | DouglasHolman9@gmail.com |
9840214308b8f845fa25516ca9daa12b472d91d5 | 944ea7e9f80ff61c3d317d7c17f96d2e59c9b03e | /src/Main/FJSP_FolderReader_Model_Data_Process_CPE.py | de4f6d67b59e22e50642f737b0d5446b8d96715c | [] | no_license | bholland/NLP_Platform_Installer | 17995e03d27f6b15557a11159ce73a8010141eb0 | a24ece0076913b91725a0988e9b23367ce865bfc | refs/heads/master | 2020-06-22T04:43:53.569668 | 2019-08-01T21:15:53 | 2019-08-01T21:15:53 | 197,636,114 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 12,428 | py | from string import Template
from pathlib import Path
FJSP_FolderReader_Model_Data_Process_CPE = """<?xml version="1.0" encoding="UTF-8"?>
<collectionReaderDescription xmlns="http://uima.apache.org/resourceSpecifier">
<frameworkImplementation>org.apache.uima.java</frameworkImplementation>
<implementationName>collection_readers.FolderReader</implementationName>
<processingResourceMetaData>
<name>FolderReader</name>
<description>This object reads through files and folders and selects all files that match the search criteria. It then loads the appropriate file reader object and populates the CAS.
Please note that all header parameters are case senseative. ID is not the same as id or Id. I might fix this in future revisions but for now, we expect that the CSV headers will match whichever information is in the array.
Also, this application will match headers in order. If the user specifies the CsvIdHeaders field as
CsvIdHeaders = ["id", "Id", "ID", "IDENTIFIER"]
and the spreadsheet contains an "IDENTIFIER" and an "id" column, the application marks the "id" column as the id column.</description>
<version>1.0</version>
<vendor/>
<configurationParameters>
<configurationParameter>
<name>BaseFolder</name>
<description>The base folder to walk.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>IsRecursive</name>
<description>Should the application perform a recursive walk over the base directory or only look in the base folder? Set to "true" to use a recursive walk.</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>ReadText</name>
<description>Read txt files where an entry is 1 text string per row.</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>ReadPdf</name>
<description>Should the search include pdf documents?</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>ReadCsv</name>
<description>Should the search include csv documents?</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>ReadHtml</name>
<description>Should the search include html documents?</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>ReadWord</name>
<description>Should the search include word documents?</description>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>CsvIdHeaders</name>
<description>This is the set of labels that will represent the id column in csv files. If the user does not provide a list of labels to use for lookup, it will default to "id".</description>
<type>String</type>
<multiValued>true</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>CsvTextHeaders</name>
<description>The list of values that will define text columns in the csv file.</description>
<type>String</type>
<multiValued>true</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>CsvCategoryHeaders</name>
<description>The list of values that will define category columns in the csv file.</description>
<type>String</type>
<multiValued>true</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>DataType</name>
<description>This specifies how the applciation should insert the data.
DataType = 0: Document data/data to categorize
DataType = 1: Category data/model data
DataType = 2: Both for a full many to many mapping.
The default is 0.</description>
<type>Integer</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>DatabaseServer</name>
<description>The database server to connect to.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>Database</name>
<description>Database to connect to on the server.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>DatabaseUserName</name>
<description>The username for the connecting user</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>DatabasePassword</name>
<description>Password for the associated database user. Please note that this is clearcase in plain text. There might be additional options to connect to a database, but for now, localhost is the database to connect to.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>DatabasePort</name>
<description>This is the port to atempt to connect to. If this is not provided, it will atempt to connect to the default port based on the type provided.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>DatabaseType</name>
<description>This is the database type we are trying to connect to. This is required if the DatabasePort value is not assigned. The port will be assigned the default value for the database type.</description>
<type>String</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>LoggingUserId</name>
<description>This is the logging user that will run this application.</description>
<type>Integer</type>
<multiValued>false</multiValued>
<mandatory>false</mandatory>
</configurationParameter>
<configurationParameter>
<name>UseJobQueue</name>
<description>Sets if this process should use the job queue
0: disable the job queue
1: insert document
2: process documents</description>
<type>Integer</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
<configurationParameter>
<name>CleanData</name>
<type>Boolean</type>
<multiValued>false</multiValued>
<mandatory>true</mandatory>
</configurationParameter>
</configurationParameters>
<configurationParameterSettings>
<nameValuePair>
<name>BaseFolder</name>
<value>
<string>$BASE_MODEL_FOLDER</string>
</value>
</nameValuePair>
<nameValuePair>
<name>IsRecursive</name>
<value>
<boolean>true</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>ReadCsv</name>
<value>
<boolean>true</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>ReadPdf</name>
<value>
<boolean>false</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>ReadHtml</name>
<value>
<boolean>false</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>ReadWord</name>
<value>
<boolean>false</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>CsvIdHeaders</name>
<value>
<array>
<string>IDENTIFIER</string>
<string>id</string>
<string>Id</string>
<string>ID</string>
</array>
</value>
</nameValuePair>
<nameValuePair>
<name>CsvTextHeaders</name>
<value>
<array>
<string>text</string>
<string>Text</string>
<string>TEXT</string>
<string>SEARCH FIELD</string>
<string>charge</string>
</array>
</value>
</nameValuePair>
<nameValuePair>
<name>DataType</name>
<value>
<integer>1</integer>
</value>
</nameValuePair>
<nameValuePair>
<name>DatabaseServer</name>
<value>
<string>$DATABASE_SERVER</string>
</value>
</nameValuePair>
<nameValuePair>
<name>DatabaseUserName</name>
<value>
<string>$DATABASE_USER</string>
</value>
</nameValuePair>
<nameValuePair>
<name>Database</name>
<value>
<string>$DATABASE</string>
</value>
</nameValuePair>
<nameValuePair>
<name>DatabasePassword</name>
<value>
<string>$DATABASE_PASSWORD</string>
</value>
</nameValuePair>
<nameValuePair>
<name>DatabasePort</name>
<value>
<string>$DATABASE_PORT</string>
</value>
</nameValuePair>
<nameValuePair>
<name>ReadText</name>
<value>
<boolean>true</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>UseJobQueue</name>
<value>
<integer>$JOB_QUEUE_VALUE</integer>
</value>
</nameValuePair>
<nameValuePair>
<name>CleanData</name>
<value>
<boolean>true</boolean>
</value>
</nameValuePair>
<nameValuePair>
<name>CsvCategoryHeaders</name>
<value>
<array>
<string>ncic_code</string>
</array>
</value>
</nameValuePair>
</configurationParameterSettings>
<typeSystemDescription>
<imports>
<import location="../../objects/DatabaseConnection.xml"/>
<import location="../../objects/UnprocessedText.xml"/>
</imports>
</typeSystemDescription>
<typePriorities/>
<fsIndexCollection/>
<capabilities>
<capability>
<inputs/>
<outputs>
<type allAnnotatorFeatures="true">objects.UnprocessedText</type>
<type allAnnotatorFeatures="true">objects.DatabaseConnection</type>
</outputs>
<languagesSupported/>
</capability>
</capabilities>
<operationalProperties>
<modifiesCas>true</modifiesCas>
<multipleDeploymentAllowed>false</multipleDeploymentAllowed>
<outputsNewCASes>true</outputsNewCASes>
</operationalProperties>
</processingResourceMetaData>
<resourceManagerConfiguration/>
</collectionReaderDescription>
"""
def generate_folder_reader_model_data_process_cpe(output_file, base_model_folder, database_server, database, database_user, database_password, database_port, use_job_queue):
"""output_file: a pathlib object"""
job_queue_value = None
if use_job_queue == True:
job_queue_value = "2"
else:
job_queue_value = "0"
s = Template(FJSP_FolderReader_Model_Data_Process_CPE)
ret = s.substitute(DATABASE_SERVER=database_server, DATABASE_USER=database_user, DATABASE=database,
DATABASE_PASSWORD=database_password, DATABASE_PORT=database_port,
BASE_MODEL_FOLDER=base_model_folder, JOB_QUEUE_VALUE=job_queue_value)
with output_file.open(mode="w") as out_file:
out_file.write(ret)
| [
"ben_holland@abtassoc.com"
] | ben_holland@abtassoc.com |
789d37118fd4b3fcb65325a1552969fa73b32b45 | 964408953a7caf5f3f76f3e2f8539617ad2cdb1c | /config.py | ceff9672ffe611a83b45ef26ca3cdb1489a77b6c | [] | no_license | HS-1/udacityProj3Web | e94b0c994c9de0c46a9ef08f50a36c6dcad1e79f | 194610ef097d6ad092e34efec9b2ec16bdaeebfd | refs/heads/main | 2023-06-13T07:32:27.517521 | 2021-07-03T10:55:14 | 2021-07-03T10:55:14 | 381,586,152 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,128 | py | import os
app_dir = os.path.abspath(os.path.dirname(__file__))
class BaseConfig:
DEBUG = True
POSTGRES_URL="hsproj3dbserver.postgres.database.azure.com" #TODO: Update value
POSTGRES_USER="serverAdmin@hsproj3dbserver" #TODO: Update value
POSTGRES_PW="P@ssword" #TODO: Update value
POSTGRES_DB="techconfdb" #TODO: Update value
DB_URL = 'postgresql://{user}:{pw}@{url}/{db}'.format(user=POSTGRES_USER,pw=POSTGRES_PW,url=POSTGRES_URL,db=POSTGRES_DB)
SQLALCHEMY_DATABASE_URI = os.getenv('SQLALCHEMY_DATABASE_URI') or DB_URL
CONFERENCE_ID = 1
SECRET_KEY = 'LWd2tzlprdGHCIPHTd4tp5SBFgDszm'
SERVICE_BUS_CONNECTION_STRING ='Endpoint=sb://hsproj3servbus.servicebus.windows.net/;SharedAccessKeyName=RootManageSharedAccessKey;SharedAccessKey=QtzwV6qquhijzlp9MkyKKOF14tRRz1V5bfeov8/9DaU=' #TODO: Update value
SERVICE_BUS_QUEUE_NAME ='notificationqueue'
ADMIN_EMAIL_ADDRESS: 'info@techconf.com'
SENDGRID_API_KEY = '' #Configuration not required, required SendGrid Account
class DevelopmentConfig(BaseConfig):
DEBUG = True
class ProductionConfig(BaseConfig):
DEBUG = False
| [
"noreply@github.com"
] | noreply@github.com |
b4d24569e6cdaf40b25de41df54651caa9e68e90 | 69b66781ed31d316df76a7cc2e9a3bf2d5bf6aee | /filed_payments-main/application/services/payment/external.py | b7bd380f8a26ad6a777900a78eb326319f0daf2f | [] | no_license | Kiran2814/Filed_Payments | df27d7c2faa36b6c210d00b00ea3b8f7b327c8d6 | 594c45176ae136d6f93554c30ebeffea14eda6ee | refs/heads/master | 2023-03-04T16:43:04.483375 | 2021-02-03T05:10:22 | 2021-02-03T05:10:22 | 335,382,794 | 1 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,060 | py | from application.services.payment import Card
class BasePaymentGateway:
def __init__(self, repeat=0):
self.repeat = repeat
self.gateway = None
def __repr__(self):
return "<{}>".format("BasePaymentGateway")
def connect(self, gateway=None, details=None):
if gateway != None:
if self.authenticate(details):
return True
return False
def authenticate(self, details=None):
if details != None:
return True
return False
def pay(self, amount, user_details=None, gateway=None):
if gateway is None:
gateway = self.gateway
while self.repeat + 1 > 0:
if self.connect(gateway, user_details):
print("payment of {} in gateway {} sucessful".format(amount, self.gateway))
return True
self.repeat -= 1
return False
class PremiumBasePaymentGateway(BasePaymentGateway):
def __init__(self, repeat=3):
super(PremiumBasePaymentGateway, self).__init__(repeat)
self.gateway = "PremiumBasePaymentGatway"
def __repr__(self):
return "<PremiumBasePaymentGateway>"
class ExpensiveBasePaymentGateway(BasePaymentGateway):
def __init__(self, repeat=1):
super(ExpensiveBasePaymentGateway, self).__init__(repeat)
self.gateway = "ExpensiveBasePaymentGateway"
def __repr__(self):
return "<ExpensiveBasePaymentGateway>"
class CheapBasePaymentGateway(BasePaymentGateway):
def __init__(self, repeat=0):
super(CheapBasePaymentGateway, self).__init__(repeat)
self.gateway = "CheapBasePaymentGateway"
def __repr__(self):
return "<CheapBasePaymentGateway>"
class ExternalPayment:
def __init__(self, amount, card_details=None):
self.amount = amount
self.card_details = card_details
def make_payment(self):
try:
payment_mode = None
if self.amount <= 20:
payment_mode = CheapBasePaymentGateway()
elif 20 < self.amount < 500:
payment_mode = ExpensiveBasePaymentGateway()
elif self.amount >= 500:
payment_mode = PremiumBasePaymentGateway()
else:
return False
status = payment_mode.pay(self.amount, self.card_details)
return status
except:
return False
| [
"rajputkiran1410@gmail.com"
] | rajputkiran1410@gmail.com |
911067f08b17feb6bdfe3f68e6a758165fe00ec3 | 92126b8dcdf8b305abfbf31f3d2a7d8b8ae0daa5 | /nn_transfer/test/architectures/simplenet.py | 1444a04c10707c0ed285d321eea0be05fbf06368 | [
"MIT"
] | permissive | dantodor/nn-transfer | 52f943d71aa915b2c3c31ada39d551b8775e82d2 | 306f548ebb92ff6824c030352e5fe79fed8d1f55 | refs/heads/master | 2021-06-28T06:04:15.400892 | 2017-09-18T04:32:20 | 2017-09-18T04:32:20 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,119 | py | import torch.nn as nn
import torch.nn.functional as F
import keras
from keras import backend as K
from keras.models import Sequential
from keras.layers import Dense, Flatten
from keras.layers import Conv2D, MaxPooling2D
K.set_image_data_format('channels_first')
class SimpleNetPytorch(nn.Module):
def __init__(self):
super(SimpleNetPytorch, self).__init__()
self.conv1 = nn.Conv2d(1, 6, 5)
self.fc1 = nn.Linear(6 * 14 * 14, 10)
def forward(self, x):
out = F.relu(self.conv1(x))
out = F.max_pool2d(out, 2)
out = out.view(out.size(0), -1)
out = self.fc1(out)
return out
def simplenet_keras():
model = Sequential()
model.add(Conv2D(6, kernel_size=(5, 5),
activation='relu',
input_shape=(1, 32, 32),
name='conv1'))
model.add(MaxPooling2D(pool_size=(2, 2)))
model.add(Flatten())
model.add(Dense(10, activation=None, name='fc1'))
model.compile(loss=keras.losses.categorical_crossentropy,
optimizer=keras.optimizers.SGD())
return model
| [
"me@guido.io"
] | me@guido.io |
8cae790bc88676a3b4ec3c1aef59e1a0ab4e5ecb | 50cf7a64b16ae511b4ce331d7f58f76117daedf4 | /codigos/machineLearning/KNNpandas.py | fd00598be189c7babae07e355e519441cd6edbc8 | [] | no_license | rachelreuters/cursoMachineLearningPython | 9870c9e8535b016bd7d2ad5ab82ec8495e0c6233 | 521b67e8c7c95209385ee8f96681421781ba4787 | refs/heads/master | 2021-06-21T22:31:05.203473 | 2017-07-16T20:38:25 | 2017-07-16T20:38:25 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 581 | py | import pandas as pd
import numpy as np
from sklearn.neighbors import KNeighborsClassifier
train= pd.read_csv('train.csv')
test = pd.read_csv('test.csv')
cols = ['shoe size','height']
cols2 = ['class']
x_train = train.as_matrix(cols)
y_train = train.as_matrix(cols2)
x_test = test.as_matrix(cols)
y_test = test.as_matrix(cols2)
knn = KNeighborsClassifier(n_neighbors=3, weights='distance') # vizinhos mais próximos terão um peso maior no calculo
knn.fit(x_train,y_train.ravel())
output = knn.predict(x_test)
perc_acertos = knn.score(x_test, y_test)
print(perc_acertos)
| [
"belchel@hotmail.com"
] | belchel@hotmail.com |
b86779e76407172f1c69c9bf41d4f6dd127dd9c5 | e4166b4467af98fa940d09be3a43c36b5702dd6f | /Train.py | c150bb020bc1a262fda3531c7aa0b902ba7fbcf6 | [] | no_license | parvathy22/Traffic-Sign-Detection | ead0621bfdb11a4061267177ca08491928c31151 | 8074c737ea61bb34d2fc916088ee107fe6ba1ea7 | refs/heads/main | 2023-06-02T12:35:27.700061 | 2021-06-26T07:04:49 | 2021-06-26T07:04:49 | 380,433,971 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 3,098 | py | from tensorflow.keras.models import Sequential
from tensorflow.keras.layers import Conv2D
from tensorflow.keras.layers import MaxPooling2D
from tensorflow.keras.layers import Activation
from tensorflow.keras.layers import Flatten
from tensorflow.keras.layers import Dense
from tensorflow.keras.layers import Dropout
from tensorflow.keras.preprocessing.image import ImageDataGenerator
from tensorflow.keras.optimizers import Adam
from sklearn.model_selection import train_test_split
from tensorflow.keras.utils import to_categorical
from tensorflow.keras.models import model_from_json
from tensorflow.keras.preprocessing.image import img_to_array
from imutils import paths
import numpy as np
import random
import cv2
import os
##Creating a CNN Model
model = Sequential()
inputShape = (32, 32,3)
##First Convolution Layer
model.add(Conv2D(32, (5, 5), padding="same",input_shape=inputShape))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
model.add(Dropout(rate=0.25))
##Second Convolution Layer
model.add(Conv2D(32, (5, 5), padding="same"))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
model.add(Dropout(rate=0.25))
##Third Convolution Layer
model.add(Conv2D(64, (5, 5), padding="same"))
model.add(Activation("relu"))
model.add(MaxPooling2D(pool_size=(2, 2), strides=(2, 2)))
model.add(Dropout(rate=0.25))
##flattening the output
model.add(Flatten())
##adding Denser layer of 500 nodes
model.add(Dense(500))
model.add(Activation("relu"))
##softmax classifier
model.add(Dense(43))
model.add(Activation("softmax"))
model.summary()
data = []
labels = []
print("[INFO] loading images...")
img_dir=sorted(list(paths.list_images("dataset")))
random.shuffle(img_dir)
for i in img_dir:
img = cv2.imread(i)
img=cv2.resize(img, (32,32))
img = img_to_array(img)
data.append(img)
lab=i.split(os.path.sep)[-2]
labels.append(lab)
print(len(data))
print(len(labels))
print("[INFO] splitting datas for training...")
# partition the data into training and testing splits using 75% of
# the data for training and the remaining 25% for testing
(trainX, testX, trainY, testY) = train_test_split(data,labels, test_size=0.25, random_state=42)
# convert the labels from integers to vectors
trainY = to_categorical(trainY, num_classes=43)
testY = to_categorical(testY, num_classes=43)
print("[INFO] Training Started...")
print(len(trainY))
print(len(trainX))
print(np.array(trainY).shape)
print(np.array(trainX).shape)
model.compile(loss='categorical_crossentropy', metrics=['accuracy'], optimizer='adam')
# training the model for 10 epochs
model.fit(np.array(trainX), np.array(trainY), batch_size=32, epochs=20, validation_data=(np.array(testX), np.array(testY)))
# serialize model to JSON
model_json = model.to_json()
with open("ch_model.json", "w") as json_file:
json_file.write(model_json)
# serialize weights to HDF5
model.save_weights("ch_model.h5")
print("[INFO] Saved model to disk")
| [
"noreply@github.com"
] | noreply@github.com |
cab6ce569e9667c2e8b5c3775c3cb0b9822e8160 | 530e37341af91071ffde01f6f84d39ec039272eb | /fetch.py | cf90eed1fcfb38c6900f528048bdad4a0bf9f79c | [
"MIT"
] | permissive | pauloadaoag/MMDA | da29e7187fd2406e005b9e5babfa881b577abd4d | 5fd69badb2a95d63c57a0eaf9748fc7052e7bbfe | refs/heads/master | 2020-05-29T12:45:24.530782 | 2015-06-28T07:22:09 | 2015-06-28T07:22:09 | 35,107,812 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 4,087 | py | #!/usr/bin/python
import json
import urllib2
import time
import collections
import ConfigParser
import MySQLdb as db
import os
config = ConfigParser.RawConfigParser()
config_filename = os.path.join(os.path.dirname(__file__), 'config.cfg')
config.read(config_filename)
db_host = config.get('mysql', 'host')
db_user = config.get('mysql', 'uname')
db_pass = config.get('mysql', 'pw')
db_name = config.get('mysql', 'db_name')
sample_time = int(time.time())
con = db.connect(db_host, db_user, db_pass, db_name)
cursor = con.cursor()
def save_run_to_db(db_cursor, sampling_time):
sql_text = """
INSERT INTO sampling_runs (sample_time) VALUES (%s)
"""
db_cursor.execute(sql_text, (sampling_time,))
return db_cursor.lastrowid
def update_run_with_end_time(db_cursor, run_id, end_time):
sql_text = """
UPDATE sampling_runs set end_time = %s where id = %s
"""
db_cursor.execute(sql_text, (end_time, run_id))
run_id = save_run_to_db(cursor, sample_time)
TrafficSample = collections.namedtuple('TrafficSample', [
'segment_id',
'direction',
'road_status',
'update_time',
'aa',
'alert_counts',
'ac',
'alert_text'])
ROADS = {
1: "EDSA",
2: "Q.AVE",
3: "ESPANA",
4: "C5",
5: "ROXAS BLVD",
6: "SLEX",
7: "COMMONWEALTH",
8: "ORTIGAS",
9: "MARCOS HIWAY"
}
ROAD_STATUSES = {
0: "NO INFO",
1: "LIGHT",
2: "LIGHT-MED",
3: "MEDIUM",
4: "MEDIUM-HEAVY",
5: "HEAVY"
}
NODE_TYPES = {
0: 'TERMINATION',
1: 'ROAD',
2: 'INTERSECTION'
}
TRAFFIC_ENDPOINT = 'http://mmdatraffic.interaksyon.com/data.traffic.status.php'
ADVISORIES_ENDPOINT = 'http://mmdatraffic.interaksyon.com/data.traffic.advisories.php'
response = urllib2.urlopen(TRAFFIC_ENDPOINT)
traffic_points = json.load(response)
response = urllib2.urlopen(ADVISORIES_ENDPOINT)
advisories_list = json.load(response)
advisories = {}
for advisory in advisories_list:
segment_id = advisory[1]
info = advisory[2]
direction = 'NB'
text = ''
if type(info) is dict:
direction = 'SB'
text = str(info)
elif type(info) is list:
direction = 'NB'
text = str(info[0][0])
if segment_id not in advisories:
advisories[segment_id] = { 'NB': [], 'SB': []}
advisories[segment_id][direction].append(text)
def mmda_time_to_timestamp(mmdatime):
a = time.strptime(mmdatime, "%Y%m%d%H%M%S")
return time.mktime(a)
def save_to_db(db_cursor, traffic_sample, run_id):
sql_text = """
INSERT INTO traffic_samples
(segment_id, direction, road_status, update_time, run_id, aa, alert_counts, ac, alert_text)
VALUES (%s, %s, %s, %s, %s, %s, %s, %s, %s)
"""
db_cursor.execute(sql_text,(traffic_sample.segment_id, traffic_sample.direction,
traffic_sample.road_status, mmda_time_to_timestamp(traffic_sample.update_time), run_id, traffic_sample.aa,
traffic_sample.alert_counts, traffic_sample.ac, traffic_sample.alert_text))
for point in traffic_points:
node_info = point[0]
node_north = point[1]
node_south = point[2]
(road_id, node_id, intersecting_node_id, node_type_arr, is_service_node, related_node_id, is_major_intersection) = node_info
(road_status, update_time, aa, alert_counts, ac) = node_north
north_alert_text = ''
south_alert_text = ''
if node_id in advisories:
alerts = advisories[node_id]
if len(alerts['NB']) > 0:
north_alert_text = " | ".join(alerts['NB'])
if len(alerts['SB']) > 0:
south_alert_text = " | ".join(alerts['SB'])
n_sample = TrafficSample(node_id, 'N', road_status, update_time, aa, alert_counts, ac, north_alert_text)
save_to_db(cursor, n_sample, run_id)
(road_status, update_time, aa, alert_counts, ac) = node_south
s_sample = TrafficSample(node_id, 'S', road_status, update_time, aa, alert_counts, ac, south_alert_text)
save_to_db(cursor, s_sample, run_id)
end_time = int(time.time())
update_run_with_end_time(cursor, run_id, end_time)
con.commit()
con.close() | [
"paulo.adaoag@gmail.com"
] | paulo.adaoag@gmail.com |
af23bfe2581b749cad1c35dc75d23d8ece968b41 | e756bfb5290cd336d20f0cf0cde04eec2a35caae | /src/actions/custom/utils/pick_card.py | 9847ba74cd50d6deef5415fb9cb4dde04b52eee6 | [
"MIT"
] | permissive | StetHD/Lonabot | ff1b9113f1e8d6618a271a17752e86679e0c6274 | 615ce2c176607d6da71c84d38644d8aaaf0d3a0b | refs/heads/master | 2021-01-22T10:40:55.989293 | 2016-08-24T10:17:29 | 2016-08-24T10:17:29 | null | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 2,101 | py | from actions.action_base import ActionBase
from random import choice, randint
class PickCardAction(ActionBase):
def __init__(self):
super().__init__(name="PICK A CARD FROM THE DECK",
keywords=['pick a card (INT)',
'pick (INT) cards?'])
def act(self, data):
times = data.get_match_int(1, fallback=1)
if times > 48: # Avoid too many
self.send_msg(data,
"there are 48 cards in a deck (no joker here), "
"how am i supposed to pick {}?!".format(times))
return
if times == 48:
self.send_msg(data, "there are 48 cards in the deck, BUT, if that makes you happy:".format(times))
# Add unique choices until we have enough
result = []
while len(result) < times:
# Pick a random value
value = randint(2, 14)
if value == 11:
value = 'jack'
elif value == 12:
value = 'queen'
elif value == 13:
value = 'king'
elif value == 14:
value = 'ace'
# And a random suit
suit = choice(['♠️', '♣️', '♥️', '♦️'])
current = '{}{}'.format(suit, value)
# Add the random value with the choice if it wasn't in yet
if current not in result:
result.append(current)
if times > 4: # If too many times, let's make a pretty table!
row_size = 4
spacing = 7
msg = '```\n'
for i in range(0, times, row_size):
# Join the results from i..i+row_size with a '.'
msg += '.'.join(str(result[j]).ljust(spacing, '.')
for j in range(i, i + row_size) if j < times)
msg += '\n'
msg += '```'
self.send_msg(data, msg, markdown=True)
else: # Else just join multiline
self.send_msg(data, '\n'.join(result), markdown=True)
| [
"totufals@hotmail.com"
] | totufals@hotmail.com |
c8b9cc3afa07f5a836da6e16b24aa34987c51fca | 3ea2ea9923ff838add0048f57e3609d48403421c | /code-imports-status.py | 4c457081f6726eb925407e1a276de068bbaa16fe | [] | no_license | ajkavanagh/release-tools | 9a85c2042de01bde13a27f2e0069622d2fd51378 | e7aa78af8f66868f85e5f331516b6fc0bad0f51f | refs/heads/master | 2023-05-11T00:55:27.644836 | 2022-10-18T12:42:24 | 2022-10-18T12:42:24 | 220,310,845 | 0 | 0 | null | 2022-02-24T22:12:41 | 2019-11-07T19:13:51 | Shell | UTF-8 | Python | false | false | 5,740 | py | #!/usr/bin/env python3
import argparse
import os
import glob
import sys
import git
import humanize
import yaml
from datetime import datetime, timedelta
from zoneinfo import ZoneInfo
from launchpadlib.launchpad import Launchpad
__author__ = "Felipe Reyes <felipe.reyes@canonical.com>"
__copyright__ = "Copyright 2022, Canonical Ltd."
__description__ = 'Check the status of code imports in Launchpad.'
OKGREEN = '\033[92m'
WARNING = '\033[93m'
FAIL = '\033[91m'
ENDC = '\033[0m'
NOW = datetime.now(tz=ZoneInfo("UTC") )
MAX_IMPORT_AGE = timedelta(days=1)
CODE_IMPORT_ERROR_CODES = ['Failed', 'Suspended']
CODE_IMPORT_WARN_CODES = ['Pending Review', 'Invalid']
CODE_IMPORT_OK_CODES = ['Reviewed']
cachedir = os.path.expanduser("~/.release-tools/cache")
os.makedirs(cachedir, exist_ok=True)
launchpad = Launchpad.login_anonymously('charmed-openstack release-tools',
'production', cachedir, version='devel')
def setup_options():
"""Setup command line options."""
parser = argparse.ArgumentParser(description=__description__)
parser.add_argument('-c', '--category', dest='category', metavar='CATEGORY',
help='Category of charms to check')
parser.add_argument('--charm', dest='charms', action='append', metavar='CHARM',
help='Charm to check')
parser.add_argument('-f', '--format', dest='format', default='human',
choices=['human', 'json'], metavar='FORMAT',
help='Output format')
return parser.parse_args()
def get_lp_repo(project: str):
repo = launchpad.git_repositories.getByPath(path=project)
return repo
def get_repo(repo_dst, upstream_url, mirror_url):
if os.path.isdir(repo_dst):
git_repo = git.Repo(repo_dst)
for remote in git_repo.remotes:
remote.fetch()
git_repo.remotes.origin.pull()
else:
git_repo = git.Repo.clone_from(upstream_url, repo_dst)
mirror_remote = git_repo.create_remote('mirror', repo.git_https_url)
mirror_remote.fetch()
return git_repo
def find_missing_commits(git_repo):
# discard HEAD since it's just an alias that only exists for git's
# upstream.
upstream_refs = [ref for ref in git_repo.remote().refs if ref.name != 'origin/HEAD']
missing_commits = {}
for upstream_ref in upstream_refs:
branch_name = upstream_ref.name.split('/', maxsplit=1)[1]
mirror_ref = git_repo.remotes.mirror.refs[branch_name]
if mirror_ref.commit.hexsha != upstream_ref.commit.hexsha:
git_log = git_repo.git.log(
f'{mirror_ref.commit.hexsha}..{upstream_ref.commit.hexsha}',
oneline=True
)
missing_commits[branch_name] = git_log
return missing_commits
def print_report(output):
for name, project in output.items():
if not project['code_import_available']:
print(name, f'{FAIL}no code import found{ENDC}')
continue
if project['code_import_review_status'] in CODE_IMPORT_ERROR_CODES:
color = FAIL
elif project['code_import_review_status'] in CODE_IMPORT_WARN_CODES:
color = WARNING
elif project['code_import_review_status'] in CODE_IMPORT_OK_CODES:
color = OKGREEN
else:
color = ''
print(name, f"{color}{project['code_import_review_status']}{ENDC}",
end='')
if NOW - project['code_import_last_successful'] > MAX_IMPORT_AGE:
color = FAIL
else:
color = ''
age = humanize.naturaltime(project['code_import_last_successful'], when=NOW)
print(f' ({color}{age}{ENDC})', project['code_import_web_link'])
if project['missing_commits']:
for branch, log in project['missing_commits'].items():
print(f' {branch}:')
for line in log.split('\n'):
print(f' {line}')
def main():
opts = setup_options()
if opts.category:
fpath = f'lp-builder-config/{opts.category}.yaml'
assert os.path.isfile(fpath), f'No such file or directory: {fpath}'
lp_builder_files = [fpath]
else:
lp_builder_files = glob.glob('lp-builder-config/*.yaml')
output = {}
for fname in lp_builder_files:
with open(fname, 'r') as f:
lp_builder_config = yaml.safe_load(f)
for project in lp_builder_config['projects']:
if opts.charms and project['charmhub'] not in opts.charms:
# skip if the charm's name is not the filter list.
continue
lp_prj_name = project['launchpad']
repo = get_lp_repo(lp_prj_name)
code_import = repo.code_import
try:
output[lp_prj_name] = {
'code_import_available': True,
'code_import_review_status': code_import.review_status,
'code_import_last_successful': code_import.date_last_successful,
'code_import_web_link': repo.web_link,
}
except AttributeError:
output[lp_prj_name] = {'code_import_available': False}
continue
repo_dst = f'{cachedir}/git_repos/{lp_prj_name}'
git_repo = get_repo(repo_dst,
upstream_url=project['repository'],
mirror_url=repo.git_https_url)
output[lp_prj_name]['missing_commits'] = find_missing_commits(git_repo)
if opts.format == 'json':
print(json.dumps(output))
else:
print_report(output)
if __name__ == '__main__':
main()
| [
"felipe.reyes@canonical.com"
] | felipe.reyes@canonical.com |
ab49e989f2bae37ff7ec31c1ea2d57c21e95e39a | e9bcabc640d02dcd5d46f0ecccdd6c62a822ff2f | /Day_96_100/Day-99.py | fd108addde3ae80e96082a9477d6847612ae167f | [
"MIT"
] | permissive | analuisadev/100-Days-Of-Code | 8ef87aca0a3d21a88f23f3e9951ede8d2a2e7a25 | b1dafabc335cd2c3c9b1cecd50597b42d8959d4a | refs/heads/main | 2023-03-24T14:38:43.127753 | 2021-03-16T17:36:13 | 2021-03-16T17:36:13 | 316,727,335 | 2 | 0 | null | null | null | null | UTF-8 | Python | false | false | 1,836 | py | """"
Parte 5: Criando Colisões
"""
#Importações necessárias para a criação da janela
import pygame
from pygame.locals import *
from sys import exit
from random import randint
#Inicialização das váriaveis e funções do pygame
pygame.init()
#Criação da tela
width = 640
height = 480
x = width/2
y = height/2
#Criando váriaveis para assumir diferentes valores para cada colisão
x_blue = randint(40, 600)
y_blue = randint(50, 430)
screen = pygame.display.set_mode((width, height))
pygame.display.set_caption('Game')
#Controlando a velocidade da movimentação do objeto
clock = pygame.time.Clock()
#Looping principal do jogo
while True:
clock.tick(30)
screen.fill((0, 0, 0))
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
exit()
#Criando uma condição para mudar a movimentação de acordo com a tecla
if event.type == KEYDOWN:
if event.key == K_a:
x = x - 20
if event.key == K_d:
x = x + 20
if event.key == K_w:
y = y - 20
if event.key == K_s:
y = y + 20
#Criando uma condição caso a tecla continue a ser pressionada
if pygame.key.get_pressed()[K_a]:
x = x - 20
if pygame.key.get_pressed()[K_d]:
x = x + 20
if pygame.key.get_pressed()[K_w]:
y = y - 20
if pygame.key.get_pressed()[K_s]:
y = y + 20
#Desenhando Objetos dentro da Tela e movimentando
ret_red = pygame.draw.rect(screen, (255, 0, 0), (x, y, 40, 50))#
ret_blue = pygame.draw.rect(screen, (0, 0, 255), (x_blue, y_blue, 40, 50))
#Criando Condições para cada colisão
if ret_red.colliderect(ret_blue):
x_blue = randint(40, 600)
y_blue = randint(50, 430)
pygame.display.update()
| [
"71856519+analuisadev@users.noreply.github.com"
] | 71856519+analuisadev@users.noreply.github.com |
a40183d91dc5ab741e0f4a91dfb2c05c5b73b66f | ca7aa979e7059467e158830b76673f5b77a0f5a3 | /Python_codes/p02628/s785161467.py | 3f9c154f8635eed3d0cb56f4538868a10a28d93d | [] | no_license | Aasthaengg/IBMdataset | 7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901 | f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8 | refs/heads/main | 2023-04-22T10:22:44.763102 | 2021-05-13T17:27:22 | 2021-05-13T17:27:22 | 367,112,348 | 0 | 0 | null | null | null | null | UTF-8 | Python | false | false | 64 | py | n,k,*p=map(int,open(0).read().split());print(sum(sorted(p)[:k])) | [
"66529651+Aastha2104@users.noreply.github.com"
] | 66529651+Aastha2104@users.noreply.github.com |
ecdf91c2095e7d8ddff72afdfa9208fa7c93d5cd | 911728d1099740b7a6236db4f99df18a11f17fbe | /manage.py | 03a6362786e5c53e0b4c07ea678bce5d396feb9c | [] | no_license | A-deLuna/proyectopa | 2871f7bd9870e5ddc7c84f66cc32810701a3cff4 | 48b11ce37c114a2359635745d213addc3950175c | refs/heads/master | 2021-01-18T02:40:21.496850 | 2015-11-18T04:13:48 | 2015-11-18T04:13:48 | 46,501,565 | 1 | 0 | null | 2015-11-19T15:33:57 | 2015-11-19T15:33:56 | null | UTF-8 | Python | false | false | 253 | py | #!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "proyectopa.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| [
"alonsowilhelmy@hotmail.com"
] | alonsowilhelmy@hotmail.com |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.