blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
69d86fa62f83e5039a98dc34c4e0e97e473f8332
|
fb29cb7c89e13b780a88c1cc6b3321c8c5502660
|
/src/price_bot.py
|
168460e5c0a77f64df1238ffda863355db07c6db
|
[
"MIT"
] |
permissive
|
GrantGyo/price-bots
|
d3e86f23647de6a4e23cb69422c7fe6d6a0d1497
|
9acf60c43602827bff00bd7c52c8843928ad0693
|
refs/heads/main
| 2023-04-29T09:08:24.280291
| 2021-05-07T20:38:15
| 2021-05-07T20:38:15
| 366,849,878
| 0
| 0
|
MIT
| 2021-05-12T20:48:22
| 2021-05-12T20:48:21
| null |
UTF-8
|
Python
| false
| false
| 4,965
|
py
|
import discord
from discord.ext import commands, tasks
import json
import logging
import math
import os
import requests
from web3 import Web3
logging.basicConfig(
# filename="price_bots_log.txt",
# filemode='a',
# format='%(asctime)s,%(msecs)d %(name)s %(levelname)s %(message)s',
# datefmt='%H:%M:%S',
level=logging.INFO
)
UPDATE_INTERVAL_SECONDS = 45
cache = {}
class PriceBot(discord.Client):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.logger = logging.getLogger("price-bot")
if cache.get("session") == None:
cache["session"] = requests.Session()
if cache.get("web3") == None:
cache["web3"] = Web3(Web3.HTTPProvider(os.getenv("INFURA_URL")))
self.session = cache.get("session")
self.coingecko_token_id = kwargs.get("coingecko_token_id")
self.token_display = kwargs.get("token_display")
self.token_address = kwargs.get("token_address")
self.token_abi = kwargs.get("token_abi") if kwargs.get("token_abi") else None
if self.token_address and self.token_abi:
self.web3 = cache.get("web3")
self.token_contract = self.web3.eth.contract(
address=self.web3.toChecksumAddress(self.token_address),
abi=self.token_abi,
)
self.discord_id = kwargs.get("discord_id")
self._get_token_data()
self.update_price.start()
async def on_ready(self):
self.logger.info(f"Logged in as {self.user.name} {self.user.id}")
@tasks.loop(seconds=UPDATE_INTERVAL_SECONDS)
async def update_price(self):
"""
Asynchronous function that runs every UPDATE_INTERVAL_SECONDS to get the current price and market of the
token and update the bot's name and activity in the guild.
"""
# first get latest token data
self._get_token_data()
activity_string = "mcap=$" + self._get_number_label(
self.token_data.get("market_cap")
)
activity = discord.Activity(
name=activity_string,
type=discord.ActivityType.playing,
)
await self.change_presence(activity=activity)
for guild in self.guilds:
self.logger.info(guild.members)
for member in guild.members:
if str(member.id) == self.discord_id:
try:
await member.edit(
nick=f"{self.token_display} $"
+ str(self.token_data.get("token_price_usd"))
)
except Exception as e:
self.logger.error("Error updated nickname")
self.logger.error(e)
webhook = discord.Webhook.from_url(os.getenv("DISCORD_MONITORING_WEBHOOK_URL"), adapter=discord.RequestsWebhookAdapter())
embed = discord.Embed(
title=f"**{self.token_display} Price Bot Error**",
description=f"Error message: {e}"
)
webhook.send(embed=embed, username="Price Bot Monitoring")
@update_price.before_loop
async def before_update_price(self):
await self.wait_until_ready() # wait until the bot logs in
def _get_token_data(self):
"""
Private function to make call to coingecko to retrieve price and market cap for the token and update
token data property.
"""
response = self.session.get(
f"https://api.coingecko.com/api/v3/coins/{self.coingecko_token_id}"
).content
token_data = json.loads(response)
token_price_usd = token_data.get("market_data").get("current_price").get("usd")
token_price_btc = token_data.get("market_data").get("current_price").get("btc")
market_cap = token_data.get("market_data").get("market_cap").get("usd")
self.token_data = {
"token_price_usd": token_price_usd,
"token_price_btc": token_price_btc,
"market_cap": market_cap,
}
def _get_number_label(self, value: str) -> str:
"""
Formats number in billions, millions, or thousands into Discord name friendly string
Args:
value (str): value between 0 - 999 billion
Returns:
str: formatted string. EG if 1,000,000,000 is passed in, will return 1B
"""
# Nine Zeroes for Billions
if abs(int(value)) >= 1.0e9:
return str(round(abs(int(value)) / 1.0e9)) + "B"
# Six Zeroes for Millions
elif abs(int(value)) >= 1.0e6:
return str(round(abs(int(value)) / 1.0e6)) + "M"
# Three Zeroes for Thousands
elif abs(int(value)) >= 1.0e3:
return str(round(abs(int(value)) / 1.0e3)) + "K"
else:
return str(abs(int(value)))
|
[
"brooks.o.taylor@vanderbilt.edu"
] |
brooks.o.taylor@vanderbilt.edu
|
1b1ab8d341f276998b7fe7972480130557b03e29
|
9a486a87e028303a551fbd0d1e1b6b650387ea14
|
/theirFTP/dialog.py
|
22451a7c7acef19c44a2afb4c160e39357e40610
|
[] |
no_license
|
shanlihou/pythonFunc
|
7b8e7064fddd4522e492c915c086cc6c5abc6eec
|
646920256551ccd8335446dd4fe11aa4b9916f64
|
refs/heads/master
| 2022-08-24T20:33:12.287464
| 2022-07-21T12:00:10
| 2022-07-21T12:00:10
| 24,311,639
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,125
|
py
|
#!/usr/bin/env python
# --*--codig: utf8 --*--
from PyQt4 import QtGui
from PyQt4 import QtCore
class LoginDialog(QtGui.QDialog):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
import os, pwd
self.setFixedSize(400, 250)
self.nameLabel = QtGui.QLabel('Name:')
self.passwdLabel = QtGui.QLabel('Password:')
self.nameEdit = QtGui.QLineEdit( )
self.passwdEdit = QtGui.QLineEdit( )
self.nameEdit.setText(pwd.getpwuid(os.getuid()).pw_name)
self.passwdEdit.setEchoMode(QtGui.QLineEdit.Password)
self.buttonBox = QtGui.QDialogButtonBox( )
self.buttonBox.setStandardButtons(QtGui.QDialogButtonBox.Cancel|QtGui.QDialogButtonBox.Ok)
self.registerRadio = QtGui.QRadioButton('Register')
self.visitorRadio = QtGui.QRadioButton('Visitor')
self.registerRadio.setChecked(True)
self.groupBox = QtGui.QGroupBox('Login')
self.groupBox.setStyleSheet('''
QGroupBox
{
font-size: 18px;
font-weight: bold;
font-family: Monaco
}
''')
self.layout = QtGui.QGridLayout( )
self.layout.addWidget(self.registerRadio, 0, 0, 2, 1)
self.layout.addWidget(self.visitorRadio, 1, 0, 3, 1)
self.layout.addWidget(self.nameLabel, 3, 0, 3, 1)
self.layout.addWidget(self.nameEdit, 3, 1, 3, 1)
self.layout.addWidget(self.passwdLabel, 4, 0, 6, 1)
self.layout.addWidget(self.passwdEdit, 4, 1, 6, 1)
self.groupBox.setLayout(self.layout)
self.mainLayout = QtGui.QVBoxLayout( )
self.mainLayout.addWidget(self.groupBox)
self.mainLayout.addWidget(self.buttonBox)
self.setLayout(self.mainLayout)
self.registerRadio.clicked.connect(self.enableEdit)
self.visitorRadio.clicked.connect(self.disableEdit)
self.nameEdit.textEdited.connect(self.checkNameEdit)
self.buttonBox.accepted.connect(self.accept)
self.buttonBox.rejected.connect(self.reject)
self.show( )
self.isAccepted = self.exec_( )
def checkNameEdit(self):
if not self.nameEdit.text( ) and not self.visitorRadio.isChecked( ):
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(False)
elif self.nameEdit.text( ) and self.registerRadio.isChecked( ):
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setEnabled(True)
def enableEdit(self):
self.nameEdit.setEnabled(True)
self.passwdEdit.setEnabled(True)
self.checkNameEdit( )
self.checkNameField( )
def disableEdit(self):
self.nameEdit.setEnabled(False)
self.passwdEdit.setEnabled(False)
self.buttonBox.button(QtGui.QDialogButtonBox.Ok).setFocus( )
class BaseProgressWidget(QtGui.QWidget):
updateProgress = QtCore.pyqtSignal(str)
def __init__(self, text='', parent=None):
super(BaseProgressWidget, self).__init__(parent)
self.setFixedHeight(50)
self.text = text
self.progressbar = QtGui.QProgressBar( )
self.progressbar.setTextVisible(True)
self.updateProgress.connect(self.set_value)
self.bottomBorder = QtGui.QWidget( )
self.bottomBorder.setStyleSheet("""
background: palette(shadow);
""")
self.bottomBorder.setSizePolicy(QtGui.QSizePolicy(QtGui.QSizePolicy.Preferred, QtGui.QSizePolicy.Fixed))
self.bottomBorder.setMinimumHeight(1)
self.label = QtGui.QLabel(self.text)
self.label.setStyleSheet("""
font-weight: bold;
""")
self.layout = QtGui.QVBoxLayout( )
self.layout.setContentsMargins(10,0,10,0)
self.layout.addWidget(self.label)
self.layout.addWidget(self.progressbar)
self.mainLayout = QtGui.QVBoxLayout( )
self.mainLayout.setContentsMargins(0,0,0,0)
self.mainLayout.addLayout(self.layout)
self.mainLayout.addWidget(self.bottomBorder)
self.setLayout(self.mainLayout)
self.totalValue = 0
def set_value(self, value):
self.totalValue += len(value)
self.progressbar.setValue(self.totalValue)
def set_max(self, value):
self.progressbar.setMaximum(value)
class DownloadProgressWidget(BaseProgressWidget):
def __init__(self, text='Downloading', parent=None):
super(self.__class__, self).__init__(text, parent)
style ="""
QProgressBar {
border: 2px solid grey;
border-radius: 5px;
text-align: center;
}
QProgressBar::chunk {
background-color: #37DA7E;
width: 20px;
}"""
self.progressbar.setStyleSheet(style)
class UploadProgressWidget(BaseProgressWidget):
def __init__(self, text='Uploading', parent=None):
super(self.__class__, self).__init__(text, parent)
style ="""
QProgressBar {
border: 2px solid grey;
border-radius: 5px;
text-align: center;
}
QProgressBar::chunk {
background-color: #88B0EB;
width: 20px;
}"""
self.progressbar.setStyleSheet(style)
class ProgressDialog(QtGui.QMainWindow):
def __init__(self, parent=None):
super(self.__class__, self).__init__(parent)
self.resize(500, 250)
self.scrollArea = QtGui.QScrollArea( )
self.scrollArea.setWidgetResizable(True)
self.setCentralWidget(self.scrollArea)
self.centralWidget = QtGui.QWidget( )
self.scrollArea.setWidget(self.centralWidget)
self.layout = QtGui.QVBoxLayout( )
self.layout.setAlignment(QtCore.Qt.AlignTop)
self.layout.setContentsMargins(0,10,0,0)
self.centralWidget.setLayout(self.layout)
def addProgressbar(self, progressbar):
self.layout.addWidget(progressbar)
def loginDialog(parent=None):
login = LoginDialog(parent)
if not login.isAccepted:
return False
elif login.visitorRadio.isChecked( ):
return ('anonymous', 'anonymous', True)
else:
return (str(login.nameEdit.text( )), str(login.passwdEdit.text( )), True)
if __name__ == '__main__':
def testLoinDialog( ):
app = QtGui.QApplication([])
print(loginDialog( ))
def testProgressDialog( ):
p = ProgressDialog( )
def testProgressDialog( ):
import random
number = [x for x in range(1, 101)]
progresses = [ ]
while len(progresses) <= 20: progresses.append(random.choice(number))
app = QtGui.QApplication([])
w = ProgressDialog( )
for i in progresses:
pb = DownloadProgressWidget(text='download')
pb.set_max(100)
pb.set_value(' '*i)
w.addProgressbar(pb)
for i in progresses:
pb = UploadProgressWidget(text='upload')
pb.set_max(100)
pb.set_value(' '*i)
w.addProgressbar(pb)
w.show( )
app.exec_( )
testProgressDialog( )
testLoinDialog( )
|
[
"shanlihou@gmail.com"
] |
shanlihou@gmail.com
|
ec337e201edcf48b3fffffa27c23f2e8ea0559f6
|
8692807f1dfa8c18c61df07cfafbbd27d4e66fba
|
/LONG-CHALLENGE/KS2.sol.py
|
0d98e4ffbadbeb3ac519a48ce9c13e026385864f
|
[] |
no_license
|
sharmakajal0/codechef_problems
|
00381e9bf1996b859e46f087c2ffafd9d7a10ef1
|
0b979029e0a821f47fbdd6f9c624daee785a02e7
|
refs/heads/master
| 2020-05-29T15:04:40.459979
| 2020-03-29T08:44:53
| 2020-03-29T08:44:53
| 189,212,028
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 488
|
py
|
#!/usr/bin/env python
'''Module for guddu on a date'''
##
# Question URL: https://www.codechef.com/problems/KS2
##
def guddu_date(int_n):
'''Function for guddu on a date'''
if int_n == 0:
return 0
return int_n % 10 + guddu_date(int(int_n/10))
for _ in range(int(input())):
N = int(input())
total_sum = guddu_date(N)
total_sum = total_sum % 10
req_ans = 10 - total_sum
if req_ans == 10:
req_ans = 0
print(int(str(N) + str(req_ans)))
|
[
"sharma.kajal.sitm@gmail.com"
] |
sharma.kajal.sitm@gmail.com
|
90704fc15f04198d0d1ef2795230f270ec8abd5d
|
43953b9520b7530abe1c14eda182ffffbef71a8e
|
/Ecommerce/migrations/0026_auto_20200627_2359.py
|
44b2f50d3c570d2d537485614bf054e03ff332e2
|
[
"MIT"
] |
permissive
|
aryanshridhar/Ecommerce-Website
|
66277d830f3c9f506721d1e9eab557c5edf2d665
|
c582659e9b530555b9715ede7bb774c39f101c7e
|
refs/heads/master
| 2021-09-23T01:08:02.834766
| 2021-04-08T18:50:18
| 2021-04-23T10:17:39
| 240,899,510
| 1
| 0
|
MIT
| 2021-09-22T18:36:16
| 2020-02-16T13:48:59
|
Python
|
UTF-8
|
Python
| false
| false
| 774
|
py
|
# Generated by Django 3.0.6 on 2020-06-27 18:29
import datetime
from django.db import migrations, models
import django.db.models.deletion
from django.utils.timezone import utc
class Migration(migrations.Migration):
dependencies = [
('Ecommerce', '0025_auto_20200625_2207'),
]
operations = [
migrations.AlterField(
model_name='review',
name='Date',
field=models.DateTimeField(default=datetime.datetime(2020, 6, 27, 18, 29, 1, 750503, tzinfo=utc)),
),
migrations.AlterField(
model_name='review',
name='for_product',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, related_name='for_product', to='Ecommerce.Product'),
),
]
|
[
"aryanshridhar7@gmail.com"
] |
aryanshridhar7@gmail.com
|
47c1bb886155817b9c0b06a4a8b8af8b78921ed6
|
95f349340866c4b5a50284444b361a180fb709b4
|
/scripts/get_price_history_commsec.py
|
fb0cfafd652359b113c74244155daa0f4ec2f806
|
[] |
no_license
|
rs77/asx-historical-data
|
16bc943f8852d09431bc218954ca0613e679ab8c
|
59dd6ac50e3ead1970544092d96d3ab4d0b0d271
|
refs/heads/master
| 2022-11-30T12:08:23.948552
| 2020-08-10T01:18:02
| 2020-08-10T01:18:02
| 257,584,156
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,848
|
py
|
import os
import time
import glob
import csv
from selenium import webdriver
from selenium.webdriver import Chrome
from datetime import date, datetime, timedelta
from decimal import Decimal
from re import sub
from selenium.webdriver.chrome.webdriver import WebDriver
from selenium.webdriver.common.keys import Keys
from selenium.common import exceptions
from typing import List, Dict, Optional, KeysView, Tuple
from selenium.webdriver.remote.webelement import WebElement
from selenium.webdriver.common.by import By
from selenium.webdriver.support.ui import WebDriverWait
from selenium.webdriver.support import expected_conditions as EC
cwd: str = os.getcwd()
def select_option(browser: WebDriver, id: str):
"""
Selects the given option in the Security list
:param browser:
:param id:
:return:
"""
security_el: WebElement = browser.find_element_by_xpath("//select[@id='ctl00_BodyPlaceHolder_EndOfDayPricesView1_ddlAllSecurityType_field']")
# work through the select options
security_opts: List[WebElement] = security_el.find_elements_by_tag_name('option')
for o in security_opts:
if id in o.text:
o.click()
# click download button
download_btn: WebElement = browser.find_element_by_xpath("//input[@name='ctl00$BodyPlaceHolder$EndOfDayPricesView1$btnAllDownload$implementation$field']")
download_btn.click()
return
def __main__():
username: str = os.getenv("USERNAME_COMMSEC")
password: str = os.getenv("PASSWORD_COMMSEC")
chrome_options = webdriver.ChromeOptions()
chrome_options.add_argument("--incognito")
main_dir: str = os.path.dirname(os.getcwd())
csv_path: str = main_dir + "/csv"
prefs = {
"download.default_directory": csv_path
}
chrome_options.add_experimental_option("prefs", prefs)
# get browser up and running
browser: WebDriver = Chrome(options=chrome_options)
browser.implicitly_wait(5)
# log in to Commsec website
url: str = 'https://www2.commsec.com.au/Private/Charts/EndOfDayPrices.aspx'
browser.get(url)
user_fld: WebElement = browser.find_element_by_xpath("//input[@id='ctl00_cpContent_txtLogin']")
user_fld.send_keys(username)
user_fld.send_keys(Keys.TAB)
el: WebElement = browser.switch_to.active_element
el.send_keys(password)
browser.find_element_by_xpath("//input[@id='ctl00_cpContent_btnLogin']").click()
time.sleep(3)
format_el: WebElement = browser.find_element_by_xpath(
"//select[@id='ctl00_BodyPlaceHolder_EndOfDayPricesView1_ddlAllFormat_field']")
format_opts: List[WebElement] = format_el.find_elements_by_tag_name('option')
for o in format_opts:
if 'Stock Easy' in o.text: # Stock Easy format preferred as it lists the dates as YYYYMMDD
o.click()
start_date: date = date(2020, 1, 1)
end_date: date = date.today()
while start_date < end_date:
date_fld: WebElement = browser.find_element_by_xpath(
"//input[@id='ctl00_BodyPlaceHolder_EndOfDayPricesView1_txtAllDate_field']")
date_txt: str = start_date.strftime("%d/%m/%Y")
js_txt: str = f"arguments[0].value = '{date_txt}'"
browser.execute_script(js_txt, date_fld)
# check if we already have this date
eto_loc: str = f"{main_dir}/prices/etos/*/{start_date:%Y%m%d}.csv"
eqt_loc: str = f"{main_dir}/prices/equities/*/{start_date:%Y%m%d}.csv"
if start_date.isoweekday() < 6:
find_eto_file: List[str] = glob.glob(eto_loc)
if len(find_eto_file) == 0:
select_option(browser, 'ETO')
find_eqt_file: List[str] = glob.glob(eqt_loc)
if len(find_eqt_file) == 0:
select_option(browser, 'Equities')
start_date = start_date + timedelta(days=1)
__main__()
|
[
"rdsheehy@gmail.com"
] |
rdsheehy@gmail.com
|
b0e033ccb4cffc961d51689e54991791e4bc3538
|
7f7b54bc1206413c33cb2089feec92defa199d53
|
/bootstrapFileInput-master/ConDoc.py
|
f1be4979cfbd82efc68deb75ddd4e541e9b52572
|
[
"Apache-2.0"
] |
permissive
|
lazyleonliu/leonhub
|
baaf4c0b1ad21a5e6d8889b335330548bdc49c11
|
4418cc0783cf624e87742436db69d17468424a1c
|
refs/heads/master
| 2020-07-07T06:20:34.494057
| 2020-03-12T13:01:18
| 2020-03-12T13:01:18
| 203,276,353
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,286
|
py
|
import os
from win32com import client as wc
import docx
import pythoncom
# 在传入的doc文件的路径下另存为一个docx文件
def con_doc_to_docx(file_paths):
print(file_paths)
pythoncom.CoInitialize()
for i in range(len(file_paths)):
file = os.path.splitext(file_paths[i])
# print(file[1])
if file[1] == u'.doc':
word = wc.Dispatch('kwps.Application')
doc = word.Documents.Open(file_paths[i]) # 目标路径下的文件
doc.SaveAs(file_paths[i] + u'x', 12, False, "", True, "", False, False, False, False)
# 此处会关闭所有当前打开的word文档
doc.Close()
word.Quit()
dele_doc(file_paths[i])
file_paths[i] = file_paths[i] + u'x'
return file_paths
def dele_doc(file_path):
if os.path.exists(file_path):
os.remove(file_path)
return True
return False
if __name__ == '__main__':
file_paths = [u"D:\\mygit\leonhub\\bootstrapFileInput-master\\app\lib\\static\\upload\\20200312a7baac84b3c44754b0f78e53cd9b1e4f/stest1.doc",
u"D:\\mygit\leonhub\\bootstrapFileInput-master\\app\lib\\static\\upload\\20200312a7baac84b3c44754b0f78e53cd9b1e4f/stest2.docx"]
con_doc_to_docx(file_paths)
|
[
"lazyleonliu@163.com"
] |
lazyleonliu@163.com
|
b659f4d8d6022539e9581d123b757cf416a3d853
|
104b4048116140b2cae29e59ce40c1ca668e63f9
|
/sample_2/views.py
|
7cf4da40cbe9e2d1bba9474e845cde1e93c883e5
|
[] |
no_license
|
daseulll/django_apiserver
|
fe20045778071e790b0395db6443e386d2074183
|
05cce3841bb4f8ce166f7db926dd1f37dd822b30
|
refs/heads/master
| 2020-05-24T18:49:06.405320
| 2019-05-30T13:36:42
| 2019-05-30T13:36:42
| 187,417,086
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 993
|
py
|
from rest_framework import generics
from rest_framework import viewsets
from django.contrib.auth import get_user_model
from .models import Post
from .serializers import PostSerializer, UserSerializer
class PostViewSet(viewsets.ModelViewSet):
queryset = Post.objects.all()
serializer_class = PostSerializer
# class PostListAPIView(generics.ListCreateAPIView):
# queryset = Post.objects.all()
# serializer_class = PostSerializer
# class PostDetailAPIView(generics.RetrieveUpdateDestroyAPIView):
# queryset = Post.objects.all()
# serializer_class = PostSerializer
class UserViewSet(viewsets.ReadOnlyModelViewSet):
queryset = get_user_model().objects.all()
serializer_class = UserSerializer
# user_list = UserViewSet.as_view({
# 'get' : 'list', # 호출될 함수와 호출할 함수를 지정
# })
# user_detail = UserViewSet.as_view({
# 'get' : 'retrieve',
# })
### router사용시 이 코드는 필요없어 진다.
|
[
"nldaseul@gmail.com"
] |
nldaseul@gmail.com
|
86033eb1b28f20d89bb1c08fbd7ce49732b99078
|
592f4fd54b16f3daaa64756fef86dd240114ce61
|
/tello_controler.py
|
715abaa1d12fb7623e37980ddaea8cb3461f1fb7
|
[] |
no_license
|
mediof/ustria
|
5d9c8ece6322e9406b3daa5b2e58ea09d2bbcff0
|
32c753fe1118efcdf345c49aaca957606969d9e4
|
refs/heads/master
| 2020-11-24T15:35:14.215999
| 2019-12-15T17:00:23
| 2019-12-15T17:00:23
| 228,218,719
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,299
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Thu Dec 5 16:26:10 2019
@author: mioooo
"""
# Import the necessary modules
import socket
import threading
import time
import sys
# IP and port of Tello
tello_address = ('192.168.10.1', 8889)
# IP and port of local computer
local_address = ('', 9000)
# Create a UDP connection that we'll send the command to
sock = socket.socket(socket.AF_INET, socket.SOCK_DGRAM)
# Bind to the local address and port
sock.bind(local_address)
# Send the message to Tello and allow for a delay in seconds
def send(message):
# Try to send the message otherwise print the exception
try:
sock.sendto(message.encode(), tello_address)
print("Sending message: " + message)
except Exception as e:
print("Error sending: " + str(e))
# Receive the message from Tello
def receive():
# Continuously loop and listen for incoming messages
while True:
# Try to receive the message otherwise print the exception
try:
response, ip_address = sock.recvfrom(128)
print("Received message: " + response.decode(encoding='utf-8'))
except Exception as e:
# If there's an error close the socket and break out of the loop
sock.close()
print("Error receiving: " + str(e))
break
# Create and start a listening thread that runs in the background
# This utilizes our receive function and will continuously monitor for incoming messages
receiveThread = threading.Thread(target=receive)
receiveThread.daemon = True
receiveThread.start()
# Tell the user what to do
print('Type in a Tello SDK command and press the enter key. Enter "quit" to exit this program.')
# Loop infinitely waiting for commands or until the user types quit or ctrl-c
while True:
try:
# Read keybord input from the user
if (sys.version_info > (3, 0)):
# Python 3 compatibility
message = input('')
else:
# Python 2 compatibility
message = raw_input('')
# If user types quit then lets exit and close the socket
if 'quit' in message:
print("Program exited sucessfully")
sock.close()
break
# Send the command to Tello
send(message)
# Handle ctrl-c case to quit and close the socket
except KeyboardInterrupt as e:
sock.close()
break
|
[
"noreply@github.com"
] |
mediof.noreply@github.com
|
1d85d8c7fd0dee6f91dc943e60d126aa7f3dc2b0
|
30e2407d44083cf5fa3737f56d40c32c9df31bf8
|
/exp_7/yolov3/yolov3-bcl/demo/image_demo.py
|
c879fb0e9e956af9870029d0f10569a1ae6eacd8
|
[
"MIT"
] |
permissive
|
gzq942560379/ICSE
|
1179c8189df574f14bd2389c93d1510bb0489743
|
ca433ec5fa022c5b31cdf47730ffaee070dea9ca
|
refs/heads/master
| 2023-05-08T11:09:41.648880
| 2021-06-03T03:19:11
| 2021-06-03T03:19:11
| 351,345,412
| 5
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,651
|
py
|
#! /usr/bin/env python
# coding=utf-8
#================================================================
# Copyright (C) 2019 * Ltd. All rights reserved.
#
# Editor : VIM
# File name : image_demo.py
# Author : YunYang1994
# Created date: 2019-01-20 16:06:06
# Description :
#
#================================================================
import argparse
import cv2
import os
import numpy as np
import time
import core.utils as utils
import tensorflow as tf
from PIL import Image
def load_graph(model_file):
graph = tf.Graph()
graph_def = tf.GraphDef()
with open(model_file, "rb") as f:
graph_def.ParseFromString(f.read())
with graph.as_default():
tf.import_graph_def(graph_def)
return graph
def get_sample_counts(filename):
count = 0
for record in tf.python_io.tf_record_iterator(path=filename):
count += 1
return count
if __name__ == "__main__":
pb_file = "./yolov3_coco.pb"
file_list = "./file_list"
number = 1
num_classes = 80
input_size = 544
graph = tf.Graph()
data_parallelism = 1
model_parallelism = 1
core_num = 1
core_version = "MLU100"
batch_size = 1
precision = "float32"
return_elements = ["input/input_data:0", "pred_sbbox/concat_2:0",
"pred_mbbox/concat_2:0", "pred_lbbox/concat_2:0"]
output_layer = ["input/input_data:0", "pred_sbbox/concat_2:0",
"pred_mbbox/concat_2:0", "pred_lbbox/concat_2:0"]
parser = argparse.ArgumentParser()
parser.add_argument("--graph", help="graph/model to be executed")
parser.add_argument("--file_list", help="file_list to be processed")
parser.add_argument("--number", type=int, help="number of file_list to be processed")
parser.add_argument("--batch_size", type=int, help="batch size")
parser.add_argument("--data_parallelism", type=int, help="data_parallelism")
parser.add_argument("--model_parallelism", type=int, help="model_parallelism")
parser.add_argument("--core_num", type=int, help="core_num")
parser.add_argument("--output_layer", help="name of output layer")
parser.add_argument("--precision", help="datatype")
parser.add_argument("--core_version", help="MLU100")
args = parser.parse_args()
if args.file_list:
file_list = args.file_list
if args.graph:
pb_file = args.graph
if args.output_layer:
output_layer = args.output_layer
if args.number:
number = args.number
if args.batch_size:
batch_size = args.batch_size
if args.data_parallelism:
data_parallelism = args.data_parallelism
if args.model_parallelism:
model_parallelism = args.model_parallelism
if args.core_num:
core_num = args.core_num
if args.precision:
precision = args.precision
if args.core_version:
core_version = args.core_version
# some check
if data_parallelism not in [1, 2, 4, 8, 16, 32]:
print ("Error! data_parallelism should be one of [1, 2, 4, 8, 16, 32]")
exit(0)
if model_parallelism not in [1, 2, 4, 8, 16, 32]:
print ("Error! model_parallelism should be one of [1, 2, 4, 8, 16, 32]")
exit(0)
if model_parallelism * data_parallelism > 32:
print ("Error! model_parallellism * data_parallelism should less than 32.")
exit(0)
if data_parallelism > 1:
if batch_size < data_parallelism:
print ("Error! batch_size must >= data_parallelism")
exit(0)
if batch_size % data_parallelism != 0:
print ("Error! batch_size must be multiple of data_parallelism")
exit(0)
config = tf.ConfigProto(allow_soft_placement=True,
inter_op_parallelism_threads=1,
intra_op_parallelism_threads=1)
config.mlu_options.save_offline_model = False
config.mlu_options.data_parallelism = data_parallelism
config.mlu_options.model_parallelism = model_parallelism
config.mlu_options.core_num = core_num
#config.mlu_options.fusion = True
config.mlu_options.core_version = core_version
config.mlu_options.precision = precision
#config.graph_options.rewrite_options.remapping = 2
#config.graph_options.rewrite_options.constant_folding = 2
#config.graph_options.rewrite_options.arithmetic_optimization = 2
config.mlu_options.optype_black_list ="StridedSlice"
f = open(file_list)
file_list_lines = f.readlines()
sample_counts = len(file_list_lines)
print("number = ", number,
"sample_counts = ", sample_counts,
"batch_size = ", batch_size)
#if number > sample_counts:
# number = sample_counts
if number < sample_counts:
number = sample_counts
if number < batch_size:
print("Error! number of images must be >= batch_size")
exit(0)
graph = load_graph(pb_file)
return_tensors = utils.read_pb_return_tensors(graph, pb_file, return_elements)
all_images = []
all_images_name = []
for i in range(number):
line = file_list_lines[i]
img_name = line.strip().split("/")[-1]
original_image = cv2.imread(line.rstrip())
h, w, _ = original_image.shape
original_image = cv2.cvtColor(original_image, cv2.COLOR_BGR2RGB)
new_h, new_w, _ = original_image.shape
original_image_size = original_image.shape[:2]
all_images_name.append(img_name)
all_images.append(original_image)
image_data = utils.images_preporcess(all_images, [input_size, input_size])
print(image_data.shape)
run_times = np.ceil(number/batch_size)
print(run_times)
all_time = 0.0
with tf.Session(config = config, graph = graph) as sess:
for t in range(int(run_times)):
batch_images = image_data[t*batch_size:(t+1)*batch_size, ...]
batch_images_name = all_images_name[t*batch_size:(t+1)*batch_size]
batch_origin_images = all_images[t*batch_size:(t+1)*batch_size]
start = time.time()
print("batch_images shape:",batch_images.shape)
pred_sbbox, pred_mbbox, pred_lbbox = sess.run(
[return_tensors[1], return_tensors[2], return_tensors[3]],
feed_dict={ return_tensors[0]: batch_images})
end = time.time()
if t > 0:
all_time = all_time + (end - start)
single_pred_sbbox = np.split(pred_sbbox, batch_size, axis = 0)
single_pred_mbbox = np.split(pred_mbbox, batch_size, axis = 0)
single_pred_lbbox = np.split(pred_lbbox, batch_size, axis = 0)
for i in range(batch_size):
pred_bbox = np.concatenate(
[np.reshape(single_pred_sbbox[i], (-1, 5 + num_classes)),
np.reshape(single_pred_mbbox[i], (-1, 5 + num_classes)),
np.reshape(single_pred_lbbox[i], (-1, 5 + num_classes))],
axis=0)
bboxes = utils.postprocess_boxes(pred_bbox, original_image_size, input_size, 0.3)
bboxes = utils.nms(bboxes, 0.45, method='nms')
image = utils.draw_bbox(batch_origin_images[i], bboxes)
image = Image.fromarray(image)
img_path = "./result_img"
if not (os.path.exists(img_path)):
os.mkdir(img_path)
new_img_location = os.path.join(img_path, batch_images_name[i])
image.save(new_img_location, 'jpeg')
if run_times > 1:
print('end2end fps: %f' %(((run_times-1) * batch_size)/all_time))
|
[
"gzq9425@qq.com"
] |
gzq9425@qq.com
|
eca3b02401d05b883dc124ec52126e7992f8c9d1
|
2827d7a837eb29c3cb07793ab6d3d5a753e18669
|
/alipay/aop/api/request/KoubeiMarketingCampaignUserAssetQueryRequest.py
|
c460199126ea5d80aab58502068482b17b78da94
|
[
"Apache-2.0"
] |
permissive
|
shaobenbin/alipay-sdk-python
|
22e809b8f5096bec57d2bb25414f64bdc87fa8b3
|
5232ad74dff2e8a6e0e7646ab3318feefa07a37d
|
refs/heads/master
| 2020-03-21T04:51:39.935692
| 2018-06-21T07:03:31
| 2018-06-21T07:03:31
| 138,131,022
| 0
| 0
| null | 2018-06-21T06:50:24
| 2018-06-21T06:50:24
| null |
UTF-8
|
Python
| false
| false
| 4,022
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import json
from alipay.aop.api.FileItem import FileItem
from alipay.aop.api.constant.ParamConstants import *
from alipay.aop.api.domain.KoubeiMarketingCampaignUserAssetQueryModel import KoubeiMarketingCampaignUserAssetQueryModel
class KoubeiMarketingCampaignUserAssetQueryRequest(object):
def __init__(self, biz_model=None):
self._biz_model = biz_model
self._biz_content = None
self._version = "1.0"
self._terminal_type = None
self._terminal_info = None
self._prod_code = None
self._notify_url = None
self._return_url = None
self._udf_params = None
self._need_encrypt = False
@property
def biz_model(self):
return self._biz_model
@biz_model.setter
def biz_model(self, value):
self._biz_model = value
@property
def biz_content(self):
return self._biz_content
@biz_content.setter
def biz_content(self, value):
if isinstance(value, KoubeiMarketingCampaignUserAssetQueryModel):
self._biz_content = value
else:
self._biz_content = KoubeiMarketingCampaignUserAssetQueryModel.from_alipay_dict(value)
@property
def version(self):
return self._version
@version.setter
def version(self, value):
self._version = value
@property
def terminal_type(self):
return self._terminal_type
@terminal_type.setter
def terminal_type(self, value):
self._terminal_type = value
@property
def terminal_info(self):
return self._terminal_info
@terminal_info.setter
def terminal_info(self, value):
self._terminal_info = value
@property
def prod_code(self):
return self._prod_code
@prod_code.setter
def prod_code(self, value):
self._prod_code = value
@property
def notify_url(self):
return self._notify_url
@notify_url.setter
def notify_url(self, value):
self._notify_url = value
@property
def return_url(self):
return self._notify_url
@return_url.setter
def return_url(self, value):
self._return_url = value
@property
def udf_params(self):
return self._udf_params
@udf_params.setter
def udf_params(self, value):
if not isinstance(value, dict):
return
self._udf_params = value
@property
def need_encrypt(self):
return self._need_encrypt
@need_encrypt.setter
def need_encrypt(self, value):
self._need_encrypt = value
def add_other_text_param(self, key, value):
if not self.udf_params:
self.udf_params = dict()
self.udf_params[key] = value
def get_params(self):
params = dict()
params[P_METHOD] = 'koubei.marketing.campaign.user.asset.query'
params[P_VERSION] = self.version
if self.biz_model:
params[P_BIZ_CONTENT] = json.dumps(obj=self.biz_model.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
if self.biz_content:
if hasattr(self.biz_content, 'to_alipay_dict'):
params['biz_content'] = json.dumps(obj=self.biz_content.to_alipay_dict(), ensure_ascii=False, sort_keys=True, separators=(',', ':'))
else:
params['biz_content'] = self.biz_content
if self.terminal_type:
params['terminal_type'] = self.terminal_type
if self.terminal_info:
params['terminal_info'] = self.terminal_info
if self.prod_code:
params['prod_code'] = self.prod_code
if self.notify_url:
params['notify_url'] = self.notify_url
if self.return_url:
params['return_url'] = self.return_url
if self.udf_params:
params.update(self.udf_params)
return params
def get_multipart_params(self):
multipart_params = dict()
return multipart_params
|
[
"liuqun.lq@alibaba-inc.com"
] |
liuqun.lq@alibaba-inc.com
|
8acf44e9129a645e1a87abb871519789cdf160bb
|
0588e908e8a7a7b87367c036c3df407d88b38700
|
/src/tor/src/test/test_rebind.py
|
232b2003265ab7d1e20eb4298260217e1c85095e
|
[
"MIT",
"BSD-3-Clause",
"LicenseRef-scancode-maxmind-odl",
"Autoconf-exception-generic",
"NCSA",
"OpenSSL",
"GPL-2.0-or-later",
"ISC",
"BSD-2-Clause"
] |
permissive
|
ShroudProtocol/ShroudX
|
d4f06ef262b28aeb115f1ebbd9432e1744829ab0
|
369c800cbf4a50f4a7cd01f5c155833f1ade3093
|
refs/heads/master
| 2023-04-23T05:12:02.347418
| 2021-05-04T20:28:55
| 2021-05-04T20:28:55
| 302,146,008
| 2
| 4
|
MIT
| 2020-12-07T10:18:21
| 2020-10-07T19:56:30
|
C
|
UTF-8
|
Python
| false
| false
| 4,511
|
py
|
from __future__ import print_function
import errno
import logging
import os
import random
import socket
import subprocess
import sys
import time
LOG_TIMEOUT = 60.0
LOG_WAIT = 0.1
def fail(msg):
logging.error('FAIL')
sys.exit(msg)
def skip(msg):
logging.warning('SKIP: {}'.format(msg))
sys.exit(77)
def try_connecting_to_socksport():
socks_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
e = socks_socket.connect_ex(('127.0.0.1', socks_port))
if e:
tor_process.terminate()
fail('Cannot connect to SOCKSPort: error ' + os.strerror(e))
socks_socket.close()
def wait_for_log(s):
cutoff = time.time() + LOG_TIMEOUT
while time.time() < cutoff:
l = tor_process.stdout.readline()
l = l.decode('utf8')
if s in l:
logging.info('Tor logged: "{}"'.format(l.strip()))
return
logging.info('Tor logged: "{}", waiting for "{}"'.format(l.strip(), s))
# readline() returns a blank string when there is no output
# avoid busy-waiting
if len(s) == 0:
time.sleep(LOG_WAIT)
fail('Could not find "{}" in logs after {} seconds'.format(s, LOG_TIMEOUT))
def pick_random_port():
port = 0
random.seed()
for i in range(8):
port = random.randint(10000, 60000)
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if s.connect_ex(('127.0.0.1', port)) == 0:
s.close()
else:
break
if port == 0:
fail('Could not find a random free port between 10000 and 60000')
return port
logging.basicConfig(level=logging.DEBUG,
format='%(asctime)s.%(msecs)03d %(message)s',
datefmt='%Y-%m-%d %H:%M:%S')
if sys.hexversion < 0x02070000:
fail("ERROR: unsupported Python version (should be >= 2.7)")
if sys.hexversion > 0x03000000 and sys.hexversion < 0x03010000:
fail("ERROR: unsupported Python3 version (should be >= 3.1)")
if 'TOR_SKIP_TEST_REBIND' in os.environ:
skip('$TOR_SKIP_TEST_REBIND is set')
control_port = pick_random_port()
socks_port = pick_random_port()
assert control_port != 0
assert socks_port != 0
if len(sys.argv) < 3:
fail('Usage: %s <path-to-tor> <data-dir>' % sys.argv[0])
if not os.path.exists(sys.argv[1]):
fail('ERROR: cannot find tor at %s' % sys.argv[1])
if not os.path.exists(sys.argv[2]):
fail('ERROR: cannot find datadir at %s' % sys.argv[2])
tor_path = sys.argv[1]
data_dir = sys.argv[2]
empty_torrc_path = os.path.join(data_dir, 'empty_torrc')
open(empty_torrc_path, 'w').close()
empty_defaults_torrc_path = os.path.join(data_dir, 'empty_defaults_torrc')
open(empty_defaults_torrc_path, 'w').close()
tor_process = subprocess.Popen([tor_path,
'-DataDirectory', data_dir,
'-ControlPort', '127.0.0.1:{}'.format(control_port),
'-SOCKSPort', '127.0.0.1:{}'.format(socks_port),
'-Log', 'debug stdout',
'-LogTimeGranularity', '1',
'-FetchServerDescriptors', '0',
'-f', empty_torrc_path,
'--defaults-torrc', empty_defaults_torrc_path,
],
stdout=subprocess.PIPE,
stderr=subprocess.PIPE)
if tor_process == None:
fail('ERROR: running tor failed')
wait_for_log('Opened Control listener on')
try_connecting_to_socksport()
control_socket = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
if control_socket.connect_ex(('127.0.0.1', control_port)):
tor_process.terminate()
fail('Cannot connect to ControlPort')
control_socket.sendall('AUTHENTICATE \r\n'.encode('utf8'))
control_socket.sendall('SETCONF SOCKSPort=0.0.0.0:{}\r\n'.format(socks_port).encode('utf8'))
wait_for_log('Opened Socks listener')
try_connecting_to_socksport()
control_socket.sendall('SETCONF SOCKSPort=127.0.0.1:{}\r\n'.format(socks_port).encode('utf8'))
wait_for_log('Opened Socks listener')
try_connecting_to_socksport()
control_socket.sendall('SIGNAL HALT\r\n'.encode('utf8'))
wait_for_log('exiting cleanly')
logging.info('OK')
try:
tor_process.terminate()
except OSError as e:
if e.errno == errno.ESRCH: # errno 3: No such process
# assume tor has already exited due to SIGNAL HALT
logging.warn("Tor has already exited")
else:
raise
|
[
"edgar.keek@gmail.com"
] |
edgar.keek@gmail.com
|
79934e61d08159229edad81ddfe11a4c0569514f
|
252298818f8aa1e3b1ee1fbce884ca0e7a0370a4
|
/store/migrations/0012_auto_20191112_1037.py
|
8306ad3bc0482ba18045292644a390e3871d2819
|
[] |
no_license
|
zxj17815/wechat_store
|
3484d5ce474a62fa2bdfae8173dea7e91bb42515
|
0e8296ede62d4cc968b343a3203ba97e1b7b1afc
|
refs/heads/master
| 2023-05-28T20:09:15.843318
| 2020-04-03T07:42:53
| 2020-04-03T07:42:53
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 445
|
py
|
# Generated by Django 2.2.3 on 2019-11-12 02:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('store', '0011_auto_20191112_0826'),
]
operations = [
migrations.AlterField(
model_name='activeporduct',
name='feature',
field=models.CharField(help_text='text,特征', max_length=500, verbose_name='Features'),
),
]
|
[
"597952291@qq.com"
] |
597952291@qq.com
|
2530779610ba3004120af9f0722ad14dfd0f4070
|
3909266b59ae80e38ea89e232f2741b1e6175739
|
/dictionaries_functions/dictionaries_functions.py
|
0dc59d42f69a4fa7362264b28be5c8a5c2a4c1f6
|
[] |
no_license
|
Bardonkle/learntocode
|
a48cdc78cff57167125b4abbd1b188ad56243f82
|
51870aa9b96d1761848ae01f510318e87a80250d
|
refs/heads/master
| 2023-04-18T17:39:53.030558
| 2021-05-08T02:40:33
| 2021-05-08T02:40:33
| 359,315,940
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,848
|
py
|
# dictionary is a way to store key-value pairs
# a record in a dictionary looks like "key": "value"
# where key is the key to the record, and value can be any type
from pprint import pprint
my_dict = {
"juice": "a tasty drink",
"apple": "a fruit thats grows on trees"
}
my_value = my_dict["juice"]
print(my_value)
# upsert (update or insert)
my_dict["apple"] = "a round, red fruit that grows on trees"
my_dict["coke"] = "favorite thing to snort"
pprint(my_dict)
# PROGRAM: Create a program to find the most used letter in a sentence
# ALGORITHM TO ACCOMPLISH PROGRAM
# 1. get input
# 2. for every letter in the sentence
# 3. check to see if it is a space or a symbol
# 4. IF the letter is a symbol, go to the next letter
# 5. lowercase the letter, because we dont want the capital letters
# 6. if the letter is not in the dictionary, add a record with a value of 1
# 7. otherwise, add 1 to the value of the letter
# 8. repeat until end of string
sentence = "the quick brown fox jumps over the lazy dog. then it fell on its face"
# create a dictionary to store letters by their occurance in the sentence
letters_dict = {}
# loop over every letter in the sentence, and update the dictionary
for letter in sentence:
# if the letter is a space or symbol, skip it
if letter in ". !@#$%^&*(){}":
continue
# lowercase the current letter
letter = letter.lower()
if not letters_dict.get(letter):
# if the letter is not in the dictionary, create it and give it the value 1
letters_dict[letter] = 1
else:
# if the letter IS in our dictionary, add one to the value
# same as -> letter_dict[letter] = letter_dict[letter] + 1
letters_dict[letter] += 1
# find the max value of letters_dict
max_value = max(letters_dict, key=letters_dict.get)
print (max_value)
|
[
"ryan.agharaad@gmail.com"
] |
ryan.agharaad@gmail.com
|
4892a9180da0063bd2762dc3b65d9533e8a3fbd7
|
90d2aa164829fc1f9c26e22d912c1e690e6f74bb
|
/SARIMA.py
|
cf916e8707fbaef80d0f368c4cadd498b45b9916
|
[] |
no_license
|
zied-prog/cryptomonnaies_prediction
|
f53ab428ec6b51e641ac38a0062a5643e600c018
|
616ea5224b7becbd254869c8909115eff6f8edc8
|
refs/heads/main
| 2023-07-19T16:47:09.145317
| 2021-09-03T12:26:12
| 2021-09-03T12:26:12
| 402,563,209
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,014
|
py
|
#!/usr/bin/env python
# coding: utf-8
# In[1]:
import pandas as pd
import numpy as np
import pandas_datareader.data as pdr
import matplotlib.pyplot as plt
from statsmodels.tsa.stattools import adfuller
from statsmodels.tsa.seasonal import seasonal_decompose
from statsmodels.tsa.arima_model import ARIMA
from pandas.plotting import register_matplotlib_converters
register_matplotlib_converters()
import warnings
import itertools
import statsmodels.api as sm
from pylab import rcParams
from sklearn.metrics import mean_squared_error
from sklearn.metrics import mean_absolute_error
from sklearn.metrics import mean_absolute_percentage_error
import datetime
from datetime import timedelta
#!pip install yfinance
import yfinance as yf
# In[11]:
start = datetime.datetime(2019,1,1)
end = datetime.datetime(2021,7,31)
ETH = yf.download('ETH-USD', start, end)
# In[12]:
ETH
# In[13]:
ETH_DATA=ETH.filter(['Adj Close'])
ETH_DATA.tail()
# In[14]:
ETH.dtypes
ETH.info()
ETH.describe()
# In[16]:
Data_viz = ETH.plot(y= 'Adj Close', figsize=(12,6), legend=True, grid=True, use_index=True)
plt.show()
# In[17]:
ETH_price = ETH['Adj Close']
# In[18]:
# Tracer la moyenne mobile et l’écart-type mobile
ETH_price.rolling(12).mean().plot(figsize=(20,10), linewidth=5, fontsize=20)
plt.xlabel('time', fontsize=20);
rolling_mean = ETH_price.rolling(window = 12).mean()
rolling_std = ETH_price.rolling(window = 12).std()
plt.plot(ETH_price, color = 'blue', label = 'Origine')
plt.plot(rolling_mean, color = 'red', label = 'Moyenne mobile')
plt.plot(rolling_std, color = 'green', label = 'Ecart-type mobile')
plt.legend(loc = 'best')
plt.title('Moyenne et Ecart-type mobiles')
plt.show()
# In[19]:
# Test de Dickey-Fuller augmenté qui est un test de stationnarité:
Test_ADF = adfuller(ETH_price)
print('Statistiques ADF : {}'.format(Test_ADF[0]))
print('p-value : {}'.format(Test_ADF[1]))
# In[20]:
ETH_price_1= ETH_price-ETH_price.shift()
ETH_price_1.dropna(inplace=True)
ETH_price_1.head()
# In[21]:
ETH_price_1.rolling(12).mean().plot(figsize=(20,10), linewidth=5, fontsize=20)
plt.xlabel('time', fontsize=20);
rolling_mean = ETH_price_1.rolling(window = 12).mean()
rolling_std = ETH_price_1.rolling(window = 12).std()
plt.plot(ETH_price_1, color = 'blue', label = 'Origine')
plt.plot(rolling_mean, color = 'red', label = 'Moyenne mobile')
plt.plot(rolling_std, color = 'green', label = 'Ecart-type mobile')
plt.legend(loc = 'best')
plt.title('Moyenne et Ecart-type mobiles de ETH data')
plt.show()
# In[22]:
Test_ADF = adfuller(ETH_price_1)
print('Statistiques ADF : {}'.format(Test_ADF[0]))
print('p-value : {}'.format(Test_ADF[1]))
# In[23]:
# Convertion en moyennes hebdomadaires de nos séries chronologiques
ETH_price_2 = ETH_price_1.resample('W').mean()
ETH_price_2.head()
# In[24]:
# Somme des données manquantes
ETH_price_2.isnull().sum()
# In[25]:
ETH_price_2.plot(figsize=(15, 6))
plt.show()
# In[26]:
# Decomposition de la serie chronologique pour identifier la saisonnalité
rcParams['figure.figsize'] = 11, 9
decomposition = sm.tsa.seasonal_decompose(ETH_price_2, model='additive')
decomposition_plot = decomposition.plot()
plt.show()
# In[27]:
# Generation des différentes combinaisons de paramètres (p, d et q) qui peuvent prendre n'importe quelles valeurs entre 0 et 2
p = d = q = range(0, 2)
pdq = list(itertools.product(p, d, q))
parametre_pdq = [(x[0], x[1], x[2], 52) for x in list(itertools.product(p, d, q))]
parametre_pdq
# In[28]:
# Utilisation du modele SARIMA suite à l'identification de la saisonnalité
for parametre in pdq:
for parametre_seasonal in parametre_pdq:
try:
model_SARIMA = sm.tsa.statespace.SARIMAX(ETH_price_2, order=parametre, seasonal_order=parametre_seasonal, enforce_stationarity=False, enforce_invertibility=False)
resultats = model_SARIMA.fit()
print('ARIMA{}x{}52 - AIC:{}'.format(parametre, parametre_seasonal, resultats.aic))
except:
continue
# In[29]:
#La sortie du modèle est ARIMA(1, 1, 1)x(1, 1, 0, 52)52 et la valeur AIC la plus faible est 330.11656280001256
model = sm.tsa.statespace.SARIMAX(ETH_price_2,order=(1, 1, 1),seasonal_order=(1, 1, 0, 12),enforce_stationarity=False, enforce_invertibility=False)
arima = model.fit()
print(arima.summary())
# In[30]:
arima.plot_diagnostics(figsize=(15, 12))
plt.show()
# In[31]:
# Construction d'un intervalle de confiance pour les paramètres ajustés.
prediction = arima.get_prediction(start=pd.to_datetime('2021-01-03'), dynamic=False)
prediction_intervalle = prediction.conf_int()
# In[ ]:
# In[32]:
pred = ETH_price_2['2019':].plot(label='observed')
prediction.predicted_mean.plot(ax=pred, label='Prediction', alpha=.7)
pred.fill_between(prediction_intervalle.index, prediction_intervalle.iloc[:, 0], prediction_intervalle.iloc[:, 1], color='k', alpha=.2)
pred.set_xlabel('Date')
pred.set_ylabel('Price')
plt.legend()
plt.show()
# In[33]:
forecast = arima.get_forecast(steps=8)
forecast_intervalle = forecast.conf_int()
# In[34]:
print(forecast_intervalle)
# In[35]:
ax = ETH_price_2.plot(label='observed', figsize=(20, 15))
forecast.predicted_mean.plot(ax=ax, label='Forecast')
ax.fill_between(forecast_intervalle.index, forecast_intervalle.iloc[:, 0], forecast_intervalle.iloc[:, 1], color='k', alpha=.25)
ax.set_xlabel('Date')
ax.set_ylabel('Price')
plt.legend()
plt.show()
# In[41]:
# Validation croisée hors du temps
# In[42]:
ETH_price_1 = ETH['Close']
ETH_price_2 = ETH_price_1-ETH_price_1.shift()
ETH_price_2.dropna(inplace=True)
#split data
train, test=ETH_price_2[:-20], ETH_price_2[-20:]
#Train
train = train.resample('w').mean()
#Test
test = test.resample('w').mean()
# In[43]:
test.describe()
# In[44]:
test
# In[45]:
p = d = q = range(0, 2)
pdq = list(itertools.product(p, d, q))
parametre_pdq = [(x[0], x[1], x[2], 52) for x in list(itertools.product(p, d, q))]
# In[46]:
for parametre in pdq:
for parametre_seasonal in parametre_pdq:
try:
model_SARIMA = sm.tsa.statespace.SARIMAX(train, order=parametre, seasonal_order=parametre_seasonal, enforce_stationarity=False, enforce_invertibility=False)
resultats = model_SARIMA.fit()
print('ARIMA{}x{}52 - AIC:{}'.format(parametre, parametre_seasonal, resultats.aic))
except:
continue
# In[47]:
model = sm.tsa.statespace.SARIMAX(train,order=(1, 1, 1),seasonal_order=(1, 1, 0, 52),enforce_stationarity=False, enforce_invertibility=False)
arima = model.fit()
print(arima.summary())
# In[49]:
arima.plot_diagnostics(figsize=(15, 12))
plt.show()
# In[50]:
prediction = arima.get_prediction(start=pd.to_datetime('2021-01-03'), dynamic=False)
prediction_intervalle = prediction.conf_int()
# In[51]:
pred = train['2019':].plot(label='observed')
prediction.predicted_mean.plot(ax=pred, label='Prediction', alpha=.7)
pred.fill_between(prediction_intervalle.index, prediction_intervalle.iloc[:, 0], prediction_intervalle.iloc[:, 1], color='k', alpha=.2)
pred.set_xlabel('Date')
pred.set_ylabel('Price')
plt.legend()
plt.show()
# In[52]:
forecast = arima.get_forecast(steps=4)
forecast_intervalle = forecast.conf_int()
ax = train.plot(label='observed', figsize=(20, 15))
forecast.predicted_mean.plot(ax=ax, label='Forecast')
ax.fill_between(forecast_intervalle.index, forecast_intervalle.iloc[:, 0], forecast_intervalle.iloc[:, 1], color='k', alpha=.25)
ax.set_xlabel('Date')
ax.set_ylabel('Price')
plt.legend()
plt.show()
# In[58]:
Prediction =(prediction_intervalle["lower Close"]+prediction_intervalle["upper Close"]) /2
Prediction.index
# In[59]:
#RMSE
rmse = np.mean((Prediction - test)**2)**.5
rmse
# In[60]:
#MAPE
mape = np.mean(np.abs(Prediction- test)/np.abs(test))
mape
# In[61]:
#MAE
mae = np.mean(np.abs(Prediction - test))
mae
# In[62]:
#MPE
mpe = np.mean((Prediction - test)/test)
mpe
# In[ ]:
|
[
"noreply@github.com"
] |
zied-prog.noreply@github.com
|
33e2f7a6ea12e80f0db9d59c65b6ada8014c9ae8
|
786f29dc7be151b716433bbe14db7552a683b8af
|
/sort/xuanzhe.py
|
8835f2bc2b83302d041e5b668f28c59ecc465f87
|
[] |
no_license
|
Hengle/my_test_project
|
bfea2e32734b921ee7ff6da8d18956a24f0ad64f
|
a9f30f883a7bd906c7ed346c79bc8f1fd53db11d
|
refs/heads/master
| 2023-03-22T08:36:32.102048
| 2021-03-08T08:10:25
| 2021-03-08T08:10:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 262
|
py
|
# 每次循环通过标记,只调换一个值的位置
l = [4,5,6,7,3,2,6,9,8,1]
lenght = len(l)
for i in range(lenght):
x = i
for j in range(i+1, lenght):
if l[j] < l[x]:
x = j
l[x], l[i] = l[i], l[x]
print(l)
print(l)
|
[
"m15927260404@163.com"
] |
m15927260404@163.com
|
a7b343baa55b765432f1cc02f7001ade630607e2
|
1e43fd5e134157e6f034327ffbf3e6501c67275d
|
/mlps/core/data/cnvrtr/functions/ExtractDomain.py
|
2df337b646a23da67d34de7acf0c60a346e626e6
|
[
"Apache-2.0"
] |
permissive
|
sone777/automl-mlps
|
f15780e23142e0f3f368815678959c7954966e71
|
a568b272333bc22dc979ac3affc9762ac324efd8
|
refs/heads/main
| 2023-08-24T10:07:30.834883
| 2021-11-03T07:41:15
| 2021-11-03T07:41:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,786
|
py
|
# -*- coding: utf-8 -*-
# Author : Seungyeon Jo
# e-mail : syjo@seculayer.co.kr
# Powered by Seculayer © 2018 AI-Core Team
import re
from mlps.core.data.cnvrtr.ConvertAbstract import ConvertAbstract
class ExtractDomain(ConvertAbstract):
def __init__(self, **kwargs):
super().__init__(**kwargs)
def apply(self, data):
result = ''
# check blank
if self._isBlank(data) :
return [result]
# 0. Cut Parameter
_regex = '(\\?.+)|([hHtTpPsS]{4,5}://)'
cutDomain = re.sub(_regex, '', data)
# 1. IP is return
if len(re.sub('[0-9.]', '', cutDomain)) == 0:
return [cutDomain]
# 2. if domain is .com/.net
arr = []
arr_len = 0
try:
arr = cutDomain.split('.')
arr_len = len(arr)
if cutDomain.endswith('.com') or cutDomain.endswith('.net'):
return [arr[arr_len-2] + "." + arr[arr_len-1]]
except Exception as e:
self.LOGGER.error(e)
# 3. if .(dot) is one
if arr_len == 2:
return [cutDomain]
# 4. if .(dot) is two
try:
if arr_len == 3:
return [arr[arr_len-2] + "." + arr[arr_len-1]]
except Exception as e:
self.LOGGER.error(e)
# 5. if .(dot) is three more
try:
if arr_len >= 4:
return [arr[arr_len-3] + "." + arr[arr_len-2] + "." + arr[arr_len-1]]
except Exception as e:
self.LOGGER.error(e)
return [cutDomain]
if __name__ == "__main__":
_str = "http://www.seculayer.com/index.html?arg1=0"
print(ExtractDomain(stat_dict=None, arg_list=None).apply(_str))
|
[
"bmg8551@naver.com"
] |
bmg8551@naver.com
|
4654e109eabd93199f5b40797255599c87d6d042
|
f0d0ea29240c53b6ce1c4b06095b528ece02fdd7
|
/views/purchase.py
|
057659a9fa45c49744f7fae684b314fc6b10d307
|
[] |
no_license
|
zhifuliu/dianjing
|
477529ccd6159329e1bc121aeb2ff328ee499f4a
|
7b3f6d58f5bc0738651d8d72c9a24df4ade0ed36
|
refs/heads/master
| 2020-03-21T09:10:28.343268
| 2017-03-24T03:06:24
| 2017-03-24T03:06:24
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,737
|
py
|
# -*- coding: utf-8 -*-
"""
Author: Wang Chao <yueyoum@gmail.com>
Filename: purchase
Date Created: 2016-08-03 17:19
Description:
"""
from django.http import HttpResponse
from dianjing.exception import GameException
from utils.http import ProtobufResponse
from config import ConfigErrorMessage
from core.purchase import Purchase, platform_callback_1sdk, platform_callback_stars_cloud
from protomsg.purchase_pb2 import PurchaseVerifyResponse, PurchaseGetFirstRewardResponse
def verify(request):
server_id = request._game_session.server_id
char_id = request._game_session.char_id
provider = request._game_session.provider
param = request._proto.param
p = Purchase(server_id, char_id)
if provider == 'debug':
raise GameException(ConfigErrorMessage.get_error_id("INVALID_OPERATE"))
if provider == 'ios':
goods_id, status = p.verify_ios(param)
else:
goods_id, status = p.verify_other(param)
response = PurchaseVerifyResponse()
response.ret = 0
response.status = status
response.goods_id = goods_id
return ProtobufResponse(response)
def get_first_reward(request):
server_id = request._game_session.server_id
char_id = request._game_session.char_id
p = Purchase(server_id, char_id)
drop = p.get_first_reward()
response = PurchaseGetFirstRewardResponse()
response.ret = 0
response.drop.MergeFrom(drop.make_protomsg())
return ProtobufResponse(response)
def callback_1sdk(request):
content = platform_callback_1sdk(request.GET)
return HttpResponse(content=content)
def callback_stars_cloud(request):
content = platform_callback_stars_cloud(request.POST)
return HttpResponse(content=content)
|
[
"yueyoum@gmail.com"
] |
yueyoum@gmail.com
|
dfa1466ec0fc43552e544cf5b6cb6fa848342685
|
767fb467f5948ed371d238d2672711262639838d
|
/collections_extended/_util.py
|
479d740185ff8a4b3cfa0aed13b9ac2172fdb4e2
|
[
"Apache-2.0"
] |
permissive
|
aishp9/collections-extended
|
48a378c6cf871c40f30884d40ab7d50d77584823
|
917d95d43e431779c423945e117d13005ef3b0c6
|
refs/heads/master
| 2022-04-11T01:58:26.215185
| 2020-02-12T07:52:23
| 2020-02-12T07:52:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,765
|
py
|
"""util functions for collections_extended."""
from functools import wraps
import textwrap
import warnings
__all__ = ('hash_iterable', 'deprecated')
def hash_iterable(it):
"""Perform a O(1) memory hash of an iterable of arbitrary length.
hash(tuple(it)) creates a temporary tuple containing all values from it
which could be a problem if it is large.
See discussion at:
https://groups.google.com/forum/#!msg/python-ideas/XcuC01a8SYs/e-doB9TbDwAJ
"""
hash_value = hash(type(it))
for value in it:
hash_value = hash((hash_value, value))
return hash_value
def deprecation_warning(msg):
"""Raise a deprecation warning."""
warnings.warn(msg, category=DeprecationWarning, stacklevel=2)
def deprecated(msg, dep_version):
"""Decorate a function, method or class to mark as deprecated.
Raise DeprecationWarning and add a deprecation notice to the docstring.
Args:
msg: The message to document
dep_version: The version in which this was deprecated
See:
https://www.sphinx-doc.org/en/master/usage/restructuredtext/directives.html#directive-deprecated
"""
def wrapper(func):
docstring = func.__doc__ or ''
docstring_msg = '.. deprecated:: {version} {msg}'.format(
version=dep_version,
msg=msg,
)
if docstring:
# We don't know how far to indent this message
# so instead we just dedent everything.
string_list = docstring.splitlines()
first_line = string_list[0]
remaining = textwrap.dedent(''.join(string_list[1:]))
docstring = '\n'.join([
first_line,
remaining,
'',
docstring_msg,
])
else:
docstring = docstring_msg
func.__doc__ = docstring
@wraps(func)
def inner(*args, **kwargs):
deprecation_warning(msg)
return func(*args, **kwargs)
return inner
return wrapper
|
[
"m.lenzen@gmail.com"
] |
m.lenzen@gmail.com
|
78537f99a66e92733a53372a677967bf72e3b17b
|
0fa1e5c8adea10601bd0ebed0524b8f5169b663b
|
/3rd Class (Ziyads)/P4.Jarvis_Security_System/script1.py
|
533af2fc910e37b0d475b0407dbb04a16fddf9dc
|
[] |
no_license
|
ivanstewart2001/projects1
|
1de638b665450772422eb1b5b6b935826a18fd6f
|
eefe3637e4c9c34712e7f85c771c64727740d91c
|
refs/heads/main
| 2022-12-26T23:41:02.084566
| 2020-10-14T22:17:42
| 2020-10-14T22:17:42
| 304,146,993
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 820
|
py
|
known_users = ["Brittany", "Courtney", "Geoffrey", "Sydney", "Isaiah"]
while True:
print("Hi my name is Jarvis")
name = input("What is your name?: ").strip().capitalize()
if name in known_users:
print("Hello {}!".format(name))
remove = input("Would you like to be removed from the system (Y/N)?: ").strip().lower()
if remove == "y":
known_users.remove(name)
elif remove == "n":
print("No problem, I didn't want to see you go anyway!")
else:
print("Hmmmm I dont think I have met you yet {}.".format(name))
add_me = input("Would you like to be added to the system (Y/N)?: ").strip().lower()
if add_me == "y":
known_users.append(name)
elif add_me == "n":
print("No worries, see you around!")
|
[
"noreply@github.com"
] |
ivanstewart2001.noreply@github.com
|
bb67b015e755fce7523a3b6b93afea8bf06f7435
|
fb1aa2e8ab142b41b0621174ededf165f94b7deb
|
/strongdoc/proto/documentNoStore_pb2.py
|
ef81d528eb716d12fab94948f9dc7f281cc01881
|
[
"MIT"
] |
permissive
|
overnest/strongdoc-python-sdk
|
382a7603bd3d6a729f8b055c527c7571d8b7a242
|
586d1b22be855ccf275191e12def3228bff18803
|
refs/heads/master
| 2023-07-24T08:47:37.473879
| 2020-06-01T20:56:50
| 2020-06-01T20:56:50
| 244,732,124
| 0
| 0
|
MIT
| 2023-07-05T21:00:37
| 2020-03-03T20:13:04
|
Python
|
UTF-8
|
Python
| false
| true
| 16,593
|
py
|
# -*- coding: utf-8 -*-
# Generated by the protocol buffer compiler. DO NOT EDIT!
# source: documentNoStore.proto
from google.protobuf import descriptor as _descriptor
from google.protobuf import message as _message
from google.protobuf import reflection as _reflection
from google.protobuf import symbol_database as _symbol_database
# @@protoc_insertion_point(imports)
_sym_db = _symbol_database.Default()
from strongdoc.proto.protoc_gen_swagger.options import annotations_pb2 as protoc__gen__swagger_dot_options_dot_annotations__pb2
DESCRIPTOR = _descriptor.FileDescriptor(
name='documentNoStore.proto',
package='proto',
syntax='proto3',
serialized_options=b'\n\"com.strongsalt.strongdoc.sdk.protoB\020DocumentsNoStore\210\001\001',
serialized_pb=b'\n\x15\x64ocumentNoStore.proto\x12\x05proto\x1a,protoc-gen-swagger/options/annotations.proto\"K\n\x13\x45ncryptDocStreamReq\x12\x11\n\x07\x64ocName\x18\x01 \x01(\tH\x00\x12\x13\n\tplaintext\x18\x02 \x01(\x0cH\x00\x42\x0c\n\nNameOrData\"9\n\x14\x45ncryptDocStreamResp\x12\r\n\x05\x64ocID\x18\x01 \x01(\t\x12\x12\n\nciphertext\x18\x02 \x01(\x0c\"H\n\x13\x44\x65\x63ryptDocStreamReq\x12\x0f\n\x05\x64ocID\x18\x01 \x01(\tH\x00\x12\x14\n\nciphertext\x18\x02 \x01(\x0cH\x00\x42\n\n\x08IdOrData\"8\n\x14\x44\x65\x63ryptDocStreamResp\x12\r\n\x05\x64ocID\x18\x01 \x01(\t\x12\x11\n\tplaintext\x18\x02 \x01(\x0c\"\x92\x01\n\rEncryptDocReq\x12\x0f\n\x07\x64ocName\x18\x01 \x01(\t\x12\x11\n\tplaintext\x18\x02 \x01(\t:]\x92\x41Z\n\x0f*\rEncryptDocReq2G\x12\x45{\"docName\": \"bribeamount.pdf\", \"plaintext\": \"I paid 1M to Volodymyr\"}\"\x93\x01\n\x0e\x45ncryptDocResp\x12\r\n\x05\x64ocID\x18\x01 \x01(\t\x12\x12\n\nciphertext\x18\x02 \x01(\t:^\x92\x41[\n\x10*\x0e\x45ncryptDocResp2G\x12\x45{\"docID\": \"bribeamount_ID.pdf\", \"ciphertext\": \"very encrypted stuff\"}\"\x91\x01\n\rDecryptDocReq\x12\r\n\x05\x64ocID\x18\x01 \x01(\t\x12\x12\n\nciphertext\x18\x02 \x01(\t:]\x92\x41Z\n\x0f*\rDecryptDocReq2G\x12\x45{\"docID\": \"bribeamount_ID.pdf\", \"ciphertext\": \"very encrypted stuff\"}\"\x93\x01\n\x0e\x44\x65\x63ryptDocResp\x12\r\n\x05\x64ocID\x18\x01 \x01(\t\x12\x11\n\tplaintext\x18\x02 \x01(\t:_\x92\x41\\\n\x10*\x0e\x44\x65\x63ryptDocResp2H\x12\x46{\"docID\": \"bribeamount_ID.pdf\", \"plaintext\": \"I paid 1M to Volodymyr\"}B9\n\"com.strongsalt.strongdoc.sdk.protoB\x10\x44ocumentsNoStore\x88\x01\x01\x62\x06proto3'
,
dependencies=[protoc__gen__swagger_dot_options_dot_annotations__pb2.DESCRIPTOR,])
_ENCRYPTDOCSTREAMREQ = _descriptor.Descriptor(
name='EncryptDocStreamReq',
full_name='proto.EncryptDocStreamReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docName', full_name='proto.EncryptDocStreamReq.docName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plaintext', full_name='proto.EncryptDocStreamReq.plaintext', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='NameOrData', full_name='proto.EncryptDocStreamReq.NameOrData',
index=0, containing_type=None, fields=[]),
],
serialized_start=78,
serialized_end=153,
)
_ENCRYPTDOCSTREAMRESP = _descriptor.Descriptor(
name='EncryptDocStreamResp',
full_name='proto.EncryptDocStreamResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.EncryptDocStreamResp.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ciphertext', full_name='proto.EncryptDocStreamResp.ciphertext', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=155,
serialized_end=212,
)
_DECRYPTDOCSTREAMREQ = _descriptor.Descriptor(
name='DecryptDocStreamReq',
full_name='proto.DecryptDocStreamReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.DecryptDocStreamReq.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ciphertext', full_name='proto.DecryptDocStreamReq.ciphertext', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
_descriptor.OneofDescriptor(
name='IdOrData', full_name='proto.DecryptDocStreamReq.IdOrData',
index=0, containing_type=None, fields=[]),
],
serialized_start=214,
serialized_end=286,
)
_DECRYPTDOCSTREAMRESP = _descriptor.Descriptor(
name='DecryptDocStreamResp',
full_name='proto.DecryptDocStreamResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.DecryptDocStreamResp.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plaintext', full_name='proto.DecryptDocStreamResp.plaintext', index=1,
number=2, type=12, cpp_type=9, label=1,
has_default_value=False, default_value=b"",
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=None,
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=288,
serialized_end=344,
)
_ENCRYPTDOCREQ = _descriptor.Descriptor(
name='EncryptDocReq',
full_name='proto.EncryptDocReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docName', full_name='proto.EncryptDocReq.docName', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plaintext', full_name='proto.EncryptDocReq.plaintext', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\222AZ\n\017*\rEncryptDocReq2G\022E{\"docName\": \"bribeamount.pdf\", \"plaintext\": \"I paid 1M to Volodymyr\"}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=347,
serialized_end=493,
)
_ENCRYPTDOCRESP = _descriptor.Descriptor(
name='EncryptDocResp',
full_name='proto.EncryptDocResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.EncryptDocResp.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ciphertext', full_name='proto.EncryptDocResp.ciphertext', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\222A[\n\020*\016EncryptDocResp2G\022E{\"docID\": \"bribeamount_ID.pdf\", \"ciphertext\": \"very encrypted stuff\"}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=496,
serialized_end=643,
)
_DECRYPTDOCREQ = _descriptor.Descriptor(
name='DecryptDocReq',
full_name='proto.DecryptDocReq',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.DecryptDocReq.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='ciphertext', full_name='proto.DecryptDocReq.ciphertext', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\222AZ\n\017*\rDecryptDocReq2G\022E{\"docID\": \"bribeamount_ID.pdf\", \"ciphertext\": \"very encrypted stuff\"}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=646,
serialized_end=791,
)
_DECRYPTDOCRESP = _descriptor.Descriptor(
name='DecryptDocResp',
full_name='proto.DecryptDocResp',
filename=None,
file=DESCRIPTOR,
containing_type=None,
fields=[
_descriptor.FieldDescriptor(
name='docID', full_name='proto.DecryptDocResp.docID', index=0,
number=1, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
_descriptor.FieldDescriptor(
name='plaintext', full_name='proto.DecryptDocResp.plaintext', index=1,
number=2, type=9, cpp_type=9, label=1,
has_default_value=False, default_value=b"".decode('utf-8'),
message_type=None, enum_type=None, containing_type=None,
is_extension=False, extension_scope=None,
serialized_options=None, file=DESCRIPTOR),
],
extensions=[
],
nested_types=[],
enum_types=[
],
serialized_options=b'\222A\\\n\020*\016DecryptDocResp2H\022F{\"docID\": \"bribeamount_ID.pdf\", \"plaintext\": \"I paid 1M to Volodymyr\"}',
is_extendable=False,
syntax='proto3',
extension_ranges=[],
oneofs=[
],
serialized_start=794,
serialized_end=941,
)
_ENCRYPTDOCSTREAMREQ.oneofs_by_name['NameOrData'].fields.append(
_ENCRYPTDOCSTREAMREQ.fields_by_name['docName'])
_ENCRYPTDOCSTREAMREQ.fields_by_name['docName'].containing_oneof = _ENCRYPTDOCSTREAMREQ.oneofs_by_name['NameOrData']
_ENCRYPTDOCSTREAMREQ.oneofs_by_name['NameOrData'].fields.append(
_ENCRYPTDOCSTREAMREQ.fields_by_name['plaintext'])
_ENCRYPTDOCSTREAMREQ.fields_by_name['plaintext'].containing_oneof = _ENCRYPTDOCSTREAMREQ.oneofs_by_name['NameOrData']
_DECRYPTDOCSTREAMREQ.oneofs_by_name['IdOrData'].fields.append(
_DECRYPTDOCSTREAMREQ.fields_by_name['docID'])
_DECRYPTDOCSTREAMREQ.fields_by_name['docID'].containing_oneof = _DECRYPTDOCSTREAMREQ.oneofs_by_name['IdOrData']
_DECRYPTDOCSTREAMREQ.oneofs_by_name['IdOrData'].fields.append(
_DECRYPTDOCSTREAMREQ.fields_by_name['ciphertext'])
_DECRYPTDOCSTREAMREQ.fields_by_name['ciphertext'].containing_oneof = _DECRYPTDOCSTREAMREQ.oneofs_by_name['IdOrData']
DESCRIPTOR.message_types_by_name['EncryptDocStreamReq'] = _ENCRYPTDOCSTREAMREQ
DESCRIPTOR.message_types_by_name['EncryptDocStreamResp'] = _ENCRYPTDOCSTREAMRESP
DESCRIPTOR.message_types_by_name['DecryptDocStreamReq'] = _DECRYPTDOCSTREAMREQ
DESCRIPTOR.message_types_by_name['DecryptDocStreamResp'] = _DECRYPTDOCSTREAMRESP
DESCRIPTOR.message_types_by_name['EncryptDocReq'] = _ENCRYPTDOCREQ
DESCRIPTOR.message_types_by_name['EncryptDocResp'] = _ENCRYPTDOCRESP
DESCRIPTOR.message_types_by_name['DecryptDocReq'] = _DECRYPTDOCREQ
DESCRIPTOR.message_types_by_name['DecryptDocResp'] = _DECRYPTDOCRESP
_sym_db.RegisterFileDescriptor(DESCRIPTOR)
EncryptDocStreamReq = _reflection.GeneratedProtocolMessageType('EncryptDocStreamReq', (_message.Message,), {
'DESCRIPTOR' : _ENCRYPTDOCSTREAMREQ,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.EncryptDocStreamReq)
})
_sym_db.RegisterMessage(EncryptDocStreamReq)
EncryptDocStreamResp = _reflection.GeneratedProtocolMessageType('EncryptDocStreamResp', (_message.Message,), {
'DESCRIPTOR' : _ENCRYPTDOCSTREAMRESP,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.EncryptDocStreamResp)
})
_sym_db.RegisterMessage(EncryptDocStreamResp)
DecryptDocStreamReq = _reflection.GeneratedProtocolMessageType('DecryptDocStreamReq', (_message.Message,), {
'DESCRIPTOR' : _DECRYPTDOCSTREAMREQ,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.DecryptDocStreamReq)
})
_sym_db.RegisterMessage(DecryptDocStreamReq)
DecryptDocStreamResp = _reflection.GeneratedProtocolMessageType('DecryptDocStreamResp', (_message.Message,), {
'DESCRIPTOR' : _DECRYPTDOCSTREAMRESP,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.DecryptDocStreamResp)
})
_sym_db.RegisterMessage(DecryptDocStreamResp)
EncryptDocReq = _reflection.GeneratedProtocolMessageType('EncryptDocReq', (_message.Message,), {
'DESCRIPTOR' : _ENCRYPTDOCREQ,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.EncryptDocReq)
})
_sym_db.RegisterMessage(EncryptDocReq)
EncryptDocResp = _reflection.GeneratedProtocolMessageType('EncryptDocResp', (_message.Message,), {
'DESCRIPTOR' : _ENCRYPTDOCRESP,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.EncryptDocResp)
})
_sym_db.RegisterMessage(EncryptDocResp)
DecryptDocReq = _reflection.GeneratedProtocolMessageType('DecryptDocReq', (_message.Message,), {
'DESCRIPTOR' : _DECRYPTDOCREQ,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.DecryptDocReq)
})
_sym_db.RegisterMessage(DecryptDocReq)
DecryptDocResp = _reflection.GeneratedProtocolMessageType('DecryptDocResp', (_message.Message,), {
'DESCRIPTOR' : _DECRYPTDOCRESP,
'__module__' : 'documentNoStore_pb2'
# @@protoc_insertion_point(class_scope:proto.DecryptDocResp)
})
_sym_db.RegisterMessage(DecryptDocResp)
DESCRIPTOR._options = None
_ENCRYPTDOCREQ._options = None
_ENCRYPTDOCRESP._options = None
_DECRYPTDOCREQ._options = None
_DECRYPTDOCRESP._options = None
# @@protoc_insertion_point(module_scope)
|
[
"kathy@overnest.com"
] |
kathy@overnest.com
|
e6a060ee16aa5e2d01c1a48b30174dda08b64f4a
|
377a966978c2c801f909f51a692d9856eac9b012
|
/skip/mahasiswa/models.py
|
785637eb4422193959a756e9ef0fa6f01d264f30
|
[
"MIT"
] |
permissive
|
timmysutanto/Skippedia
|
d6e6ccf2bb3aac42fdb6197666b11a4c70e7bee4
|
97cd3c95c87b3bb865f8ecd71e7f9064b34f1e6a
|
refs/heads/master
| 2020-05-29T12:54:51.545179
| 2019-05-30T15:05:27
| 2019-05-30T15:05:27
| 189,143,566
| 0
| 0
|
MIT
| 2019-05-29T03:24:13
| 2019-05-29T03:24:13
| null |
UTF-8
|
Python
| false
| false
| 640
|
py
|
from django.db import models
# Create your models here.
class Mahasiswa(models.Model):
nim = models.IntegerField(primary_key=True)
nama = models.CharField(max_length=50)
def __str__(self):
return self.nama
class Komentar(models.Model):
mahasiswa = models.ForeignKey(Mahasiswa, on_delete=models.CASCADE)
isi_komentar = models.CharField(max_length=300)
def __str__(self):
return self.isi_komentar
class Rating(models.Model):
mahasiswa = models.ForeignKey(Mahasiswa, on_delete=models.CASCADE)
rate = models.IntegerField(default=0)
def __str__(self):
return str(self.rate)
|
[
"noreply@github.com"
] |
timmysutanto.noreply@github.com
|
b61302e00f0ee1d54192e358cab62f426f900e21
|
2c845cbd0ee9d3f54ad47a5207f417cf5277b0b2
|
/rango/admin.py
|
54dc0227943f34cb082bd440889f033e6ccc6b9f
|
[] |
no_license
|
QIANSUMORRIS/tango_with_django_project
|
8c7ab87fe1a985a9e7454c34db8a700daa331114
|
ff4d47a6e8de1bbf3f36aeb766a82eb51b437d63
|
refs/heads/master
| 2021-05-10T15:01:46.210046
| 2018-02-12T18:27:19
| 2018-02-12T18:27:19
| 118,533,740
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 334
|
py
|
# -*- coding: utf-8 -*-
from django.contrib import admin
from rango.models import Category, Page
class PageAdmin(admin.ModelAdmin):
# To display individual fields
list_display = ('title', 'category', 'url')
# prepopulated_fields = {'slug': ('title',)}
admin.site.register(Category)
admin.site.register(Page, PageAdmin)
|
[
"2274593s@student.gla.ac.uk"
] |
2274593s@student.gla.ac.uk
|
385d3e5cdaf7c8dfce4264669d9a57e8a7117cef
|
2902629c76b48ecf9c28b1bf61bb0405a9f58140
|
/jarvis.py
|
e4619b1a005f4dec27d71527fc73e7490acd735e
|
[] |
no_license
|
ashish29nov/jarvis
|
a700ebbc765ffce6e403727db3f0031a3a210d22
|
a008b3cf4b2224b56114c2a86fa96f1a814532c5
|
refs/heads/main
| 2023-05-31T23:24:32.377274
| 2021-07-06T14:31:10
| 2021-07-06T14:31:10
| 383,497,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 14,503
|
py
|
import pyttsx3
import requests
import urllib
import pyaudio
import webbrowser
import smtplib
import random
from PyDictionary import PyDictionary
from googlesearch import search
import ety
from nltk.corpus import wordnet
import speech_recognition as sr
import wikipedia
import datetime
import time
import wolframalpha
import os
import sys
import subprocess
from pygame import mixer
engine = pyttsx3.init('sapi5')
#client = wolframalpha.Client('Your_App_ID')
voices = engine.getProperty('voices')
engine.setProperty('voice', voices[0].id)
rate = engine.getProperty('rate')
engine.setProperty('rate', rate-50) # Slows down the speaking speed of the engine voice.
def speak(audio):
print('Jarvis: ' + audio)
engine.say(audio)
engine.runAndWait()
def greetMe():
currentH = int(datetime.datetime.now().hour)
currentM = int(datetime.datetime.now().minute)
if currentH >= 0 and currentH < 12:
speak('Good Morning!')
if currentH >= 12 and currentH < 18:
speak('Good Afternoon!')
if currentH >= 18 and currentH !=0:
speak('Good Evening!')
greetMe()
speak('Hello Sir, I am your digital assistant JARVIS !')
speak('Welcome to Aptech Konnagar.')
speak('How may I help you?')
def myCommand():
r = sr.Recognizer()
with sr.Microphone() as source:
print("Listening...")
audio=r.adjust_for_ambient_noise(source)
audio = r.listen(source)
stop_listening = r.listen_in_background(sr.Microphone(), myCommand)
try:
query = r.recognize_google(audio, language='en-in')
print('User: ' + query + '\n')
except sr.UnknownValueError:
myCommand()
pass
return query
def playMusic():
music_folder ='F:\My hit songs\\'
music = os.listdir(music_folder)
random_music = music_folder + random.choice(music)
mixer.init()
mixer.music.load(random_music)
mixer.music.play()
def find(name, path):
for root, files in os.walk(path):
if name in files:
return os.path.join(root, name)
def searchOnGoogle(query, outputList):
speak('The top five search results from Google are listed below.')
for output in search(query, tld="https://www.google.co.in", num=10, stop=5, pause=2):
print(output)
outputList.append(output)
return outputList
def openLink(outputList):
speak("Here's the first link for you.")
webbrowser.open(outputList[0])
def playOnYoutube(query_string):
query_string = urllib.parse.urlencode({"search_query" : query})
search_string = str("http://www.youtube.com/results?" + query_string)
speak("Here's what you asked for. Enjoy!")
webbrowser.open_new_tab(search_string)
def tellAJoke():
res = requests.get(
'https://icanhazdadjoke.com/',
headers={"Accept":"application/json"}
)
if res.status_code == 200:
speak("Okay. Here's one")
speak(str(res.json()['joke']))
else:
speak('Oops!I ran out of jokes')
def getCompleteInfo(word):
dictionary = PyDictionary()
mean = {}
mean = dictionary.meaning(word)
synonyms = []
antonyms = []
speak("Alright. Here is the information you asked for.")
for key in mean.keys():
speak("When "+str(word)+" is used as a "+str(key)+" then it has the following meanings")
for val in mean[key]:
print(val)
print()
speak("The possible synonyms and antonyms of "+str(word)+" are given below.")
for syn in wordnet.synsets(word):
for l in syn.lemmas():
if l.name() not in synonyms:
synonyms.append(l.name())
if l.antonyms() and l.antonyms()[0].name() not in antonyms:
antonyms.append(l.antonyms()[0].name())
print("Synonyms: ", end = " ")
print(' '.join(synonyms), end = " ")
print("\n")
print("Antonyms: ", end = " ")
print(' '.join(antonyms), end = " ")
print("\n")
ori = ety.origins(word)
if len(ori) > 0:
speak("There are "+str(len(ori))+" possible origins found.")
for origin in ori:
print(origin)
else:
speak("I'm sorry. No data regarding the origin of "+str(word)+" was found.")
if __name__ == '__main__':
while True:
query = myCommand()
query = query.lower()
if 'open youtube' in query or 'open my youtube' in query:
speak('okay')
webbrowser.open('www.youtube.com')
time.sleep(2)
speak('this is your youtube sir and enjoy it')
#elif 'play music' in query:
# speak("Here's your music. Enjoy !")
# playMusic()
# elif 'stop the music' in query or 'stop the song' in query or 'stop' in query :
# mixer.music.stop()
# speak('The music is stopped.')
elif 'find file' in query:
speak('What is the name of the file that I should find ?')
query = myCommand()
filename = query
print(filename)
speak('What would be the extension of the file ?')
query = myCommand()
query = query.lower()
extension = query
print(extension)
fullname = str(filename) + '.' + str(extension)
print(fullname)
path = 'F:\\'
location = find(fullname,path)
speak('File is found at the below location')
print(location)
elif 'search' in query:
outputList = []
speak('What should I search for ?')
query = myCommand()
searchOnGoogle(query, outputList)
speak('Should I open up the first link for you ?')
query = myCommand()
if 'yes' in query or 'sure' in query:
openLink(outputList)
if 'no' in query:
speak('Alright.')
elif 'play on youtube' in query:
speak('What should I look up for ?')
query = myCommand()
playOnYoutube(query)
elif 'open dictionary' in query or 'dictionary' in query:
speak('What word should I look up for ?')
word = myCommand()
getCompleteInfo(word)
elif 'joke' in query:
tellAJoke()
elif 'open flipkart' in query:
speak("okay sir")
webbrowser.open("https://www.flipkart.com")
elif 'open chrome' in query:
speak('opening your chrome browser sir')
subprocess.call("C:\Program Files (x86)\Google\Chrome\Application\chrome.exe")
time.sleep(2)
speak('this is your chrome browser and search your query on chrome')
elif 'open firefox' in query or 'open my firefox' in query:
speak('openning your firefox browser sir.')
subprocess.call("C:/Program Files/Mozilla Firefox/firefox.exe")
time.sleep(2)
speak('this is your firefox browser and search your query on firefox')
elif 'open file Explorer' in query or 'open file' in query:
speak('opening your file Explorer ')
subprocess.call("Quick access")
elif 'open webcam' in query:
speak('opening your webcam camera')
subprocess.call("C:\Program Files (x86)\CyberLink\YouCam\YouCam.exe")
time.sleep(4)
elif 'open photoshop' in query:
speak('opening your photoshop')
subprocess.call("C:/Program Files (x86)/Adobe/Photoshop 7.0/Photoshop.exe")
time.sleep(5)
elif 'open adobe reader' in query or 'open adobe' in query:
speak('opening your Adobe Reader')
subprocess.call("C:/Program Files (x86)/Adobe/Reader 11.0/Reader/AcroRd32.exe")
time.sleep(4)
elif 'open microsoft office excel' in query or 'open microsoft excel' in query:
speak('opening your microsoft office excel')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/EXCEL.EXE")
time.sleep(4)
elif 'open microsoft office groove' in query or 'open microsoft groove' in query:
speak('opening your microsoft office groove')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/GROOVE.EXE")
time.sleep(4)
elif 'open microsoft office infopath' in query or 'open microsoft infopath' in query:
speak('opening your microsoft office infopath')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/INFOPATH.EXE")
time.sleep(4)
elif 'open microsoft onenote' in query or 'open onenote' in query:
speak('opening your microsoft office onenote')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/ONENOTE.EXE")
time.sleep(4)
elif 'open microsoft outlook' in query or 'open outlook' in query:
speak('opening your microsoft office outlook')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/OUTLOOK.EXE")
time.sleep(4)
elif 'open microsoft powerpoint' in query or 'open powerpoint' in query:
speak('opening your microsoft office powerpoint')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/POWERPNT.EXE")
time.sleep(4)
elif 'open microsoft word' in query or 'open word' in query:
speak('opening your microsoft office word')
subprocess.call("C:/Program Files (x86)/Microsoft Office/Office12/WINWORD.EXE")
time.sleep(4)
elif 'open pivot animator' in query or 'open animator':
speak('opening your microsoft office pivot animator')
subprocess.call("C:/Program Files (x86)/Pivot Animator/pivot.exe")
time.sleep(4)
elif 'open shareit' in query or 'open share' in query:
speak('opening your shareit')
subprocess.call("C:/Program Files (x86)/SHAREit Technologies/SHAREit/SHAREit.exe")
elif 'open team viewer' in query or 'team viewer' in query:
speak('opening your team viewer')
subprocess.call("C:/Program Files (x86)/TeamViewer/TeamViewer.exe")
time.sleep(4)
elif 'open windows media player' in query or 'open media player' in query:
speak('opening your windows media player')
subprocess.call("C:/Program Files (x86)/Windows Media Player/wmplayer.exe")
time.sleep(4)
elif 'current time' in query:
#speak("It's your current time sir")
strTime = datetime.datetime.now().strftime("%H:%M:%S")
speak(f"Sir, the time is {strTime}")
elif 'open google' in query or 'open my google' in query:
speak('okay sir, i am opening your Google sir')
webbrowser.open('www.google.co.in')
time.sleep(2)
speak('this is your Google search engine sir...')
elif 'open facebook' in query or 'open my facebook' in query:
speak('okay sir, i am opening your facebook sir')
webbrowser.open('https://www.facebook.com/')
time.sleep(2)
speak('this is your social media "Facebook" sir')
elif 'open gmail' in query or 'open my gmail' in query:
speak('okay sir, i am opening your Gmail sir')
webbrowser.open('www.gmail.com')
time.sleep(2)
speak('this is your Gmail and now you are using gmail sir')
elif "what\'s up" in query or 'how are you' in query:
stMsgs = ['I am fine!', 'Nice!', 'I am nice and full of energy']
speak(random.choice(stMsgs))
elif 'send my email' in query or 'send my mail' in query:
speak('Who is the recipient? ')
# speak('please enter an email id.')
recipient = input('please enter an email id: ')
fromaddr='ramzna96@gmail.com'
try:
speak('What should I say? ')
content = myCommand()
server = smtplib.SMTP('smtp.gmail.com', 587)
server.ehlo()
server.starttls()
server.login("ramzna96@gmail.com", 'your password')
server.sendmail(fromaddr, recipient, content)
server.close()
speak('Email sent!')
except:
speak('Sorry Sir! I am unable to send your message at this moment!')
elif 'bye' in query:
speak('Bye Sir, have a good day.')
sys.exit()
elif 'shutdown my laptop' in query or 'shutdown laptop' in query:
os.system("shutdown /s /t 1");
elif 'restart my laptop' in query or 'restart laptop' in query:
os.system("shutdown /r /t 1");
else:
query = query
#speak('Searching...')
try:
try:
res = wolframalpha.Client.query(query)
results = next(res.results).text
speak('search in wikipedia ')
speak("Here's your result")
speak(results)
except:
results = wikipedia.summary(query, sentences=2)
speak('search in wikipedia.')
speak('WIKIPEDIA says - ')
speak(results)
except:
speak('search in google')
url='https://www.google.co.in/search?q='
result=url+query
webbrowser.open(result)
# webbrowser.open('www.google.com')
speak('Next Command! Sir!')
|
[
"noreply@github.com"
] |
ashish29nov.noreply@github.com
|
e588db5fe9e4897d809d9053180573bd9d19e749
|
8d4c91a38ec51beb7c0e81f24855502a9b83f0e9
|
/procgen.py
|
3a91154f71345d85e41c3ff980c35b5f6a92c5fa
|
[] |
no_license
|
Tanttinator/PythonRoguelike
|
74959f1eadc0b52dbda11937db1fd1699f143d7f
|
ca268b78311042e6e1f0cfb114c186515a8eed0c
|
refs/heads/master
| 2022-12-27T02:31:05.145858
| 2020-10-08T22:25:17
| 2020-10-08T22:25:17
| 287,374,755
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,262
|
py
|
from __future__ import annotations
from typing import Tuple, Iterator, List, Dict, TYPE_CHECKING
import random
import tcod
import entity_factories
from game_map import GameMap
import tile_types
if TYPE_CHECKING:
from engine import Engine
from entity import Entity
max_items_by_floor = [
(1, 1),
(4, 2),
]
max_monsters_by_floor = [
(1, 2),
(4, 3),
(6, 5),
]
item_chances: Dict[int, List[Tuple[Entity, int]]] = {
0: [(entity_factories.health_potion, 35)],
2: [(entity_factories.confusion_scroll, 10)],
4: [(entity_factories.lightning_scroll, 25), (entity_factories.sword, 5)],
6: [(entity_factories.fireball_scroll, 25), (entity_factories.chain_mail, 15)],
}
enemy_chances: Dict[int, List[Tuple[Entity, int]]] = {
0: [(entity_factories.orc, 80)],
3: [(entity_factories.troll, 15)],
5: [(entity_factories.troll, 30)],
7: [(entity_factories.troll, 60)],
}
def get_max_value_for_floor(weighted_chances_by_floor: List[Tuple[int, int]], floor: int) -> int:
current_value = 0
for floor_minimum, value in weighted_chances_by_floor:
if floor_minimum > floor:
break
else:
current_value = value
return current_value
def get_entities_at_random(weighted_chances_by_floor: Dict[int, List[Tuple[Entity, int]]], number_of_entities: int, floor: int) -> List[Entity]:
entity_weighted_chances = {}
for key, values in weighted_chances_by_floor.items():
if key > floor:
break
else:
for value in values:
entity = value[0]
weighted_chance = value[1]
entity_weighted_chances[entity] = weighted_chance
entities = list(entity_weighted_chances.keys())
entity_weighted_chance_values = list(entity_weighted_chances.values())
chosen_entities = random.choices(entities, weights=entity_weighted_chance_values, k=number_of_entities)
return chosen_entities
class RectangularRoom:
def __init__(self, x: int, y: int, width: int, height: int):
self.x1 = x
self.y1 = y
self.x2 = x + width
self.y2 = y + height
@property
def center(self) -> Tuple[int, int]:
center_x = int((self.x1 + self.x2) / 2)
center_y = int((self.y1 + self.y2) / 2)
return center_x, center_y
@property
def inner(self) -> Tuple[slice, slice]:
return slice(self.x1 + 1, self.x2), slice(self.y1 + 1, self.y2)
def intersects(self, other: RectangularRoom) -> bool:
return (
self.x1 <= other.x2 and
self.x2 >= other.x1 and
self.y1 <= other.y2 and
self.y2 >= other.y1
)
def place_entities(room: RectangularRoom, dungeon: GameMap, floor_number: int) -> None:
number_of_monsters = random.randint(0, get_max_value_for_floor(max_monsters_by_floor, floor_number))
number_of_items = random.randint(0, get_max_value_for_floor(max_items_by_floor, floor_number))
monsters: List[Entity] = get_entities_at_random(enemy_chances, number_of_monsters, floor_number)
items: List[Entity] = get_entities_at_random(item_chances, number_of_items, floor_number)
for entity in monsters + items:
x = random.randint(room.x1 + 1, room.x2 - 1)
y = random.randint(room.y1 + 1, room.y2 - 1)
if not any(entity.x == x and entity.y == y for entity in dungeon.entities):
entity.spawn(dungeon, x, y)
def tunnel_between(start: Tuple[int, int], end: Tuple[int, int]) -> Iterator[Tuple[int, int]]:
x1, y1 = start
x2, y2 = end
if random.random() < 0.5:
corner_x, corner_y = x2, y1
else:
corner_x, corner_y = x1, y2
for x, y in tcod.los.bresenham((x1, y1), (corner_x, corner_y)).tolist():
yield x, y
for x, y in tcod.los.bresenham((corner_x, corner_y), (x2, y2)).tolist():
yield x, y
def generate_dungeon(max_rooms: int, room_min_size: int, room_max_size: int, map_width: int, map_height: int, engine: Engine) -> GameMap:
player = engine.player
dungeon = GameMap(engine, map_width, map_height, entities=[player])
rooms: List[RectangularRoom] = []
center_of_last_room = (0, 0)
for r in range(max_rooms):
room_width = random.randint(room_min_size, room_max_size)
room_height = random.randint(room_min_size, room_max_size)
x = random.randint(0, dungeon.width - room_width - 1)
y = random.randint(0, dungeon.height - room_height - 1)
new_room = RectangularRoom(x, y, room_width, room_height)
if any(new_room.intersects(other_room) for other_room in rooms):
continue
dungeon.tiles[new_room.inner] = tile_types.floor
if len(rooms) == 0:
player.place(*new_room.center, dungeon)
else:
for x, y in tunnel_between(rooms[-1].center, new_room.center):
dungeon.tiles[x, y] = tile_types.floor
center_of_last_room = new_room.center
place_entities(new_room, dungeon, engine.game_world.current_floor)
dungeon.tiles[center_of_last_room] = tile_types.down_stairs
dungeon.downstairs_location = center_of_last_room
rooms.append(new_room)
return dungeon
|
[
"antti.kangas99@gmail.com"
] |
antti.kangas99@gmail.com
|
edadc48ef4cab1bc6a6b66dd2f78e28382bf8839
|
d653d720ec4f3e2913c50b11380755cfe79ca419
|
/main.py
|
c30d799ff2d5954c2c1bd3cb7ef2bcff14860290
|
[
"MIT"
] |
permissive
|
Newbtrainee/code2vec
|
d324eb253fb85f673272643acbde2cad9b7f7171
|
dac6389bb0dc6d7fb231e10c677282a1829ea8c0
|
refs/heads/master
| 2022-12-05T20:33:18.702712
| 2020-08-27T10:23:25
| 2020-08-27T10:23:25
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,628
|
py
|
# -*- coding: utf8 -*-
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__author__ = "Isao Sonobe"
__copyright__ = "Copyright (C) 2018 Isao Sonobe"
import sys
import argparse
import numpy as np
import torch
from os import path
from distutils.util import strtobool
from torch.utils.data import DataLoader
from model.model import *
from model.dataset_builder import *
from sklearn.metrics import precision_recall_fscore_support, accuracy_score
sys.path.append('.')
logger = logging.getLogger()
logger.setLevel(logging.INFO)
fmt = logging.Formatter('%(asctime)s: %(message)s', '%m/%d/%Y %I:%M:%S %p')
console = logging.StreamHandler()
console.setFormatter(fmt)
logger.addHandler(console)
parser = argparse.ArgumentParser()
parser.add_argument('--random_seed', type=int, default=123, help="random_seed")
parser.add_argument('--corpus_path', type=str, default="./dataset/corpus.txt", help="corpus_path")
parser.add_argument('--path_idx_path', type=str, default="./dataset/path_idxs.txt", help="path_idx_path")
parser.add_argument('--terminal_idx_path', type=str, default="./dataset/terminal_idxs.txt", help="terminal_idx_path")
parser.add_argument('--batch_size', type=int, default=32, help="batch_size")
parser.add_argument('--terminal_embed_size', type=int, default=100, help="terminal_embed_size")
parser.add_argument('--path_embed_size', type=int, default=100, help="path_embed_size")
parser.add_argument('--encode_size', type=int, default=300, help="encode_size")
parser.add_argument('--max_path_length', type=int, default=200, help="max_path_length")
parser.add_argument('--model_path', type=str, default="./output", help="model_path")
parser.add_argument('--vectors_path', type=str, default="./output/code.vec", help="vectors_path")
parser.add_argument('--test_result_path', type=str, default=None, help="test_result_path")
parser.add_argument("--max_epoch", type=int, default=40, help="max_epoch")
parser.add_argument('--lr', type=float, default=0.01, help="lr")
parser.add_argument('--beta_min', type=float, default=0.9, help="beta_min")
parser.add_argument('--beta_max', type=float, default=0.999, help="beta_max")
parser.add_argument('--weight_decay', type=float, default=0.0, help="weight_decay")
parser.add_argument('--dropout_prob', type=float, default=0.25, help="dropout_prob")
parser.add_argument("--no_cuda", action="store_true", default=False, help="no_cuda")
parser.add_argument("--gpu", type=str, default="cuda:0", help="gpu")
parser.add_argument("--num_workers", type=int, default=4, help="num_workers")
parser.add_argument("--env", type=str, default=None, help="env")
parser.add_argument("--print_sample_cycle", type=int, default=10, help="print_sample_cycle")
parser.add_argument("--eval_method", type=str, default="subtoken", help="eval_method")
parser.add_argument("--find_hyperparams", action="store_true", default=False, help="find optimal hyperparameters")
parser.add_argument("--num_trials", type=int, default=100, help="num_trials")
parser.add_argument("--angular_margin_loss", action="store_true", default=False, help="use angular margin loss")
parser.add_argument("--angular_margin", type=float, default=0.5, help="angular margin")
parser.add_argument("--inverse_temp", type=float, default=30.0, help="inverse temperature")
parser.add_argument("--infer_method_name", type=lambda b: bool(strtobool(b)), default=True, help="infer method name like code2vec task")
parser.add_argument("--infer_variable_name", type=lambda b: bool(strtobool(b)), default=False, help="infer variable name like context2name task")
parser.add_argument("--shuffle_variable_indexes", type=lambda b: bool(strtobool(b)), default=False, help="shuffle variable indexes in the variable name inference task")
args = parser.parse_args()
device = torch.device(args.gpu if not args.no_cuda and torch.cuda.is_available() else "cpu")
logger.info("device: {0}".format(device))
if args.env == "tensorboard":
from tensorboardX import SummaryWriter
if args.find_hyperparams:
import optuna
class Option(object):
"""configurations of the model"""
def __init__(self, reader):
self.max_path_length = args.max_path_length
self.terminal_count = reader.terminal_vocab.len()
self.path_count = reader.path_vocab.len()
self.label_count = reader.label_vocab.len()
self.terminal_embed_size = args.terminal_embed_size
self.path_embed_size = args.path_embed_size
self.encode_size = args.encode_size
self.dropout_prob = args.dropout_prob
self.batch_size = args.batch_size
self.eval_method = args.eval_method
self.angular_margin_loss = args.angular_margin_loss
self.angular_margin = args.angular_margin
self.inverse_temp = args.inverse_temp
self.device = device
def train():
"""train the model"""
torch.manual_seed(args.random_seed)
reader = DatasetReader(args.corpus_path, args.path_idx_path, args.terminal_idx_path,
infer_method=args.infer_method_name, infer_variable=args.infer_variable_name,
shuffle_variable_indexes=args.shuffle_variable_indexes)
option = Option(reader)
builder = DatasetBuilder(reader, option)
label_freq = torch.tensor(reader.label_vocab.get_freq_list(), dtype=torch.float32).to(device)
criterion = nn.NLLLoss(weight=1 / label_freq).to(device)
model = Code2Vec(option).to(device)
# print(model)
# for param in model.parameters():
# print(type(param.data), param.size())
learning_rate = args.lr
optimizer = torch.optim.Adam(model.parameters(), lr=learning_rate, betas=(args.beta_min, args.beta_max), weight_decay=args.weight_decay)
_train(model, optimizer, criterion, option, reader, builder, None)
def _train(model, optimizer, criterion, option, reader, builder, trial):
"""train the model"""
f1 = 0.0
best_f1 = None
last_loss = None
last_accuracy = None
bad_count = 0
if args.env == "tensorboard":
summary_writer = SummaryWriter()
else:
summary_writer = None
try:
for epoch in range(args.max_epoch):
train_loss = 0.0
builder.refresh_train_dataset()
train_data_loader = DataLoader(builder.train_dataset, batch_size=option.batch_size, shuffle=True, num_workers=args.num_workers)
model.train()
for i_batch, sample_batched in enumerate(train_data_loader):
starts = sample_batched['starts'].to(option.device)
paths = sample_batched['paths'].to(option.device)
ends = sample_batched['ends'].to(option.device)
label = sample_batched['label'].to(device)
optimizer.zero_grad()
preds, _, _ = model.forward(starts, paths, ends, label)
loss = calculate_loss(preds, label, criterion, option)
loss.backward()
optimizer.step()
train_loss += loss.item()
builder.refresh_test_dataset()
test_data_loader = DataLoader(builder.test_dataset, batch_size=option.batch_size, shuffle=True, num_workers=args.num_workers)
test_loss, accuracy, precision, recall, f1 = test(model, test_data_loader, criterion, option, reader.label_vocab)
if args.env == "floyd":
print("epoch {0}".format(epoch))
print('{{"metric": "train_loss", "value": {0}}}'.format(train_loss))
print('{{"metric": "test_loss", "value": {0}}}'.format(test_loss))
print('{{"metric": "accuracy", "value": {0}}}'.format(accuracy))
print('{{"metric": "precision", "value": {0}}}'.format(precision))
print('{{"metric": "recall", "value": {0}}}'.format(recall))
print('{{"metric": "f1", "value": {0}}}'.format(f1))
else:
logger.info("epoch {0}".format(epoch))
logger.info('{{"metric": "train_loss", "value": {0}}}'.format(train_loss))
logger.info('{{"metric": "test_loss", "value": {0}}}'.format(test_loss))
logger.info('{{"metric": "accuracy", "value": {0}}}'.format(accuracy))
logger.info('{{"metric": "precision", "value": {0}}}'.format(precision))
logger.info('{{"metric": "recall", "value": {0}}}'.format(recall))
logger.info('{{"metric": "f1", "value": {0}}}'.format(f1))
if args.env == "tensorboard":
summary_writer.add_scalar('metric/train_loss', train_loss, epoch)
summary_writer.add_scalar('metric/test_loss', test_loss, epoch)
summary_writer.add_scalar('metric/accuracy', accuracy, epoch)
summary_writer.add_scalar('metric/precision', precision, epoch)
summary_writer.add_scalar('metric/recall', recall, epoch)
summary_writer.add_scalar('metric/f1', f1, epoch)
if trial is not None:
intermediate_value = 1.0 - f1
trial.report(intermediate_value, epoch)
if trial.should_prune(epoch):
raise optuna.structs.TrialPruned()
if epoch > 1 and epoch % args.print_sample_cycle == 0 and trial is None:
print_sample(reader, model, test_data_loader, option)
if best_f1 is None or best_f1 < f1:
if args.env == "floyd":
print('{{"metric": "best_f1", "value": {0}}}'.format(f1))
else:
logger.info('{{"metric": "best_f1", "value": {0}}}'.format(f1))
if args.env == "tensorboard":
summary_writer.add_scalar('metric/best_f1', f1, epoch)
best_f1 = f1
if trial is None:
vector_file = args.vectors_path
with open(vector_file, "w") as f:
f.write("{0}\t{1}\n".format(len(reader.items), option.encode_size))
write_code_vectors(reader, model, train_data_loader, option, vector_file, "a", None)
write_code_vectors(reader, model, test_data_loader, option, vector_file, "a", args.test_result_path)
torch.save(model.state_dict(), path.join(args.model_path, "code2vec.model"))
if last_loss is None or train_loss < last_loss or last_accuracy is None or last_accuracy < accuracy:
last_loss = train_loss
last_accuracy = accuracy
bad_count = 0
else:
bad_count += 1
if bad_count > 10:
print('early stop loss:{0}, bad:{1}'.format(train_loss, bad_count))
print_sample(reader, model, test_data_loader, option)
break
finally:
if args.env == "tensorboard":
summary_writer.close()
return 1.0 - f1
def calculate_loss(predictions, label, criterion, option):
# preds = F.log_softmax(predictions, dim=1)
#
# batch_size = predictions.size()[0]
# y_onehot = torch.FloatTensor(batch_size, option.label_count).to(device)
# y_onehot.zero_()
# y_onehot.scatter_(1, label.view(-1, 1), 1)
#
# loss = -torch.mean(torch.sum(preds * y_onehot, dim=1))
preds = F.log_softmax(predictions, dim=1)
loss = criterion(preds, label)
return loss
def test(model, data_loader, criterion, option, label_vocab):
"""test the model"""
model.eval()
with torch.no_grad():
test_loss = 0.0
expected_labels = []
actual_labels = []
for i_batch, sample_batched in enumerate(data_loader):
starts = sample_batched['starts'].to(option.device)
paths = sample_batched['paths'].to(option.device)
ends = sample_batched['ends'].to(option.device)
label = sample_batched['label'].to(device)
expected_labels.extend(label)
preds, _, _ = model.forward(starts, paths, ends, label)
loss = calculate_loss(preds, label, criterion, option)
test_loss += loss.item()
_, preds_label = torch.max(preds, dim=1)
actual_labels.extend(preds_label)
expected_labels = np.array(expected_labels)
actual_labels = np.array(actual_labels)
accuracy, precision, recall, f1 = None, None, None, None
if args.eval_method == 'exact':
accuracy, precision, recall, f1 = exact_match(expected_labels, actual_labels)
elif args.eval_method == 'subtoken':
accuracy, precision, recall, f1 = subtoken_match(expected_labels, actual_labels, label_vocab)
elif args.eval_method == 'ave_subtoken':
accuracy, precision, recall, f1 = averaged_subtoken_match(expected_labels, actual_labels, label_vocab)
return test_loss, accuracy, precision, recall, f1
def exact_match(expected_labels, actual_labels):
expected_labels = np.array(expected_labels, dtype=np.uint64)
actual_labels = np.array(actual_labels, dtype=np.uint64)
precision, recall, f1, _ = precision_recall_fscore_support(expected_labels, actual_labels, average='weighted')
accuracy = accuracy_score(expected_labels, actual_labels)
return accuracy, precision, recall, f1
def averaged_subtoken_match(expected_labels, actual_labels, label_vocab):
subtokens_accuracy = []
subtokens_precision = []
subtokens_recall = []
subtokens_f1 = []
for expected, actual in zip(expected_labels.tolist(), actual_labels.tolist()):
exp_subtokens = label_vocab.itosubtokens[expected]
actual_subtokens = label_vocab.itosubtokens[actual]
match = 0
for subtoken in exp_subtokens:
if subtoken in actual_subtokens:
match += 1
acc = match / float(len(exp_subtokens) + len(actual_subtokens) - match)
rec = match / float(len(exp_subtokens))
prec = match / float(len(actual_subtokens))
if prec + rec > 0:
subtoken_f1 = 2.0 * prec * rec / (prec + rec)
else:
subtoken_f1 = 0.0
subtokens_accuracy.append(acc)
subtokens_precision.append(prec)
subtokens_recall.append(rec)
subtokens_f1.append(subtoken_f1)
ave_accuracy = np.average(subtokens_accuracy)
ave_precision = np.average(subtokens_precision)
ave_recall = np.average(subtokens_recall)
ave_f1 = np.average(subtokens_f1)
return ave_accuracy, ave_precision, ave_recall, ave_f1
def subtoken_match(expected_labels, actual_labels, label_vocab):
subtokens_match = 0.0
subtokens_expected_count = 0.0
subtokens_actual_count = 0.0
for expected, actual in zip(expected_labels.tolist(), actual_labels.tolist()):
exp_subtokens = label_vocab.itosubtokens[expected.item()]
actual_subtokens = label_vocab.itosubtokens[actual.item()]
for subtoken in exp_subtokens:
if subtoken in actual_subtokens:
subtokens_match += 1
subtokens_expected_count += len(exp_subtokens)
subtokens_actual_count += len(actual_subtokens)
accuracy = subtokens_match / (subtokens_expected_count + subtokens_actual_count - subtokens_match)
precision = subtokens_match / subtokens_actual_count
recall = subtokens_match / subtokens_expected_count
if precision + recall > 0:
f1 = 2.0 * precision * recall / (precision + recall)
else:
f1 = 0.0
return accuracy, precision, recall, f1
def print_sample(reader, model, data_loader, option):
"""print one data that leads correct prediction with the trained model"""
model.eval()
with torch.no_grad():
for i_batch, sample_batched in enumerate(data_loader):
starts = sample_batched['starts'].to(option.device)
paths = sample_batched['paths'].to(option.device)
ends = sample_batched['ends'].to(option.device)
label = sample_batched['label'].to(option.device)
preds, code_vector, attn = model.forward(starts, paths, ends, label)
_, preds_label = torch.max(preds, dim=1)
for i in range(len(starts)):
if preds_label[i] == label[i]:
# 予測と正解が一致したデータを1つだけ表示する。
start_names = [reader.terminal_vocab.itos[v.item()] for v in starts[i]]
path_names = [reader.path_vocab.itos[v.item()] for v in paths[i]]
end_names = [reader.terminal_vocab.itos[v.item()] for v in ends[i]]
label_name = reader.label_vocab.itos[label[i].item()]
pred_label_name = reader.label_vocab.itos[preds_label[i].item()]
attentions = attn.cpu()[i]
for start, path, end, attention in zip(start_names, path_names, end_names, attentions):
if start != "<PAD/>":
logger.info("{0} {1} {2} [{3}]".format(start, path, end, attention))
logger.info('expected label: {0}'.format(label_name))
logger.info('actual label: {0}'.format(pred_label_name))
return
def write_code_vectors(reader, model, data_loader, option, vector_file, mode, test_result_file):
"""sav the code vectors"""
model.eval()
with torch.no_grad():
if test_result_file is not None:
fr = open(test_result_file, "w")
else:
fr = None
with open(vector_file, mode) as fv:
for i_batch, sample_batched in enumerate(data_loader):
id = sample_batched['id']
starts = sample_batched['starts'].to(option.device)
paths = sample_batched['paths'].to(option.device)
ends = sample_batched['ends'].to(option.device)
label = sample_batched['label'].to(option.device)
preds, code_vector, _ = model.forward(starts, paths, ends, label)
preds_prob, preds_label = torch.max(preds, dim=1)
for i in range(len(starts)):
label_name = reader.label_vocab.itos[label[i].item()]
vec = code_vector.cpu()[i]
fv.write(label_name + "\t" + " ".join([str(e.item()) for e in vec]) + "\n")
if test_result_file is not None:
pred_name = reader.label_vocab.itos[preds_label[i].item()]
fr.write("{0}\t{1}\t{2}\t{3}\t{4}\n".format(id[i].item(), label_name == pred_name, label_name, pred_name, preds_prob[i].item()))
if test_result_file is not None:
fr.close()
#
# for optuna
#
def find_optimal_hyperparams():
"""find optimal hyperparameters"""
torch.manual_seed(args.random_seed)
reader = DatasetReader(args.corpus_path, args.path_idx_path, args.terminal_idx_path,
infer_method=args.infer_method_name, infer_variable=args.infer_variable_name,
shuffle_variable_indexes=args.shuffle_variable_indexes)
option = Option(reader)
builder = DatasetBuilder(reader, option)
label_freq = torch.tensor(reader.label_vocab.get_freq_list(), dtype=torch.float32).to(device)
criterion = nn.NLLLoss(weight=1 / label_freq).to(device)
def objective(trial):
# option.max_path_length = int(trial.suggest_loguniform('max_path_length', 50, 200))
# option.terminal_embed_size = int(trial.suggest_loguniform('terminal_embed_size', 50, 200))
# option.path_embed_size = int(trial.suggest_loguniform('path_embed_size', 50, 200))
option.encode_size = int(trial.suggest_loguniform('encode_size', 100, 300))
option.dropout_prob = trial.suggest_loguniform('dropout_prob', 0.5, 0.9)
option.batch_size = int(trial.suggest_loguniform('batch_size', 256, 2048))
model = Code2Vec(option).to(device)
# print(model)
# for param in model.parameters():
# print(type(param.data), param.size())
optimizer = get_optimizer(trial, model)
return _train(model, optimizer, criterion, option, reader, builder, trial)
study = optuna.create_study(pruner=optuna.pruners.MedianPruner())
study.optimize(objective, n_trials=args.num_trials)
best_params = study.best_params
best_value = study.best_value
if args.env == "floyd":
print('best hyperparams: {0}'.format(best_params))
print('best value: {0}'.format(best_value))
else:
logger.info("optimal hyperparams: {0}".format(best_params))
logger.info('best value: {0}'.format(best_value))
def get_optimizer(trial, model):
# optimizer = trial.suggest_categorical('optimizer', [adam, momentum])
# weight_decay = trial.suggest_loguniform('weight_decay', 1e-10, 1e-3)
# return optimizer(model, trial, weight_decay)
weight_decay = trial.suggest_loguniform('weight_decay', 1e-10, 1e-3)
return adam(model, trial, weight_decay)
def adam(model, trial, weight_decay):
lr = trial.suggest_loguniform('adam_lr', 1e-5, 1e-1)
return torch.optim.Adam(model.parameters(), lr=lr, weight_decay=weight_decay)
def momentum(model, trial, weight_decay):
lr = trial.suggest_loguniform('momentum_sgd_lr', 1e-5, 1e-1)
return torch.optim.SGD(model.parameters(), lr=lr, momentum=0.9, weight_decay=weight_decay)
#
# entry point
#
def main():
if args.find_hyperparams:
find_optimal_hyperparams()
else:
train()
if __name__ == '__main__':
main()
|
[
"sonoisa@gmail.com"
] |
sonoisa@gmail.com
|
64059b9f7d7de6483cfefce20910ddc3c734d3d4
|
50b928cd421d0474ba08c4a80c883e988e5ed7bf
|
/test/gentest_multiple/test_multiple.py
|
d729db09166fa3996e7c79bc99419a140c4dad85
|
[
"BSD-3-Clause",
"MIT"
] |
permissive
|
thirtytwobits/nunavut
|
41a32fab32a99b3e7368648961abafb0a4f54dcd
|
71fdbbc5423e898ef37a1345156132d65d866c12
|
refs/heads/main
| 2023-06-24T01:03:57.270411
| 2023-06-07T22:32:49
| 2023-06-07T22:36:42
| 178,792,985
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,604
|
py
|
#
# Copyright 2019 Amazon.com, Inc. or its affiliates. All Rights Reserved.
# Copyright (C) 2018-2019 OpenCyphal Development Team <opencyphal.org>
# This software is distributed under the terms of the MIT License.
#
import json
from pathlib import Path
import pytest
from pydsdl import FrontendError, read_namespace
from nunavut import build_namespace_tree
from nunavut.jinja import DSDLCodeGenerator
from nunavut.lang import LanguageContextBuilder
def test_two_root_error(gen_paths): # type: ignore
""" Verifies that we are trying to use a type outside of the root.
"""
root_namespace = str(gen_paths.dsdl_dir / Path("scotec"))
with pytest.raises(FrontendError):
read_namespace(root_namespace, [], allow_unregulated_fixed_port_id=True)
def test_three_roots(gen_paths): # type: ignore
""" Generates a type that uses another type from a different root namespace.
"""
root_namespace = str(gen_paths.dsdl_dir / Path("scotec"))
includes = [str(gen_paths.dsdl_dir / Path("huckco")),
str(gen_paths.dsdl_dir / Path("esmeinc"))]
compound_types = read_namespace(root_namespace, includes, allow_unregulated_fixed_port_id=True)
language_context = LanguageContextBuilder(include_experimental_languages=True).set_target_language("js").create()
namespace = build_namespace_tree(compound_types,
root_namespace,
gen_paths.out_dir,
language_context)
generator = DSDLCodeGenerator(namespace, templates_dir=gen_paths.templates_dir)
generator.generate_all(False)
# Now read back in and verify
outfile = gen_paths.find_outfile_in_namespace("scotec.FatherType", namespace)
assert (outfile is not None)
with open(str(outfile), 'r') as json_file:
json_blob = json.load(json_file)
assert len(json_blob) > 0
assert len(json_blob['scotec.FatherType']['attributes']) == 2
assert json_blob['scotec.FatherType']['attributes'][0]['type'] == 'huckco.SonType.0.1'
assert json_blob['scotec.FatherType']['attributes'][1]['type'] == 'esmeinc.DaughterType.0.1'
def test_three_roots_using_nnvg(gen_paths, run_nnvg): # type: ignore
nnvg_cmd = ['--templates', str(gen_paths.templates_dir),
'-I', str(gen_paths.dsdl_dir / Path("huckco")),
'-I', str(gen_paths.dsdl_dir / Path("esmeinc")),
'-O', str(gen_paths.out_dir),
'-e', str('.json'),
str(gen_paths.dsdl_dir / Path("scotec"))]
run_nnvg(gen_paths, nnvg_cmd)
|
[
"noreply@github.com"
] |
thirtytwobits.noreply@github.com
|
847d9ba6d4c23f31651d6b3a72144b8a339c01b0
|
feb35f45e015befe9f2a35348be3809e0d30229d
|
/Join/save_best_joins.py
|
61db512b6b57ce2d48b56afc1899524fafaa5906
|
[
"MIT"
] |
permissive
|
csaybar/qgis-earthengine-examples
|
9d65bb31f74921c78edc576973a2021f3b4bb79a
|
ba8942683834d2847ff3246bdd1859b36e50fe44
|
refs/heads/master
| 2020-12-30T05:51:36.960385
| 2020-03-21T15:52:38
| 2020-03-21T15:52:38
| 238,882,033
| 4
| 0
|
MIT
| 2020-02-07T09:04:57
| 2020-02-07T09:04:56
| null |
UTF-8
|
Python
| false
| false
| 824
|
py
|
import ee
from ee_plugin import Map
# Load a primary 'collection': Landsat imagery.
primary = ee.ImageCollection('LANDSAT/LC08/C01/T1_TOA') \
.filterDate('2014-04-01', '2014-06-01') \
.filterBounds(ee.Geometry.Point(-122.092, 37.42))
# Load a secondary 'collection': GRIDMET meteorological data
gridmet = ee.ImageCollection('IDAHO_EPSCOR/GRIDMET')
# Define a max difference filter to compare timestamps.
maxDiffFilter = ee.Filter.maxDifference(**{
'difference': 2 * 24 * 60 * 60 * 1000,
'leftField': 'system:time_start',
'rightField': 'system:time_start'
})
# Define the join.
saveBestJoin = ee.Join.saveBest(**{
'matchKey': 'bestImage',
'measureKey': 'timeDiff'
})
# Apply the join.
landsatMet = saveBestJoin.apply(primary, gridmet, maxDiffFilter)
# Print the result.
print(landsatMet.getInfo())
|
[
"giswqs@gmail.com"
] |
giswqs@gmail.com
|
91a74818d7be1e945c52b732f86833c55532da7a
|
a5794c4ca3477ac4925ba6cb90e07cf0ffa82752
|
/Cwiczenia/1.py
|
3fe1c28b28981a3ab8c84ca2573b497ea74a27d8
|
[] |
no_license
|
Rages84/python-3-formatowanie-napisow
|
665d760daefffa54cec2457bf449bc39b198339e
|
f8ed2fa80b0851683e2db467712555169b4173f3
|
refs/heads/master
| 2020-04-17T19:59:30.214550
| 2018-03-12T22:14:18
| 2018-03-12T22:14:18
| 166,887,035
| 0
| 0
| null | 2019-01-21T22:03:38
| 2019-01-21T22:03:38
| null |
UTF-8
|
Python
| false
| false
| 325
|
py
|
# -*- coding: utf8 -*-
"""
A.Utwórz nowy plik, który po podaniu przez użytkownika długości w cm będzie wyświetlał wynik w metrach i calach zaokrąglając do 4 miejsc po przecinku.
B.Podobny skrypt możesz wykonać dla zamiany kg na funty.
Wynik wyświetl używając dowolnego sposobu formatowania tekstu.
"""
|
[
"malgorzata.lyczywek@gmail.com"
] |
malgorzata.lyczywek@gmail.com
|
237fcd8897a09d8a98ffa3b077e795264dd293c5
|
06f7ffdae684ac3cc258c45c3daabce98243f64f
|
/vsts/vsts/work/v4_0/models/timeline_team_data.py
|
6d29a511443e00f0f368a3aaab0a8af0fe282ab7
|
[
"MIT",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
kenkuo/azure-devops-python-api
|
7dbfb35f1c9637c9db10207824dd535c4d6861e8
|
9ac38a97a06ee9e0ee56530de170154f6ed39c98
|
refs/heads/master
| 2020-04-03T17:47:29.526104
| 2018-10-25T17:46:09
| 2018-10-25T17:46:09
| 155,459,045
| 0
| 0
|
MIT
| 2018-10-30T21:32:43
| 2018-10-30T21:32:42
| null |
UTF-8
|
Python
| false
| false
| 4,678
|
py
|
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# Generated file, DO NOT EDIT
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------------------------
from msrest.serialization import Model
class TimelineTeamData(Model):
"""TimelineTeamData.
:param backlog: Backlog matching the mapped backlog associated with this team.
:type backlog: :class:`BacklogLevel <work.v4_0.models.BacklogLevel>`
:param field_reference_names: The field reference names of the work item data
:type field_reference_names: list of str
:param id: The id of the team
:type id: str
:param is_expanded: Was iteration and work item data retrieved for this team. <remarks> Teams with IsExpanded false have not had their iteration, work item, and field related data queried and will never contain this data. If true then these items are queried and, if there are items in the queried range, there will be data. </remarks>
:type is_expanded: bool
:param iterations: The iteration data, including the work items, in the queried date range.
:type iterations: list of :class:`TimelineTeamIteration <work.v4_0.models.TimelineTeamIteration>`
:param name: The name of the team
:type name: str
:param order_by_field: The order by field name of this team
:type order_by_field: str
:param partially_paged_field_reference_names: The field reference names of the partially paged work items, such as ID, WorkItemType
:type partially_paged_field_reference_names: list of str
:param project_id: The project id the team belongs team
:type project_id: str
:param status: Status for this team.
:type status: :class:`TimelineTeamStatus <work.v4_0.models.TimelineTeamStatus>`
:param team_field_default_value: The team field default value
:type team_field_default_value: str
:param team_field_name: The team field name of this team
:type team_field_name: str
:param team_field_values: The team field values
:type team_field_values: list of :class:`TeamFieldValue <work.v4_0.models.TeamFieldValue>`
:param work_item_type_colors: Colors for the work item types.
:type work_item_type_colors: list of :class:`WorkItemColor <work.v4_0.models.WorkItemColor>`
"""
_attribute_map = {
'backlog': {'key': 'backlog', 'type': 'BacklogLevel'},
'field_reference_names': {'key': 'fieldReferenceNames', 'type': '[str]'},
'id': {'key': 'id', 'type': 'str'},
'is_expanded': {'key': 'isExpanded', 'type': 'bool'},
'iterations': {'key': 'iterations', 'type': '[TimelineTeamIteration]'},
'name': {'key': 'name', 'type': 'str'},
'order_by_field': {'key': 'orderByField', 'type': 'str'},
'partially_paged_field_reference_names': {'key': 'partiallyPagedFieldReferenceNames', 'type': '[str]'},
'project_id': {'key': 'projectId', 'type': 'str'},
'status': {'key': 'status', 'type': 'TimelineTeamStatus'},
'team_field_default_value': {'key': 'teamFieldDefaultValue', 'type': 'str'},
'team_field_name': {'key': 'teamFieldName', 'type': 'str'},
'team_field_values': {'key': 'teamFieldValues', 'type': '[TeamFieldValue]'},
'work_item_type_colors': {'key': 'workItemTypeColors', 'type': '[WorkItemColor]'}
}
def __init__(self, backlog=None, field_reference_names=None, id=None, is_expanded=None, iterations=None, name=None, order_by_field=None, partially_paged_field_reference_names=None, project_id=None, status=None, team_field_default_value=None, team_field_name=None, team_field_values=None, work_item_type_colors=None):
super(TimelineTeamData, self).__init__()
self.backlog = backlog
self.field_reference_names = field_reference_names
self.id = id
self.is_expanded = is_expanded
self.iterations = iterations
self.name = name
self.order_by_field = order_by_field
self.partially_paged_field_reference_names = partially_paged_field_reference_names
self.project_id = project_id
self.status = status
self.team_field_default_value = team_field_default_value
self.team_field_name = team_field_name
self.team_field_values = team_field_values
self.work_item_type_colors = work_item_type_colors
|
[
"tedchamb@microsoft.com"
] |
tedchamb@microsoft.com
|
2d5ba96b6122f417ed8eb666b3be501b01c91d5c
|
36e12b65922ebbb6d95aff6cbac0777c47e24153
|
/runCutadapt.py
|
faed1060856ca3d5fd0f4201522429e23eb9a4ea
|
[
"MIT"
] |
permissive
|
NailouZhang/AnalysisScripts
|
d0d00174f642d6722cc907f9a392084600630780
|
3df37d2f8fca9bc402afe5ea870c42200fca1ed3
|
refs/heads/master
| 2023-06-06T08:14:39.064920
| 2021-06-22T16:46:26
| 2021-06-22T16:46:26
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,727
|
py
|
#python3
import subprocess
samples = ['Ctrl.Rep1_1', 'Ctrl.Rep2_1', 'Ctrl.Rep3_1', 'KD.Rep1_1', 'KD.Rep2_1', 'KD.Rep3_1']
#adapter1 = 'GGCGGAAAGATCGCCGTGTAAGTTTGCTTCGATATCCGCATGCTA'
#adapter2 = 'CCACTTATTTCATGGATACTTGGAATGGTTTCACTAGTGCGACCGCAAGAG'
adapter1 = 'AGATCGGAAGAGCACACGTCTGAACTCCAGTCA'
adapter2 = 'AGATCGGAAGAGCGTCGTGTAGGGAAAGAGTGT'
adapter3 = 'TGGTGGCTGGTGTGGCCAAGCTTCGATATCCGCATGCTA'
adapter4 = 'GGGAAAAAGATCTCAGTGGTATTTGTGAGCCAGCACTAGTGCGACCGCAAGAG'
#adapter5 = 'GGCGGAAAGATCGCCGTGTAAGTTTGCTTCGATATCCGCATGCTA'
#adapter6 = 'CCACTTATTTCATGGATACTTGGAATGGTTTCACTAGTGCGACCGCAAGAG'
for idx, sample in enumerate(samples):
print('Trimming {0}, sample {1} of {2}...'.format(sample, idx + 1, len(samples)))
input1 = sample + '.fq.gz'
input2 = input1.replace('_1.fq.gz', '_2.fq.gz')
if 'Soma_FF' in input1:
index = 13
elif 'Neurite_FF' in input1:
index = 16
elif 'Soma_GFP' in input1:
index = 14
elif 'Neurite_GFP' in input1:
index = 17
#samplename = sample[:index]
#samplename = samplename[:-2] + '_Rep' + samplename[-1]
samplename = sample.split('_')[0]
output1 = '{0}.R1.trimmed.fq.gz'.format(samplename)
output2 = '{0}.R2.trimmed.fq.gz'.format(samplename)
statsout = '{0}.cutadaptstats.txt'.format(samplename)
if 'FF' in samplename:
adapters = [adapter1, adapter2]
elif 'GFP' in samplename:
adapters = [adapter3, adapter4]
#command = ['cutadapt', '-g', adapters[0], '-G', adapters[1], '--minimum-length', '75', '-j', '8',
#'-o', output1, '-p', output2, input1, input2]
command = ['cutadapt', '-a', adapter1, '-A', adapter2, '--minimum-length', '25', '-j', '8', '-o', output1, '-p', output2, input1, input2]
with open(statsout, 'w') as outfh:
subprocess.call(command, stdout = outfh)
|
[
"taliaferrojm@gmail.com"
] |
taliaferrojm@gmail.com
|
d97b0a4b7ef7ec687bc1f711f9fa6e0cb6fc3fcd
|
8ff8ca93b66121492c437810181b94560b2c5334
|
/test_utils.py
|
877c867567a5d18183ef48b64fa098a2b2378d15
|
[] |
no_license
|
mxscott/rna-aligner
|
3e7cf88b2eb07c10c5d4f2597196e063dbbeda8d
|
8280567b92c7b56455d3c44cc0fec6db93029115
|
refs/heads/master
| 2020-04-04T08:07:33.345061
| 2018-11-29T07:28:34
| 2018-11-29T07:28:34
| 155,772,129
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,402
|
py
|
import utils
import evaluation
import shared
from project import *
from timeit import default_timer as timer
toy_genome = 'AGAAGCAACCTGAGAAACGTCTTTGGGATGTGTTCATTCACTTCACAATGATGAACGTTTCTTTTGATTGAGAAGTTTGTAAGGATAACTTTTGTAGAATCTGCAAAGGGATATATGTGAGCCCCTTGATTCCTATGGCAAAATAGGAATTATCTTGAGATAAAAGCCAGACAGAAGATTTCTGAGAAACTTTTTTGTGATGTGTACTTTCATCTCACAGAGTTGAAAAATTCTTTTGATTGAGCAGTTTGGAAACAGTCTTTTCGTATCATCTGCAAATGGATGTTTGGGGCGCTTTGTGGCCTAAGGTGAAAATGGAAACACCTTCACATAAAAACTAGACAGAAGAATTCTGAGGAACCTCTTTATGATGTGTGCATTCATCTCAGATGGGTGAAATTTTCTTTTGATGGAGCAGTTTGGAAACAGTCTTTTTCTAGTATCTGCAGAAGGATATTTGTGAGCGGTGTAAGGCCTATGGTGAAAAAGGAAATATCTTCACATAAAAAACAGACAGAAGCTTTCTGAGGAACTTTTTGTGAGGTGTGCATTCATCTCACCGTGTTGAAACTTTATTTTATTTGAGCAGTTTAGAGACAGTCTTTCTCTGCAATCTGCAAAGGTCTAATTCTGAGCCCTTTGAGGTCTATGGTGAAAAAGAAATATCTTCCCATTTAAACTAGACAGAAGCATTCTGAGGAACTTCGTTGTGATGCCTCTCCATTCATCGGACAGAGTTGAAGGTTTCTTTTAATTCAGCACTTTGGAAAGCATATTTTTGTAGAATCTGCAAAGGGATATTTTTGAGACATTTGAAGCCTAGAGTGAAATAGTAAATATCTTCCCATGAAAACTAGACAGGAGAATTCTGAGAAACTTCATTCTGATGTGTGCATTAACCTCACAGAATTTAACCTTTCTTTTGATTGAGAAGTATGGAAATGGTGGTCTTTTAGAACCTGGAAAGGGATATTTCTTAGCCCTTTGAGGCCTATGGTGAGACTGGAAATATCATCACATGAAAACTAGTCCGAAGCTTTCGGAGAAACTTCTTTGAGATGTGTGCTTTCACCTCACAGAGTTAATCACTTTCTTTTGATTGAGCAGTTTGGAAACACTCTTTCTGTGACATCTGTAAATGGATATTAGGAGTGCTTTGAGGCCAATG'
iso_1 = shared.Isoform('1', [shared.Exon('1', 100, 200), shared.Exon('2', 250, 400)])
iso_2 = shared.Isoform('2', [shared.Exon('1', 100, 200), shared.Exon('3', 700, 850)])
iso_3 = shared.Isoform('3', [shared.Exon('2', 250, 400), shared.Exon('3', 700, 850)])
iso_4 = shared.Isoform('4', [shared.Exon('4', 900, 1000)])
gene1 = shared.Gene('1', [iso_1, iso_2, iso_3])
gene2 = shared.Gene('2', [iso_1, iso_3])
gene3 = shared.Gene('3', [iso_4])
toy_genes = {gene1, gene2, gene3}
read = 'GATTTCTGAGAAACTTTTTTGTGA'
'''
with open('genome.fa') as f:
f.readline()
genome = f.readline()
genes = utils.parse_tab_file('genes.tab')
'''
start_construct = timer()
aligner = Aligner(toy_genome, toy_genes)
end_construct = timer()
print('Initialization took: ' + str(end_construct - start_construct) + ' seconds')
#print(aligner.index_dict[100])
#print(aligner.index_dict[350])
#print(aligner.index_dict[1000])
#print(aligner.transcriptome)
start_align = timer()
alignment = aligner.align(read)
end_align = timer()
print('Alignment took: ' + str(end_align - start_align) + ' seconds')
print(alignment)
|
[
"mscott@lexentbio.com"
] |
mscott@lexentbio.com
|
65a768c55f088fdb2ae6ec670697a327fde4ef06
|
686781cbf18d2a34a3bb9c902c29f96402fbe032
|
/classes/inner_classes.py
|
21893eb0056daf80c28a1002b87586dcbb1ea9a6
|
[] |
no_license
|
arvind-mocha/Python-Basics
|
962c4088361aee89b53f47f4d8e925b12722e0ee
|
6f079543b4cf69c18ed65e5b618851d24f36e716
|
refs/heads/main
| 2023-05-10T13:55:53.459362
| 2021-06-17T12:35:13
| 2021-06-17T12:35:13
| 343,633,921
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,701
|
py
|
#inner calsses are basically used when you are creating
# another class but it is only useful for your previous class
class student:
def __init__(self,name,dept):
self.name=name
self.dept=dept
self.lap=self.laptop("RTX 2080ti and","quad cooler") #creating object for inner class
#inside outter class[followed by all]
self.lap1=self.laptop("intel and","single fan") #even if you create object ouside
#you have to create object inside outter class again
#so why creating object outside is a unusual
#and unwanted way to be done
def show(self):
print(self.name,self.dept)
print(self.lap.show()) #current object.object of inner class.method = self.lap.brand
print(self.lap.graphics,self.lap.fan)
print()
print(self.lap1.show())
print(self.lap1.graphics, self.lap1.fan)
class laptop:
def __init__(self,graphics,fan):
self.brand="lenova"
self.specs="i9"
self.graphics=graphics
self.fan=fan
def show(self):
return self.brand,self.specs
obj=student("Arvind","CSE") #creating object for outter class
lap1=student.laptop("intel and","single fan") #creating object for inner class refering outter class itself
#[unusual way] executes as same as above statment lap1
obj.show()#calling outter class
|
[
"arvindarvind2210@.com"
] |
arvindarvind2210@.com
|
b143b869106aa912f578f44e93da58b05ecdd5e3
|
d37c9b469988b66a6abb3a06aa55dea29156eeb7
|
/ginomypy/__init__.py
|
b2fe634542d871ba8aa3837e56bcc933d036f780
|
[
"BSD-3-Clause"
] |
permissive
|
vppuzakov/gino-stubs-1
|
98f9501281be355545f941dc72e12359cd50fbc6
|
291b390566416103ce4c88c10521c40344645dea
|
refs/heads/master
| 2020-04-18T02:20:18.001114
| 2019-01-21T18:02:40
| 2019-01-21T18:02:40
| 167,158,656
| 0
| 1
|
BSD-3-Clause
| 2019-01-23T09:48:54
| 2019-01-23T09:48:53
| null |
UTF-8
|
Python
| false
| false
| 776
|
py
|
from typing import TYPE_CHECKING, Optional, Callable
from mypy.plugin import Plugin, FunctionContext, MethodContext
from mypy.types import Type
from sqlmypy import column_hook
if TYPE_CHECKING:
from typing_extensions import Final # noqa
COLUMN_NAME = 'sqlalchemy.sql.schema.Column' # type: Final
class BasicGinoPlugin(Plugin):
def get_function_hook(
self, fullname: str
) -> Optional[Callable[[FunctionContext], Type]]:
if fullname == COLUMN_NAME:
return column_hook
return None
def get_method_hook(
self, fullname: str
) -> Optional[Callable[[MethodContext], Type]]:
if fullname == COLUMN_NAME:
return column_hook
return None
def plugin(version):
return BasicGinoPlugin
|
[
"bryan@reigndropsfall.net"
] |
bryan@reigndropsfall.net
|
3dfb56f12b66a8cb242eb134e51f202a71db7d0f
|
2581f2c98d497a6adf9bbb62730b02efea08cf80
|
/stubs/scales/formats_test.pyi
|
536e563227bf193a0e77af86c73fccd1b106b645
|
[] |
no_license
|
drewp/rdfdb
|
1ebbb5cf892fd86f6e3c571b94a97ecd07dd7340
|
8c71f02f989b2de1a4921640d1ca765e6d9efdb6
|
refs/heads/master
| 2021-04-27T00:31:22.493060
| 2019-08-09T06:15:15
| 2019-08-09T06:15:15
| 123,776,296
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 694
|
pyi
|
# Stubs for scales.formats_test (Python 3)
#
# NOTE: This dynamically typed stub was automatically generated by stubgen.
import unittest
from typing import Any
class Root:
def __init__(self) -> None: ...
def getChild(self, name: Any, collapsed: Any): ...
class Child:
countStat: Any = ...
def __init__(self, name: Any, collapsed: Any) -> None: ...
class StatsTest(unittest.TestCase):
def setUp(self) -> None: ...
def testJsonCollapse(self) -> None: ...
class UnicodeFormatTest(unittest.TestCase):
UNICODE_VALUE: Any = ...
def testHtmlFormat(self) -> None: ...
def testJsonFormat(self) -> None: ...
def testJsonFormatBinaryGarbage(self) -> None: ...
|
[
"drewp@bigasterisk.com"
] |
drewp@bigasterisk.com
|
7069fca095f6baf2063be9bbc63196896714d52b
|
c00006a7faccc175420aeb6d8f0a754bd3c1fc59
|
/Ejecucion/SportCompanyTraining.py
|
93de28f2aff40f6587cf5fdc8df1d958d97d344f
|
[] |
no_license
|
palomin98/PythonGS
|
de527f10d2fe9cdf95110a109b7e99a730789868
|
c24f363901f3b86c885fbe5b894e8be7652896a6
|
refs/heads/main
| 2023-03-29T06:26:56.468988
| 2021-03-28T00:09:35
| 2021-03-28T00:09:35
| 352,205,560
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 26,603
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# sportCompanyTraining.py
#
# Copyright 2018 fp <fp@bachilllerato-07>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
#
import random
import pedirdatos as datos
class SportCompanyTraining:
def __init__(self, nombre):
"""Constructor"""
self.nombre = nombre
self.entrenadores = []
self.alumnos = []
self.trabajadores = []
self.entrenamientos = []
self.carrera = ""
self.peso = ""
self.salto = ""
def get_nombre(self):
"""Get de nombre"""
return self.nombre
def set_nombre(self, nombre):
"""Set de nombre"""
self.nombre = nombre
def get_entrenadores(self):
"""Get de entrenadores"""
return self.entrenadores
def set_entrenadores(self, entrenadores):
"""Set de entrenadores"""
self.entrenadores = entrenadores
def get_alumnos(self):
"""Get de alumnos"""
return self.alumnos
def set_alumnos(self, alumnos):
"""Set de alumnos"""
self.alumnos = alumnos
def get_trabajadores(self):
"""Get de trabajadores"""
return self.trabajadores
def set_trabajadores(self, trabajadores):
"""Set de trabajadores"""
self.trabajadores = trabajadores
def get_entrenamientos(self):
"""Get de entrenamientos"""
return self.entrenamientos
def set_entrenamientos(self, entrenamientos):
"""Set de entrenamientos"""
self.entrenamientos = entrenamientos
def get_carrera(self):
"""Get de carrera"""
return self.carrera
def set_carrera(self, carrera):
"""Set de carrera"""
self.carrera = carrera
def get_peso(self):
"""Get de peso"""
return self.peso
def set_peso(self, peso):
"""Set de peso"""
self.peso = peso
def get_salto(self):
"""Get de salto"""
return self.salto
def set_salto(self, salto):
"""Set de salto"""
self.salto = salto
def agregarEntrenador(self, entrenador2):
"""Agregamos un nuevo entrenador"""
# Declaramos e inicializamos variables
encontrado = False
# Recorremos todos los entrenadores
for entrenador in self.entrenadores:
# Si se encuentra uno igual
if entrenador.get_dni() == entrenador2.get_dni():
# Actualizamos variables
encontrado = True
# Si no se ha encontrado
if not encontrado:
# Agregamos un nuevo entrenador
self.entrenadores.append(entrenador2)
# Mensaje correcto
print("Se ha agregado un nuevo entrenador.")
else:
# Mensaje de error
print("Ya existe este entrenador.")
def agregarAlumnos(self, alumno2):
"""Agregamos un nuevo alumno"""
# Declaramos e inicializamos variables
encontrado = False
# Recorremos todos los alumnos
for alumno in self.alumnos:
# Si encontramos uno igual
if alumno.get_dni() == alumno2.get_dni():
# Actualizamos variables
encontrado = True
# Si no se ha encontrado
if not encontrado:
# Agregamos un nuevo alumno
self.alumnos.append(alumno2)
# Mensaje correcto
print("Se ha agregado un nuevo alumno.")
else:
# Mensaje de error
print("Ya existe este alumno.")
def agregartrabajador(self, trabajador2):
"""Agreamos un nuevo trabajador"""
# Declaramos e inicializamos variables
encontrado = False
# Recorremos todos los trabajadores
for trabajador in self.trabajadores:
# Si encontramos uno igual
if trabajador.get_dni() == trabajador2.get_dni():
# Actualizamos variables
encontrado = True
# Si no se ha encontrado
if not encontrado:
# Agregamos un nuevo trabajador
self.trabajadores.append(trabajador2)
# Mensaje correcto
print("Se ha agregado un nuevo trabajador.")
else:
# Mensaje de error
print("Ya existe este trabajador.")
def agregarentrenamiento(self, entrenamiento2):
"""Añadimos un nuevo entrenamiento"""
# Declaramos e inicializamos variables
encontrado = False
alumno2 = ""
entrenador2 = ""
pedirAlumnos = "s"
encontradoEntrenador = False
encontradoAlumno = False
# Recorremos todos los entrenamientos
for entrenamiento in self.entrenamientos:
# Si se encuentra uno igual
if entrenamiento.get_nombre() == entrenamiento2.get_nombre():
# Actualizamos la variable
encontrado = True
print("Añadir entrenador: ")
# Si no se ha encontrado
if not encontrado:
# Pedimos el entrenador para el entrenamiento
entrenador2, encontradoEntrenador = self.anadirEntrenadorEntrenamiento()
# Mientras no se encuentre el entrenador
while not encontradoEntrenador:
# Si no se encuentra
if not encontradoEntrenador:
# Mensaje de error
print("No se ha añadido el entrenador al entrenamiento")
# Pedimos el entrenador para el entrenamiento
entrenador2, encontradoEntrenador = self.anadirEntrenadorEntrenamiento()
# Si se encuentra el entrenador
if encontradoEntrenador:
# Agregamos el entrenador al entrenamiento
entrenamiento2.set_entrenador(entrenador2)
# Mensaje correcto
print("Se ha añadido el entrenador al entrenamiento")
else:
# Mensaje de error
print("No se ha añadido el entrenador al entrenamiento")
print("Añadir alumnos: ")
#Mientras que sea diferente a n
while pedirAlumnos != "n":
# Pedimos el alumno
alumno2, encontradoAlumno = self.anadirAlumnoEntrenaCompe()
# Si se encuentra
if encontradoAlumno:
# Mensaje correcto
print("Se ha añadido el alumno al entrenamiento")
# Se agrega el alumno al entrenamiento
entrenamiento2.agregarAlumnos(alumno2)
# Preguntamos que si quiere algun alumno
pedirAlumnos = input("¿Quieres añadir un alumno mas?s/n")
else:
# Mensaje de error
print("No se ha añadido el alumno al entrenamiento")
# Añadimos el entrenamiento
self.entrenamientos.append(entrenamiento2)
# Mensaje correcto
print("Se ha agregado un nuevo entrenamiento.")
else:
# Mensaje de error
print("Ya existe este entrenamiento.")
def borrarAlumno(self):
"""Borrar un alumno"""
# Declaramos e inicializamos variables
borrado = False
dniAlumno = datos.pedirDni()
# Recorro todos los alumnos
for alumno in self.alumnos:
# Si el dni es igual
if dniAlumno == alumno.get_dni():
# Borramos este alumno
self.alumnos.remove(alumno)
# Actualizamos variables
borrado = True
# Si se ha borrado
if borrado:
# Mensaje correcto
print("Se ha borrado correctamente.")
else:
# Mensaje de error
print("No se ha borrado correctamente.")
def borrarEntrenador(self):
"""Borrar un entrenador"""
# Declaramos e inicializamos variables
borrado = False
dniEntrenadores = datos.pedirDni()
# Recorremos todos los entrenadores
for entrenador in self.entrenadores:
# Si el dni es igual
if dniEntrenadores == entrenador.get_dni():
# Se borra el entrenador
self.entrenadores.remove(entrenador)
# Actualizamos variable
borrado = True
# Si se ha borrado
if borrado:
# Mensaje correcto
print("Se ha borrado correctamente.")
else:
# Mensaje de error
print("No se ha borrado correctamente.")
def borrarTrabajador(self):
"""Borrar un trabajador"""
# Declaramos e inicializamos variables
borrado = False
dniTrabajador = datos.pedirDni()
# Recorremos todos los trabajadores
for trabajador in self.trabajadores:
# Si el dni es igual
if dniTrabajador == trabajador.get_dni():
# Se borra el trabajador
self.trabajadores.remove(trabajador)
# Actualizamos variable
borrado = True
# Si se ha borrado
if borrado:
# Mensaje correcto
print("Se ha borrado correctamente.")
else:
# Mensaje de error
print("No se ha borrado correctamente.")
def borrarEntrenamiento(self):
"""Borrar un entrenamiento"""
# Declaramos e inicializamos variables
borrado = False
nombreEntrenamiento = input("Dame el nombre del Entrenamiento: ")
# Recorremos todos los entrenamientos
for entrenamiento in self.entrenamientos:
# Si el nombre es igual
if nombreEntrenamiento == entrenamiento.get_nombre():
# Borramos el entrenamiento
self.entrenamientos.remove(entrenamiento)
# Actualizamos variable
borrado = True
# Si se ha borrado
if borrado:
# Mensaje correcto
print("Se ha borrado correctamente.")
else:
# Mensaje de error
print("No se ha borrado correctamente.")
def anadirAlumnoEntrenaCompe(self):
"""Se añade un alumno a una competicion o un entrenamiento"""
# Declaramos e inicializamos variables
alumno2 = ""
encontrado = False
# Mientras no se encuentre
while not encontrado:
# Mensaje de informacion
print("Escribe exit para salir.")
# Pedimos el dni
dniAlumno = datos.pedirDni()
# Recorremos todos los alumnos
for alumno in self.alumnos:
# Si tienen igual dni
if dniAlumno == alumno.get_dni():
# Actualizamos el objeto
alumno2 = alumno
# Actualizamos la varible
encontrado = True
# Si ha escrito exit
if dniAlumno == "exit":
# Actualizamos la varible
encontrado = True
# Si no se encuentra
if not encontrado:
# Mensaje de error
print("Este alumno no existe o no pertenece a esta competicion")
return alumno2, encontrado
def anadirEntrenadorEntrenamiento(self):
"""Añadimos un entrenador al entrenamiento"""
# Declaramos e inicializamos variables
entrenador2 = ""
encontrado = False
# Pedimos el dni
dniEntrenador = datos.pedirDni()
# Recorremos todos los entrenadores
for entrenador in self.entrenadores:
# Si el dni es igual
if dniEntrenador == entrenador.get_dni():
# Actualizamos el objeto
entrenador2 = entrenador
# Actualizamos la varible
encontrado = True
return entrenador2, encontrado
def realizarCarrera(self, carrera):
"""Se realiza una competicion de carrera"""
# Declaramos e inicializamos variables
pedirAlumnos = "s"
competicion = ""
encontradoAlumno = False
alumnosParticipantes = ""
# Mientras que no pulse n
while pedirAlumnos != "n":
# Pedimos el alumno
alumno2, encontradoAlumno = self.anadirAlumnoEntrenaCompe()
# Cogemos el tipo de competicion del alumno
competicion = alumno2.get_tipoCompeticion()
# Si se ha encontrado y es de tipo carreras
if encontradoAlumno and competicion == "carreras":
# Mensaje correcto
print("Se ha añadido el alumno a la competicion de carreras.")
# Se añade el alumno
carrera.agregarAlumnosParticipantes(alumno2)
# Pregunta si quiere añadir uno mas
pedirAlumnos = input("¿Quieres añadir un alumno mas?s/n")
else:
# Mensaje de error
print("No se ha añadido el alumno a la competicion de carreras")
# Ponemos la carrera
self.set_carrera(carrera)
# Cogemos los alumnos que participan
alumnosParticipantes = carrera.get_alumnosParticipantes()
# Genero un numero random
num1 = random.randint(0, len(alumnosParticipantes))
# Cogemos el alumno ganador al azar
carrera.set_ganador(alumnosParticipantes[num1 - 1])
# Mostramos quien ha ganador
print("El ganador de la competicion con id ", carrera.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni())
# Lo guardamos una variable
ganador = "El ganador de la competicion con id ", carrera.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni()
# Lo guarda en un fichero
self.historialGanadores(ganador)
def realizarPeso(self, peso):
"""Se realiza una competicion de peso"""
# Declaramos e inicializamos variables
pedirAlumnos = "s"
competicion = ""
encontradoAlumno = False
# Mientras que no pulse n
while pedirAlumnos != "n":
# Pedimos el alumno
alumno2, encontradoAlumno = self.anadirAlumnoEntrenaCompe()
# Cogemos el tipo de competicion del alumno
competicion = alumno2.get_tipoCompeticion()
# Si se ha encontrado y es de tipo peso
if encontradoAlumno and competicion == "peso":
# Mensaje correcto
print("Se ha añadido el alumno a la competicionde peso.")
# Se añade el alumno
peso.agregarAlumnosParticipantes(alumno2)
# Pregunta si quiere añadir uno mas
pedirAlumnos = input("¿Quieres añadir un alumno mas?s/n")
else:
# Mensaje de error
print("No se ha añadido el alumno a la competicion de peso.")
# Ponemos la competicion de peso
self.set_peso(peso)
# Cogemos los alumnos que participan
alumnosParticipantes = peso.get_alumnosParticipantes()
# Genero un numero random
num1 = random.randint(0, len(alumnosParticipantes))
# Cogemos el alumno ganador al azar
peso.set_ganador(alumnosParticipantes[num1 - 1])
# Mostramos quien ha ganador
print("El ganador de la competicion con id ", peso.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni())
# Lo guardamos una variable
ganador = "El ganador de la competicion con id ", peso.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni()
# Lo guarda en un fichero
self.historialGanadores(ganador)
def realizarSalto(self, salto):
"""Se realiza una competicion de salto"""
# Declaramos e inicializamos variables
pedirAlumnos = "s"
competicion = ""
encontradoAlumno = False
# Mientras que no pulse n
while pedirAlumnos != "n":
# Pedimos el alumno
alumno2, encontradoAlumno = self.anadirAlumnoEntrenaCompe()
# Cogemos el tipo de competicion del alumno
competicion = alumno2.get_tipoCompeticion()
# Si se ha encontrado y es de tipo salto
if encontradoAlumno and competicion == "salto":
# Mensaje correcto
print("Se ha añadido el alumno a la competicion de salto.")
# Se añade el alumno
salto.agregarAlumnosParticipantes(alumno2)
# Pregunta si quiere añadir uno mas
pedirAlumnos = input("¿Quieres añadir un alumno mas?s/n")
else:
# Mensaje de error
print("No se ha añadido el alumno a la competicion de salto.")
# Ponemos la competicion de salto
self.set_salto(salto)
# Cogemos los alumnos que participan
alumnosParticipantes = salto.get_alumnosParticipantes()
# Genero un numero random
num1 = random.randint(0, len(alumnosParticipantes))
# Cogemos el alumno ganador al azar
salto.set_ganador(alumnosParticipantes[num1 - 1])
# Mostramos quien ha ganador
print("El ganador de la competicion con id ", salto.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni())
# Lo guardamos una variable
ganador = "El ganador de la competicion con id ", salto.get_idCompeticion(), " es ", alumnosParticipantes[num1 - 1].get_dni()
# Lo guarda en un fichero
self.historialGanadores(ganador)
def historialGanadores(self, ganador):
"""Añadimos un nuevo ganador"""
try:
# Abrimos el fichero en modo lectura
victorias = open("vicorias.txt")
guardar = ""
# Leemos la primera linea
leerVictorias = victorias.readline()
# Mientras no llegue al final
while leerVictorias != "":
# Guardamos todas las victorias
guardar = guardar + leerVictorias
# Leemos la siguiente linea
leerVictorias = victorias.readline()
except:
# Mensaje de error
print("No hay victorias, se van a crear")
# Lo abrimos en modo escritura
victorias = open("vicorias.txt", "w")
# Guardamos todos los ganadores y el ultimo
victorias.write(str(guardar) + "\n" + str(ganador))
def historialUnGanador(self):
"""Vemos el historial de un ganador"""
# Declaramos e inicializamos variables
jugadorId = input("Dame el id del jugador para ver su historial: ")
victorias = open("vicorias.txt")
encontrado = 0
# Leemos la primera linea
leerVictorias = victorias.readline()
# Mientras que no llegue al final
while leerVictorias != "":
# Buscamos el jugador en la linea
encontrado = leerVictorias.find(jugadorId)
# Si se encuentra
if encontrado != -1:
# Muetra la linea
print(leerVictorias)
# Leemos la siguiente linea
leerVictorias = victorias.readline()
def verVictorias(self):
"""Vemos todas la victorias"""
try:
# Abrimos el fichero en modo lectura
victorias = open("vicorias.txt")
# Leemos la primera linea
leerVictorias = victorias.readline()
# Mientras que no llegue al final
while leerVictorias != "":
# Mostramos la linea
print(leerVictorias)
# Leemos la siguiente linea
leerVictorias = victorias.readline()
except:
# Mensaje de error
print("No hay victorias, se van a crear")
def mostrarAlumnos(self):
"""Mostramos todos los alumnos"""
cadena = "Alumnos: "
for alumno in self.alumnos:
cadena = cadena + "\n\t" + str(alumno)
print(cadena)
def mostrarEntrenadores(self):
"""Mostramos todos los entrenadores"""
cadena = "Entrenadores: "
for entrenador in self.entrenadores:
cadena = cadena + "\n\t" + str(entrenador)
print(cadena)
def mostrarTrabajadores(self):
"""Mostramos todos los trabajadores"""
cadena = "Trabajadores: "
for trabajador in self.trabajadores:
cadena = cadena + "\n\t" + str(trabajador)
print(cadena)
def mostrarEntrenamientos(self):
"""Mostramos todos los entrenamientos"""
cadena = "Entrenamientos: "
for entrenamiento in self.entrenamientos:
cadena = cadena + "\n\t" + str(entrenamiento)
print(cadena)
def mostrarUnAlumno(self, dni):
"""Mostramos un alumno por dni"""
cadena = "Alumno: "
for alumno in self.alumnos:
if dni.lower() == alumno.get_dni():
cadena = cadena + "\n\t" + str(alumno)
print(cadena)
def mostrarUnEntrenador(self, dni):
"""Mostramos un entrenador por dni"""
cadena = "Entrenadores: "
for entrenador in self.entrenadores:
if dni.lower() == entrenador.get_dni():
cadena = cadena + "\n\t" + str(entrenador)
print(cadena)
def mostrarUnTrabajador(self, dni):
"""Mostramos un trabajador por dni"""
cadena = "Trabajadores: "
for trabajador in self.trabajadores:
if dni.lower() == trabajador.get_dni():
cadena = cadena + "\n\t" + str(trabajador)
print(cadena)
def mostrarUnEntrenamiento(self, nombre):
"""Mostramos un entrenamiento por nombre"""
cadena = "Entrenamientos: "
for entrenamiento in self.entrenamientos:
if nombre == entrenamiento.get_nombre():
cadena = cadena + "\n\t" + str(entrenamiento)
print(cadena)
def mostrarNumAlumnosTipo(self):
"""Se muestran los alumnos segun su tipo"""
# Declaro e inicializo variables
contarTipoCarrera = 0
contarTipoSalto = 0
contarTipoPeso = 0
# Recorremos todos los alumnos
for alumno in self.alumnos:
# Depende de la competicion
if alumno.get_tipoCompeticion() == "carreras":
# Sumo un alumno mas
contarTipoCarrera = contarTipoCarrera + 1
elif alumno.get_tipoCompeticion() == "salto":
# Sumo un alumno mas
contarTipoSalto = contarTipoSalto + 1
elif alumno.get_tipoCompeticion() == "peso":
# Sumo un alumno mas
contarTipoPeso = contarTipoPeso + 1
# Muestro todos los tipos de alumnos
print("Hay ", contarTipoCarrera, " Alumnos de carreras")
print("Hay ", contarTipoSalto, " Alumnos de salto")
print("Hay ", contarTipoPeso, " Alumnos de peso")
# Comparo cual predomina mas y lo muestro
if(contarTipoCarrera>=contarTipoSalto and contarTipoCarrera>=contarTipoPeso):
print("Los alumnos de carreras predominan en nuestra empresa")
if(contarTipoSalto>=contarTipoCarrera and contarTipoSalto>=contarTipoPeso):
print("Los alumnos de salto predominan en nuestra empresa")
if(contarTipoPeso>=contarTipoCarrera and contarTipoPeso>=contarTipoSalto):
print("Los alumnos de peso predominan en nuestra empresa")
def mostrarNumEntrenadoresTipo(self):
"""Se muestran los entrenadores segun su tipo"""
# Declaro e inicializo variables
contarTipoCarrera = 0
contarTipoSalto = 0
contarTipoPeso = 0
# Recorremos todos los entrenadores
for entrenador in self.entrenadores:
# Depende de la competicion
if entrenador.get_tipoEntrenador() == "carreras":
# Sumo un entrenador mas
contarTipoCarrera = contarTipoCarrera + 1
elif entrenador.get_tipoEntrenador() == "salto":
# Sumo un entrenador mas
contarTipoSalto = contarTipoSalto + 1
elif entrenador.get_tipoEntrenador() == "peso":
# Sumo un entrenador mas
contarTipoPeso = contarTipoPeso + 1
# Muestro todos los tipos de entrenadores
print("Hay ", contarTipoCarrera, " Entrenadores de carreras")
print("Hay ", contarTipoSalto, " Entrenadores de salto")
print("Hay ", contarTipoPeso, " Entrenadores de peso")
# Comparo cual predomina mas y lo muestro
if(contarTipoCarrera>=contarTipoSalto and contarTipoCarrera>=contarTipoPeso):
print("Los Entrenadores de carreras predominan en nuestra empresa")
if(contarTipoSalto>=contarTipoCarrera and contarTipoSalto>=contarTipoPeso):
print("Los Entrenadores de salto predominan en nuestra empresa")
if(contarTipoPeso>=contarTipoCarrera and contarTipoPeso>=contarTipoSalto):
print("Los Entrenadores de peso predominan en nuestra empresa")
def __str__(self):
"""Convierte todas el objeto a un string"""
cadena = str(self.nombre) + "\nEntrenadores: "
for entrenador in self.entrenadores:
cadena = cadena + "\n\t" + str(entrenador)
cadena = cadena + "\nTrabajadores"
for trabajador in self.trabajadores:
cadena = cadena + "\n\t" + str(trabajador)
cadena = cadena + "\nAlumnos"
for alumno in self.alumnos:
cadena = cadena + "\n\t" + str(alumno)
cadena = cadena + "\nEntrenamientos"
for entrenamiento in self.entrenamientos:
cadena = cadena + "\n\t" + str(entrenamiento)
cadena = cadena + "\nCarrera: " + str(self.carrera) + "\nPeso: " + str(self.peso) + "\nSalto: " + str(self.salto)
return cadena
|
[
"rubenp98@hotmail.com"
] |
rubenp98@hotmail.com
|
a012af3522c8bd71947b29ca99cd9d2593829514
|
aad87bb44c736128d4eba57427a32f0c4c5342c8
|
/RoGCN.py
|
1cb57678724ce27d74287d018b1283caef5e5dde
|
[] |
no_license
|
zhouxianchen/military_project
|
d5efcbe29d26472fa8f11a2e0ff85f5d53ba4294
|
882ba4bdf8f623c1982c7859f6c698c0a16a3bf1
|
refs/heads/master
| 2023-09-02T05:53:44.992575
| 2021-11-22T01:35:17
| 2021-11-22T01:35:17
| 353,210,060
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,481
|
py
|
"""
(original method) This model is using the GCN with the procedure of original article.
"""
import time
import numpy as np
from copy import deepcopy
import torch
import torch.nn as nn
import torch.nn.functional as F
import torch.optim as optim
from DeepRobust.deeprobust.graph.utils import accuracy
from DeepRobust.deeprobust.graph.defense.pgd import PGD, prox_operators
class ProGCN:
def __init__(self, model, args, device):
'''
model: The backbone GNN model in ProGNN
'''
self.device = device
self.args = args
self.best_val_acc = 0
self.best_val_loss = 10
self.best_graph = None
self.weights = None
self.estimator = None
self.model = model.to(device)
def fit(self, features, adj, labels, idx_train, idx_val,feature_id):
args = self.args
self.optimizer = optim.Adam(self.model.parameters(),
lr=args.lr, weight_decay=args.weight_decay)
estimator = EstimateAdj(adj, symmetric=False).to(self.device)
self.estimator = estimator
self.optimizer_adj = optim.SGD(estimator.parameters(),
momentum=0.9, lr=args.lr_adj)
# Train model
t_total = time.time()
for epoch in range(args.epochs):
if args.only_gcn:
for i in range(int(args.inner_steps)):
self.train_gcn(epoch, features, estimator.estimated_adj,
labels, idx_train, idx_val,feature_id)
else:
for i in range(int(args.outer_steps)):
self.train_adj(epoch, features, adj, labels,
idx_train, idx_val,feature_id)
for i in range(int(args.inner_steps)):
self.train_gcn(epoch, features, estimator.estimated_adj,
labels, idx_train, idx_val,feature_id)
print("Optimization Finished!")
print("Total time elapsed: {:.4f}s".format(time.time() - t_total))
print(args)
# Testing
print("picking the best model according to validation performance")
self.model.load_state_dict(self.weights)
def train_gcn(self, epoch, features, adj, labels, idx_train, idx_val,feature_id):
args = self.args
estimator = self.estimator
adj = estimator.normalize()
t = time.time()
self.model.train()
self.optimizer.zero_grad()
output = self.model(features, adj)
loss_train = F.nll_loss(output[idx_train], labels[idx_train])
acc_train = accuracy(output[idx_train], labels[idx_train])
loss_train.backward()
self.optimizer.step()
# Evaluate validation set performance separately,
# deactivates dropout during validation run.
self.model.eval()
output = self.model(features, adj)
loss_val = F.nll_loss(output[idx_val], labels[idx_val])
acc_val = accuracy(output[idx_val], labels[idx_val])
if acc_val > self.best_val_acc:
self.best_val_acc = acc_val
self.best_graph = adj.detach()
self.weights = deepcopy(self.model.state_dict())
if args.debug:
print('\t=== saving current graph/gcn, best_val_acc: %s' % self.best_val_acc.item())
if loss_val < self.best_val_loss:
self.best_val_loss = loss_val
self.best_graph = adj.detach()
self.weights = deepcopy(self.model.state_dict())
if args.debug:
print(f'\t=== saving current graph/gcn, best_val_loss: %s' % self.best_val_loss.item())
if args.debug:
if epoch % 100 == 0:
print('Epoch: {:04d}'.format(epoch+1),
'loss_train: {:.4f}'.format(loss_train.item()),
'acc_train: {:.4f}'.format(acc_train.item()),
'loss_val: {:.4f}'.format(loss_val.item()),
'acc_val: {:.4f}'.format(acc_val.item()),
'time: {:.4f}s'.format(time.time() - t))
def train_adj(self, epoch, features, adj, labels, idx_train, idx_val, feature_id):
estimator = self.estimator
args = self.args
if args.debug:
print("\n=== train_adj ===")
t = time.time()
estimator.train()
self.optimizer_adj.zero_grad()
loss_l1 = torch.norm(estimator.estimated_adj, 1)
loss_fro = torch.norm(estimator.estimated_adj - adj, p='fro')
normalized_adj = estimator.normalize()
if args.lambda_:
loss_smooth_feat = self.feature_smoothing(estimator.estimated_adj, feature_id)
else:
loss_smooth_feat = 0 * loss_l1
output = self.model(features, normalized_adj)
loss_gcn = F.nll_loss(output[idx_train], labels[idx_train])
acc_train = accuracy(output[idx_train], labels[idx_train])
loss_diffiential = loss_fro + args.gamma * loss_gcn + args.lambda_ * loss_smooth_feat
loss_diffiential.backward()
self.optimizer_adj.step()
total_loss = loss_fro \
+ args.gamma * loss_gcn \
+ args.alpha * loss_l1 \
estimator.estimated_adj.data.copy_(torch.clamp(
estimator.estimated_adj.data, min=0, max=1))
# Evaluate validation set performance separately,
# deactivates dropout during validation run.
self.model.eval()
normalized_adj = estimator.normalize()
output = self.model(features, normalized_adj)
loss_val = F.nll_loss(output[idx_val], labels[idx_val])
acc_val = accuracy(output[idx_val], labels[idx_val])
print('Epoch: {:04d}'.format(epoch+1),
'acc_train: {:.4f}'.format(acc_train.item()),
'loss_val: {:.4f}'.format(loss_val.item()),
'acc_val: {:.4f}'.format(acc_val.item()),
'time: {:.4f}s'.format(time.time() - t))
if acc_val > self.best_val_acc:
self.best_val_acc = acc_val
self.best_graph = normalized_adj.detach()
self.weights = deepcopy(self.model.state_dict())
if args.debug:
print(f'\t=== saving current graph/gcn, best_val_acc: %s' % self.best_val_acc.item())
if loss_val < self.best_val_loss:
self.best_val_loss = loss_val
self.best_graph = normalized_adj.detach()
self.weights = deepcopy(self.model.state_dict())
if args.debug:
print(f'\t=== saving current graph/gcn, best_val_loss: %s' % self.best_val_loss.item())
if args.debug:
if epoch % 1 == 0:
print('Epoch: {:04d}'.format(epoch+1),
'loss_fro: {:.4f}'.format(loss_fro.item()),
'loss_gcn: {:.4f}'.format(loss_gcn.item()),
'loss_feat: {:.4f}'.format(loss_smooth_feat.item()),
'delta_l1_norm: {:.4f}'.format(torch.norm(estimator.estimated_adj-adj, 1).item()),
'loss_l1: {:.4f}'.format(loss_l1.item()),
'loss_total: {:.4f}'.format(total_loss.item()),
)
def test(self, features, labels, idx_test):
print("\t=== testing ===")
self.model.eval()
adj = self.best_graph
if self.best_graph is None:
adj = self.estimator.normalize()
output = self.model(features, adj)
loss_test = F.nll_loss(output[idx_test], labels[idx_test])
acc_test = accuracy(output[idx_test], labels[idx_test])
print("\tTest set results:",
"loss= {:.4f}".format(loss_test.item()),
"accuracy= {:.4f}".format(acc_test.item()))
return acc_test.item()
def feature_smoothing(self, adj, X):
adj = (adj.t() + adj)/2
rowsum = adj.sum(1)
r_inv = rowsum.flatten()
D = torch.diag(r_inv)
L = D - adj
r_inv = r_inv + 1e-3
r_inv = r_inv.pow(-1/2).flatten()
r_inv[torch.isinf(r_inv)] = 0.
r_mat_inv = torch.diag(r_inv)
# L = r_mat_inv @ L
L = r_mat_inv @ L @ r_mat_inv
XLXT = torch.matmul(torch.matmul(X.t(), L), X)
loss_smooth_feat = torch.trace(XLXT)
return loss_smooth_feat
class EstimateAdj(nn.Module):
def __init__(self, adj, symmetric=False, device='cpu'):
super(EstimateAdj, self).__init__()
n = len(adj)
self.estimated_adj = nn.Parameter(torch.FloatTensor(n, n))
self._init_estimation(adj)
self.symmetric = symmetric
self.device =device
def _init_estimation(self, adj):
with torch.no_grad():
n = len(adj)
self.estimated_adj.data.copy_(adj)
def forward(self):
return self.estimated_adj
def normalize(self):
if self.symmetric:
adj = (self.estimated_adj + self.estimated_adj.t())
else:
adj = self.estimated_adj
# normalized_adj = self._normalize(adj + torch.eye(adj.shape[0])).cuda()
normalized_adj = self._normalize(adj + torch.eye(adj.shape[0]))
return normalized_adj
def _normalize(self, mx):
rowsum = mx.sum(1)
r_inv = rowsum.pow(-1/2).flatten()
r_inv[torch.isinf(r_inv)] = 0.
r_mat_inv = torch.diag(r_inv)
mx = r_mat_inv @ mx
mx = mx @ r_mat_inv
return mx
|
[
"zhouxianchen@163.com"
] |
zhouxianchen@163.com
|
5a34ada849ffd1501419485bd5be65707f1ddfb2
|
4fd9b2e78a989543dd9357f3e5c890e55f6da6aa
|
/raspi/light/blink.py
|
1030f3f9ba2193c16bd3a220efae8052e1d728f0
|
[] |
no_license
|
zyndagj/coda-at-tacc-summer-2015
|
5c3b35a2525844ee71b9c4a77ab86c8da8f9934f
|
ebe0cbe79245995a556861beaf7443f346117abb
|
refs/heads/master
| 2023-03-16T20:35:47.903079
| 2015-07-31T22:57:04
| 2015-07-31T22:57:04
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 659
|
py
|
#!/bin/env python
import RPi.GPIO as GPIO
import time
# blinking function
def blink(pin):
# Send 3.3V out of the pin
GPIO.output(pin,GPIO.HIGH)
# ### What do you think happens if you modify the number in time.sleep?
time.sleep(1)
# Set the pin to 0V
GPIO.output(pin,GPIO.LOW)
time.sleep(1)
return
# Use BCM pin numbers
GPIO.setmode(GPIO.BCM)
# Set up GPIO25 output channel
GPIO.setup(25, GPIO.OUT)
# Blink GPIO25 5 times
# ### What do you think happens if you modify the numbers in range(0, 5)?
for i in range(0,5):
blink(25)
print 'blink #' + str(i + 1)
# Reset all the pins when we are done
GPIO.cleanup()
|
[
"jychuah@gmail.com"
] |
jychuah@gmail.com
|
d9ba26aa4bf18f381093c342ba9d1f535b929b22
|
bf55dd4139f73022dc2630b60a81e6737796b6f9
|
/exercises/chapter02-exercises.py
|
8fa6e1e8cc6f7ef1bb5c046bd85bd2e5b0d3d6f4
|
[] |
no_license
|
serenascalzi/python-specialization
|
92210cf3a11f3617be6ded5ddc83805c21e1ae14
|
605fd6665320adfd000a826a3ed3346db97e0bec
|
refs/heads/master
| 2020-12-29T07:26:50.993232
| 2020-02-05T18:10:02
| 2020-02-05T18:10:02
| 175,999,628
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 567
|
py
|
# chapter 2
# variables, expressions, and statements
5
x = 5
x + 1 # 6
# welcome program
name = input('Enter your name:\n')
print('Hello ', name)
# pay program
hours = input('Enter Hours:\n')
rate = input('Enter Rate:\n')
pay = float(hours) * float(rate)
print('Pay:', pay)
# expression values/types
width = 17
height = 12.0
width // 2 # 8, int
width / 2.0 # 8.5, float
height / 3 # 4, float
1 + 2 * 5 # 11, int
# temperature program
tempC = input('Enter Celsius Temperature:\n')
tempF = (float(tempC) * 9.0 / 5.0) + 32.0
print('Fahrenheit Temperature:', tempF)
|
[
"serenascalzi@alumni.iu.edu"
] |
serenascalzi@alumni.iu.edu
|
e37b015e717086548816dfe25e19163649bfdec6
|
9cedc2db68f2a3d65cdfb79cea39a62acaa9d9a0
|
/firstpython.py
|
9bfc477bc9425828381b24b96e3f31cd293f3d1c
|
[] |
no_license
|
nadianasrin/testrepo
|
fd960191da67ce33d0337702331f532d2e157e53
|
5154861c61cf33fd1849fe7fa0460b5e896411a0
|
refs/heads/master
| 2022-12-07T19:12:46.917024
| 2020-09-03T09:49:44
| 2020-09-03T09:49:44
| 292,528,822
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 51
|
py
|
# Display the output
print (*new python file * )
|
[
"noreply@github.com"
] |
nadianasrin.noreply@github.com
|
15ff734a6e07ceccad08434f652c55eb4bd20655
|
d818e3a532f57f4f27db1aa9b795f82e91950371
|
/KPCB - Answer & Tests. Andrei Lyskov/KPCB_UnitTest.py
|
a08d109054d0661321a1f7583d03edbdef33a73e
|
[] |
no_license
|
Andreilys/Thinkful-Data-Science
|
0c83420ce2abb234c681a5f3aa49feec597adfc0
|
de046e7e4377c562f94923e6fa1c20849bfa9037
|
refs/heads/master
| 2020-04-17T08:36:54.137529
| 2017-12-19T22:48:44
| 2017-12-19T22:48:44
| 67,144,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,437
|
py
|
import unittest
from KPCB import hashMap
# This class tests all the given methods iwthin the hashMap class
class hashMapTests(unittest.TestCase):
def setUp(self):
newHash = hashMap(8)
newHash.set("Blueberries", 5)
newHash.set("Apples", 9)
newHash.set("Lemons", 10)
return newHash
def testCreatingHashMap(self):
newHash = self.setUp()
self.assertEqual(len(newHash.data), 8)
def testSettingValue(self):
newHash = hashMap(2)
newHash.set("Blueberries", 5)
self.assertEqual(newHash.set("Lemons", 10), True)
# Testing whether setting after the hashMap is full returns False
self.assertEqual(newHash.set("Melons", 10), False)
self.assertEqual(newHash.itemCount, 2)
self.assertEqual(newHash.get("Blueberries"), 5)
def testGettingValue(self):
newHash = self.setUp()
self.assertEqual(newHash.get("Blueberries"), 5)
# Testing a false input
self.assertEqual(newHash.get("Melons"), None)
def testDeletingValue(self):
newHash = self.setUp()
self.assertEqual(newHash.delete("Blueberries"), 5)
# Testing a false input
self.assertEqual(newHash.get("Melons"), None)
def testLoadMethod(self):
newHash = self.setUp()
self.assertEqual(newHash.load(), float(3)/8)
def main():
unittest.main()
if __name__ == '__main__':
main()
|
[
"andreilyskov@gmail.com"
] |
andreilyskov@gmail.com
|
defa9d815ef1032c95ea48ccc4d79e4180de74ad
|
5c8346597e3690eec3939f56f233eb5fafd336bc
|
/test/test_search_criteria_request_dto.py
|
577599052c16581d540ae2942752b116df1bd679
|
[] |
no_license
|
NVE/python-varsom-regobs-client
|
be44befd04ca07058f8b46ec69bf1659d3ee422b
|
8bb7fc06d2f6da36a5fa4a475d4f036ebe3cfd72
|
refs/heads/master
| 2022-12-27T19:09:54.761318
| 2020-06-24T08:56:15
| 2020-06-24T08:56:15
| 274,619,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,107
|
py
|
# coding: utf-8
"""
RegObs API
## Introduction RegObs is a tool for collecting observations and events related to natural hazards. It is currently used by the Norwegian flood, landslide and avalanche warning service in Norway, but the data is openly available for anyone through this API. Regobs has been developed by the Norwegian Water resources and Energy Directorate (NVE), in collaboration with the Norwegian Meteorological Institute (MET) and the Norwegian Public Roads Administration (NPRA). You can check out our representation of the data at [regobs.no](http://regobs.no). ## Authentication Some endpoints require an api key. You can get an API key by sending an email to [raek@nve.no](mailto:raek@nve.no?subject=RegObs%20API%20Key). To use the api key with the swagger ui, fill in the api\\_key input above. It should then be included with every request in the `regObs_apptoken` header. ## Getting started Get the last 10 observations using python: ```python import requests r = requests.post('https://api.regobs.no/v4/Search', data={'NumberOfRecords': 10}, headers={'Content-Type': 'application/json'} ) data = r.json() print(len(data)) # 10 ``` # noqa: E501
OpenAPI spec version: v4
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import unittest
import varsom_regobs_client
from models.search_criteria_request_dto import SearchCriteriaRequestDto # noqa: E501
from varsom_regobs_client.rest import ApiException
class TestSearchCriteriaRequestDto(unittest.TestCase):
"""SearchCriteriaRequestDto unit test stubs"""
def setUp(self):
pass
def tearDown(self):
pass
def testSearchCriteriaRequestDto(self):
"""Test SearchCriteriaRequestDto"""
# FIXME: construct object with mandatory attributes with example values
# model = varsom_regobs_client.models.search_criteria_request_dto.SearchCriteriaRequestDto() # noqa: E501
pass
if __name__ == '__main__':
unittest.main()
|
[
"jorgen.kvalberg@gmail.com"
] |
jorgen.kvalberg@gmail.com
|
8f96f5219407d27edd6ef67496357be324ace1a0
|
58bca29946133281eca5bf1f255b10d497ae2f13
|
/ros_ws/src/servo_visao/scripts/camera_node_mask.py
|
6324e4a8a044df3ca668f34823b351204979f09a
|
[] |
no_license
|
AlexanderVieira/robotics
|
d3656f72f0f375d0229bef923d15d6ffe3d8750f
|
0c54b200ccbc702f807212cfe5c40b6ca865b16f
|
refs/heads/main
| 2023-03-31T05:15:19.378479
| 2021-04-09T01:53:26
| 2021-04-09T01:53:26
| 355,740,229
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,525
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#############################################
# #
# Node to control the simulator TurtleSim. #
# This node receives a goal coordinate from #
# topc /goal and controls the turtle. #
# #
# Author: Adalberto Oliveira #
# Autonomous Vehicle - Infnet #
# Version: 1.0 #
# Date: 02-03-2021 #
# #
#############################################
# importing libraries
import rospy, time, sys, cv2
import numpy as np
import image_lib as img
from geometry_msgs.msg import Pose2D
from sensor_msgs.msg import Image
from cv_bridge import CvBridge, CvBridgeError
def camera_main():
"""
This function is called from the main conde and calls
all work methods of fucntions into the codde.
"""
# Global variables
global cap
global mask_h
global mask_l
# Initializing ros node
rospy.init_node('camera_node', anonymous=True) # node name
# Publishers
pub_image = rospy.Publisher('image_raw', Image, queue_size=10) # send control signals
# control rate
rate = rospy.Rate(30) # run the node at 15H
pub_img = Image()
# main loop
while not rospy.is_shutdown():
print('Camera node running ok!')
#get_img(cap)
cv_image, pub_img = img.get_img_ros(cap)
try:
mask_laranja = img.get_mask(cv_image,
mask_l[0],mask_h[0],
im_blur=True)
mask_verde = img.get_mask(cv_image,
mask_l[1],mask_h[1],
im_blur=True)
cent_l, img_cont = img.get_centroid(cv_image,
mask_laranja,
put_text=True,
drawn_contour=False)
cent_v, img_cont = img.get_centroid(img_cont,
mask_verde,
put_text=True,
drawn_contour=False)
base_l, img_cont = img.get_base(img_cont,mask_laranja, put_text=True)
base_v, img_cont = img.get_base(img_cont,mask_verde, put_text=True)
except:
cent_v = [0,0]
cent_l = [0,0]
base_v = [0,0]
base_l = [0,0]
img_cont = cv_image
cv2.namedWindow('Original')
cv2.imshow('Original',cv_image)
print(cent_v, cent_l)
print(base_v, base_l)
cv2.waitKey(5)
cv2.namedWindow('Centroides')
cv2.imshow('Centroides',img_cont)
pub_image.publish(pub_img)
rate.sleep()
# loading params from rosparam
num_masks = rospy.get_param('/num_masks')
# creating masks
mask_h = np.empty([num_masks,3],dtype=np.uint8)
mask_l = np.empty([num_masks,3],dtype=np.uint8)
for i in range(0,num_masks):
mask_l[i,:] = rospy.get_param('/mask_'+str(i+1)+'/low')
mask_h[i,:] = rospy.get_param('/mask_'+str(i+1)+'/high')
video = int(sys.argv[1]) # webacam
# Criando a porta de captura de video
cap = cv2.VideoCapture(video)
if __name__ == '__main__':
camera_main()
|
[
"alexander.silva@al.infnet.edu.br"
] |
alexander.silva@al.infnet.edu.br
|
738c6fc1c3e28f68432e3d844956b5ba271481b0
|
fa1a84f571715bf378e13d40ab2f3682466146d4
|
/scripts/payment.py
|
defc26cd1fdbb477717eeecf0f756bc9a4293e6b
|
[] |
no_license
|
Drucia/zsbd-shop
|
ea82c0c29633429e60ef5a8e3d75489cb42b46a3
|
53fc0f10d7acf5d03fa662fa00c8fbbbaf0db839
|
refs/heads/master
| 2023-02-20T13:09:49.103214
| 2021-01-12T18:15:59
| 2021-01-12T18:15:59
| 304,911,722
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,319
|
py
|
import random
from random import randrange
import time
def generateBankAccount(TYPEOFPAYMENTID):
bankAccount = ""
if TYPEOFPAYMENTID == 6:
return "null"
else:
for x in range(26):
number = random.randint(0,9)
bankAccount += str(number)
return str(bankAccount)
EXECUTIONDATE = 'Select RECEIPTDATE from "Order" INNER JOIN "Order" ON "Order".paymentid = payment.paymentID;'
CONFIRMED = 'Select PAID from "Order" INNER JOIN "Order" ON "Order".paymentid = payment.paymentID;'
TYPEOFPAYMENTID = random.randint(1,6)
ACCOUNTNUMBER = generateBankAccount(TYPEOFPAYMENTID)
CURRENTPRICE = '(SELECT CurrentPrice FROM Product AS pr Inner Join OrderDetails AS od ON pr.ProductID = od.ProductID Inner Join Order AS o ON od.OrderID = o.OrderID Inner Join On o.OrderID = pa.OrderID)'
QUANTITY = 'SELECT QUANTITY FROM ORDERDETAIL AS od INNER JOIN '
def getExecutionDate(paymentid):
return '(Select RECEIPTDATE + (1/1440 * dbms_random.value(0, 14*24*60)) from "ORDER" where paymentid = ' + str(paymentid) + ")"
def getConfirmed(paymentid):
return '(Select PAID from "ORDER" WHERE paymentID = ' + str(paymentid) + ')'
def getCost(paymentid):
return '(SELECT SUM(P.CurrentPrice * OD.QUANTITY) FROM ORDERDETAILS OD JOIN PRODUCT P ON OD.PRODUCTID = P.PRODUCTID WHERE OD.ORDERID = ' + '(SELECT ORDERID FROM "ORDER" WHERE PAYMENTID = ' + str(paymentid) + '))'
def str_time_prop(start, end, format):
stime = time.mktime(time.strptime(start, format))
etime = time.mktime(time.strptime(end, format))
return time.strftime(format, time.localtime(randrange(stime, etime)))
def random_date(start, end):
return str_time_prop(start, end, '%Y-%m-%d %I:%M:%S')
def getPayment(paymentid):
type_of_payment = random.randint(1,6)
account_number = generateBankAccount(type_of_payment)
return getCost(paymentid) + ", " + account_number + ", " + getConfirmed(paymentid) + ", " + getExecutionDate(paymentid) + ", " + str(type_of_payment)
with open('Payment.sql', 'w') as file:
# for x in range(160000):
for x in range(100202):
file.write("INSERT INTO PAYMENT (COST,ACCOUNTNUMBER,CONFIRMED,EXECUTIONDATE,TYPEOFPAYMENTID) VALUES ( "+ getPayment(x+1) + ");\n")
# INSERT INTO PAYMENT (ACCOUNTNUMBER,TYPEOFPAYMENTID)
# VALUES (generateBankAccount(TYPEOFPAYMENTID),TYPEOFPAYMENTID);
|
[
"ola.druciak@gmail.com"
] |
ola.druciak@gmail.com
|
3302fe355169f3eb2ff9c8330a0bc8e88b511176
|
1be3db9efa7c1a087969cf0b048e43a93c1be62e
|
/decimal-hexadecimal.py
|
bc0f7f48841b09e654b62b14a01757114818e3ff
|
[] |
no_license
|
Hezel254/python_projects
|
358fd7e37282b0e84b727bf0c962f3313e8517f6
|
793b6714785f432c26ea3baa23aabaaf5ffb1b82
|
refs/heads/master
| 2020-08-18T06:12:21.730600
| 2020-02-25T09:09:24
| 2020-02-25T09:09:24
| 215,756,648
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 58
|
py
|
a = 1234
hex_a = hex(a)
print(hex_a)
print(int(hex_a,16))
|
[
"hezelandahwa@gmail.com"
] |
hezelandahwa@gmail.com
|
dfe884cc74537bf3ab3e2f32315819705edd026e
|
f3494a6910d6015d11729969a29536cb6329eedb
|
/nova/tests/hyperv/stubs/test_hypervapi_HyperVAPITestCase_test_pause_already_paused_wmi.py
|
6b7d5ef9875622781467710ad2e6d145a6100fb1
|
[
"Apache-2.0"
] |
permissive
|
alexpilotti/nova
|
2baf303fe969447aeebc686697f864c991b15825
|
ec7e9286877b02627276597880c9388dabe2c8d3
|
refs/heads/master
| 2021-01-16T22:07:10.512879
| 2013-01-07T20:36:46
| 2013-01-07T20:36:46
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 145,062
|
py
|
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
'''
This file contains auto generated mock classes and functions.
'''
def WMI(computer='', impersonation_level='', authentication_level='',
authority='', privileges='', moniker='', wmi=None, namespace='', suffix='',
user='', password='', find_classes=False, debug=False):
if moniker == '//./Root/Microsoft/Windows/Storage':
ret_value = None
global _WMI_count_0
if not '_WMI_count_0' in globals():
_WMI_count_0 = 0
if _WMI_count_0 == 0:
v = _wmi_namespace(2)
elif _WMI_count_0 == 1:
v = _wmi_namespace(7)
_WMI_count_0 += 1
return v
elif moniker == '//./root/wmi':
ret_value = None
global _WMI_count_1
if not '_WMI_count_1' in globals():
_WMI_count_1 = 0
if _WMI_count_1 == 0:
v = _wmi_namespace(3)
elif _WMI_count_1 == 1:
v = _wmi_namespace(8)
_WMI_count_1 += 1
return v
elif moniker == '//./root/cimv2':
ret_value = None
global _WMI_count_2
if not '_WMI_count_2' in globals():
_WMI_count_2 = 0
if _WMI_count_2 == 0:
v = _wmi_namespace(4)
elif _WMI_count_2 == 1:
v = _wmi_namespace(378)
_WMI_count_2 += 1
return v
elif moniker == '//./root/virtualization':
ret_value = None
global _WMI_count_3
if not '_WMI_count_3' in globals():
_WMI_count_3 = 0
if _WMI_count_3 == 0:
v = _wmi_namespace(9)
elif _WMI_count_3 == 1:
v = _wmi_namespace(11)
_WMI_count_3 += 1
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_StorageJob.InstanceID="1FFFA674-9739\
-4475-806E-B8A59D599D41"':
ret_value = None
global _WMI_count_4
if not '_WMI_count_4' in globals():
_WMI_count_4 = 0
if _WMI_count_4 == 0:
v = _wmi_object(14)
elif _WMI_count_4 == 1:
v = _wmi_object(15)
elif _WMI_count_4 == 2:
v = _wmi_object(16)
elif _WMI_count_4 == 3:
v = _wmi_object(17)
elif _WMI_count_4 == 4:
v = _wmi_object(18)
elif _WMI_count_4 == 5:
v = _wmi_object(19)
elif _WMI_count_4 == 6:
v = _wmi_object(20)
elif _WMI_count_4 == 7:
v = _wmi_object(21)
elif _WMI_count_4 == 8:
v = _wmi_object(22)
elif _WMI_count_4 == 9:
v = _wmi_object(23)
_WMI_count_4 += 1
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_StorageJob.InstanceID="8D5B2476-1065\
-4F21-A487-7D72B201AFC0"':
ret_value = None
global _WMI_count_5
if not '_WMI_count_5' in globals():
_WMI_count_5 = 0
if _WMI_count_5 == 0:
v = _wmi_object(26)
elif _WMI_count_5 == 1:
v = _wmi_object(27)
elif _WMI_count_5 == 2:
v = _wmi_object(28)
elif _WMI_count_5 == 3:
v = _wmi_object(29)
elif _WMI_count_5 == 4:
v = _wmi_object(30)
elif _WMI_count_5 == 5:
v = _wmi_object(31)
elif _WMI_count_5 == 6:
v = _wmi_object(32)
elif _WMI_count_5 == 7:
v = _wmi_object(33)
elif _WMI_count_5 == 8:
v = _wmi_object(34)
_WMI_count_5 += 1
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_ConcreteJob.InstanceID="AED4B786-5F5\
2-41FB-904D-FF0324976F5D"':
ret_value = None
global _WMI_count_6
if not '_WMI_count_6' in globals():
_WMI_count_6 = 0
if _WMI_count_6 == 0:
v = _wmi_object(355)
elif _WMI_count_6 == 1:
v = _wmi_object(356)
elif _WMI_count_6 == 2:
v = _wmi_object(357)
elif _WMI_count_6 == 3:
v = _wmi_object(358)
elif _WMI_count_6 == 4:
v = _wmi_object(359)
elif _WMI_count_6 == 5:
v = _wmi_object(360)
_WMI_count_6 += 1
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_ConcreteJob.InstanceID="8B33221D-9E8\
A-41B9-96FB-DDAC49D5E8B3"':
v = _wmi_object(364)
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_ConcreteJob.InstanceID="D3ECBA7D-C75\
0-40AB-B11F-8658FF528483"':
ret_value = None
global _WMI_count_8
if not '_WMI_count_8' in globals():
_WMI_count_8 = 0
if _WMI_count_8 == 0:
v = _wmi_object(386)
elif _WMI_count_8 == 1:
v = _wmi_object(387)
_WMI_count_8 += 1
return v
elif moniker ==\
u'//HV12OSDEMO1/root/virtualization:Msvm_ConcreteJob.InstanceID="D6B00745-2CC\
D-4A58-8B51-0AA585932392"':
ret_value = None
global _WMI_count_9
if not '_WMI_count_9' in globals():
_WMI_count_9 = 0
if _WMI_count_9 == 0:
v = _wmi_object(408)
elif _WMI_count_9 == 1:
v = _wmi_object(409)
_WMI_count_9 += 1
return v
class _wmi_namespace(object):
def __init__(self, instance_id=1, *args, **kwargs):
self.__instance_id__ = instance_id
@property
def __id__(self):
return self.__instance_id__
@property
def Win32_OperatingSystem(self):
if self.__id__ == 4:
v = _wmi_class(5)
return v
@Win32_OperatingSystem.setter
def Win32_OperatingSystem(self, value):
pass
@property
def Msvm_ResourceAllocationSettingData(self):
if self.__id__ == 9:
ret_value = None
if not hasattr(self,
'_Msvm_ResourceAllocationSettingData_count_0'):
self._Msvm_ResourceAllocationSettingData_count_0 = 0
if self._Msvm_ResourceAllocationSettingData_count_0 == 0:
v = _wmi_class(65)
elif self._Msvm_ResourceAllocationSettingData_count_0 == 1:
v = _wmi_class(159)
elif self._Msvm_ResourceAllocationSettingData_count_0 == 2:
v = _wmi_class(254)
self._Msvm_ResourceAllocationSettingData_count_0 += 1
return v
@Msvm_ResourceAllocationSettingData.setter
def Msvm_ResourceAllocationSettingData(self, value):
pass
@property
def Msvm_VirtualSystemManagementService(self):
if self.__id__ == 9:
ret_value = None
if not hasattr(self,
'_Msvm_VirtualSystemManagementService_count_0'):
self._Msvm_VirtualSystemManagementService_count_0 = 0
if self._Msvm_VirtualSystemManagementService_count_0 == 0:
v = _wmi_class(35)
elif self._Msvm_VirtualSystemManagementService_count_0 == 1:
v = _wmi_class(154)
elif self._Msvm_VirtualSystemManagementService_count_0 == 2:
v = _wmi_class(247)
elif self._Msvm_VirtualSystemManagementService_count_0 == 3:
v = _wmi_class(342)
elif self._Msvm_VirtualSystemManagementService_count_0 == 4:
v = _wmi_class(348)
self._Msvm_VirtualSystemManagementService_count_0 += 1
return v
elif self.__id__ == 11:
ret_value = None
if not hasattr(self,
'_Msvm_VirtualSystemManagementService_count_1'):
self._Msvm_VirtualSystemManagementService_count_1 = 0
if self._Msvm_VirtualSystemManagementService_count_1 == 0:
v = _wmi_class(370)
elif self._Msvm_VirtualSystemManagementService_count_1 == 1:
v = _wmi_class(381)
self._Msvm_VirtualSystemManagementService_count_1 += 1
return v
@Msvm_VirtualSystemManagementService.setter
def Msvm_VirtualSystemManagementService(self, value):
pass
@property
def Msvm_VirtualSystemGlobalSettingData(self):
if self.__id__ == 9:
v = _wmi_class(37)
return v
@Msvm_VirtualSystemGlobalSettingData.setter
def Msvm_VirtualSystemGlobalSettingData(self, value):
pass
@property
def Msvm_ComputerSystem(self):
if self.__id__ == 9:
ret_value = None
if not hasattr(self, '_Msvm_ComputerSystem_count_0'):
self._Msvm_ComputerSystem_count_0 = 0
if self._Msvm_ComputerSystem_count_0 == 0:
v = _wmi_class(10)
elif self._Msvm_ComputerSystem_count_0 == 1:
v = _wmi_class(40)
elif self._Msvm_ComputerSystem_count_0 == 2:
v = _wmi_class(346)
elif self._Msvm_ComputerSystem_count_0 == 3:
v = _wmi_class(352)
elif self._Msvm_ComputerSystem_count_0 == 4:
v = _wmi_class(365)
self._Msvm_ComputerSystem_count_0 += 1
return v
elif self.__id__ == 11:
ret_value = None
if not hasattr(self, '_Msvm_ComputerSystem_count_1'):
self._Msvm_ComputerSystem_count_1 = 0
if self._Msvm_ComputerSystem_count_1 == 0:
v = _wmi_class(361)
elif self._Msvm_ComputerSystem_count_1 == 1:
v = _wmi_class(368)
elif self._Msvm_ComputerSystem_count_1 == 2:
v = _wmi_class(376)
elif self._Msvm_ComputerSystem_count_1 == 3:
v = _wmi_class(379)
elif self._Msvm_ComputerSystem_count_1 == 4:
v = _wmi_class(383)
elif self._Msvm_ComputerSystem_count_1 == 5:
v = _wmi_class(388)
self._Msvm_ComputerSystem_count_1 += 1
return v
@Msvm_ComputerSystem.setter
def Msvm_ComputerSystem(self, value):
pass
@property
def MSVM_ComputerSystem(self):
if self.__id__ == 9:
ret_value = None
if not hasattr(self, '_MSVM_ComputerSystem_count_0'):
self._MSVM_ComputerSystem_count_0 = 0
if self._MSVM_ComputerSystem_count_0 == 0:
v = _wmi_class(49)
elif self._MSVM_ComputerSystem_count_0 == 1:
v = _wmi_class(251)
self._MSVM_ComputerSystem_count_0 += 1
return v
@MSVM_ComputerSystem.setter
def MSVM_ComputerSystem(self, value):
pass
def query(self, wql, instance_of=None, fields='[]'):
if self.__id__ == 3 and wql == 'SELECT * FROM\
MSiSCSIInitiator_SessionClass WHERE\
TargetName=\'iqn.2010-10.org.openstack:volume-ccb39627-34fa-4f47-968b-6580d9d\
7ee2b\'':
v = []
return v
elif self.__id__ == 378 and wql == u'Select * from CIM_DataFile where\
Name =\
\'C:\\Hyper-V\\test\\instances\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf4\
6-ccfbbb618bf0\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0.vh\
d\'':
v = []
v1 = _wmi_object(410)
v.append(v1)
return v
elif self.__id__ == 9 and wql == 'Select * from\
Msvm_ImageManagementService':
v = []
v1 = _wmi_object(24)
v.append(v1)
return v
elif self.__id__ == 9 and wql == 'SELECT * FROM\
Msvm_ResourceAllocationSettingData WHERE ResourceSubType LIKE\
\'Microsoft Synthetic Disk Drive\' AND InstanceID LIKE\
\'%Default%\'':
v = []
v1 = _wmi_object(64)
v.append(v1)
return v
elif self.__id__ == 9 and wql == 'SELECT * FROM\
Msvm_ResourceAllocationSettingData WHERE ResourceSubType\
LIKE \'Microsoft Virtual Hard Disk\' AND InstanceID LIKE\
\'%Default%\' ':
v = []
v1 = _wmi_object(158)
v.append(v1)
return v
elif self.__id__ == 9 and wql == 'SELECT * FROM\
Msvm_ResourceAllocationSettingData WHERE ResourceSubType =\
\'Microsoft Synthetic SCSI Controller\' AND InstanceID LIKE\
\'%Default%\'':
v = []
v1 = _wmi_object(253)
v.append(v1)
return v
elif self.__id__ == 11 and wql == 'Select * from\
Msvm_ImageManagementService':
v = []
v1 = _wmi_object(12)
v.append(v1)
return v
class _wmi_class(object):
def __init__(self, instance_id=1, *args, **kwargs):
self.__instance_id__ = instance_id
@property
def __id__(self):
return self.__instance_id__
def __call__(self, fields='[]', **where_clause):
if self.__id__ == 5:
v = []
v1 = _wmi_object(6)
v.append(v1)
return v
elif self.__id__ == 49 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(50)
v.append(v1)
return v
elif self.__id__ == 251 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(252)
v.append(v1)
return v
elif self.__id__ == 10 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
return v
elif self.__id__ == 40 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(41)
v.append(v1)
return v
elif self.__id__ == 346 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(347)
v.append(v1)
return v
elif self.__id__ == 352 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(353)
v.append(v1)
return v
elif self.__id__ == 365 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(366)
v.append(v1)
return v
elif self.__id__ == 35:
v = []
v1 = _wmi_object(36)
v.append(v1)
return v
elif self.__id__ == 154:
v = []
v1 = _wmi_object(155)
v.append(v1)
return v
elif self.__id__ == 247:
v = []
v1 = _wmi_object(248)
v.append(v1)
return v
elif self.__id__ == 342:
v = []
v1 = _wmi_object(343)
v.append(v1)
return v
elif self.__id__ == 348:
v = []
v1 = _wmi_object(349)
v.append(v1)
return v
elif self.__id__ == 361 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(362)
v.append(v1)
return v
elif self.__id__ == 368 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(369)
v.append(v1)
return v
elif self.__id__ == 376 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(377)
v.append(v1)
return v
elif self.__id__ == 379 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(380)
v.append(v1)
return v
elif self.__id__ == 383 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(384)
v.append(v1)
return v
elif self.__id__ == 388 and where_clause.get('ElementName') ==\
'openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0':
v = []
v1 = _wmi_object(389)
v.append(v1)
return v
elif self.__id__ == 370:
v = []
v1 = _wmi_object(371)
v.append(v1)
return v
elif self.__id__ == 381:
v = []
v1 = _wmi_object(382)
v.append(v1)
return v
def new(self, **kwargs):
if self.__id__ == 37:
v = _wmi_object(38)
return v
elif self.__id__ == 65:
v = _wmi_object(66)
return v
elif self.__id__ == 159:
v = _wmi_object(160)
return v
elif self.__id__ == 254:
v = _wmi_object(255)
return v
class _wmi_object(object):
def __init__(self, instance_id=1, *args, **kwargs):
self.__instance_id__ = instance_id
@property
def __id__(self):
return self.__instance_id__
@property
def CreateDifferencingVirtualHardDisk(self):
if self.__id__ == 24:
v = _wmi_method(25)
return v
@CreateDifferencingVirtualHardDisk.setter
def CreateDifferencingVirtualHardDisk(self, value):
pass
@property
def RequestStateChange(self):
if self.__id__ == 353:
v = _wmi_method(354)
return v
elif self.__id__ == 366:
v = _wmi_method(367)
return v
elif self.__id__ == 362:
v = _wmi_method(363)
return v
elif self.__id__ == 384:
v = _wmi_method(385)
return v
@RequestStateChange.setter
def RequestStateChange(self, value):
pass
@property
def ModifyVirtualSystemResources(self):
if self.__id__ == 36:
ret_value = None
if not hasattr(self, '_ModifyVirtualSystemResources_count_0'):
self._ModifyVirtualSystemResources_count_0 = 0
if self._ModifyVirtualSystemResources_count_0 == 0:
v = _wmi_method(44)
elif self._ModifyVirtualSystemResources_count_0 == 1:
v = _wmi_method(47)
self._ModifyVirtualSystemResources_count_0 += 1
return v
@ModifyVirtualSystemResources.setter
def ModifyVirtualSystemResources(self, value):
pass
@property
def Description(self):
if self.__id__ == 34:
return u'Creating Virtual Hard Disk'
elif self.__id__ == 360:
return u'Initializing and Starting Virtual Machine'
@Description.setter
def Description(self, value):
pass
@property
def SettingType(self):
if self.__id__ == 42:
return 3
@SettingType.setter
def SettingType(self, value):
pass
@property
def AddVirtualSystemResources(self):
if self.__id__ == 155:
v = _wmi_method(156)
return v
elif self.__id__ == 248:
v = _wmi_method(249)
return v
elif self.__id__ == 343:
v = _wmi_method(344)
return v
elif self.__id__ == 349:
v = _wmi_method(350)
return v
@AddVirtualSystemResources.setter
def AddVirtualSystemResources(self, value):
pass
@property
def Address(self):
if self.__id__ == 60:
return u'0'
elif self.__id__ == 61:
return u'1'
@Address.setter
def Address(self, value):
pass
@property
def ResourceSubType(self):
if self.__id__ == 52:
return u'Microsoft Virtual Keyboard'
elif self.__id__ == 53:
return u'Microsoft Virtual PS2 Mouse'
elif self.__id__ == 54:
return u'Microsoft S3 Display Controller'
elif self.__id__ == 55:
return u'Microsoft Synthetic Diskette Drive'
elif self.__id__ == 56:
return None
elif self.__id__ == 57:
return u'Microsoft Serial Controller'
elif self.__id__ == 58:
return u'Microsoft Serial Port'
elif self.__id__ == 59:
return u'Microsoft Serial Port'
elif self.__id__ == 60:
return u'Microsoft Emulated IDE Controller'
elif self.__id__ == 61:
return u'Microsoft Emulated IDE Controller'
elif self.__id__ == 62:
return u'Microsoft Synthetic Mouse'
elif self.__id__ == 63:
return u'Microsoft Synthetic Display Controller'
elif self.__id__ == 391:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_12'):
self._ResourceSubType_count_12 = 0
if self._ResourceSubType_count_12 == 0:
v = u'Microsoft Virtual Keyboard'
elif self._ResourceSubType_count_12 == 1:
v = u'Microsoft Virtual Keyboard'
elif self._ResourceSubType_count_12 == 2:
v = u'Microsoft Virtual Keyboard'
self._ResourceSubType_count_12 += 1
return v
elif self.__id__ == 392:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_13'):
self._ResourceSubType_count_13 = 0
if self._ResourceSubType_count_13 == 0:
v = u'Microsoft Virtual PS2 Mouse'
elif self._ResourceSubType_count_13 == 1:
v = u'Microsoft Virtual PS2 Mouse'
elif self._ResourceSubType_count_13 == 2:
v = u'Microsoft Virtual PS2 Mouse'
self._ResourceSubType_count_13 += 1
return v
elif self.__id__ == 393:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_14'):
self._ResourceSubType_count_14 = 0
if self._ResourceSubType_count_14 == 0:
v = u'Microsoft S3 Display Controller'
elif self._ResourceSubType_count_14 == 1:
v = u'Microsoft S3 Display Controller'
elif self._ResourceSubType_count_14 == 2:
v = u'Microsoft S3 Display Controller'
self._ResourceSubType_count_14 += 1
return v
elif self.__id__ == 394:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_15'):
self._ResourceSubType_count_15 = 0
if self._ResourceSubType_count_15 == 0:
v = u'Microsoft Synthetic Diskette Drive'
elif self._ResourceSubType_count_15 == 1:
v = u'Microsoft Synthetic Diskette Drive'
elif self._ResourceSubType_count_15 == 2:
v = u'Microsoft Synthetic Diskette Drive'
self._ResourceSubType_count_15 += 1
return v
elif self.__id__ == 395:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_16'):
self._ResourceSubType_count_16 = 0
if self._ResourceSubType_count_16 == 0:
v = None
elif self._ResourceSubType_count_16 == 1:
v = None
elif self._ResourceSubType_count_16 == 2:
v = None
self._ResourceSubType_count_16 += 1
return v
elif self.__id__ == 396:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_17'):
self._ResourceSubType_count_17 = 0
if self._ResourceSubType_count_17 == 0:
v = u'Microsoft Serial Controller'
elif self._ResourceSubType_count_17 == 1:
v = u'Microsoft Serial Controller'
elif self._ResourceSubType_count_17 == 2:
v = u'Microsoft Serial Controller'
self._ResourceSubType_count_17 += 1
return v
elif self.__id__ == 397:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_18'):
self._ResourceSubType_count_18 = 0
if self._ResourceSubType_count_18 == 0:
v = u'Microsoft Serial Port'
elif self._ResourceSubType_count_18 == 1:
v = u'Microsoft Serial Port'
elif self._ResourceSubType_count_18 == 2:
v = u'Microsoft Serial Port'
self._ResourceSubType_count_18 += 1
return v
elif self.__id__ == 398:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_19'):
self._ResourceSubType_count_19 = 0
if self._ResourceSubType_count_19 == 0:
v = u'Microsoft Serial Port'
elif self._ResourceSubType_count_19 == 1:
v = u'Microsoft Serial Port'
elif self._ResourceSubType_count_19 == 2:
v = u'Microsoft Serial Port'
self._ResourceSubType_count_19 += 1
return v
elif self.__id__ == 399:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_20'):
self._ResourceSubType_count_20 = 0
if self._ResourceSubType_count_20 == 0:
v = u'Microsoft Synthetic Disk Drive'
elif self._ResourceSubType_count_20 == 1:
v = u'Microsoft Synthetic Disk Drive'
elif self._ResourceSubType_count_20 == 2:
v = u'Microsoft Synthetic Disk Drive'
self._ResourceSubType_count_20 += 1
return v
elif self.__id__ == 400:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_21'):
self._ResourceSubType_count_21 = 0
if self._ResourceSubType_count_21 == 0:
v = u'Microsoft Virtual Hard Disk'
elif self._ResourceSubType_count_21 == 1:
v = u'Microsoft Virtual Hard Disk'
elif self._ResourceSubType_count_21 == 2:
v = u'Microsoft Virtual Hard Disk'
self._ResourceSubType_count_21 += 1
return v
elif self.__id__ == 401:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_22'):
self._ResourceSubType_count_22 = 0
if self._ResourceSubType_count_22 == 0:
v = u'Microsoft Emulated IDE Controller'
elif self._ResourceSubType_count_22 == 1:
v = u'Microsoft Emulated IDE Controller'
elif self._ResourceSubType_count_22 == 2:
v = u'Microsoft Emulated IDE Controller'
self._ResourceSubType_count_22 += 1
return v
elif self.__id__ == 402:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_23'):
self._ResourceSubType_count_23 = 0
if self._ResourceSubType_count_23 == 0:
v = u'Microsoft Emulated IDE Controller'
elif self._ResourceSubType_count_23 == 1:
v = u'Microsoft Emulated IDE Controller'
elif self._ResourceSubType_count_23 == 2:
v = u'Microsoft Emulated IDE Controller'
self._ResourceSubType_count_23 += 1
return v
elif self.__id__ == 403:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_24'):
self._ResourceSubType_count_24 = 0
if self._ResourceSubType_count_24 == 0:
v = u'Microsoft Synthetic Mouse'
elif self._ResourceSubType_count_24 == 1:
v = u'Microsoft Synthetic Mouse'
elif self._ResourceSubType_count_24 == 2:
v = u'Microsoft Synthetic Mouse'
self._ResourceSubType_count_24 += 1
return v
elif self.__id__ == 404:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_25'):
self._ResourceSubType_count_25 = 0
if self._ResourceSubType_count_25 == 0:
v = u'Microsoft Synthetic Display Controller'
elif self._ResourceSubType_count_25 == 1:
v = u'Microsoft Synthetic Display Controller'
elif self._ResourceSubType_count_25 == 2:
v = u'Microsoft Synthetic Display Controller'
self._ResourceSubType_count_25 += 1
return v
elif self.__id__ == 405:
ret_value = None
if not hasattr(self, '_ResourceSubType_count_26'):
self._ResourceSubType_count_26 = 0
if self._ResourceSubType_count_26 == 0:
v = u'Microsoft Synthetic SCSI Controller'
elif self._ResourceSubType_count_26 == 1:
v = u'Microsoft Synthetic SCSI Controller'
elif self._ResourceSubType_count_26 == 2:
v = u'Microsoft Synthetic SCSI Controller'
self._ResourceSubType_count_26 += 1
return v
@ResourceSubType.setter
def ResourceSubType(self, value):
pass
@property
def path_(self):
if self.__id__ == 60:
v = CDispatch(153)
return v
elif self.__id__ == 50:
ret_value = None
if not hasattr(self, '_path__count_1'):
self._path__count_1 = 0
if self._path__count_1 == 0:
v = CDispatch(157)
elif self._path__count_1 == 1:
v = CDispatch(250)
self._path__count_1 += 1
return v
elif self.__id__ == 252:
v = CDispatch(345)
return v
elif self.__id__ == 41:
ret_value = None
if not hasattr(self, '_path__count_3'):
self._path__count_3 = 0
if self._path__count_3 == 0:
v = CDispatch(45)
elif self._path__count_3 == 1:
v = CDispatch(48)
self._path__count_3 += 1
return v
elif self.__id__ == 347:
v = CDispatch(351)
return v
elif self.__id__ == 372:
v = CDispatch(373)
return v
elif self.__id__ == 380:
v = CDispatch(407)
return v
@path_.setter
def path_(self, value):
pass
@property
def ElapsedTime(self):
if self.__id__ == 34:
return u'00000000000001.009551:000'
elif self.__id__ == 360:
return u'00000000000000.644864:000'
@ElapsedTime.setter
def ElapsedTime(self, value):
pass
@property
def Properties_(self):
if self.__id__ == 64:
ret_value = None
if not hasattr(self, '_Properties__count_0'):
self._Properties__count_0 = 0
if self._Properties__count_0 == 0:
v = CDispatch(67)
elif self._Properties__count_0 == 1:
v = CDispatch(71)
elif self._Properties__count_0 == 2:
v = CDispatch(75)
elif self._Properties__count_0 == 3:
v = CDispatch(79)
elif self._Properties__count_0 == 4:
v = CDispatch(83)
elif self._Properties__count_0 == 5:
v = CDispatch(87)
elif self._Properties__count_0 == 6:
v = CDispatch(91)
elif self._Properties__count_0 == 7:
v = CDispatch(95)
elif self._Properties__count_0 == 8:
v = CDispatch(99)
elif self._Properties__count_0 == 9:
v = CDispatch(103)
elif self._Properties__count_0 == 10:
v = CDispatch(107)
elif self._Properties__count_0 == 11:
v = CDispatch(111)
elif self._Properties__count_0 == 12:
v = CDispatch(115)
elif self._Properties__count_0 == 13:
v = CDispatch(119)
elif self._Properties__count_0 == 14:
v = CDispatch(123)
elif self._Properties__count_0 == 15:
v = CDispatch(127)
elif self._Properties__count_0 == 16:
v = CDispatch(131)
elif self._Properties__count_0 == 17:
v = CDispatch(135)
elif self._Properties__count_0 == 18:
v = CDispatch(139)
elif self._Properties__count_0 == 19:
v = CDispatch(143)
elif self._Properties__count_0 == 20:
v = CDispatch(149)
self._Properties__count_0 += 1
return v
elif self.__id__ == 158:
ret_value = None
if not hasattr(self, '_Properties__count_1'):
self._Properties__count_1 = 0
if self._Properties__count_1 == 0:
v = CDispatch(161)
elif self._Properties__count_1 == 1:
v = CDispatch(165)
elif self._Properties__count_1 == 2:
v = CDispatch(169)
elif self._Properties__count_1 == 3:
v = CDispatch(173)
elif self._Properties__count_1 == 4:
v = CDispatch(177)
elif self._Properties__count_1 == 5:
v = CDispatch(181)
elif self._Properties__count_1 == 6:
v = CDispatch(185)
elif self._Properties__count_1 == 7:
v = CDispatch(189)
elif self._Properties__count_1 == 8:
v = CDispatch(193)
elif self._Properties__count_1 == 9:
v = CDispatch(197)
elif self._Properties__count_1 == 10:
v = CDispatch(201)
elif self._Properties__count_1 == 11:
v = CDispatch(205)
elif self._Properties__count_1 == 12:
v = CDispatch(209)
elif self._Properties__count_1 == 13:
v = CDispatch(213)
elif self._Properties__count_1 == 14:
v = CDispatch(217)
elif self._Properties__count_1 == 15:
v = CDispatch(221)
elif self._Properties__count_1 == 16:
v = CDispatch(225)
elif self._Properties__count_1 == 17:
v = CDispatch(229)
elif self._Properties__count_1 == 18:
v = CDispatch(233)
elif self._Properties__count_1 == 19:
v = CDispatch(237)
elif self._Properties__count_1 == 20:
v = CDispatch(243)
self._Properties__count_1 += 1
return v
elif self.__id__ == 253:
ret_value = None
if not hasattr(self, '_Properties__count_2'):
self._Properties__count_2 = 0
if self._Properties__count_2 == 0:
v = CDispatch(256)
elif self._Properties__count_2 == 1:
v = CDispatch(260)
elif self._Properties__count_2 == 2:
v = CDispatch(264)
elif self._Properties__count_2 == 3:
v = CDispatch(268)
elif self._Properties__count_2 == 4:
v = CDispatch(272)
elif self._Properties__count_2 == 5:
v = CDispatch(276)
elif self._Properties__count_2 == 6:
v = CDispatch(280)
elif self._Properties__count_2 == 7:
v = CDispatch(284)
elif self._Properties__count_2 == 8:
v = CDispatch(288)
elif self._Properties__count_2 == 9:
v = CDispatch(292)
elif self._Properties__count_2 == 10:
v = CDispatch(296)
elif self._Properties__count_2 == 11:
v = CDispatch(300)
elif self._Properties__count_2 == 12:
v = CDispatch(304)
elif self._Properties__count_2 == 13:
v = CDispatch(308)
elif self._Properties__count_2 == 14:
v = CDispatch(312)
elif self._Properties__count_2 == 15:
v = CDispatch(316)
elif self._Properties__count_2 == 16:
v = CDispatch(320)
elif self._Properties__count_2 == 17:
v = CDispatch(324)
elif self._Properties__count_2 == 18:
v = CDispatch(328)
elif self._Properties__count_2 == 19:
v = CDispatch(332)
elif self._Properties__count_2 == 20:
v = CDispatch(338)
self._Properties__count_2 += 1
return v
elif self.__id__ == 66:
ret_value = None
if not hasattr(self, '_Properties__count_3'):
self._Properties__count_3 = 0
if self._Properties__count_3 == 0:
v = CDispatch(69)
elif self._Properties__count_3 == 1:
v = CDispatch(73)
elif self._Properties__count_3 == 2:
v = CDispatch(77)
elif self._Properties__count_3 == 3:
v = CDispatch(81)
elif self._Properties__count_3 == 4:
v = CDispatch(85)
elif self._Properties__count_3 == 5:
v = CDispatch(89)
elif self._Properties__count_3 == 6:
v = CDispatch(93)
elif self._Properties__count_3 == 7:
v = CDispatch(97)
elif self._Properties__count_3 == 8:
v = CDispatch(101)
elif self._Properties__count_3 == 9:
v = CDispatch(105)
elif self._Properties__count_3 == 10:
v = CDispatch(109)
elif self._Properties__count_3 == 11:
v = CDispatch(113)
elif self._Properties__count_3 == 12:
v = CDispatch(117)
elif self._Properties__count_3 == 13:
v = CDispatch(121)
elif self._Properties__count_3 == 14:
v = CDispatch(125)
elif self._Properties__count_3 == 15:
v = CDispatch(129)
elif self._Properties__count_3 == 16:
v = CDispatch(133)
elif self._Properties__count_3 == 17:
v = CDispatch(137)
elif self._Properties__count_3 == 18:
v = CDispatch(141)
elif self._Properties__count_3 == 19:
v = CDispatch(145)
elif self._Properties__count_3 == 20:
v = CDispatch(147)
elif self._Properties__count_3 == 21:
v = CDispatch(151)
self._Properties__count_3 += 1
return v
elif self.__id__ == 160:
ret_value = None
if not hasattr(self, '_Properties__count_4'):
self._Properties__count_4 = 0
if self._Properties__count_4 == 0:
v = CDispatch(163)
elif self._Properties__count_4 == 1:
v = CDispatch(167)
elif self._Properties__count_4 == 2:
v = CDispatch(171)
elif self._Properties__count_4 == 3:
v = CDispatch(175)
elif self._Properties__count_4 == 4:
v = CDispatch(179)
elif self._Properties__count_4 == 5:
v = CDispatch(183)
elif self._Properties__count_4 == 6:
v = CDispatch(187)
elif self._Properties__count_4 == 7:
v = CDispatch(191)
elif self._Properties__count_4 == 8:
v = CDispatch(195)
elif self._Properties__count_4 == 9:
v = CDispatch(199)
elif self._Properties__count_4 == 10:
v = CDispatch(203)
elif self._Properties__count_4 == 11:
v = CDispatch(207)
elif self._Properties__count_4 == 12:
v = CDispatch(211)
elif self._Properties__count_4 == 13:
v = CDispatch(215)
elif self._Properties__count_4 == 14:
v = CDispatch(219)
elif self._Properties__count_4 == 15:
v = CDispatch(223)
elif self._Properties__count_4 == 16:
v = CDispatch(227)
elif self._Properties__count_4 == 17:
v = CDispatch(231)
elif self._Properties__count_4 == 18:
v = CDispatch(235)
elif self._Properties__count_4 == 19:
v = CDispatch(239)
elif self._Properties__count_4 == 20:
v = CDispatch(241)
elif self._Properties__count_4 == 21:
v = CDispatch(245)
self._Properties__count_4 += 1
return v
elif self.__id__ == 255:
ret_value = None
if not hasattr(self, '_Properties__count_5'):
self._Properties__count_5 = 0
if self._Properties__count_5 == 0:
v = CDispatch(258)
elif self._Properties__count_5 == 1:
v = CDispatch(262)
elif self._Properties__count_5 == 2:
v = CDispatch(266)
elif self._Properties__count_5 == 3:
v = CDispatch(270)
elif self._Properties__count_5 == 4:
v = CDispatch(274)
elif self._Properties__count_5 == 5:
v = CDispatch(278)
elif self._Properties__count_5 == 6:
v = CDispatch(282)
elif self._Properties__count_5 == 7:
v = CDispatch(286)
elif self._Properties__count_5 == 8:
v = CDispatch(290)
elif self._Properties__count_5 == 9:
v = CDispatch(294)
elif self._Properties__count_5 == 10:
v = CDispatch(298)
elif self._Properties__count_5 == 11:
v = CDispatch(302)
elif self._Properties__count_5 == 12:
v = CDispatch(306)
elif self._Properties__count_5 == 13:
v = CDispatch(310)
elif self._Properties__count_5 == 14:
v = CDispatch(314)
elif self._Properties__count_5 == 15:
v = CDispatch(318)
elif self._Properties__count_5 == 16:
v = CDispatch(322)
elif self._Properties__count_5 == 17:
v = CDispatch(326)
elif self._Properties__count_5 == 18:
v = CDispatch(330)
elif self._Properties__count_5 == 19:
v = CDispatch(334)
elif self._Properties__count_5 == 20:
v = CDispatch(336)
elif self._Properties__count_5 == 21:
v = CDispatch(340)
self._Properties__count_5 += 1
return v
@Properties_.setter
def Properties_(self, value):
pass
@property
def Connection(self):
if self.__id__ == 400:
v = ()
v1 =\
u'C:\\Hyper-V\\test\\instances\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf4\
6-ccfbbb618bf0\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0.vh\
d'
v += (v1,)
return v
@Connection.setter
def Connection(self, value):
pass
@property
def Version(self):
if self.__id__ == 6:
return u'6.2.9200'
@Version.setter
def Version(self, value):
pass
@property
def CreateDynamicVirtualHardDisk(self):
if self.__id__ == 12:
v = _wmi_method(13)
return v
@CreateDynamicVirtualHardDisk.setter
def CreateDynamicVirtualHardDisk(self, value):
pass
@property
def JobState(self):
if self.__id__ == 14:
return 4
elif self.__id__ == 15:
return 4
elif self.__id__ == 16:
return 4
elif self.__id__ == 17:
return 4
elif self.__id__ == 18:
return 4
elif self.__id__ == 19:
return 4
elif self.__id__ == 20:
return 4
elif self.__id__ == 21:
return 4
elif self.__id__ == 22:
return 4
elif self.__id__ == 23:
ret_value = None
if not hasattr(self, '_JobState_count_9'):
self._JobState_count_9 = 0
if self._JobState_count_9 == 0:
v = 7
elif self._JobState_count_9 == 1:
v = 7
self._JobState_count_9 += 1
return v
elif self.__id__ == 26:
return 4
elif self.__id__ == 27:
return 4
elif self.__id__ == 28:
return 4
elif self.__id__ == 29:
return 4
elif self.__id__ == 30:
return 4
elif self.__id__ == 31:
return 4
elif self.__id__ == 32:
return 4
elif self.__id__ == 33:
return 4
elif self.__id__ == 34:
ret_value = None
if not hasattr(self, '_JobState_count_18'):
self._JobState_count_18 = 0
if self._JobState_count_18 == 0:
v = 7
elif self._JobState_count_18 == 1:
v = 7
self._JobState_count_18 += 1
return v
elif self.__id__ == 355:
return 4
elif self.__id__ == 356:
return 4
elif self.__id__ == 357:
return 4
elif self.__id__ == 358:
return 4
elif self.__id__ == 359:
return 4
elif self.__id__ == 360:
ret_value = None
if not hasattr(self, '_JobState_count_24'):
self._JobState_count_24 = 0
if self._JobState_count_24 == 0:
v = 7
elif self._JobState_count_24 == 1:
v = 7
self._JobState_count_24 += 1
return v
elif self.__id__ == 364:
ret_value = None
if not hasattr(self, '_JobState_count_25'):
self._JobState_count_25 = 0
if self._JobState_count_25 == 0:
v = 7
elif self._JobState_count_25 == 1:
v = 7
self._JobState_count_25 += 1
return v
elif self.__id__ == 386:
return 4
elif self.__id__ == 387:
ret_value = None
if not hasattr(self, '_JobState_count_27'):
self._JobState_count_27 = 0
if self._JobState_count_27 == 0:
v = 7
elif self._JobState_count_27 == 1:
v = 7
self._JobState_count_27 += 1
return v
elif self.__id__ == 408:
return 4
elif self.__id__ == 409:
ret_value = None
if not hasattr(self, '_JobState_count_29'):
self._JobState_count_29 = 0
if self._JobState_count_29 == 0:
v = 7
elif self._JobState_count_29 == 1:
v = 7
self._JobState_count_29 += 1
return v
@JobState.setter
def JobState(self, value):
pass
@property
def DefineVirtualSystem(self):
if self.__id__ == 36:
v = _wmi_method(39)
return v
@DefineVirtualSystem.setter
def DefineVirtualSystem(self, value):
pass
@property
def _properties(self):
if self.__id__ == 64:
v = []
v1 = u'InstanceID'
v.append(v1)
v1 = u'ResourceSubType'
v.append(v1)
v1 = u'HostResource'
v.append(v1)
v1 = u'ElementName'
v.append(v1)
v1 = u'Description'
v.append(v1)
v1 = u'Parent'
v.append(v1)
v1 = u'VirtualQuantity'
v.append(v1)
v1 = u'AutomaticDeallocation'
v.append(v1)
v1 = u'AutomaticAllocation'
v.append(v1)
v1 = u'PoolID'
v.append(v1)
v1 = u'Reservation'
v.append(v1)
v1 = u'AllocationUnits'
v.append(v1)
v1 = u'MappingBehavior'
v.append(v1)
v1 = u'Address'
v.append(v1)
v1 = u'OtherResourceType'
v.append(v1)
v1 = u'Caption'
v.append(v1)
v1 = u'ConsumerVisibility'
v.append(v1)
v1 = u'Limit'
v.append(v1)
v1 = u'ResourceType'
v.append(v1)
v1 = u'Weight'
v.append(v1)
v1 = u'VirtualSystemIdentifiers'
v.append(v1)
v1 = u'Connection'
v.append(v1)
return v
elif self.__id__ == 158:
v = []
v1 = u'InstanceID'
v.append(v1)
v1 = u'ResourceSubType'
v.append(v1)
v1 = u'HostResource'
v.append(v1)
v1 = u'ElementName'
v.append(v1)
v1 = u'Description'
v.append(v1)
v1 = u'Parent'
v.append(v1)
v1 = u'VirtualQuantity'
v.append(v1)
v1 = u'AutomaticDeallocation'
v.append(v1)
v1 = u'AutomaticAllocation'
v.append(v1)
v1 = u'PoolID'
v.append(v1)
v1 = u'Reservation'
v.append(v1)
v1 = u'AllocationUnits'
v.append(v1)
v1 = u'MappingBehavior'
v.append(v1)
v1 = u'Address'
v.append(v1)
v1 = u'OtherResourceType'
v.append(v1)
v1 = u'Caption'
v.append(v1)
v1 = u'ConsumerVisibility'
v.append(v1)
v1 = u'Limit'
v.append(v1)
v1 = u'ResourceType'
v.append(v1)
v1 = u'Weight'
v.append(v1)
v1 = u'VirtualSystemIdentifiers'
v.append(v1)
v1 = u'Connection'
v.append(v1)
return v
elif self.__id__ == 253:
v = []
v1 = u'InstanceID'
v.append(v1)
v1 = u'ResourceSubType'
v.append(v1)
v1 = u'HostResource'
v.append(v1)
v1 = u'ElementName'
v.append(v1)
v1 = u'Description'
v.append(v1)
v1 = u'Parent'
v.append(v1)
v1 = u'VirtualQuantity'
v.append(v1)
v1 = u'AutomaticDeallocation'
v.append(v1)
v1 = u'AutomaticAllocation'
v.append(v1)
v1 = u'PoolID'
v.append(v1)
v1 = u'Reservation'
v.append(v1)
v1 = u'AllocationUnits'
v.append(v1)
v1 = u'MappingBehavior'
v.append(v1)
v1 = u'Address'
v.append(v1)
v1 = u'OtherResourceType'
v.append(v1)
v1 = u'Caption'
v.append(v1)
v1 = u'ConsumerVisibility'
v.append(v1)
v1 = u'Limit'
v.append(v1)
v1 = u'ResourceType'
v.append(v1)
v1 = u'Weight'
v.append(v1)
v1 = u'VirtualSystemIdentifiers'
v.append(v1)
v1 = u'Connection'
v.append(v1)
return v
@_properties.setter
def _properties(self, value):
pass
@property
def DestroyVirtualSystem(self):
if self.__id__ == 382:
v = _wmi_method(406)
return v
@DestroyVirtualSystem.setter
def DestroyVirtualSystem(self, value):
pass
@property
def GetSummaryInformation(self):
if self.__id__ == 371:
v = _wmi_method(374)
return v
@GetSummaryInformation.setter
def GetSummaryInformation(self, value):
pass
@property
def Delete(self):
if self.__id__ == 410:
v = _wmi_method(411)
return v
@Delete.setter
def Delete(self, value):
pass
def associators(self, wmi_association_class='', wmi_result_class=''):
if self.__id__ == 51 and wmi_result_class ==\
'MSVM_ResourceAllocationSettingData':
v = []
v1 = _wmi_object(52)
v.append(v1)
v1 = _wmi_object(53)
v.append(v1)
v1 = _wmi_object(54)
v.append(v1)
v1 = _wmi_object(55)
v.append(v1)
v1 = _wmi_object(56)
v.append(v1)
v1 = _wmi_object(57)
v.append(v1)
v1 = _wmi_object(58)
v.append(v1)
v1 = _wmi_object(59)
v.append(v1)
v1 = _wmi_object(60)
v.append(v1)
v1 = _wmi_object(61)
v.append(v1)
v1 = _wmi_object(62)
v.append(v1)
v1 = _wmi_object(63)
v.append(v1)
return v
elif self.__id__ == 50 and wmi_result_class ==\
'Msvm_VirtualSystemSettingData':
v = []
v1 = _wmi_object(51)
v.append(v1)
return v
elif self.__id__ == 42 and wmi_result_class ==\
'Msvm_MemorySettingData':
v = []
v1 = _wmi_object(43)
v.append(v1)
return v
elif self.__id__ == 42 and wmi_result_class ==\
'Msvm_ProcessorSettingData':
v = []
v1 = _wmi_object(46)
v.append(v1)
return v
elif self.__id__ == 41 and wmi_result_class ==\
'Msvm_VirtualSystemSettingData':
v = []
v1 = _wmi_object(42)
v.append(v1)
return v
elif self.__id__ == 369 and wmi_association_class ==\
'Msvm_SettingsDefineState' and wmi_result_class ==\
'Msvm_VirtualSystemSettingData':
v = []
v1 = _wmi_object(372)
v.append(v1)
return v
elif self.__id__ == 390 and wmi_result_class ==\
'MSVM_ResourceAllocationSettingData':
v = []
v1 = _wmi_object(391)
v.append(v1)
v1 = _wmi_object(392)
v.append(v1)
v1 = _wmi_object(393)
v.append(v1)
v1 = _wmi_object(394)
v.append(v1)
v1 = _wmi_object(395)
v.append(v1)
v1 = _wmi_object(396)
v.append(v1)
v1 = _wmi_object(397)
v.append(v1)
v1 = _wmi_object(398)
v.append(v1)
v1 = _wmi_object(399)
v.append(v1)
v1 = _wmi_object(400)
v.append(v1)
v1 = _wmi_object(401)
v.append(v1)
v1 = _wmi_object(402)
v.append(v1)
v1 = _wmi_object(403)
v.append(v1)
v1 = _wmi_object(404)
v.append(v1)
v1 = _wmi_object(405)
v.append(v1)
return v
elif self.__id__ == 389 and wmi_result_class ==\
'Msvm_VirtualSystemSettingData':
v = []
v1 = _wmi_object(390)
v.append(v1)
return v
def GetText_(self, iObjectTextFormat='<PyOleMissing object at\
0x03237588>', iFlags=0, objWbemNamedValueSet=None):
if self.__id__ == 43 and iObjectTextFormat == 1:
return u'<INSTANCE CLASSNAME="Msvm_MemorySettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>\\\\HV12OSDEMO1\\root\\virtualization:Msvm_MemorySetting\
Data.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\4764334d-e0\
01-4176-82ee-5594ec9b530e"</VALUE></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>root\\virtualization</VALUE></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>HV12OSDEMO1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>26</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_MemorySettingData.InstanceID="Microsoft:3755DA23-3B\
95-4B27-BE7B-B6C617FE1F04\\\\4764334d-e001-4176-82ee-5594ec9b530e"</VALUE></PR\
OPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_MemorySettingData</VALUE></PROPERTY><PROPERTY\
NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>MB</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Memory</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="Connection" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for Microsoft\
Virtual Machine Memory.</VALUE></PROPERTY><PROPERTY NAME="DeviceID"\
CLASSORIGIN="Msvm_MemorySettingData" TYPE="string"></PROPERTY><PROPERTY\
NAME="DeviceIDFormat" CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="DynamicMemoryEnabled"\
CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Memory</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="HostResource" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\4764334d\
-e001-4176-82ee-5594ec9b530e</VALUE></PROPERTY><PROPERTY NAME="IsVirtualized"\
CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>15129609-B465-4916-AA13-0CF4B109ADB0</VALUE></PROPERTY><\
PROPERTY NAME="Reservation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Virtual Machine\
Memory</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>4</VALUE></PROPERTY><PROPERTY NAME="TargetMemoryBuffer"\
CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="uint32"><VALUE>20</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>5000</VALUE></PROPERTY></INSTANCE>'
elif self.__id__ == 46 and iObjectTextFormat == 1:
return u'<INSTANCE CLASSNAME="Msvm_ProcessorSettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ProcessorSett\
ingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\b637f346\
-6a0e-4dec-af52-bd70cb80a21d\\\\0"</VALUE></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>root\\virtualization</VALUE></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>HV12OSDEMO1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>29</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ProcessorSettingData.InstanceID="Microsoft:3755DA23\
-3B95-4B27-BE7B-B6C617FE1F04\\\\b637f346-6a0e-4dec-af52-bd70cb80a21d\\\\0"</VA\
LUE></PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ProcessorSettingData</VALUE></PROPERTY><PROPERTY\
NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Processor Cores</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Processor</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="Connection" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for Microsoft\
Virtual Processor.</VALUE></PROPERTY><PROPERTY NAME="DeviceID"\
CLASSORIGIN="Msvm_ProcessorSettingData" TYPE="string"></PROPERTY><PROPERTY\
NAME="DeviceIDFormat" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Processor</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="HostResource" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\b637f346\
-6a0e-4dec-af52-bd70cb80a21d\\0</VALUE></PROPERTY><PROPERTY\
NAME="IsVirtualized" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>100000</VALUE></PROPERTY><PROPERTY NAME="LimitCPUID"\
CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY\
NAME="LimitProcessorFeatures" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY\
NAME="MappingBehavior" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>A4F3C4E4-5E15-4018-A713-96C2CFB4C9B8</VALUE></PROPERTY><\
PROPERTY NAME="ProcessorsPerSocket" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="uint16"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Processor</VALUE></PROPERTY><PROPERTY\
NAME="ResourceType" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="SocketCount"\
CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="uint16"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ThreadsEnabled"\
CLASSORIGIN="Msvm_ProcessorSettingData" TYPE="boolean"></PROPERTY><PROPERTY\
NAME="VirtualQuantity" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>100</VALUE></PROPERTY></INSTANCE>'
elif self.__id__ == 38 and iObjectTextFormat == 1:
return u'<INSTANCE\
CLASSNAME="Msvm_VirtualSystemGlobalSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__SERVER"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_VirtualSystemSettingData</VALUE><VALUE>\
CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></PROPER\
TY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>23</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__DYNASTY"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_VirtualSystemSettingData</VALUE></PROPERTY><PROPERTY\
NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_VirtualSystemGlobalSettingData</VALUE></PROPERTY><P\
ROPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY\
NAME="AdditionalRecoveryInformation"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllowFullSCSICommandSet"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="boolean"></PROPERTY><PROPERTY NAME="AutoActivate"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="boolean"></PROPERTY><PROPERTY NAME="AutomaticRecoveryAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticShutdownAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticStartupAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticStartupActionDelay"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="datetime"><VALUE>00000000000000.000000:000</VALUE></PROPERTY><PROPERTY\
NAME="Caption" CLASSORIGIN="CIM_ManagedElement" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="CreationTime"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="datetime"></PROPERTY><PROPERTY NAME="DebugChannelId"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint32"></PROPERTY><PROPERTY NAME="DebugPort"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint32"></PROPERTY><PROPERTY NAME="DebugPortEnabled"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618\
bf0</VALUE></PROPERTY><PROPERTY NAME="ExternalDataRoot"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="OtherVirtualSystemType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="ScopeOfResidence"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="SettingType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="SnapshotDataRoot"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="SystemName"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="Version"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="VirtualSystemType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY></INSTANCE>'
elif self.__id__ == 66 and iObjectTextFormat == 1:
return u'<INSTANCE\
CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__SERVER"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\\\Default"</VALUE></PROPE\
RTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>0</VALUE></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData" TYPE="string"><VALUE>Hard\
Drives</VALUE></PROPERTY><PROPERTY NAME="AutomaticAllocation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard\
Drive</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Virtual Hard Drive.</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard\
Drive</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\118C3BE5-0D31-4804-85F0-5C6074ABEA\
8F\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAlloc\
ationSettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\
\\\\83F8638B-8DCA-4152-9EDA-2CA8B33039B4\\\\0"</VALUE></PROPERTY><PROPERTY\
NAME="PoolID" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\Root</VA\
LUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Synthetic Disk\
Drive</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>5184445f-413e-4a36-93f1-5cd1c57e0581</VALUE\
></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>'
elif self.__id__ == 160 and iObjectTextFormat == 1:
return u'<INSTANCE\
CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__SERVER"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\70BB60D2-A9D3-46AA-B654-3DE53004B4F8\\\\Default"</VALUE></PROPE\
RTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Disks</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard Disk\
Image</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>C:\\Hyper-V\\test\\instances\\openstack_uni\
t_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0\\openstack_unit_test_vm_fe88a3b\
0-6f96-4fa7-bf46-ccfbbb618bf0.vhd</VALUE></VALUE.ARRAY></PROPERTY.ARRAY><PROPE\
RTY NAME="ConsumerVisibility" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Hard Disk Image.</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard Disk\
Image</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\70BB60D2-A9D3-46AA-B654-3DE53004B4\
F8\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAlloc\
ationSettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\
\\\\83F8638B-8DCA-4152-9EDA-2CA8B33039B4\\\\0\\\\0\\\\D"</VALUE></PROPERTY><PR\
OPERTY NAME="PoolID" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:70BB60D2-A9D3-46aa-B654-3DE53004B4F8\\Root</VA\
LUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Virtual Hard Disk</VALUE></PROPERTY><PROPERTY\
NAME="ResourceType" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>21</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>e1249ea0-ef00-4863-995c-12ff64826f12</VALUE\
></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>'
elif self.__id__ == 255 and iObjectTextFormat == 1:
return u'<INSTANCE\
CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__SERVER"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\BDE5D4D6-E450-46D2-B925-976CA3E989B4\\\\Default"</VALUE></PROPE\
RTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Controllers</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>SCSI\
Controller</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Synthetic SCSI Controller.</VALUE></PROPERTY><PROPERTY\
NAME="ElementName" CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>SCSI\
Controller</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\BDE5D4D6-E450-46D2-B925-976CA3E989\
B4\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:bde5d4d6-e450-46d2-b925-976ca3e989b4\\Root</VA\
LUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Synthetic SCSI\
Controller</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>6</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>{64d9fae9-5932-4ed0-a2dd-4e29fd6204dc}</VAL\
UE></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>'
class _wmi_method(object):
def __init__(self, instance_id=1, *args, **kwargs):
self.__instance_id__ = instance_id
@property
def __id__(self):
return self.__instance_id__
def __call__(self, *args, **kwargs):
if len(args) == 0 and self.__id__ == 411:
v = ()
v1 = 0
v += (v1,)
return v
elif len(args) == 0 and self.__id__ == 25 and kwargs.get('Path') ==\
'C:\\Hyper-V\\test\\instances\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46\
-ccfbbb618bf0\\openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0.vhd\
' and kwargs.get('ParentPath') == 'C:\\Hyper-V\\test\\instances\\_base\\1.vhd':
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_StorageJob.InstanceID="8D5B2476-\
1065-4F21-A487-7D72B201AFC0"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
elif len(args) == 1 and self.__id__ == 354 and args[0] == 2:
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ConcreteJob.InstanceID="AED4B786\
-5F52-41FB-904D-FF0324976F5D"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
elif len(args) == 1 and self.__id__ == 367 and args[0] == 32768:
v = ()
v1 = None
v += (v1,)
v1 = 32775
v += (v1,)
return v
elif len(args) == 3 and self.__id__ == 39 and str(args[0]) == '[]' and\
args[1] is None and args[2] == u'<INSTANCE\
CLASSNAME="Msvm_VirtualSystemGlobalSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__NAMESPACE"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__SERVER"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_VirtualSystemSettingData</VALUE><VALUE>\
CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></PROPER\
TY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>23</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY NAME="__DYNASTY"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_VirtualSystemSettingData</VALUE></PROPERTY><PROPERTY\
NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_VirtualSystemGlobalSettingData</VALUE></PROPERTY><P\
ROPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY\
NAME="AdditionalRecoveryInformation"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllowFullSCSICommandSet"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="boolean"></PROPERTY><PROPERTY NAME="AutoActivate"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="boolean"></PROPERTY><PROPERTY NAME="AutomaticRecoveryAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticShutdownAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticStartupAction"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="AutomaticStartupActionDelay"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="datetime"><VALUE>00000000000000.000000:000</VALUE></PROPERTY><PROPERTY\
NAME="Caption" CLASSORIGIN="CIM_ManagedElement" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="CreationTime"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="datetime"></PROPERTY><PROPERTY NAME="DebugChannelId"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint32"></PROPERTY><PROPERTY NAME="DebugPort"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint32"></PROPERTY><PROPERTY NAME="DebugPortEnabled"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>openstack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618\
bf0</VALUE></PROPERTY><PROPERTY NAME="ExternalDataRoot"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="OtherVirtualSystemType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="ScopeOfResidence"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="SettingType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="SnapshotDataRoot"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="SystemName"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="Version"\
CLASSORIGIN="Msvm_VirtualSystemGlobalSettingData" PROPAGATED="true"\
TYPE="string"></PROPERTY><PROPERTY NAME="VirtualSystemType"\
CLASSORIGIN="CIM_VirtualSystemSettingData" PROPAGATED="true"\
TYPE="uint16"></PROPERTY></INSTANCE>':
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
v += (v1,)
v1 = None
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 44 and args[0] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"' and\
str(args[1]) == '[u\'<INSTANCE CLASSNAME="Msvm_MemorySettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>\\\\\\\\HV12OSDEMO1\\\\root\\\\virtualization:Msvm_Memor\
ySettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\
\\\\\\\\4764334d-e001-4176-82ee-5594ec9b530e"</VALUE></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>root\\\\virtualization</VALUE></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>HV12OSDEMO1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>26</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_MemorySettingData.InstanceID="Microsoft:3755DA23-3B\
95-4B27-BE7B-B6C617FE1F04\\\\\\\\4764334d-e001-4176-82ee-5594ec9b530e"</VALUE>\
</PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_MemorySettingData</VALUE></PROPERTY><PROPERTY\
NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>MB</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Memory</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="Connection" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for Microsoft\
Virtual Machine Memory.</VALUE></PROPERTY><PROPERTY NAME="DeviceID"\
CLASSORIGIN="Msvm_MemorySettingData" TYPE="string"></PROPERTY><PROPERTY\
NAME="DeviceIDFormat" CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="DynamicMemoryEnabled"\
CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Memory</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="HostResource" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\476433\
4d-e001-4176-82ee-5594ec9b530e</VALUE></PROPERTY><PROPERTY\
NAME="IsVirtualized" CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>15129609-B465-4916-AA13-0CF4B109ADB0</VALUE></PROPERTY><\
PROPERTY NAME="Reservation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Virtual Machine\
Memory</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>4</VALUE></PROPERTY><PROPERTY NAME="TargetMemoryBuffer"\
CLASSORIGIN="Msvm_MemorySettingData"\
TYPE="uint32"><VALUE>20</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>512</VALUE></PROPERTY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>5000</VALUE></PROPERTY></INSTANCE>\']':
v = ()
v1 = None
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 47 and args[0] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"' and\
str(args[1]) == '[u\'<INSTANCE\
CLASSNAME="Msvm_ProcessorSettingData"><PROPERTY NAME="__PATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>\\\\\\\\HV12OSDEMO1\\\\root\\\\virtualization:Msvm_Proce\
ssorSettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\
\\\\\\\\b637f346-6a0e-4dec-af52-bd70cb80a21d\\\\\\\\0"</VALUE></PROPERTY><PROP\
ERTY NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>root\\\\virtualization</VALUE></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>HV12OSDEMO1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="__DERIVATION" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>29</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ProcessorSettingData.InstanceID="Microsoft:3755DA23\
-3B95-4B27-BE7B-B6C617FE1F04\\\\\\\\b637f346-6a0e-4dec-af52-bd70cb80a21d\
\\\\\\\\0"</VALUE></PROPERTY><PROPERTY NAME="__DYNASTY"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ProcessorSettingData</VALUE></PROPERTY><PROPERTY\
NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Processor Cores</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Processor</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="Connection" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for Microsoft\
Virtual Processor.</VALUE></PROPERTY><PROPERTY NAME="DeviceID"\
CLASSORIGIN="Msvm_ProcessorSettingData" TYPE="string"></PROPERTY><PROPERTY\
NAME="DeviceIDFormat" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>Processor</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="HostResource" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\b637f3\
46-6a0e-4dec-af52-bd70cb80a21d\\\\0</VALUE></PROPERTY><PROPERTY\
NAME="IsVirtualized" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>100000</VALUE></PROPERTY><PROPERTY NAME="LimitCPUID"\
CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY\
NAME="LimitProcessorFeatures" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="boolean"><VALUE>FALSE</VALUE></PROPERTY><PROPERTY\
NAME="MappingBehavior" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>A4F3C4E4-5E15-4018-A713-96C2CFB4C9B8</VALUE></PROPERTY><\
PROPERTY NAME="ProcessorsPerSocket" CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="uint16"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Processor</VALUE></PROPERTY><PROPERTY\
NAME="ResourceType" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>3</VALUE></PROPERTY><PROPERTY NAME="SocketCount"\
CLASSORIGIN="Msvm_ProcessorSettingData"\
TYPE="uint16"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ThreadsEnabled"\
CLASSORIGIN="Msvm_ProcessorSettingData" TYPE="boolean"></PROPERTY><PROPERTY\
NAME="VirtualQuantity" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>100</VALUE></PROPERTY></INSTANCE>\']':
v = ()
v1 = None
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 156 and str(args[0]) ==\
'[u\'<INSTANCE CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"></PROPERTY><PROPERTY.ARRAY NAME="__DERIVATION"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\\\\\118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\\\\\\\Default"</VALUE\
></PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>0</VALUE></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData" TYPE="string"><VALUE>Hard\
Drives</VALUE></PROPERTY><PROPERTY NAME="AutomaticAllocation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard\
Drive</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Virtual Hard Drive.</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard\
Drive</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\\\118C3BE5-0D31-4804-85F0-5C6074AB\
EA8F\\\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>\\\\\\\\HV12OSDEMO1\\\\root\\\\virtualization:Msvm_Resou\
rceAllocationSettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617F\
E1F04\\\\\\\\83F8638B-8DCA-4152-9EDA-2CA8B33039B4\\\\\\\\0"</VALUE></PROPERTY>\
<PROPERTY NAME="PoolID" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\\\Root</\
VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Synthetic Disk\
Drive</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>5184445f-413e-4a36-93f1-5cd1c57e0581</VALUE\
></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>\']' and args[1] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"':
v = ()
v1 = None
v += (v1,)
v1 = []
v2 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAllocationSettingData.In\
stanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\83F8638B-8DCA-4152\
-9EDA-2CA8B33039B4\\\\0\\\\0\\\\D"'
v1.append(v2)
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 249 and str(args[0]) ==\
'[u\'<INSTANCE CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"></PROPERTY><PROPERTY.ARRAY NAME="__DERIVATION"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\\\\\70BB60D2-A9D3-46AA-B654-3DE53004B4F8\\\\\\\\Default"</VALUE\
></PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Disks</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard Disk\
Image</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>C:\\\\Hyper-V\\\\test\\\\instances\\\\opens\
tack_unit_test_vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0\\\\openstack_unit_test_\
vm_fe88a3b0-6f96-4fa7-bf46-ccfbbb618bf0.vhd</VALUE></VALUE.ARRAY></PROPERTY.AR\
RAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Hard Disk Image.</VALUE></PROPERTY><PROPERTY NAME="ElementName"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Hard Disk\
Image</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\\\70BB60D2-A9D3-46AA-B654-3DE53004\
B4F8\\\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>\\\\\\\\HV12OSDEMO1\\\\root\\\\virtualization:Msvm_Resou\
rceAllocationSettingData.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617F\
E1F04\\\\\\\\83F8638B-8DCA-4152-9EDA-2CA8B33039B4\\\\\\\\0\\\\\\\\0\\\\\\\\D"<\
/VALUE></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:70BB60D2-A9D3-46aa-B654-3DE53004B4F8\\\\Root</\
VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Virtual Hard Disk</VALUE></PROPERTY><PROPERTY\
NAME="ResourceType" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>21</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>e1249ea0-ef00-4863-995c-12ff64826f12</VALUE\
></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>\']' and args[1] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"':
v = ()
v1 = None
v += (v1,)
v1 = []
v2 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAllocationSettingData.In\
stanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\83F8638B-8DCA-4152\
-9EDA-2CA8B33039B4\\\\0\\\\0\\\\L"'
v1.append(v2)
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 344 and str(args[0]) ==\
'[u\'<INSTANCE CLASSNAME="Msvm_ResourceAllocationSettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"></PROPERTY><PROPERTY.ARRAY NAME="__DERIVATION"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>22</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData.InstanceID="Microsoft\
:Definition\\\\\\\\BDE5D4D6-E450-46D2-B925-976CA3E989B4\\\\\\\\Default"</VALUE\
></PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_ResourceAllocationSettingData</VALUE></PROPERTY><PR\
OPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="AllocationUnits"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Controllers</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>SCSI\
Controller</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Synthetic SCSI Controller.</VALUE></PROPERTY><PROPERTY\
NAME="ElementName" CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>SCSI\
Controller</VALUE></PROPERTY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\\\BDE5D4D6-E450-46D2-B925-976CA3E9\
89B4\\\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:bde5d4d6-e450-46d2-b925-976ca3e989b4\\\\Root</\
VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Synthetic SCSI\
Controller</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>6</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_ResourceAllocationSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>{64d9fae9-5932-4ed0-a2dd-4e29fd6204dc}</VAL\
UE></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>\']' and args[1] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"':
v = ()
v1 = None
v += (v1,)
v1 = []
v2 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAllocationSettingData.In\
stanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\6749E429-5249-4F12\
-987B-C45CCB3C23B5\\\\0"'
v1.append(v2)
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 350 and str(args[0]) ==\
'[u\'<INSTANCE CLASSNAME="Msvm_SyntheticEthernetPortSettingData"><PROPERTY\
NAME="__PATH" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__NAMESPACE" CLASSORIGIN="___SYSTEM" TYPE="string"></PROPERTY><PROPERTY\
NAME="__SERVER" CLASSORIGIN="___SYSTEM"\
TYPE="string"></PROPERTY><PROPERTY.ARRAY NAME="__DERIVATION"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE.ARRAY><VALUE>CIM_ResourceAllocationSettingData</VALUE><V\
ALUE>CIM_SettingData</VALUE><VALUE>CIM_ManagedElement</VALUE></VALUE.ARRAY></P\
ROPERTY.ARRAY><PROPERTY NAME="__PROPERTY_COUNT" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>23</VALUE></PROPERTY><PROPERTY NAME="__RELPATH"\
CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_SyntheticEthernetPortSettingData.InstanceID="Micros\
oft:Definition\\\\\\\\6A45335D-4C3A-44B7-B61F-C9808BBDF8ED\\\\\\\\Default"</VA\
LUE></PROPERTY><PROPERTY NAME="__DYNASTY" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ManagedElement</VALUE></PROPERTY><PROPERTY\
NAME="__SUPERCLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>CIM_ResourceAllocationSettingData</VALUE></PROPERTY><PRO\
PERTY NAME="__CLASS" CLASSORIGIN="___SYSTEM"\
TYPE="string"><VALUE>Msvm_SyntheticEthernetPortSettingData</VALUE></PROPERTY>\
<PROPERTY NAME="__GENUS" CLASSORIGIN="___SYSTEM"\
TYPE="sint32"><VALUE>2</VALUE></PROPERTY><PROPERTY NAME="Address"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>DEADBEEF0001</VALUE></PROPERTY><PROPERTY\
NAME="AllocationUnits" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Ports</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticAllocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY\
NAME="AutomaticDeallocation" CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="Caption"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Ethernet\
Port</VALUE></PROPERTY><PROPERTY.ARRAY NAME="Connection"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="ConsumerVisibility"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="Description"\
CLASSORIGIN="CIM_ManagedElement" TYPE="string"><VALUE>Settings for the\
Microsoft Synthetic Ethernet Port.</VALUE></PROPERTY><PROPERTY\
NAME="ElementName" CLASSORIGIN="CIM_ManagedElement"\
TYPE="string"><VALUE>00000000-0000-0000-0000-0000000000000001</VALUE></PROPER\
TY><PROPERTY.ARRAY NAME="HostResource"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY.ARRAY><PROPERTY NAME="InstanceID"\
CLASSORIGIN="CIM_SettingData"\
TYPE="string"><VALUE>Microsoft:Definition\\\\6A45335D-4C3A-44B7-B61F-C9808BBD\
F8ED\\\\Default</VALUE></PROPERTY><PROPERTY NAME="Limit"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="MappingBehavior"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"></PROPERTY><PROPERTY NAME="OtherResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="Parent"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"></PROPERTY><PROPERTY NAME="PoolID"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft:6A45335D-4C3A-44b7-B61F-C9808BBDF8ED\\\\Root</\
VALUE></PROPERTY><PROPERTY NAME="Reservation"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY NAME="ResourceSubType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="string"><VALUE>Microsoft Synthetic Ethernet\
Port</VALUE></PROPERTY><PROPERTY NAME="ResourceType"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint16"><VALUE>10</VALUE></PROPERTY><PROPERTY NAME="StaticMacAddress"\
CLASSORIGIN="Msvm_SyntheticEthernetPortSettingData"\
TYPE="boolean"><VALUE>TRUE</VALUE></PROPERTY><PROPERTY NAME="VirtualQuantity"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint64"><VALUE>1</VALUE></PROPERTY><PROPERTY.ARRAY\
NAME="VirtualSystemIdentifiers"\
CLASSORIGIN="Msvm_SyntheticEthernetPortSettingData"\
TYPE="string"><VALUE.ARRAY><VALUE>{ec93f40e-bf11-4880-b47b-2983f6cef5d0}</VAL\
UE></VALUE.ARRAY></PROPERTY.ARRAY><PROPERTY NAME="Weight"\
CLASSORIGIN="CIM_ResourceAllocationSettingData"\
TYPE="uint32"><VALUE>0</VALUE></PROPERTY></INSTANCE>\']' and args[1] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"':
v = ()
v1 = None
v += (v1,)
v1 = []
v2 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_SyntheticEthernetPortSettingData\
.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\3C059607-C3F1-4\
1CE-AC31-CD6ECA76AB3D"'
v1.append(v2)
v += (v1,)
v1 = 0
v += (v1,)
return v
elif len(args) == 0 and self.__id__ == 13 and\
kwargs.get('MaxInternalSize') == 3145728 and kwargs.get('Path') ==\
'C:\\Hyper-V\\test\\instances\\_base\\1.vhd':
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_StorageJob.InstanceID="1FFFA674-\
9739-4475-806E-B8A59D599D41"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
elif len(args) == 1 and self.__id__ == 363 and args[0] == 32768:
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ConcreteJob.InstanceID="8B33221D\
-9E8A-41B9-96FB-DDAC49D5E8B3"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
elif len(args) == 1 and self.__id__ == 385 and args[0] == 3:
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ConcreteJob.InstanceID="D3ECBA7D\
-C750-40AB-B11F-8658FF528483"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
elif len(args) == 2 and self.__id__ == 374 and str(args[0]) == '[100,\
105]' and str(args[1]) ==\
'[u\'\\\\\\\\HV12OSDEMO1\\\\root\\\\virtualization:Msvm_VirtualSystemSettingD\
ata.InstanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04"\']':
v = ()
v1 = 0
v += (v1,)
v1 = []
v2 = CDispatch(375)
v1.append(v2)
v += (v1,)
return v
elif len(args) == 1 and self.__id__ == 406 and args[0] ==\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"':
v = ()
v1 =\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ConcreteJob.InstanceID="D6B00745\
-2CCD-4A58-8B51-0AA585932392"'
v += (v1,)
v1 = 4096
v += (v1,)
return v
class CDispatch(object):
def __init__(self, instance_id=1, *args, **kwargs):
self.__instance_id__ = instance_id
@property
def __id__(self):
return self.__instance_id__
@property
def EnabledState(self):
if self.__id__ == 375:
return 32768
@EnabledState.setter
def EnabledState(self, value):
pass
@property
def Value(self):
if self.__id__ == 68:
return\
u'Microsoft:Definition\\118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\Default'
elif self.__id__ == 72:
return u'Microsoft Synthetic Disk Drive'
elif self.__id__ == 76:
return None
elif self.__id__ == 80:
return u'Hard Drive'
elif self.__id__ == 84:
return u'Settings for the Microsoft Virtual Hard Drive.'
elif self.__id__ == 88:
return None
elif self.__id__ == 92:
return u'1'
elif self.__id__ == 96:
return True
elif self.__id__ == 100:
return True
elif self.__id__ == 104:
return u'Microsoft:118C3BE5-0D31-4804-85F0-5C6074ABEA8F\\Root'
elif self.__id__ == 108:
return u'1'
elif self.__id__ == 112:
return u'Hard Drives'
elif self.__id__ == 116:
return None
elif self.__id__ == 120:
return None
elif self.__id__ == 124:
return None
elif self.__id__ == 128:
return u'Hard Drive'
elif self.__id__ == 132:
return None
elif self.__id__ == 136:
return u'1'
elif self.__id__ == 140:
return 22
elif self.__id__ == 144:
return 0
elif self.__id__ == 150:
return None
elif self.__id__ == 162:
return\
u'Microsoft:Definition\\70BB60D2-A9D3-46AA-B654-3DE53004B4F8\\Default'
elif self.__id__ == 166:
return u'Microsoft Virtual Hard Disk'
elif self.__id__ == 170:
return None
elif self.__id__ == 174:
return u'Hard Disk Image'
elif self.__id__ == 178:
return u'Settings for the Microsoft Hard Disk Image.'
elif self.__id__ == 182:
return None
elif self.__id__ == 186:
return u'1'
elif self.__id__ == 190:
return True
elif self.__id__ == 194:
return True
elif self.__id__ == 198:
return u'Microsoft:70BB60D2-A9D3-46aa-B654-3DE53004B4F8\\Root'
elif self.__id__ == 202:
return u'1'
elif self.__id__ == 206:
return u'Disks'
elif self.__id__ == 210:
return None
elif self.__id__ == 214:
return None
elif self.__id__ == 218:
return None
elif self.__id__ == 222:
return u'Hard Disk Image'
elif self.__id__ == 226:
return None
elif self.__id__ == 230:
return u'1'
elif self.__id__ == 234:
return 21
elif self.__id__ == 238:
return 0
elif self.__id__ == 244:
return None
elif self.__id__ == 257:
return\
u'Microsoft:Definition\\BDE5D4D6-E450-46D2-B925-976CA3E989B4\\Default'
elif self.__id__ == 261:
return u'Microsoft Synthetic SCSI Controller'
elif self.__id__ == 265:
return None
elif self.__id__ == 269:
return u'SCSI Controller'
elif self.__id__ == 273:
return u'Settings for the Microsoft Synthetic SCSI Controller.'
elif self.__id__ == 277:
return None
elif self.__id__ == 281:
return u'1'
elif self.__id__ == 285:
return True
elif self.__id__ == 289:
return True
elif self.__id__ == 293:
return u'Microsoft:bde5d4d6-e450-46d2-b925-976ca3e989b4\\Root'
elif self.__id__ == 297:
return u'1'
elif self.__id__ == 301:
return u'Controllers'
elif self.__id__ == 305:
return None
elif self.__id__ == 309:
return None
elif self.__id__ == 313:
return None
elif self.__id__ == 317:
return u'SCSI Controller'
elif self.__id__ == 321:
return None
elif self.__id__ == 325:
return u'1'
elif self.__id__ == 329:
return 6
elif self.__id__ == 333:
return 0
elif self.__id__ == 339:
return None
@Value.setter
def Value(self, value):
pass
def __call__(self, *args):
if len(args) == 0 and self.__id__ == 153:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ResourceAllocationSettingData.In\
stanceID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04\\\\83F8638B-8DCA-4152\
-9EDA-2CA8B33039B4\\\\0"'
elif len(args) == 0 and self.__id__ == 157:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 250:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 345:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 45:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 48:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 351:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 373:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_VirtualSystemSettingData.Instanc\
eID="Microsoft:3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
elif len(args) == 0 and self.__id__ == 407:
return\
u'\\\\HV12OSDEMO1\\root\\virtualization:Msvm_ComputerSystem.CreationClassName\
="Msvm_ComputerSystem",Name="3755DA23-3B95-4B27-BE7B-B6C617FE1F04"'
def Item(self, strName='<PyOleMissing object at 0x03237588>', iFlags=0):
if self.__id__ == 67 and strName == u'InstanceID':
v = CDispatch(68)
return v
elif self.__id__ == 71 and strName == u'ResourceSubType':
v = CDispatch(72)
return v
elif self.__id__ == 75 and strName == u'HostResource':
v = CDispatch(76)
return v
elif self.__id__ == 79 and strName == u'ElementName':
v = CDispatch(80)
return v
elif self.__id__ == 83 and strName == u'Description':
v = CDispatch(84)
return v
elif self.__id__ == 87 and strName == u'Parent':
v = CDispatch(88)
return v
elif self.__id__ == 91 and strName == u'VirtualQuantity':
v = CDispatch(92)
return v
elif self.__id__ == 95 and strName == u'AutomaticDeallocation':
v = CDispatch(96)
return v
elif self.__id__ == 99 and strName == u'AutomaticAllocation':
v = CDispatch(100)
return v
elif self.__id__ == 103 and strName == u'PoolID':
v = CDispatch(104)
return v
elif self.__id__ == 107 and strName == u'Reservation':
v = CDispatch(108)
return v
elif self.__id__ == 111 and strName == u'AllocationUnits':
v = CDispatch(112)
return v
elif self.__id__ == 115 and strName == u'MappingBehavior':
v = CDispatch(116)
return v
elif self.__id__ == 119 and strName == u'Address':
v = CDispatch(120)
return v
elif self.__id__ == 123 and strName == u'OtherResourceType':
v = CDispatch(124)
return v
elif self.__id__ == 127 and strName == u'Caption':
v = CDispatch(128)
return v
elif self.__id__ == 131 and strName == u'ConsumerVisibility':
v = CDispatch(132)
return v
elif self.__id__ == 135 and strName == u'Limit':
v = CDispatch(136)
return v
elif self.__id__ == 139 and strName == u'ResourceType':
v = CDispatch(140)
return v
elif self.__id__ == 143 and strName == u'Weight':
v = CDispatch(144)
return v
elif self.__id__ == 149 and strName == u'Connection':
v = CDispatch(150)
return v
elif self.__id__ == 161 and strName == u'InstanceID':
v = CDispatch(162)
return v
elif self.__id__ == 165 and strName == u'ResourceSubType':
v = CDispatch(166)
return v
elif self.__id__ == 169 and strName == u'HostResource':
v = CDispatch(170)
return v
elif self.__id__ == 173 and strName == u'ElementName':
v = CDispatch(174)
return v
elif self.__id__ == 177 and strName == u'Description':
v = CDispatch(178)
return v
elif self.__id__ == 181 and strName == u'Parent':
v = CDispatch(182)
return v
elif self.__id__ == 185 and strName == u'VirtualQuantity':
v = CDispatch(186)
return v
elif self.__id__ == 189 and strName == u'AutomaticDeallocation':
v = CDispatch(190)
return v
elif self.__id__ == 193 and strName == u'AutomaticAllocation':
v = CDispatch(194)
return v
elif self.__id__ == 197 and strName == u'PoolID':
v = CDispatch(198)
return v
elif self.__id__ == 201 and strName == u'Reservation':
v = CDispatch(202)
return v
elif self.__id__ == 205 and strName == u'AllocationUnits':
v = CDispatch(206)
return v
elif self.__id__ == 209 and strName == u'MappingBehavior':
v = CDispatch(210)
return v
elif self.__id__ == 213 and strName == u'Address':
v = CDispatch(214)
return v
elif self.__id__ == 217 and strName == u'OtherResourceType':
v = CDispatch(218)
return v
elif self.__id__ == 221 and strName == u'Caption':
v = CDispatch(222)
return v
elif self.__id__ == 225 and strName == u'ConsumerVisibility':
v = CDispatch(226)
return v
elif self.__id__ == 229 and strName == u'Limit':
v = CDispatch(230)
return v
elif self.__id__ == 233 and strName == u'ResourceType':
v = CDispatch(234)
return v
elif self.__id__ == 237 and strName == u'Weight':
v = CDispatch(238)
return v
elif self.__id__ == 243 and strName == u'Connection':
v = CDispatch(244)
return v
elif self.__id__ == 256 and strName == u'InstanceID':
v = CDispatch(257)
return v
elif self.__id__ == 260 and strName == u'ResourceSubType':
v = CDispatch(261)
return v
elif self.__id__ == 264 and strName == u'HostResource':
v = CDispatch(265)
return v
elif self.__id__ == 268 and strName == u'ElementName':
v = CDispatch(269)
return v
elif self.__id__ == 272 and strName == u'Description':
v = CDispatch(273)
return v
elif self.__id__ == 276 and strName == u'Parent':
v = CDispatch(277)
return v
elif self.__id__ == 280 and strName == u'VirtualQuantity':
v = CDispatch(281)
return v
elif self.__id__ == 284 and strName == u'AutomaticDeallocation':
v = CDispatch(285)
return v
elif self.__id__ == 288 and strName == u'AutomaticAllocation':
v = CDispatch(289)
return v
elif self.__id__ == 292 and strName == u'PoolID':
v = CDispatch(293)
return v
elif self.__id__ == 296 and strName == u'Reservation':
v = CDispatch(297)
return v
elif self.__id__ == 300 and strName == u'AllocationUnits':
v = CDispatch(301)
return v
elif self.__id__ == 304 and strName == u'MappingBehavior':
v = CDispatch(305)
return v
elif self.__id__ == 308 and strName == u'Address':
v = CDispatch(309)
return v
elif self.__id__ == 312 and strName == u'OtherResourceType':
v = CDispatch(313)
return v
elif self.__id__ == 316 and strName == u'Caption':
v = CDispatch(317)
return v
elif self.__id__ == 320 and strName == u'ConsumerVisibility':
v = CDispatch(321)
return v
elif self.__id__ == 324 and strName == u'Limit':
v = CDispatch(325)
return v
elif self.__id__ == 328 and strName == u'ResourceType':
v = CDispatch(329)
return v
elif self.__id__ == 332 and strName == u'Weight':
v = CDispatch(333)
return v
elif self.__id__ == 338 and strName == u'Connection':
v = CDispatch(339)
return v
elif self.__id__ == 69 and strName == u'InstanceID':
v = CDispatch(70)
return v
elif self.__id__ == 73 and strName == u'ResourceSubType':
v = CDispatch(74)
return v
elif self.__id__ == 77 and strName == u'HostResource':
v = CDispatch(78)
return v
elif self.__id__ == 81 and strName == u'ElementName':
v = CDispatch(82)
return v
elif self.__id__ == 85 and strName == u'Description':
v = CDispatch(86)
return v
elif self.__id__ == 89 and strName == u'Parent':
v = CDispatch(90)
return v
elif self.__id__ == 93 and strName == u'VirtualQuantity':
v = CDispatch(94)
return v
elif self.__id__ == 97 and strName == u'AutomaticDeallocation':
v = CDispatch(98)
return v
elif self.__id__ == 101 and strName == u'AutomaticAllocation':
v = CDispatch(102)
return v
elif self.__id__ == 105 and strName == u'PoolID':
v = CDispatch(106)
return v
elif self.__id__ == 109 and strName == u'Reservation':
v = CDispatch(110)
return v
elif self.__id__ == 113 and strName == u'AllocationUnits':
v = CDispatch(114)
return v
elif self.__id__ == 117 and strName == u'MappingBehavior':
v = CDispatch(118)
return v
elif self.__id__ == 121 and strName == u'Address':
v = CDispatch(122)
return v
elif self.__id__ == 125 and strName == u'OtherResourceType':
v = CDispatch(126)
return v
elif self.__id__ == 129 and strName == u'Caption':
v = CDispatch(130)
return v
elif self.__id__ == 133 and strName == u'ConsumerVisibility':
v = CDispatch(134)
return v
elif self.__id__ == 137 and strName == u'Limit':
v = CDispatch(138)
return v
elif self.__id__ == 141 and strName == u'ResourceType':
v = CDispatch(142)
return v
elif self.__id__ == 145 and strName == u'Weight':
v = CDispatch(146)
return v
elif self.__id__ == 147 and strName == u'VirtualSystemIdentifiers':
v = CDispatch(148)
return v
elif self.__id__ == 151 and strName == u'Connection':
v = CDispatch(152)
return v
elif self.__id__ == 163 and strName == u'InstanceID':
v = CDispatch(164)
return v
elif self.__id__ == 167 and strName == u'ResourceSubType':
v = CDispatch(168)
return v
elif self.__id__ == 171 and strName == u'HostResource':
v = CDispatch(172)
return v
elif self.__id__ == 175 and strName == u'ElementName':
v = CDispatch(176)
return v
elif self.__id__ == 179 and strName == u'Description':
v = CDispatch(180)
return v
elif self.__id__ == 183 and strName == u'Parent':
v = CDispatch(184)
return v
elif self.__id__ == 187 and strName == u'VirtualQuantity':
v = CDispatch(188)
return v
elif self.__id__ == 191 and strName == u'AutomaticDeallocation':
v = CDispatch(192)
return v
elif self.__id__ == 195 and strName == u'AutomaticAllocation':
v = CDispatch(196)
return v
elif self.__id__ == 199 and strName == u'PoolID':
v = CDispatch(200)
return v
elif self.__id__ == 203 and strName == u'Reservation':
v = CDispatch(204)
return v
elif self.__id__ == 207 and strName == u'AllocationUnits':
v = CDispatch(208)
return v
elif self.__id__ == 211 and strName == u'MappingBehavior':
v = CDispatch(212)
return v
elif self.__id__ == 215 and strName == u'Address':
v = CDispatch(216)
return v
elif self.__id__ == 219 and strName == u'OtherResourceType':
v = CDispatch(220)
return v
elif self.__id__ == 223 and strName == u'Caption':
v = CDispatch(224)
return v
elif self.__id__ == 227 and strName == u'ConsumerVisibility':
v = CDispatch(228)
return v
elif self.__id__ == 231 and strName == u'Limit':
v = CDispatch(232)
return v
elif self.__id__ == 235 and strName == u'ResourceType':
v = CDispatch(236)
return v
elif self.__id__ == 239 and strName == u'Weight':
v = CDispatch(240)
return v
elif self.__id__ == 241 and strName == u'VirtualSystemIdentifiers':
v = CDispatch(242)
return v
elif self.__id__ == 245 and strName == u'Connection':
v = CDispatch(246)
return v
elif self.__id__ == 258 and strName == u'InstanceID':
v = CDispatch(259)
return v
elif self.__id__ == 262 and strName == u'ResourceSubType':
v = CDispatch(263)
return v
elif self.__id__ == 266 and strName == u'HostResource':
v = CDispatch(267)
return v
elif self.__id__ == 270 and strName == u'ElementName':
v = CDispatch(271)
return v
elif self.__id__ == 274 and strName == u'Description':
v = CDispatch(275)
return v
elif self.__id__ == 278 and strName == u'Parent':
v = CDispatch(279)
return v
elif self.__id__ == 282 and strName == u'VirtualQuantity':
v = CDispatch(283)
return v
elif self.__id__ == 286 and strName == u'AutomaticDeallocation':
v = CDispatch(287)
return v
elif self.__id__ == 290 and strName == u'AutomaticAllocation':
v = CDispatch(291)
return v
elif self.__id__ == 294 and strName == u'PoolID':
v = CDispatch(295)
return v
elif self.__id__ == 298 and strName == u'Reservation':
v = CDispatch(299)
return v
elif self.__id__ == 302 and strName == u'AllocationUnits':
v = CDispatch(303)
return v
elif self.__id__ == 306 and strName == u'MappingBehavior':
v = CDispatch(307)
return v
elif self.__id__ == 310 and strName == u'Address':
v = CDispatch(311)
return v
elif self.__id__ == 314 and strName == u'OtherResourceType':
v = CDispatch(315)
return v
elif self.__id__ == 318 and strName == u'Caption':
v = CDispatch(319)
return v
elif self.__id__ == 322 and strName == u'ConsumerVisibility':
v = CDispatch(323)
return v
elif self.__id__ == 326 and strName == u'Limit':
v = CDispatch(327)
return v
elif self.__id__ == 330 and strName == u'ResourceType':
v = CDispatch(331)
return v
elif self.__id__ == 334 and strName == u'Weight':
v = CDispatch(335)
return v
elif self.__id__ == 336 and strName == u'VirtualSystemIdentifiers':
v = CDispatch(337)
return v
elif self.__id__ == 340 and strName == u'Connection':
v = CDispatch(341)
return v
|
[
"ap@pilotti.it"
] |
ap@pilotti.it
|
e6d3c90ef5eab0ca84b77fe3f6eb1ac38666898f
|
1fa7109fa66f01c03effd7ed1aecf4526b7bb66e
|
/src/trusted/validator_ragel/def_format.py
|
7d0dfbeb737eb0fa7f5dd939193aa6561280b031
|
[
"LGPL-2.0-or-later",
"LicenseRef-scancode-philippe-de-muyter",
"LicenseRef-scancode-intel-osl-1993",
"HPND-sell-variant",
"ICU",
"LicenseRef-scancode-python-cwi",
"LicenseRef-scancode-generic-exception",
"LGPL-2.1-or-later",
"SMLNJ",
"BSD-3-Clause",
"GFDL-1.1-or-later",
"HP-1986",
"LicenseRef-scancode-free-unknown",
"CC0-1.0",
"GPL-2.0-or-later",
"LicenseRef-scancode-unicode",
"LicenseRef-scancode-nilsson-historical",
"LicenseRef-scancode-other-copyleft",
"GPL-3.0-only",
"LGPL-3.0-only",
"GFDL-1.1-only",
"LicenseRef-scancode-unknown-license-reference",
"LicenseRef-scancode-warranty-disclaimer",
"SunPro",
"NTP",
"MPL-1.1",
"W3C",
"GPL-1.0-or-later",
"CPL-1.0",
"NPL-1.1",
"PSF-2.0",
"SAX-PD",
"LicenseRef-scancode-newlib-historical",
"LicenseRef-scancode-protobuf",
"Spencer-94",
"GPL-2.0-only",
"BSL-1.0",
"LGPL-2.0-only",
"LicenseRef-scancode-ietf",
"LGPL-2.1-only",
"LicenseRef-scancode-public-domain",
"LicenseRef-scancode-other-permissive",
"BSD-2-Clause",
"Python-2.0",
"Zlib",
"Classpath-exception-2.0",
"BSD-Source-Code",
"LZMA-exception",
"bzip2-1.0.6",
"NCSA",
"LicenseRef-scancode-proprietary-license",
"GCC-exception-3.1",
"W3C-19980720",
"LicenseRef-scancode-amd-historical",
"LicenseRef-scancode-x11-hanson",
"dtoa",
"MIT",
"LicenseRef-scancode-public-domain-disclaimer"
] |
permissive
|
Lind-Project/native_client
|
2bd32d9515c561d7f60cca10d309a18059b67006
|
842a4f87bf9ba46914a6efe74a94d7168d33b4b2
|
refs/heads/main
| 2023-08-30T02:38:29.827132
| 2023-07-08T17:16:56
| 2023-07-08T17:16:56
| 11,289,145
| 5
| 4
|
BSD-3-Clause
| 2023-08-30T19:04:07
| 2013-07-09T17:38:40
|
C
|
UTF-8
|
Python
| false
| false
| 15,365
|
py
|
# Copyright (c) 2013 The Native Client Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
################################################################################
# File format:
# three columns separated by commas. Each line describes one instruction.
# Notation for argument types and sizes and for opcodes is based on
# AMD64 Architecture Programmer's Manual.
################################################################################
# First column: instruction description.
# Includes name of the instruction and arguments.
#
# Arguments consist of four parts:
# 1. Read/write attribute (optional).
# 2. Argument type.
# 3. Argument size.
# 4. Implicit argument mark (optional).
#
# Read/write attribute:
# ': Instruction does not use this argument (lea or nop).
# =: Instruction reads from this argument.
# !: Instruction writes to this argument.
# &: Instruction reads this argument and writes the result to it.
# By default one- and two-operand instructions are assumed to read all
# operands and store result to the last one, while instructions with
# three or more operands are assumed to read all operands except last one
# which is used to store the result of the execution.
# Possible argument types:
# a: Accumulator: %al/%ax/%eax/%rax/%xmm0 (depending on size).
# c: Counter register: %cl/%cx/%ecx/%rcx (depending on size).
# d: Data register: %dl/%dx/%edx/%rdx (depending on size).
# f: x87 register in opcode (3 least significant bits).
# i: Second immediate value encoded in the instruction.
# o: I/O port in %dx (used in "in"/"out" instructions).
# r: Register in opcode (3 least significant bits plus rex.B).
# t: Top of the x87 stack (%st).
# x: A memory operand addressed by the %ds:(%[er]bx). See "xlat".
# B: General purpose register specified by the VEX/XOP.vvvv field.
# C: Control register specified by the ModRM.reg field.
# D: Debug register specified by the ModRM.reg field.
# E: General purpose register or memory operand specified by the r/m
# field of the ModRM byte. For memory operands, the ModRM byte may
# be followed by a SIB byte to specify one of the indexed
# register-indirect addressing forms.
# G: General purpose register specified by the reg field of ModRM.
# H: YMM or XMM register specified by the VEX/XOP.vvvv field.
# I: Immediate value encoded in the instruction.
# J: The instruction encoding includes a relative offset that is added to
# the rIP.
# L: YMM or XMM register specified using the most-significant 4 bits of
# the last byte of the instruction. In legacy or compatibility mode
# the most significant bit is ignored.
# M: A memory operand specified by the {mod, r/m} field of the ModRM byte.
# ModRM.mod != 11b.
# N: 64-bit MMX register specified by the ModRM.r/m field. The ModRM.mod
# field must be 11b.
# O: The offset of an operand is encoded in the instruction. There is no
# ModRM byte in the instruction encoding. Indexed register-indirect
# addressing using the SIB byte is not supported.
# P: 64-bit MMX register specified by the ModRM.reg field.
# Q: 64-bit MMX-register or memory operand specified by the {mod, r/m}
# field of the ModRM byte. For memory operands, the ModRM byte may
# be followed by a SIB byte to specify one of the indexed
# register-indirect addressing forms.
# R: General purpose register specified by the ModRM.r/m field.
# The ModRM.mod field must be 11b.
# S: Segment register specified by the ModRM.reg field.
# U: YMM/XMM register specified by the ModRM.r/m field.
# The ModRM.mod field must be 11b.
# V: YMM/XMM register specified by the ModRM.reg field.
# W: YMM/XMM register or memory operand specified by the {mod, r/m} field
# of the ModRM byte. For memory operands, the ModRM byte may be
# followed by a SIB byte to specify one of the indexed
# register-indirect addressing forms.
# X: A memory operand addressed by the %ds:%[er]si registers. Used in
# string instructions.
# Y: A memory operand addressed by the %es:%[er]di registers. Used in
# string instructions.
# Possible sizes:
# (no size provided):
# A byte, word, doubleword, or quadword (in 64-bit mode),
# depending on the effective operand size.
# 2: Two bits (see VPERMIL2Px instruction).
# 7: x87 register %st(N).
# b: A byte, irrespective of the effective operand size.
# d: A doubleword (32-bit), irrespective of the effective operand size.
# do: A double octword (256 bits), irrespective of the effective operand
# size.
# dq: A double quadword (128 bits), irrespective of the effective
# operand size.
# fq: A quadra quadword (256 bits), irrespective of the effective
# operand size.
# o: An octword (128 bits), irrespective of the effective operand size.
# p: A 32-bit or 48-bit far pointer, depending on the effective operand
# size.
# pb: A Vector with byte-wide (8-bit) elements (packed byte).
# pd: A double-precision (64-bit) floating-point vector operand (packed
# double-precision).
# pdw: Vector composed of 32-bit doublewords.
# pdwx: Vector composed of 32-bit doublewords. L bit selects 256bit YMM
# registers.
# pdx: A double-precision (64-bit) floating-point vector operand (packed
# double-precision). L bit selects 256bit YMM registers.
# ph: A half-precision (16-bit) floating-point vector operand (packed
# half-precision).
# phx: A half-precision (16-bit) floating-point vector operand (packed
# half-precision). L bit selects 256bit YMM registers.
# pi: Vector composed of 16-bit integers (packed integer).
# pj: Vector composed of 32-bit integers (packed double integer).
# pjx: Vector composed of 32-bit integers (packed double integer).
# L bit selects 256bit YMM registers.
# pk: Vector composed of 8-bit integers (packed half-word integer).
# pkx: Vector composed of 8-bit integers (packed half-word integer).
# L bit selects 256bit YMM registers.
# pq: Vector composed of 64-bit integers (packed quadword integer).
# pqw: Vector composed of 64-bit quadwords (packed quadword).
# pqwx: Vector composed of 64-bit quadwords (packed quadword). L bit
# selects 256bit YMM registers.
# pqx: Vector composed of 64-bit integers (packed quadword integer).
# L bit selects 256bit YMM registers.
# ps: A single-precision floating-point vector operand (packed
# single-precision).
# psx: A single-precision floating-point vector operand (packed
# single-precision). L bit selects 256bit YMM registers.
# pw: Vector composed of 16-bit words (packed word).
# q: A quadword (64-bit), irrespective of the effective operand size.
# r: Register size (32bit in 32bit mode, 64bit in 64bit mode).
# s: Segment register (if register operand).
# s: A 6-byte or 10-byte pseudo-descriptor (if memory operand).
# sb: A scalar 10-byte packed BCD value (scalar BCD).
# sd: A scalar double-precision floating-point operand (scalar double).
# se: A 14-byte or 28-byte x87 environment.
# si: A scalar doubleword (32-bit) integer operand (scalar integer).
# sq: A scalar quadword (64-bit) integer operand (scalar integer).
# sr: A 94-byte or 108-byte x87 state.
# ss: A scalar single-precision floating-point operand (scalar single).
# st: A scalar 80bit-precision floating-point operand (scalar tenbytes).
# sw: A scalar word (16-bit) integer operand (scalar integer).
# sx: A 512-byte extended x87/MMX/XMM state.
# v: A word, doubleword, or quadword (in 64-bit mode), depending on
# the effective operand size.
# w: A word, irrespective of the effective operand size.
# x: Instruction supports both vector sizes (128 bits or 256 bits).
# Size is encoded using the VEX/XOP.L field. (L=0: 128 bits;
# L=1: 256 bits). Usually this symbol is appended to ps or pd, but
# sometimes it is used alone. For gen_dfa psx, pdx and x
# are the same.
# y: A doubleword or quadword depending on effective operand size.
# z: A word if the effective operand size is 16 bits, or a doubleword
# if the effective operand size is 32 or 64 bits.
# Implicit argument mark:
# *: This argument is implicit. It's not shown in the diassembly listing.
################################################################################
# Second column: instruction opcodes.
# Includes all opcode bytes. If first opcode bytes is 0x66/data16,
# 0xf2/repnz, or 0xf3/rep/repz then they can be moved before other prefixes
# (and will be moved before REX prefix if it's allowed). Note: data16, repnz,
# and rep/repz opcodes will set appropriate flags while 0x66, 0xf2, and 0xf3
# will not.
# If part of the opcode is stored in ModRM byte then opcode should include the
# usual "/0", "/1", ..., "/7" "bytes".
# For VEX/XOP instructions it is expected that first three opcode bytes are
# specified in the following form:
# 0xc4 (or 0x8f)
# RXB.<map_select>
# <W>.<vvvv>.<L>.<pp>
# (so they describe long form of VEX prefix; short form is deduced
# automatically when appropriate)
################################################################################
# Third column: additional instruction notes.
# Different kind of notes for the instruction: non-typical prefixes (for
# example "lock" prefix or "rep" prefix), CPUID checks, etc.
#
# Possible prefixes:
# branch_hint: branch hint prefixes are allowed (0x2E, 0x3E)
# condrep: prefixes "repnz" and "repz" are allowed for the instruction
# lock: prefix "lock" is allowed for the instruction
# rep: prefix "rep" is allowed for the instruction (it's alias of "repz")
# no_memory_access: command does not access memory in detectable way: lea,
# nop, prefetch* instructions...
# norex: "rex" prefix can not be used with this instruction (various "nop"
# instructions use this flag)
# norexw: "rex.W" can not be used with this instruction (usually used when
# instruction with "rex.W" have a different name: e.g. "movd"/"movq")
#
# Instruction enabling/disabling:
# ia32: ia32-only instruction
# amd64: amd64-only instruction
# nacl-forbidden: instruction is not supported in NaCl sandbox
# nacl-ia32-forbidden: instruction is not supported in ia32 NaCl sandbox
# nacl-amd64-forbidden: instruction is not supported in amd64 NaCl sandbox
#
# Special marks:
# nacl-amd64-zero-extends: instruction can be used to zero-extend register
# in amd64 mode
# nacl-amd64-modifiable: instruction can be modified in amd64 mode
# att-show-name-suffix-{b,l,ll,t,s,q,x,y,w}: instruction is shown with the
# given suffix by objdump in AT&T mode
#
# CPU features are defined in validator_internal.h.
################################################################################
# Technically, columns are separated with mere ',' followed by spaces for
# readability, but there are quoted instruction names that include commas
# not followed by spaces (see nops.def).
# For simplicity I choose to rely on this coincidence and use split-based parser
# instead of proper recursive descent one.
# If by accident somebody put ', ' in quoted instruction name, it will fail
# loudly, because closing quote then will fall into second or third column and
# will cause parse error.
# TODO(shcherbina): use for column separator something that is never encountered
# in columns, like semicolon?
COLUMN_SEPARATOR = ', '
SUPPORTED_ATTRIBUTES = [
# Parsing attributes.
'branch_hint',
'condrep',
'lock',
'no_memory_access',
'norex',
'norexw',
'rep',
# CPUID attributes.
'CPUFeature_3DNOW',
'CPUFeature_3DPRFTCH',
'CPUFeature_AES',
'CPUFeature_AESAVX',
'CPUFeature_ALTMOVCR8',
'CPUFeature_AVX',
'CPUFeature_BMI1',
'CPUFeature_CLFLUSH',
'CPUFeature_CLMUL',
'CPUFeature_CLMULAVX',
'CPUFeature_CMOV',
'CPUFeature_CMOVx87',
'CPUFeature_CX16',
'CPUFeature_CX8',
'CPUFeature_E3DNOW',
'CPUFeature_EMMX',
'CPUFeature_EMMXSSE',
'CPUFeature_F16C',
'CPUFeature_FMA',
'CPUFeature_FMA4',
'CPUFeature_FXSR',
'CPUFeature_LAHF',
'CPUFeature_LWP',
'CPUFeature_LZCNT',
'CPUFeature_MMX',
'CPUFeature_MON',
'CPUFeature_MOVBE',
'CPUFeature_MSR',
'CPUFeature_POPCNT',
'CPUFeature_SEP',
'CPUFeature_SFENCE',
'CPUFeature_SKINIT',
'CPUFeature_SSE',
'CPUFeature_SSE2',
'CPUFeature_SSE3',
'CPUFeature_SSE41',
'CPUFeature_SSE42',
'CPUFeature_SSE4A',
'CPUFeature_SSSE3',
'CPUFeature_SVM',
'CPUFeature_SYSCALL',
'CPUFeature_TBM',
'CPUFeature_TSC',
'CPUFeature_TSCP',
'CPUFeature_TZCNT',
'CPUFeature_x87',
'CPUFeature_XOP',
# Attributes for enabling/disabling based on architecture and validity.
'ia32',
'amd64',
'nacl-ia32-forbidden',
'nacl-amd64-forbidden',
'nacl-forbidden',
'nacl-amd64-zero-extends',
'nacl-amd64-modifiable',
# AT&T Decoder attributes.
'att-show-name-suffix-b',
'att-show-name-suffix-l',
'att-show-name-suffix-ll',
'att-show-name-suffix-t',
'att-show-name-suffix-s',
'att-show-name-suffix-q',
'att-show-name-suffix-x',
'att-show-name-suffix-y',
'att-show-name-suffix-w',
]
class OperandReadWriteMode(object):
UNUSED = '\''
READ = '='
WRITE = '!'
READ_WRITE = '&'
class OperandType(object):
AX = 'a'
CX = 'c'
DX = 'd'
IMMEDIATE = 'I'
SECOND_IMMEDIATE = 'i'
CONTROL_REGISTER = 'C' # in ModRM.reg
DEBUG_REGISTER = 'D' # in ModRM.reg
REGISTER_IN_OPCODE = 'r'
X87_REGISTER_IN_OPCODE = 'f'
X87_ST = 't' # st0 that objdump displays as 'st'
ABSOLUTE_DISP = 'O'
RELATIVE_TARGET = 'J'
REGISTER_IN_RM = 'R'
REGISTER_IN_REG = 'G'
REGISTER_OR_MEMORY = 'E' # in ModRM.mod and .r/m
MEMORY = 'M' # in ModRM.mod and .r/m
SEGMENT_REGISTER_IN_REG = 'S'
MMX_REGISTER_IN_RM = 'N'
MMX_REGISTER_IN_REG = 'P'
MMX_REGISTER_OR_MEMORY = 'Q' # in ModRM.mod and .r/m
XMM_REGISTER_IN_RM = 'U'
XMM_REGISTER_IN_REG = 'V'
XMM_REGISTER_OR_MEMORY = 'W' # in ModRM.mod and .r/m
XMM_REGISTER_IN_LAST_BYTE = 'L' # most-significant 4 bits
DS_SI = 'X'
ES_DI = 'Y'
DS_BX = 'x'
REGISTER_IN_VVVV = 'B'
XMM_REGISTER_IN_VVVV = 'H'
PORT_IN_DX = 'o'
ALL_OPERAND_TYPES = set(
v for k, v in OperandType.__dict__.items() if not k.startswith('__'))
|
[
"alexatnt@gmail.com"
] |
alexatnt@gmail.com
|
be741d8c4d02fa61714427705c1be75f720b733a
|
2a7f2c4b7b88dfcbd9829c97e5fcf60644b8e48e
|
/HackerRank/string.py
|
346bbd23a74b6936c1bf15bfc67b05f006152aea
|
[] |
no_license
|
Suprit5/synerzip_training
|
c2ce5730b99497f3a7385d1baaa8f089e8686a2f
|
9f44dd58e0694bb2d5c64fc578c5c563b09a177b
|
refs/heads/main
| 2023-08-23T03:25:50.810777
| 2021-10-22T13:37:49
| 2021-10-22T13:37:49
| 389,588,676
| 0
| 0
| null | 2021-08-30T08:18:01
| 2021-07-26T10:05:56
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 225
|
py
|
"""Without using any string methods, try to print the following: 12345 take a number as an input and generate
a string. """
if __name__ == '__main__':
n = int(input())
for i in range(1,n+1):
print(i,end="")
|
[
"suprit.salvi@excellarate.com"
] |
suprit.salvi@excellarate.com
|
3ec9e8e8374c23ac85e08d073f049884aea233ec
|
f9d564f1aa83eca45872dab7fbaa26dd48210d08
|
/huaweicloud-sdk-lts/huaweicloudsdklts/v2/model/topn_requst_body.py
|
52832c2253e608c1695e1e8662a2ae9043c51ca3
|
[
"Apache-2.0"
] |
permissive
|
huaweicloud/huaweicloud-sdk-python-v3
|
cde6d849ce5b1de05ac5ebfd6153f27803837d84
|
f69344c1dadb79067746ddf9bfde4bddc18d5ecf
|
refs/heads/master
| 2023-09-01T19:29:43.013318
| 2023-08-31T08:28:59
| 2023-08-31T08:28:59
| 262,207,814
| 103
| 44
|
NOASSERTION
| 2023-06-22T14:50:48
| 2020-05-08T02:28:43
|
Python
|
UTF-8
|
Python
| false
| false
| 9,339
|
py
|
# coding: utf-8
import six
from huaweicloudsdkcore.utils.http_utils import sanitize_for_serialization
class TopnRequstBody:
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
sensitive_list = []
openapi_types = {
'end_time': 'int',
'is_desc': 'bool',
'resource_type': 'str',
'sort_by': 'str',
'start_time': 'int',
'topn': 'int',
'filter': 'dict(str, str)',
'search_list': 'list[str]'
}
attribute_map = {
'end_time': 'end_time',
'is_desc': 'is_desc',
'resource_type': 'resource_type',
'sort_by': 'sort_by',
'start_time': 'start_time',
'topn': 'topn',
'filter': 'filter',
'search_list': 'search_list'
}
def __init__(self, end_time=None, is_desc=None, resource_type=None, sort_by=None, start_time=None, topn=None, filter=None, search_list=None):
"""TopnRequstBody
The model defined in huaweicloud sdk
:param end_time: 结束时间时间戳,毫秒时间
:type end_time: int
:param is_desc: 是否降序 true / false
:type is_desc: bool
:param resource_type: 资源类型,log_group / log_stream / tenant
:type resource_type: str
:param sort_by: 排序依据,index/write/storage,必须是search_list中存在的数据
:type sort_by: str
:param start_time: 开始时间时间戳,毫秒时间,最多支持30天范围内的查询
:type start_time: int
:param topn: 查询前多少数据,范围1~100
:type topn: int
:param filter: 过滤条件 { \"log_group_id\": \"xxxxxx\" }过滤器,为一个map结构,键为过滤属性,值为属性值,不支持模糊匹配
:type filter: dict(str, str)
:param search_list: 查询数据类型,字符串数组可多种搭配,只能在index/write/storage中选填
:type search_list: list[str]
"""
self._end_time = None
self._is_desc = None
self._resource_type = None
self._sort_by = None
self._start_time = None
self._topn = None
self._filter = None
self._search_list = None
self.discriminator = None
self.end_time = end_time
self.is_desc = is_desc
self.resource_type = resource_type
self.sort_by = sort_by
self.start_time = start_time
self.topn = topn
self.filter = filter
self.search_list = search_list
@property
def end_time(self):
"""Gets the end_time of this TopnRequstBody.
结束时间时间戳,毫秒时间
:return: The end_time of this TopnRequstBody.
:rtype: int
"""
return self._end_time
@end_time.setter
def end_time(self, end_time):
"""Sets the end_time of this TopnRequstBody.
结束时间时间戳,毫秒时间
:param end_time: The end_time of this TopnRequstBody.
:type end_time: int
"""
self._end_time = end_time
@property
def is_desc(self):
"""Gets the is_desc of this TopnRequstBody.
是否降序 true / false
:return: The is_desc of this TopnRequstBody.
:rtype: bool
"""
return self._is_desc
@is_desc.setter
def is_desc(self, is_desc):
"""Sets the is_desc of this TopnRequstBody.
是否降序 true / false
:param is_desc: The is_desc of this TopnRequstBody.
:type is_desc: bool
"""
self._is_desc = is_desc
@property
def resource_type(self):
"""Gets the resource_type of this TopnRequstBody.
资源类型,log_group / log_stream / tenant
:return: The resource_type of this TopnRequstBody.
:rtype: str
"""
return self._resource_type
@resource_type.setter
def resource_type(self, resource_type):
"""Sets the resource_type of this TopnRequstBody.
资源类型,log_group / log_stream / tenant
:param resource_type: The resource_type of this TopnRequstBody.
:type resource_type: str
"""
self._resource_type = resource_type
@property
def sort_by(self):
"""Gets the sort_by of this TopnRequstBody.
排序依据,index/write/storage,必须是search_list中存在的数据
:return: The sort_by of this TopnRequstBody.
:rtype: str
"""
return self._sort_by
@sort_by.setter
def sort_by(self, sort_by):
"""Sets the sort_by of this TopnRequstBody.
排序依据,index/write/storage,必须是search_list中存在的数据
:param sort_by: The sort_by of this TopnRequstBody.
:type sort_by: str
"""
self._sort_by = sort_by
@property
def start_time(self):
"""Gets the start_time of this TopnRequstBody.
开始时间时间戳,毫秒时间,最多支持30天范围内的查询
:return: The start_time of this TopnRequstBody.
:rtype: int
"""
return self._start_time
@start_time.setter
def start_time(self, start_time):
"""Sets the start_time of this TopnRequstBody.
开始时间时间戳,毫秒时间,最多支持30天范围内的查询
:param start_time: The start_time of this TopnRequstBody.
:type start_time: int
"""
self._start_time = start_time
@property
def topn(self):
"""Gets the topn of this TopnRequstBody.
查询前多少数据,范围1~100
:return: The topn of this TopnRequstBody.
:rtype: int
"""
return self._topn
@topn.setter
def topn(self, topn):
"""Sets the topn of this TopnRequstBody.
查询前多少数据,范围1~100
:param topn: The topn of this TopnRequstBody.
:type topn: int
"""
self._topn = topn
@property
def filter(self):
"""Gets the filter of this TopnRequstBody.
过滤条件 { \"log_group_id\": \"xxxxxx\" }过滤器,为一个map结构,键为过滤属性,值为属性值,不支持模糊匹配
:return: The filter of this TopnRequstBody.
:rtype: dict(str, str)
"""
return self._filter
@filter.setter
def filter(self, filter):
"""Sets the filter of this TopnRequstBody.
过滤条件 { \"log_group_id\": \"xxxxxx\" }过滤器,为一个map结构,键为过滤属性,值为属性值,不支持模糊匹配
:param filter: The filter of this TopnRequstBody.
:type filter: dict(str, str)
"""
self._filter = filter
@property
def search_list(self):
"""Gets the search_list of this TopnRequstBody.
查询数据类型,字符串数组可多种搭配,只能在index/write/storage中选填
:return: The search_list of this TopnRequstBody.
:rtype: list[str]
"""
return self._search_list
@search_list.setter
def search_list(self, search_list):
"""Sets the search_list of this TopnRequstBody.
查询数据类型,字符串数组可多种搭配,只能在index/write/storage中选填
:param search_list: The search_list of this TopnRequstBody.
:type search_list: list[str]
"""
self._search_list = search_list
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.openapi_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
if attr in self.sensitive_list:
result[attr] = "****"
else:
result[attr] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
import simplejson as json
if six.PY2:
import sys
reload(sys)
sys.setdefaultencoding("utf-8")
return json.dumps(sanitize_for_serialization(self), ensure_ascii=False)
def __repr__(self):
"""For `print`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, TopnRequstBody):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"hwcloudsdk@huawei.com"
] |
hwcloudsdk@huawei.com
|
2158b73ea96bba1e5fd97a6ecb874e61b8f23c14
|
9b476bc2491c132006bbd04c74e95d1924065dbd
|
/linenum.py
|
d2816d020c2da5f6c186957e99456cef78d61f0a
|
[] |
no_license
|
eoinpayne/Python
|
6b949e78f574645aa0b151eaa8abff1c8632d2ba
|
5e7fa7d30da5ace3821af8d19f9885481008dfcf
|
refs/heads/master
| 2021-01-10T12:38:19.958363
| 2016-02-12T18:38:42
| 2016-02-12T18:38:42
| 51,600,935
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 609
|
py
|
# def pickRandomWord(fname):
# wordList = []
# with open(fname) as inputfile:
# for line in inputfile:
# wordList.append(line.split())
# wordIndex = random.randint(0, len(wordList)-1)
# word = wordList[wordIndex]
# print (word)
# return word
def num_of_lines(fname):
word_list = []
with open(fname) as f:
list = f.read().split()
i = 0
for position in list:
print(i, position, sep=".")
#print(i, position, end="")
i += 1
def main():
name_of_file = "dictionary.txt"
num_of_lines(name_of_file)
main()
|
[
"eoinpayne@gmail.com"
] |
eoinpayne@gmail.com
|
18da2eb4f0e9777641e47be30bb0dc84f192d33b
|
9ba9b0ada9de96d4aa9ed554d0736e5c399eabd3
|
/restaurants/urls.py
|
c65bdc718c295cf8950fc5aeeb907f1025991994
|
[] |
no_license
|
protechsumer/Restaurant-Web-App-Django-Based-
|
91ce46e2d13266de7827ed40d7aa348be5a9cc60
|
a2bcbd0b091a8296af9f8423908b9bf12a66f3f6
|
refs/heads/master
| 2020-05-16T04:15:59.856750
| 2019-04-28T12:50:37
| 2019-04-28T12:50:37
| 182,770,430
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 497
|
py
|
from django.conf.urls import url
from restaurants.views import (
RestaurantListView,
RestaurantDetailView,
RestaurantCreateView,
RestaurantUpdateView,
)
urlpatterns = [
url(r'^create/$', RestaurantCreateView.as_view(), name='create'),
#url(r'^(?P<slug>[\w-]+)/edit/$', RestaurantUpdateView.as_view(), name='edit'),
url(r'^(?P<slug>[\w-]+)/$', RestaurantUpdateView.as_view(), name='detail'),
url(r'^$', RestaurantListView.as_view(), name='list'),
]
|
[
"noreply@github.com"
] |
protechsumer.noreply@github.com
|
d2d05c5c0a892a0e4e15f1184c828cf5d428985b
|
84f7e5597c81164f471ea652c4adbc8c3eddcce1
|
/collectors/utils/__init__.py
|
4671a13cfaf40637b57635919382e28eb0ce771d
|
[
"MIT"
] |
permissive
|
alvesmatheus/fala-camarada
|
4603e99873d1d83c3b191ef9fd45d20fdebb79ef
|
47015fe95422d5f71c279e47edacdd31ea3f71b8
|
refs/heads/main
| 2023-06-12T01:15:24.824164
| 2021-07-01T18:06:56
| 2021-07-01T18:06:56
| 338,142,237
| 12
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 79
|
py
|
from collectors.utils import constants
from collectors.utils import processors
|
[
"matheus.santos@ccc.ufcg.edu.br"
] |
matheus.santos@ccc.ufcg.edu.br
|
941ce01d679ae8301637252f93d4fb0efcc4fea7
|
a7ee846bad9f41652e56bfd62df34fe8c25d334e
|
/lab3/lab_1.py
|
014668982d7669b4f0d6a16da8d4bdd1470d3a4e
|
[] |
no_license
|
tuanlinh31/nguyentuanlinh-lab-C4E22
|
04c4650fb0ff8998121a055706c828e9f9fb8022
|
8ed9866ed0123ef2e69cf5919a42b0c8b303534f
|
refs/heads/master
| 2020-03-31T18:01:28.028161
| 2018-10-20T04:31:21
| 2018-10-20T04:31:21
| 152,443,382
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 433
|
py
|
x = int(input('your number? '))
y = int(input('your number? '))
s = x + y
output = '{0} + {1} = {2}'.format(x, y, s)
print(output)
# r = 'your result: '
# while True:
# if s == '-':
# r = x -y
# elif s == '+':
# r = x + y
# elif s == '*':
# r = x * y
# elif s == '/':
# r = x / y
# else:
# print('Error')
# print(r)
# break
|
[
"nguyentuanlinh311@gmail.com"
] |
nguyentuanlinh311@gmail.com
|
3e632daee89c023fc998b1933418819e694980a8
|
ba044caa8352b06c1e2e3aef52b80d1f30f8527d
|
/video/views.py
|
156feb798964369b44b59306f68ce2685d313592
|
[] |
no_license
|
SkylerMcCoy/twistednation
|
305b82acf10bd6a322b9c42b7852f259d3662af0
|
f9443cfc65cf6ecec8a4a19066c9d87863901ac3
|
refs/heads/master
| 2022-11-24T17:42:36.237500
| 2020-06-24T20:54:10
| 2020-06-24T20:54:10
| 171,540,252
| 0
| 1
| null | 2022-11-22T00:19:49
| 2019-02-19T19:58:13
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,950
|
py
|
# Create your views here.
from django.contrib.auth.decorators import login_required
from django.template import RequestContext
from django.shortcuts import get_object_or_404,render_to_response
from django.http import HttpResponseRedirect,HttpResponse
from video.forms import UploadVideoForm
from members.models import Member
from video.models import Video
import os
import datetime
import string
import random
@login_required
def upload(request):
userobject=get_object_or_404(Member,user=request.user)
videos=Video.objects.filter(user=userobject)
if request.method == 'POST':
form = UploadVideoForm(request.POST, request.FILES)
f=request.FILES['file']
fileext=os.path.splitext(f.name)[1]
print f.name+"<<filename with extension"
print f.content_type+"<<content type"
print os.path.splitext(f.name)[0]+"<<filename alone"
print os.path.splitext(f.name)[1]+"<<file extension alone"
filename=''.join(random.choice(string.ascii_lowercase + string.ascii_uppercase + string.digits) for x in range(80))
originalfilename=str(userobject.id)+"-"+datetime.datetime.now().strftime("%Y-%m-%d-%H-%M-%S")+"-"+filename+fileext
if form.is_valid() and (fileext==".mp4" or fileext==".flv"):
with open('ups/videos/'+originalfilename, 'wb+') as destination:
for chunk in f.chunks():
destination.write(chunk)
instance=form.save(commit=False)
instance.user=userobject
instance.filename=originalfilename
instance.timestamp=datetime.datetime.now()
instance.save()
return HttpResponseRedirect('/video/')
else:
return HttpResponse("Invalid file dude")
else:
form = UploadVideoForm()
return render_to_response('user_pages/video/allvideos.html', {'form': form,'videos':videos},context_instance=RequestContext(request))
@login_required
def viewvid(request,vid_id):
video=Video.objects.get(pk=vid_id)
return render_to_response('user_pages/video/video.html', {'video':video},context_instance=RequestContext(request))
|
[
"skylerdmccoy@gmail.com"
] |
skylerdmccoy@gmail.com
|
be1fe40dad651838e8247767372e2dbcc019ddcd
|
23f15e29dace8718d3256ea5d3fba07dc354f62e
|
/Problem Solving/factors.py
|
eb854be63fc2b7305b9a007bac85a1d1a5a68d3d
|
[] |
no_license
|
ATSivapraneeth/Problem-Solving
|
d46fcf2981d26b3ebe011e92fa77bfd399d87778
|
f6530bdcf93b9109a83ea3b1d0dbfccaa5902b5b
|
refs/heads/master
| 2022-12-31T22:35:54.816576
| 2020-10-24T17:53:56
| 2020-10-24T17:53:56
| 306,932,966
| 0
| 0
| null | 2020-10-24T17:53:57
| 2020-10-24T17:12:36
|
C++
|
UTF-8
|
Python
| false
| false
| 162
|
py
|
t=int(input())
for i in range(t):
n,k=map(int,input().split())
for i in range(k):
for i in range(2,n+1):
if(n%i==0):
n+=i
break
print(n)
|
[
"noreply@github.com"
] |
ATSivapraneeth.noreply@github.com
|
8a2f60ef854e4e5b320b224330d00405d070ddb1
|
116a4a2fcd3e9c3d216f96103006c707daa6001a
|
/HelloDjango/apps/forum/migrations/0004_facilitator_company_logo.py
|
d8e1060f29ed1ebd636086d4efd02d7f536c8b8a
|
[] |
no_license
|
Eldar1988/a_white_birds
|
22d743ed1fa651062f070c0e81b7ac665be7a72a
|
0430d5322b3a55b6f55e9541675d6670f5d8a518
|
refs/heads/master
| 2022-12-18T20:23:26.293059
| 2020-09-15T04:27:59
| 2020-09-15T04:27:59
| 283,169,602
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 454
|
py
|
# Generated by Django 3.0.6 on 2020-08-19 05:32
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('forum', '0003_auto_20200819_1117'),
]
operations = [
migrations.AddField(
model_name='facilitator',
name='company_logo',
field=models.ImageField(null=True, upload_to=None, verbose_name='Логотип компании'),
),
]
|
[
"elfarych@gmail.com"
] |
elfarych@gmail.com
|
2f6175c493b3685c1e21524ba18a7fc9c9983405
|
4d08da497335139370032bd32b1250c545d7aa10
|
/app/modules/auth/resources.py
|
bfaca75be302253d8539c00412fd9cbd514c8b13
|
[
"MIT"
] |
permissive
|
crivero7/flask-restplus-example
|
b6d4b6ee446d2676810fbeaaf35dd6181ae83f70
|
ff8754e1eff1989a61e2ba14e3f1c661d258d5d9
|
refs/heads/master
| 2022-12-04T16:28:48.573860
| 2020-08-26T15:48:36
| 2020-08-26T15:48:36
| 290,536,450
| 0
| 0
|
MIT
| 2020-08-26T15:36:38
| 2020-08-26T15:36:37
| null |
UTF-8
|
Python
| false
| false
| 2,050
|
py
|
# -*- coding: utf-8 -*-
# app/api/auth.py
from flask import request
from flask_restplus import Namespace, Resource, fields
from playhouse.shortcuts import model_to_dict
from app.extensions.api import api
from app.modules.auth import User
from app.modules.api.utils import token_required
ns = Namespace('auth', description='Namespace for authentication')
login_model = api.model("login_model", {
'username': fields.String(required=True, description='Username'),
'password': fields.String(required=True, description='Password')
})
signup_model = api.model("signup_model", {
'username': fields.String(required=True, description='Username'),
'password': fields.String(required=True, description='Password'),
'email': fields.String(required=True, description='Email')
})
@ns.route('/signup')
class AuthSignUpResource(Resource):
@ns.expect(signup_model)
def post(self):
payload = api.payload
username = payload["username"]
if User.verify_username(username):
return {"message": "User already exist"}, 401
user = User.create(**payload)
response = {
"message": "User created"
}
return response
@ns.route('/login')
class AuthLoginResource(Resource):
@ns.expect(login_model)
def post(self):
payload = api.payload
username = payload["username"]
password = payload["password"]
if not User.verify_username(username):
return {"message": "Invalid credentials"}, 401
if not User.login(username, password):
return {"message": "Invalid credentials"}, 401
key = User.get_key(username)
response = {
"api_key": key
}
return response
@ns.route('/logout')
class AuthLogoutResource(Resource):
@api.doc(security='apikey')
@token_required
def get(self):
key = request.headers['X-API-KEY']
User.delete_key(key)
return {"message": "Logout succesfully"}
|
[
"carrasquel@outlook.com"
] |
carrasquel@outlook.com
|
24e2bb977b68bbf01d06f67c83eaee8d62269f70
|
6e3cf4b7c0fdcebcbfe8fe4e96de36057004f97b
|
/tut-2/main.py
|
af9e2989357f6021ca1aab4a9b2d4b2ce6fc66fb
|
[
"MIT"
] |
permissive
|
Melikakmm/Wrapping-Cpp-using-Cython
|
7ff4264603c2147f96d9695699b1bf641084fcab
|
3ca063c75ef88515d8c888b2cf8c68f4fdb5deb9
|
refs/heads/master
| 2023-03-24T04:43:17.315118
| 2016-05-20T07:59:04
| 2016-05-20T07:59:04
| 589,746,469
| 1
| 0
|
MIT
| 2023-01-16T20:53:20
| 2023-01-16T20:53:19
| null |
UTF-8
|
Python
| false
| false
| 27
|
py
|
import test
print dir(test)
|
[
"kuldeepbb.grewal@gmail.com"
] |
kuldeepbb.grewal@gmail.com
|
6e3a74384f2f0981f274b6296e6136875248b06c
|
e4cbe721398f8757f346ca7d29fea3125343c38b
|
/py35_django/app01/urls.py
|
bb158b9a0d41de36a23922269479ff0d53869e22
|
[] |
no_license
|
Rambotien/oldboy_python
|
da68a713e6196a5d87bb1766aa86bb9bde68266f
|
756c2461736250dfbdbcb5b9ec4fa27b3bd36a7a
|
refs/heads/master
| 2020-04-05T23:14:19.406352
| 2016-08-07T06:57:37
| 2016-08-07T06:57:37
| 64,648,556
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 826
|
py
|
"""py35_django URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url
from django.contrib import admin
from app01 import views
urlpatterns = [
url(r'^home/', views.home),
url(r'^db_handle/', views.db_handle),
]
|
[
"tech@zhiyong.wiki"
] |
tech@zhiyong.wiki
|
5c0287582c4b41c560018b8557dec63482aa7ac7
|
1cbd68789158185fdeb11a7d866ede449f1359ae
|
/setup.py
|
7288987cca0b6f6aff848a1ba7b2c30aa23b9fc8
|
[
"Apache-2.0"
] |
permissive
|
markusj1201/domovoi
|
9cf368044cb3a488624810fed3e18b16d0f2fad5
|
810375c2671a8e1062e487697df885edb61b9630
|
refs/heads/master
| 2020-05-12T23:51:23.638053
| 2019-04-15T21:07:49
| 2019-04-15T21:07:49
| 181,567,182
| 1
| 0
| null | 2019-04-15T21:17:25
| 2019-04-15T21:17:24
| null |
UTF-8
|
Python
| false
| false
| 1,322
|
py
|
#!/usr/bin/env python
import glob
from setuptools import setup, find_packages
setup(
name="domovoi",
version="2.0.1",
url='https://github.com/kislyuk/domovoi',
license='Apache Software License',
author='Andrey Kislyuk',
author_email='kislyuk@gmail.com',
description='AWS Lambda event handler manager',
long_description=open('README.rst').read(),
install_requires=[
'boto3 >= 1.7.19, < 2',
'chalice >= 1.3.0, < 2'
],
extras_require={
':python_version == "2.7"': ['enum34 >= 1.1.6, < 2']
},
packages=find_packages(exclude=['test']),
scripts=glob.glob('scripts/*'),
platforms=['MacOS X', 'Posix'],
package_data={'domovoi': ['*.json']},
zip_safe=False,
include_package_data=True,
test_suite='test',
classifiers=[
'Intended Audience :: Developers',
'License :: OSI Approved :: Apache Software License',
'Operating System :: MacOS :: MacOS X',
'Operating System :: POSIX',
'Programming Language :: Python',
'Programming Language :: Python :: 2.7',
'Programming Language :: Python :: 3.3',
'Programming Language :: Python :: 3.4',
'Programming Language :: Python :: 3.5',
'Topic :: Software Development :: Libraries :: Python Modules'
]
)
|
[
"kislyuk@gmail.com"
] |
kislyuk@gmail.com
|
ac261bd8487632332052246525185c82a1810cf2
|
cc03e0d7ceb6ca5e37c239a5c354cad36c78ade6
|
/worldpop/worldpop/spiders/worldpop_spider.py
|
b639d44966e7cc9afb5519eeaba0c9f1c8381748
|
[] |
no_license
|
randypantinople/Analysis-of-Starbucks-Global-Presence
|
87235315e048baea7f0ead7f1ddfa48ecd915c38
|
5516ae5095bf07d2936c57da0704ae3af0a0bfc8
|
refs/heads/master
| 2022-11-26T09:31:03.309927
| 2020-08-01T16:23:05
| 2020-08-01T16:23:05
| 281,262,650
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,367
|
py
|
from scrapy import Spider, Request
from worldpop.items import WorldpopItem
class WorldpopSpider(Spider):
name ='worldpop_spider'
allowed_urls =["https://www.worldometers.info"]
start_urls=["https://www.worldometers.info/world-population/population-by-country/"]
def parse(self, response):
# Find all the table rows
rows = response.xpath('//*[@id="example2"]/tbody/tr')
for row in rows:
country = row.xpath('./td[2]//text()').extract_first()
population=row.xpath('./td[3]//text()').extract_first()
yearly_change = row.xpath('./td[4]//text()').extract_first()
net_change = row.xpath('./td[5]//text()').extract_first()
density = row.xpath('./td[6]//text()').extract_first()
land_area = row.xpath('./td[7]//text()').extract_first()
migrants_net = row.xpath('./td[8]//text()').extract_first()
# Initialize a new WorldpopItem instance for each country.
item = WorldpopItem()
item['country'] = country
item['population'] = population
item['yearly_change'] = yearly_change
item['net_change'] = net_change
item['density'] = density
item['land_area'] = land_area
item['migrants_net'] = migrants_net
yield item
|
[
"randypantinople@yahoo.com"
] |
randypantinople@yahoo.com
|
223ddc86fda1894da753558ed69baa6edb4c8260
|
599d6aeace70c763c792ed17f61af1771b3dcc2c
|
/verlet.py
|
d79cac01c9e6428bc77093654a8baf43318faa06
|
[] |
no_license
|
genjix/hypatia
|
b4c4dd9fce30fb267338dfdf0ceeb175da793ec9
|
f485454252be4543c1e829720c4d6a6b728c83b5
|
refs/heads/master
| 2021-01-13T02:22:46.163698
| 2010-08-14T13:59:05
| 2010-08-14T13:59:05
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
# -*- coding: utf-8 -*-
# verlet
acc = 0
vel = 0
pos = 0
for t in xrange(10):
h = 1.0
pos = pos + vel*h + acc*(h*h)/2.0
velhalfw = vel + acc*h/2.0
acc = (t + 1) * 2 / 5.0
if acc > 2:
acc = 2
vel = velhalfw + acc*h/2.0
print pos, vel, acc
# euler
acc = 0
vel = 0
pos = 0
for t in xrange(10):
oldvel = vel
for h in xrange(100):
acc = (t + 1) * 2 / 5.0
if acc > 2:
acc = 2
vel += acc*0.01
pos += oldvel*0.01
print pos, vel, acc
|
[
"aphidia@hotmail.com"
] |
aphidia@hotmail.com
|
d826c57836094719e54e91ffe320500037cc1a16
|
2736c0baf290827edf6d2a705d18d09a3c4c674b
|
/openselfsup/hooks/byol_hook.py
|
38da543619897456aabc2c05702fd408106a0d86
|
[
"Apache-2.0"
] |
permissive
|
sshaoshuai/OpenSelfSup
|
81beecb8c32aaa0ef069e117de6e38100b18d7af
|
9a579fde4e38a96fe0121b01e3a7e83492ad2217
|
refs/heads/master
| 2022-11-13T06:39:54.870887
| 2020-06-30T06:20:35
| 2020-06-30T06:20:35
| 276,158,957
| 3
| 0
|
Apache-2.0
| 2020-06-30T16:55:05
| 2020-06-30T16:55:04
| null |
UTF-8
|
Python
| false
| false
| 1,005
|
py
|
from math import cos, pi
from mmcv.runner import Hook
from .registry import HOOKS
@HOOKS.register_module
class BYOLHook(Hook):
'''Hook in BYOL
This hook including momentum adjustment in BYOL following:
m = 1 - ( 1- m_0) * (cos(pi * k / K) + 1) / 2,
k: current step, K: total steps.
'''
def __init__(self, end_momentum=1., **kwargs):
self.end_momentum = end_momentum
def before_train_iter(self, runner):
assert hasattr(runner.model.module, 'momentum'), \
"The runner must have attribute \"momentum\" in BYOLHook."
assert hasattr(runner.model.module, 'base_momentum'), \
"The runner must have attribute \"base_momentum\" in BYOLHook."
cur_iter = runner.iter
max_iter = runner.max_iters
base_m = runner.model.module.base_momentum
m = self.end_momentum - (self.end_momentum - base_m) * (
cos(pi * cur_iter / float(max_iter)) + 1) / 2
runner.model.module.momentum = m
|
[
"xiaohangzhan@outlook.com"
] |
xiaohangzhan@outlook.com
|
b8d4b673919a1f01295b56f407df7bf7811aa42f
|
85f52de727f72db30a4fc4161fc2414cd72035d8
|
/09day/03-创建能显示的游戏窗口.py
|
5aa96db723311c16838e725ab097099515b5e508
|
[] |
no_license
|
ittoyou/-2-
|
ff7ca3bfd9428ac6f3ba5332a4c62825c5d30dcd
|
2c988c2f996221e86e1bbbeb9b3e96da25fe8f92
|
refs/heads/master
| 2020-03-24T16:57:05.220482
| 2018-07-30T08:08:59
| 2018-07-30T08:08:59
| 142,844,024
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 114
|
py
|
import pygame
pygame.init()
screen = pygame.display.set_mode((480, 700))
while True:
pass
pygame.quit()
|
[
"429013601@qq.com"
] |
429013601@qq.com
|
d4347652ff9d1cefa2e6825d465cc291214c64aa
|
54a4bbb3ba3108b86490b2a857c45e9fde30cc03
|
/reducer.py
|
3da6f54b5e14420381a5a6e930947163c739a58b
|
[] |
no_license
|
vincenre/CCHWAssignment4
|
ed3c7da885bd3d0187aad08a820a026e1ab26c64
|
ad68a338b2d98c2cc2a3a6da3e6bdcde1f09e715
|
refs/heads/master
| 2020-03-08T17:53:37.030700
| 2018-04-06T12:39:17
| 2018-04-06T12:39:17
| 128,280,716
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 641
|
py
|
#!/usr/bin/env python
"""reducer.py"""
from operator import itemgetter
import sys
current_veh_type = None
current_count = 0
veh_type = None
for line in sys.stdin:
line = line.strip()
veh_type, count = line.split('\t',1)
try:
count = int(count)
except ValueError:
continue
if current_veh_type == veh_type:
current_count += count
else:
if current_veh_type:
print ( '%s\t%s' % (current_veh_type, current_count))
current_count = count
current_veh_type = veh_type
if current_veh_type == veh_type:
print( '%s\t%s' % (current_veh_type, current_count))
|
[
"noreply@github.com"
] |
vincenre.noreply@github.com
|
3874fad036e0b8b3a35df5333e6d6a49b8f96684
|
e84c8ce302edaeff4110bccb66e71512eb10592a
|
/defaultparameter.py
|
f5492f0bb11eb419abee516645e2d3f2689cacda
|
[] |
no_license
|
clumsyme/py
|
e0b2074ddcb34a85ae04ccbac5f30c6c83bf240d
|
7ad1ba11bc54973af7d9f68e9dfa814cf0a9234b
|
refs/heads/master
| 2021-01-18T01:02:04.252963
| 2016-08-08T13:35:56
| 2016-08-08T13:35:56
| 63,416,213
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,775
|
py
|
# in memoize.py we see how to store calculated value to dic for later use.
# now we see another feature----action of default-parameters.
# and we will use this for a memoize.
def foo(a=[]):
a.append(5)
return a
# >>> foo()
# [5]
# >>> foo()
# [5, 5]
# >>> foo()
# [5, 5, 5]
# >>> foo()
# [5, 5, 5, 5]
# >>> foo()
"""
Actually, this is not a design flaw, and it is not because of internals, or performance.
It comes simply from the fact that functions in Python are first-class objects, and not only a piece of code.
As soon as you get to think into this way, then it completely makes sense: a function is an object
being evaluated on its definition; default parameters are kind of "member data"
and therefore their state may change from one call to the other - exactly as in any other object.
In any case, Effbot has a very nice explanation of the reasons for this behavior in Default Parameter Values in Python.
I found it very clear, and I really suggest reading it for a better knowledge of how function objects work.
"""
"""Default parameter values are evaluated when the function definition is executed. """
# OR:
# When we define a function, at the momente it is defined, the default parameters is a ref to a certain
# position of memory(we call it the init memory).
# so
# def f(i=5)
# or
# def g(li=[])
# the i ref to a certain memory, so does the li.
# Everytime the function is called, it looks for that memory(the init memory).
# But as we see, when we operate a int object or something unmutable, as i = i+1,
# we are creating a new i ref to a new memory,so the origin memory value is unchanged.
# The next time we call the function, it looks for the init memory and the value is still unchange.
# The other way, unlike int, we all know that when we operate list, it DOES NOT creat a new object in memory
# to link the ref, it just modify the value in init memory.So, as long as we do li.append(1), the init memory
# vaule will change, the next time the function is called, the default value is nolonger the init value.
# It may lead us to mistake when we do not relize it, but we can take advantage of it also.
# The fib function below use recursion has a large cost, as there's lot's of repeat calculations.
def fib(n):
if n<2:
return n
else:
return fib(n-1) + fib(n-2)
# This can be potimized by store calculated value in a dict, so repeat calculations will be replaced as
# dict lookups.As below:
# ##### HERE IS THE MEMOIZE VERSION OF FIB: #####
def fib(n, values = {}):
if n<2:
return n
try:
result = values[n]
except:
values[n] = fib(n-1) + fib(n-2)
result = values[n]
return result
# which will be more readable by this:
def fib(n, values = {}):
if n<2:
values[n] = n
else:
if n not in values:
values[n] = fib(n-1)+fib(n-2)
return values[n]
# # THIS efficiently reduce recursion repeat calculations, but still has a dict stored in memory.
# # SO, it's still not as good as the iter one which is:
def fib(n):
a,b = 0,1
for i in range(n):
a,b = b,a+b
return a
# Here's the evidence:
# --------------------------------
def mutable(var=[]):
print(id(var)) # present the id before modify it.
var.append(1) # modify it.
print(id(var)) # present the id after modify it.
# >>> mutable()
# 58295048
# 58295048 # use default value, after modify it, the id does't change.BUT var.value changed.
# >>> mutable([2])
# 58296904 # no-use default value, id of var is not the init one.
# 58296904 # still, after modified, it's id remain the begining of the call.
# >>> mutable()
# 58295048 # we use default value to call it again, it does use the init id and remain unchanged.
# 58295048 #
# ----------------------------------
def unmutable(var=1):
print(id(var))
var += 1
print(id(var))
# >>> unmutable()
# 1515979216
# 1515979248 # changed
# >>> unmatable(2)
# 1515979248 # not init one.
# 1515979280 still changed.
# >>> unmutable()
# 1515979216 # init one.
# 1515979248 # changed.
# -----------------------------------
# BUT another side, we know after
# list = list+b
# id(list) is changed,so the following is different from mutable():
def mutorno(var=[]):
print(id(var))
var = var+[1]
print(id(var))
# >>> mutorno()
# 58963016
# 58963208 # id DOES CHANGED.
# >>> mutorno([2])
# 58353800
# 58963144
# >>> mutorno()
# 58963016
# 58360008
# this function will return [1] every time insteed [1]\[1,1]\[1,1,1]... in the mutable().
# WHY?Because every call does not change the value of init memory, so every call with d-value get same value.
# While mutable() does change the value of init memory, so every call with d-value get non-same value.
# In fact, if we can find out the value in memory, we would know it better.
# Luckly, the ctype module provide a function for us to know the value in
# memory with a id of ID.
import ctypes
def mem_value(ID):
"""return the value in memory with id:ID."""
return ctypes.cast(ID, ctypes.py_object).value
def i(var=1):
init_id = id(var)
print("Before var+=1, var.id and value in init memory:")
print(id(var))
print(mem_value(init_id))
var += 1
print("After i+=1, var.id and value in init memory:")
print(id(var))
print(mem_value(init_id))
# >>> i()
# Before var+=1, var.id and value in init memory:
# 1537409488
# 1
# After i+=1, var.id and value in init memory:
# 1537409520 # so, id changed, which means the pointer of var changed to another memory.
# 1 # but value in init memory DOES NOT change.
def li(var=[1]):
init_id = id(var)
print("Before var+=[1], var.id and value in init memory:")
print(id(var))
print(mem_value(init_id))
var += [1]
print("After i+=1, var.id and value in init memory:")
print(id(var))
print(mem_value(init_id))
# >>> li()
# Before var+=[1], var.id and value in init memory:
# 59061512
# [1]
# After i+=1, var.id and value in init memory:
# 59061512 # so, id Does Not change, which means the pointer of var still point to init memory.
# [1, 1] # but value in init memory DID change.
|
[
"lidoyan@gmail.com"
] |
lidoyan@gmail.com
|
da96f2e53ded82352c337a7c6dea3b0d5a72cf39
|
7486b3af4d4413a96b3e0bf76f776cd8605d7c05
|
/koalakid1/Divide Conquer/bj-1629.py
|
31206203867672664def46fe72833ca7d3e46927
|
[] |
no_license
|
WonyJeong/algorithm-study
|
7146e18ec9a3d7f46910e31890768b2e37f8b9b4
|
dd659bf75c902800bed226d392d144b691d8e059
|
refs/heads/main
| 2023-03-31T14:38:47.365622
| 2021-04-02T01:35:36
| 2021-04-02T01:35:36
| 334,309,434
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 142
|
py
|
import sys
if __name__ == "__main__":
input = sys.stdin.readline
a, b, c = map(int, input().strip().split())
print(pow(a, b, c))
|
[
"koalakid154@gmail.com"
] |
koalakid154@gmail.com
|
9da77659cb6cbd397e76bbdafc84a7d38d1fae1c
|
14e09d2b694c2005e65df016e24a49571871488c
|
/lab 5/exp3/nmos_earlyv.py
|
d3cf0c04ed2c2534a9be3f82079082950d762ec1
|
[] |
no_license
|
vickymmcd/circuits-sp19
|
fb68e9d32f936e41186f0c86709e187118f90430
|
6632bf2ccceaddc1380781a431af4fda7bb99a3d
|
refs/heads/master
| 2020-04-23T03:51:31.573983
| 2019-05-06T19:58:38
| 2019-05-06T19:58:38
| 170,889,938
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 769
|
py
|
# -*- coding: utf-8 -*-
import numpy as np
import matplotlib.pyplot as plt
from nmos_drain import m, b, m2, b2, m3, b3
# Isat is b
# Va is b/m
Va = b/m
Va2 = b2/m2
Va3 = b3/m3
if __name__ == '__main__':
# Setting up plot
title = "nMOS Early Voltage"
xLabel = "Saturation Current (A)"
yLabel = "Early Voltage (V)"
# Plotting Data
Data1 = plt.semilogx(b, Va, 'ro', markersize=3, label="Vg=5V (Strong Inversion)")
Data1 = plt.semilogx(b2, Va2, 'bo', markersize=3, label="Vg=.8V (Moderate Inversion)")
Data1 = plt.semilogx(b3, Va3, 'go', markersize=3, label="Vg=.7V (Weak Inversion)")
plt.xlabel(xLabel)
plt.ylabel(yLabel)
plt.title(title)
plt.legend()
plt.savefig('nmos_earlyv.png', format='png')
plt.show()
|
[
"vickymmcd@gmail.com"
] |
vickymmcd@gmail.com
|
92515175fdca1f41cc25995d701f6f07f01146e4
|
6c981596eb382393d9c4b72203f27d1717b9b605
|
/train.py
|
7e5d1b669f29cf94fbfd906dee881343d5d5c5ab
|
[] |
no_license
|
viplix3/DL-Template
|
5b2d3528d3adc7e8ee25def14b410c61630be68e
|
3118cf6920a6aebd64fd01870284c9fb570ed62c
|
refs/heads/master
| 2020-05-15T16:24:46.643632
| 2019-04-20T10:00:05
| 2019-04-20T10:00:05
| 182,374,343
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,519
|
py
|
import argparse
import tensorflow as tf
from tqdm import tqdm
import time
import os
from utils.utils import draw_box
from dataParser import Parser
from utils import checkmate
from model import model
from utils.model_loss import compute_loss
import numpy as np
import config
def get_classes(classes_path):
""" Loads the classes
Input:
classes_path: path to the file containing class names
Output: list containing class names
"""
with open(classes_path) as f:
class_names = f.readlines()
class_names = [c.strip() for c in class_names]
return class_names
def read_anchors(file_path):
""" Reads the anchors computer by k-means.py for from the provided path
Input:
file_path: path to anchors.txt contaning the anchors computer by k-means.py
Output:
A numpy array containing the anchors written into anchors.txt
"""
anchors = []
with open(file_path, 'r') as file:
for line in file.read().splitlines():
w, h = line.split()
anchor = [float(w), float(h)]
anchors.append(anchor)
return np.asarray(anchors)
def train(ckpt_path, log_path, class_path):
""" Function to train the model.
ckpt_path: string, path for saving/restoring the model
log_path: string, path for saving the training/validation logs
class_path: string, path for the classes of the dataset
decay_steps: int, steps after which the learning rate is to be decayed
decay_rate: float, rate to carrying out exponential decay
"""
# Getting the anchors
anchors = read_anchors(config.anchors_path)
if not os.path.exists(config.data_dir):
os.mkdir(config.data_dir)
classes = get_classes(class_path)
# Building the training pipeline
graph = tf.get_default_graph()
with graph.as_default():
# Getting the training data
with tf.name_scope('data_parser/'):
train_reader = Parser('train', config.data_dir, config.anchors_path, config.output_dir,
config.num_classes, input_shape=config.input_shape, max_boxes=config.max_boxes)
train_data = train_reader.build_dataset(config.train_batch_size//config.subdivisions)
train_iterator = train_data.make_one_shot_iterator()
val_reader = Parser('val', config.data_dir, config.anchors_path, config.output_dir,
config.num_classes, input_shape=config.input_shape, max_boxes=config.max_boxes)
val_data = val_reader.build_dataset(config.val_batch_size)
val_iterator = val_data.make_one_shot_iterator()
is_training = tf.placeholder(dtype=tf.bool, shape=[], name='train_flag') # Used for different behaviour of batch normalization
mode = tf.placeholder(dtype=tf.int16, shape=[], name='mode_flag')
def train():
return train_iterator.get_next()
def valid():
return val_iterator.get_next()
images, labels = tf.cond(pred=tf.equal(mode, 1), true_fn=train, false_fn=valid, name='train_val_data')
grid_shapes = [config.input_shape // 32, config.input_shape // 16, config.input_shape // 8]
images.set_shape([None, config.input_shape, config.input_shape, 3])
labels.set_shape([None, required_shape, 5])
# image_summary = draw_box(images, bbox, file_name)
if not os.path.exists(ckpt_path):
os.mkdir(ckpt_path)
model = model(images, is_training, config.num_classes, config.num_anchors_per_scale, config.weight_decay, config.norm_decay)
output, model_layers = model.forward()
print('Summary of the created model.......\n')
for layer in model_layers:
print(layer)
# Declaring the parameters for GT
with tf.name_scope('Targets'):
### GT PROCESSING ###
# Compute Loss
with tf.name_scope('Loss_and_Detect'):
loss_scale,summaries = compute_loss(output, y_true, config.num_classes, ignore_threshold=config.ignore_thresh)
exponential_moving_average_op = tf.train.ExponentialMovingAverage(config.weight_decay).apply(var_list=tf.trainable_variables())
loss = model_loss
model_loss_summary = tf.summary.scalar('model_loss', summaries, family='Losses')
# Declaring the parameters for training the model
with tf.name_scope('train_parameters'):
global_step = tf.Variable(0, trainable=False, name='global_step')
# Declaring the parameters for training the model
with tf.name_scope('train_parameters'):
global_step = tf.Variable(0, trainable=False, name='global_step')
def learning_rate_scheduler(learning_rate, scheduler_name, global_step, decay_steps=100):
if scheduler_name == 'exponential':
lr = tf.train.exponential_decay(learning_rate, global_step,
decay_steps, decay_rate, staircase=True, name='exponential_learning_rate')
return tf.maximum(lr, config.learning_rate_lower_bound)
elif scheduler_name == 'polynomial':
lr = tf.train.polynomial_decay(learning_rate, global_step,
decay_steps, config.learning_rate_lower_bound, power=0.8, cycle=True, name='polynomial_learning_rate')
return tf.maximum(lr, config.learning_rate_lower_bound)
elif scheduler_name == 'cosine':
lr = tf.train.cosine_decay(learning_rate, global_step,
decay_steps, alpha=0.5, name='cosine_learning_rate')
return tf.maximum(lr, config.learning_rate_lower_bound)
elif scheduler_name == 'linear':
return tf.convert_to_tensor(learning_rate, name='linear_learning_rate')
else:
raise ValueError('Unsupported learning rate scheduler\n[supported types: exponential, polynomial, linear]')
if config.use_warm_up:
learning_rate = tf.cond(pred=tf.less(global_step, config.burn_in_epochs * (config.train_num // config.train_batch_size)),
true_fn=lambda: learning_rate_scheduler(config.init_learning_rate, config.warm_up_lr_scheduler, global_step),
false_fn=lambda: learning_rate_scheduler(config.learning_rate, config.lr_scheduler, global_step, decay_steps=2000))
else:
learning_rate = learning_rate_scheduler(config.learning_rate, config.lr_scheduler, global_step=global_step, decay_steps=2000)
tf.summary.scalar('learning rate', learning_rate, family='Train_Parameters')
# Define optimizer for minimizing the computed loss
with tf.name_scope('Optimizer'):
optimizer = tf.train.MomentumOptimizer(learning_rate=learning_rate, momentum=config.momentum)
# optimizer = tf.train.AdamOptimizer(learning_rate=learning_rate)
# optimizer = tf.train.RMSPropOptimizer(learning_rate=learning_rate, momentum=config.momentum)
update_ops = tf.get_collection(tf.GraphKeys.UPDATE_OPS)
with tf.control_dependencies(update_ops):
# grads = optimizer.compute_gradients(loss=loss)
# gradients = [(tf.placeholder(dtype=tf.float32, shape=grad[1].get_shape()), grad[1]) for grad in grads]
# train_step = optimizer.apply_gradients(grads_and_vars=gradients, global_step=global_step)
optimizing_op = optimizer.minimize(loss=loss, global_step=global_step)
with tf.control_dependencies([optimizing_op]):
with tf.control_dependencies([exponential_moving_average_op]):
train_op_with_mve = tf.no_op()
train_op = train_op_with_mve
#################################### Training loop ############################################################
# A saver object for saving the model
best_ckpt_saver_train = checkmate.BestCheckpointSaver(save_dir=ckpt_path+'train/', num_to_keep=5)
best_ckpt_saver_valid = checkmate.BestCheckpointSaver(save_dir=ckpt_path+'valid/', num_to_keep=5)
summary_op = tf.summary.merge_all()
summary_op_valid = tf.summary.merge([model_loss_summary_without_learning_rate])
init_op = tf.global_variables_initializer()
# Defining some train loop dependencies
gpu_config = tf.ConfigProto(log_device_placement=False)
gpu_config.gpu_options.allow_growth = True
sess = tf.Session(config=gpu_config)
tf.logging.set_verbosity(tf.logging.ERROR)
train_summary_writer = tf.summary.FileWriter(os.path.join(log_path, 'train'), sess.graph)
val_summary_writer = tf.summary.FileWriter(os.path.join(log_path, 'val'), sess.graph)
print(sess.run(receptive_field))
# Restoring the model
ckpt = tf.train.get_checkpoint_state(ckpt_path+'train/')
if ckpt and tf.train.checkpoint_exists(ckpt.model_checkpoint_path):
print('Restoring model ', checkmate.get_best_checkpoint(ckpt_path+'train/'))
tf.train.Saver().restore(sess, checkmate.get_best_checkpoint(ckpt_path+'train/'))
print('Model Loaded!')
else:
sess.run(init_op)
print('Uninitialized variables: ', sess.run(tf.report_uninitialized_variables()))
epochbar = tqdm(range(config.Epoch))
for epoch in epochbar:
epochbar.set_description('Epoch %s of %s' % (epoch, config.Epoch))
mean_loss_train = []
mean_loss_valid = []
trainbar = tqdm(range(config.train_num//config.train_batch_size))
for k in trainbar:
num_steps, train_summary, loss_train, _ = sess.run([global_step, summary_op, loss,
train_op], feed_dict={is_training: True, mode: 1})
train_summary_writer.add_summary(train_summary, epoch)
train_summary_writer.flush()
mean_loss_train.append(loss_train)
trainbar.set_description('Train loss: %s' %str(loss_train))
print('Validating.....')
valbar = tqdm(range(config.val_num//config.val_batch_size))
for k in valbar:
val_summary, loss_valid = sess.run([summary_op_valid, loss], feed_dict={is_training: False, mode: 0})
val_summary_writer.add_summary(val_summary, epoch)
val_summary_writer.flush()
mean_loss_valid.append(loss_valid)
valbar.set_description('Validation loss: %s' %str(loss_valid))
mean_loss_train = np.mean(mean_loss_train)
mean_loss_valid = np.mean(mean_loss_valid)
print('\n')
print('Train loss after %d epochs is: %f' %(epoch+1, mean_loss_train))
print('Validation loss after %d epochs is: %f' %(epoch+1, mean_loss_valid))
print('\n\n')
if (config.use_warm_up):
if (num_steps > config.burn_in_epochs * (config.train_num // config.train_batch_size)):
best_ckpt_saver_train.handle(mean_loss_train, sess, global_step)
best_ckpt_saver_valid.handle(mean_loss_valid, sess, global_step)
else:
continue
else:
best_ckpt_saver_train.handle(mean_loss_train, sess, global_step)
best_ckpt_saver_valid.handle(mean_loss_valid, sess, global_step)
print('Tuning Completed!!')
train_summary_writer.close()
val_summary_writer.close()
sess.close()
def main():
""" main function which calls all the other required functions for training """
os.environ["CUDA_DEVICE_ORDER"] = "PCI_BUS_ID"
os.environ["CUDA_VISIBLE_DEVICES"] = str(config.gpu_num)
train(config.model_dir, config.logs_dir, config.classes_path)
os.environ['TF_CPP_MIN_LOG_LEVEL'] = '3'
if __name__ == '__main__':
main()
|
[
"vipin.sharma2024@gmail.com"
] |
vipin.sharma2024@gmail.com
|
6387dceef4ec0ae48b1922383fc1c74cd3f5e604
|
f429cce77b9e03a9bca64e0eb35b5de629b4a1d6
|
/data_env/bin/f2py
|
96f0bc6fd20632b0f1d3f50129f227db301f6fac
|
[] |
no_license
|
g2ransom/data_projects
|
f0fbe45c6e505d06c5af220eefe696fc1b8f5a75
|
b815dd8a1e6882f7519ca26e532acc78faeeb349
|
refs/heads/master
| 2021-01-24T12:19:13.606364
| 2018-06-13T05:01:00
| 2018-06-13T05:01:00
| 123,127,941
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 796
|
#!/Users/garrettransom/data_projects/data_env/bin/python
# See http://cens.ioc.ee/projects/f2py2e/
from __future__ import division, print_function
import os
import sys
for mode in ["g3-numpy", "2e-numeric", "2e-numarray", "2e-numpy"]:
try:
i = sys.argv.index("--" + mode)
del sys.argv[i]
break
except ValueError:
pass
os.environ["NO_SCIPY_IMPORT"] = "f2py"
if mode == "g3-numpy":
sys.stderr.write("G3 f2py support is not implemented, yet.\\n")
sys.exit(1)
elif mode == "2e-numeric":
from f2py2e import main
elif mode == "2e-numarray":
sys.argv.append("-DNUMARRAY")
from f2py2e import main
elif mode == "2e-numpy":
from numpy.f2py import main
else:
sys.stderr.write("Unknown mode: " + repr(mode) + "\\n")
sys.exit(1)
main()
|
[
"garrettransom@Garretts-MacBook-Pro-2.local"
] |
garrettransom@Garretts-MacBook-Pro-2.local
|
|
07151f0a3f30969ce8fcd55f38e023a25c833152
|
a58ac1484099049a412264e0278b4712cb4c60b9
|
/dy_setting.py
|
56d3117ea6a73c700feed8c36784c31a193a72e9
|
[] |
no_license
|
ongbe/douyin_spider
|
4018cf2c40b06c89267df5e0432d6f7912643ec9
|
37455b7a2c0f82b558da2cfa5d46bec238350bfd
|
refs/heads/master
| 2022-02-27T01:33:30.610386
| 2019-10-16T05:26:17
| 2019-10-16T05:26:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,493
|
py
|
# !/usr/bin/env python
# coding:utf-8
# Time:2019/10/2 10:42
# write_by:QiFuMin
# script_name:setting.py
import random
device_name = "LE67A06340179566"
host = '127.0.0.1'
port = 3306
user = 'root'
password = '369852'
database = 'douyin'
charset = 'utf8'
# 获取关注的人
follow_get = False
# 获取更多的视频
video_get = False
# 是否初始化设备
init_devices = True
#
persion_center_num = 2
follow_page_num = 2
search_page_num = 30
# 关注页面上滑最大次数
follow_up_slide_num = 50
# 视频页面最大上滑次数
video_up_slide_num = 5
# 退出休息时间/s
back_rest_time = 50
user_agent_list = [
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/74.0.3729.131 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; WOW64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/63.0.3239.132 Safari/537.36',
'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/64.0.3282.140 Safari/537.36 Edge/18.17763',
'Mozilla/5.0 (Windows NT 10.0; WOW64; Trident/7.0; rv:11.0) like Gecko'
]
def headers():
user_agent = random.choice(user_agent_list)
headers={
'User-Agent': user_agent,
}
return headers
"""
一些特殊的账号
001003070209jst 这个没有作品
迪丽热巴 没有关注
0219040WYH 这个是私密账号
03180219.46520 搜索不到
"""
|
[
"noreply@github.com"
] |
ongbe.noreply@github.com
|
7c3f1c588687229ef881f5a119932c076a578cb9
|
5298cab4df7f3e37311f61ba408e0a5450f9aa87
|
/pykin/utils/urdf_utils.py
|
12499601876f3e5663e2b16af03eda7d4fc1bb38
|
[
"Python-2.0",
"MIT"
] |
permissive
|
junha-lee/pykin
|
d12a2263caecfdcaf1024576f96f40e0f05b2c09
|
8db4fa4c198f663453bca8dcbee87bf4ece0438e
|
refs/heads/main
| 2023-07-31T11:19:25.586162
| 2021-09-08T03:44:58
| 2021-09-08T03:44:58
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,062
|
py
|
from pykin.utils.kin_utils import convert_string_to_narray, LINK_TYPES
class URDF_Link:
"""
Class of parsing link info described in URDF
"""
@staticmethod
def set_visual(elem_link, link_frame):
"""
Set link visual
"""
for elem_visual in elem_link.findall('visual'):
URDF_Link.set_visual_origin(elem_visual, link_frame)
URDF_Link.set_visual_geometry(elem_visual, link_frame)
URDF_Link.set_visual_color(elem_visual, link_frame)
@staticmethod
def set_collision(elem_link, link_frame):
"""
Set link collision
"""
for elem_collision in elem_link.findall('collision'):
URDF_Link.set_collision_origin(elem_collision, link_frame)
URDF_Link.set_collision_geometry(elem_collision, link_frame)
@staticmethod
def set_visual_origin(elem_visual, frame):
"""
Set link visual's origin
"""
for elem_origin in elem_visual.findall('origin'):
frame.link.visual.offset.pos = convert_string_to_narray(elem_origin.attrib.get('xyz'))
frame.link.visual.offset.rot = convert_string_to_narray(elem_origin.attrib.get('rpy'))
@staticmethod
def set_visual_geometry(elem_visual, frame):
"""
Set link visual's geometry
"""
def _set_link_visual_geom(shapes, frame):
"""
Set link visual's geometry
"""
if shapes.tag == "box":
frame.link.visual.gtype = shapes.tag
frame.link.visual.gparam = {"size" : convert_string_to_narray(shapes.attrib.get('size', None))}
elif shapes.tag == "cylinder":
frame.link.visual.gtype = shapes.tag
frame.link.visual.gparam = {"length" : shapes.attrib.get('length', 0),
"radius" : shapes.attrib.get('radius', 0)}
elif shapes.tag == "sphere":
frame.link.visual.gtype = shapes.tag
frame.link.visual.gparam = {"radius" : shapes.attrib.get('radius', 0)}
elif shapes.tag == "mesh":
frame.link.visual.gtype = shapes.tag
frame.link.visual.gparam = {"filename" : shapes.attrib.get('filename', None)}
else:
frame.link.visual.gtype = None
frame.link.visual.gparam = None
for elem_geometry in elem_visual.findall('geometry'):
for shape_type in LINK_TYPES:
for shapes in elem_geometry.findall(shape_type):
_set_link_visual_geom(shapes, frame)
@staticmethod
def set_visual_color(elem_visual, frame):
"""
Set link visual's color
"""
for elem_matrial in elem_visual.findall('material'):
for elem_color in elem_matrial.findall('color'):
rgba = convert_string_to_narray(elem_color.attrib.get('rgba'))
frame.link.visual.gparam['color'] = {elem_matrial.get('name') : rgba}
@staticmethod
def set_collision_origin(elem_collision, frame):
"""
Set link collision's origin
"""
for elem_origin in elem_collision.findall('origin'):
frame.link.collision.offset.pos = convert_string_to_narray(elem_origin.attrib.get('xyz'))
frame.link.collision.offset.rot = convert_string_to_narray(elem_origin.attrib.get('rpy'))
@staticmethod
def set_collision_geometry(elem_collision, frame):
"""
Set link collision's geometry
"""
def _set_link_collision_geom(shapes, frame):
if shapes.tag == "box":
frame.link.collision.gtype = shapes.tag
frame.link.collision.gparam = {"size" : convert_string_to_narray(shapes.attrib.get('size', None))}
elif shapes.tag == "cylinder":
frame.link.collision.gtype = shapes.tag
frame.link.collision.gparam = {"length" : shapes.attrib.get('length', 0),
"radius" : shapes.attrib.get('radius', 0)}
elif shapes.tag == "sphere":
frame.link.collision.gtype = shapes.tag
frame.link.collision.gparam = {"radius" : shapes.attrib.get('radius', 0)}
elif shapes.tag == "mesh":
frame.link.collision.gtype = shapes.tag
frame.link.collision.gparam = {"filename" : shapes.attrib.get('filename', None)}
else:
frame.link.collision.gtype = None
frame.link.collision.gparam = None
elem_geometry = elem_collision.find('geometry')
for shape_type in LINK_TYPES:
for shapes in elem_geometry.findall(shape_type):
_set_link_collision_geom(shapes, frame)
class URDF_Joint:
"""
Class of parsing joint info described in URDF
"""
@staticmethod
def set_origin(elem_joint, frame):
"""
Set joint's origin
"""
elem_origin = elem_joint.find('origin')
if elem_origin is not None:
frame.joint.offset.pos = convert_string_to_narray(
elem_origin.attrib.get('xyz'))
frame.joint.offset.rot = convert_string_to_narray(
elem_origin.attrib.get('rpy'))
@staticmethod
def set_axis(elem_joint, frame):
"""
Set joint's axis
"""
elem_axis = elem_joint.find('axis')
if elem_axis is not None:
frame.joint.axis = convert_string_to_narray(
elem_axis.attrib.get('xyz'))
@staticmethod
def set_limit(elem_joint, frame):
"""
Set joint's limit
"""
elem_limit = elem_joint.find('limit')
if elem_limit is not None:
if "lower" in elem_limit.attrib:
frame.joint.limit[0] = float(elem_limit.attrib["lower"])
if "upper" in elem_limit.attrib:
frame.joint.limit[1] = float(elem_limit.attrib["upper"])
|
[
"wlseowhd12@naver.com"
] |
wlseowhd12@naver.com
|
fa334da85eec071a23fd5a0882f13d3a076436a1
|
26e82c1dbe24ccec0f8e9d9bf273ffdedd3fa7bd
|
/Tensorrt_yolov3/test_np2cbyte.py
|
989af71d85c4d1d9b01c73b38269b0f831e4f42a
|
[
"MIT"
] |
permissive
|
KTOC/hd_acceleration
|
5c74ca31ee852ace9b9e2cba83b7fa605310429f
|
e62c2a555c320e474c7caf16c32c34184df6f6b0
|
refs/heads/master
| 2020-09-30T08:36:03.164035
| 2019-05-17T03:57:14
| 2019-05-17T03:57:14
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,997
|
py
|
#!/usr/bin/python3
# *_* coding: utf-8 *_*
# @Author: samon
# @Email: samonsix@163.com
# @IDE: PyCharm
# @File: test_np2cbyte.py
# @Date: 19-5-16 下午4:38
# @Descr:
from ctypes import *
import ctypes
import numpy
import cv2
dll = cdll.LoadLibrary('install/libyolov3.so')
n = 100
one_R = [1 for r in range(n)]
R = [one_R for rr in range(n)]
one_G = [128 for g in range(n)]
G = [one_G for gg in range(n)]
one_B = [256 for b in range(n)]
B = [one_B for bb in range(n)]
RGB = numpy.zeros((n, n, 3), dtype=c_uint8)
RGB[:, :, 0] = B # B
RGB[:, :, 1] = G # G
RGB[:, :, 2] = R # R
img = RGB
# rows, cols, channel = img.shape
# dataptr = img.ctypes.data_as(ctypes.c_char_p)
# dll.test(dataptr, n, n, 3)
# 测试方法2, 直接从图片获取numpy
img = cv2.imread('08.jpg')
# img = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
rows, cols, chanel = img.shape
dataptr = img.ctypes.data_as(ctypes.c_char_p)
class StructPointer(ctypes.Structure):
_fields_ = [("num", ctypes.c_int),
("location", ctypes.c_int * 400)]
dll.yolov3.restype = ctypes.POINTER(StructPointer)
# 数据,row, col, bit(位数,1表示灰度图,3表示rgb图)
import time
print("start")
dll.test(dataptr, rows, cols, chanel)
time.sleep(1)
dll.yolov3(dataptr, rows, cols, chanel)
st = time.time()
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
dll.yolov3(dataptr, rows, cols, chanel)
p = dll.yolov3(dataptr, rows, cols, chanel)
all_point = []
index = 0
for i in range(p.contents.num):
point = [p.contents.location[index], p.contents.location[index+1],
p.contents.location[index + 2], p.contents.location[index+3]]
index += 4
all_point.append(point)
print(all_point)
print(time.time()-st)
|
[
"scuteezxz@163.com"
] |
scuteezxz@163.com
|
5fc45b173bbdd6e91f220b1216e3c0155e86ce4b
|
fe5bdd27f69f9920ccc6a620425279e6e41d5cd1
|
/versatileimagefield/registry.py
|
ac4c5316adaec4bfe2cd527cff5de151047cc652
|
[
"MIT"
] |
permissive
|
derenio/django-versatileimagefield
|
961586546b06f9fd3614d37d1f942f236380a757
|
67dbd6130bb75e45fdbfdacd5a57b777f8793185
|
refs/heads/master
| 2021-01-14T14:15:31.209450
| 2015-07-08T13:46:07
| 2015-07-08T13:46:07
| 38,746,389
| 0
| 0
| null | 2015-07-08T10:00:34
| 2015-07-08T10:00:34
| null |
UTF-8
|
Python
| false
| false
| 7,062
|
py
|
from __future__ import unicode_literals
from .datastructures import FilteredImage, SizedImage
class AlreadyRegistered(Exception):
pass
class InvalidSizedImageSubclass(Exception):
pass
class InvalidFilteredImageSubclass(Exception):
pass
class NotRegistered(Exception):
pass
class UnallowedSizerName(Exception):
pass
class UnallowedFilterName(Exception):
pass
class VersatileImageFieldRegistry(object):
"""
A VersatileImageFieldRegistry object allows new SizedImage & FilteredImage
subclasses to be dynamically added to all SizedImageFileField instances
at runtime. New SizedImage subclasses are registered with the
register_sizer method. New ProcessedImage subclasses are registered
with the register_filter method.
"""
unallowed_sizer_names = (
'build_filters_and_sizers',
'chunks',
'close',
'closed',
'create_on_demand',
'delete',
'encoding',
'field',
'file',
'fileno',
'filters',
'flush',
'height',
'instance',
'isatty',
'multiple_chunks',
'name',
'newlines',
'open',
'path',
'ppoi',
'read',
'readinto',
'readline',
'readlines',
'save',
'seek',
'size',
'softspace',
'storage',
'tell',
'truncate',
'url',
'validate_ppoi',
'width',
'write',
'writelines',
'xreadlines'
)
def __init__(self, name='versatileimage_registry'):
self._sizedimage_registry = {} # attr_name -> sizedimage_cls
self._filter_registry = {} # attr_name -> filter_cls
self.name = name
def register_sizer(self, attr_name, sizedimage_cls):
"""
Registers a new SizedImage subclass (`sizedimage_cls`) to be used
via the attribute (`attr_name`)
"""
if attr_name.startswith(
'_'
)or attr_name in self.unallowed_sizer_names:
raise UnallowedSizerName(
"`%s` is an unallowed Sizer name. Sizer names cannot begin "
"with an underscore or be named any of the "
"following: %s." % (
attr_name,
', '.join([
name
for name in self.unallowed_sizer_names
])
)
)
if not issubclass(sizedimage_cls, SizedImage):
raise InvalidSizedImageSubclass(
'Only subclasses of versatileimagefield.datastructures.'
'SizedImage may be registered with register_sizer'
)
if attr_name in self._sizedimage_registry:
raise AlreadyRegistered(
'A SizedImage class is already registered to the `%s` '
'attribute. If you would like to override this attribute, '
'use the unregister method' % attr_name
)
else:
self._sizedimage_registry[attr_name] = sizedimage_cls
def unregister_sizer(self, attr_name):
"""
Unregisters the SizedImage subclass currently assigned to `attr_name`.
If a SizedImage subclass isn't already registered to `attr_name`
NotRegistered will raise.
"""
if attr_name not in self._sizedimage_registry:
raise NotRegistered(
'No SizedImage subclass is registered to %s' % attr_name
)
else:
del self._sizedimage_registry[attr_name]
def register_filter(self, attr_name, filterimage_cls):
"""
Registers a new FilteredImage subclass (`filterimage_cls`) to be used
via the attribute (filters.`attr_name`)
"""
if attr_name.startswith('_'):
raise UnallowedFilterName(
'`%s` is an unallowed Filter name. Filter names cannot begin '
'with an underscore.' % attr_name
)
if not issubclass(filterimage_cls, FilteredImage):
raise InvalidFilteredImageSubclass(
'Only subclasses of FilteredImage may be registered as '
'filters with VersatileImageFieldRegistry'
)
if attr_name in self._filter_registry:
raise AlreadyRegistered(
'A ProcessedImageMixIn class is already registered to the `%s`'
' attribute. If you would like to override this attribute, '
'use the unregister method' % attr_name
)
else:
self._filter_registry[attr_name] = filterimage_cls
def unregister_filter(self, attr_name):
"""
Unregisters the FilteredImage subclass currently assigned to
`attr_name`.
If a FilteredImage subclass isn't already registered to filters.
`attr_name` NotRegistered will raise.
"""
if attr_name not in self._filter_registry:
raise NotRegistered(
'No FilteredImage subclass is registered to %s' % attr_name
)
else:
del self._filter_registry[attr_name]
versatileimagefield_registry = VersatileImageFieldRegistry()
def autodiscover():
"""
Auto-discover INSTALLED_APPS sizedimage.py modules and fail silently when
not present. This forces an import on them to register any
versatileimagefield bits they may want.
This is a near 1-to-1 copy of how django's admin application registers
models.
"""
import copy
from django.conf import settings
from django.utils.importlib import import_module
from django.utils.module_loading import module_has_submodule
for app in settings.INSTALLED_APPS:
mod = import_module(app)
# Attempt to import the app's versatileimagefield module.
try:
before_import_sizedimage_registry = copy.copy(
versatileimagefield_registry._sizedimage_registry
)
before_import_filter_registry = copy.copy(
versatileimagefield_registry._filter_registry
)
import_module('%s.versatileimagefield' % app)
except:
# Reset the versatileimagefield_registry to the state before the
# last import as this import will have to reoccur on the next
# request and this could raise NotRegistered and AlreadyRegistered
# exceptions (see django ticket #8245).
versatileimagefield_registry._sizedimage_registry = \
before_import_sizedimage_registry
versatileimagefield_registry._filter_registry = \
before_import_filter_registry
# Decide whether to bubble up this error. If the app just
# doesn't have a sizedimage module, we can ignore the error
# attempting to import it, otherwise we want it to bubble up.
if module_has_submodule(mod, 'versatileimagefield'):
raise
|
[
"jonathan_ellenberger@wgbh.org"
] |
jonathan_ellenberger@wgbh.org
|
d47697d3aeb18c73cd9c38bffe4ee1743e777652
|
289e17a9b3d0cf187f403894ebfb1007dcb1b3dc
|
/easy/reverse-linked-list.py
|
0b51f91f6ca2b9583185b136e6b108b5d7edd1d8
|
[] |
no_license
|
congyingTech/Basic-Algorithm
|
7ddb376e49ef3b1c0d989fb1d4a4949d2d121d63
|
18c06a96bb14688e4a1d5fb6baf235a6b53bd3ae
|
refs/heads/master
| 2021-11-27T07:01:05.474609
| 2021-11-15T07:16:31
| 2021-11-15T07:16:31
| 224,206,231
| 10
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 988
|
py
|
# encoding:utf-8
"""
问题描述:反转链表
解决方案:三个指针解决,pre,cur,next_node
"""
class ListNode(object):
def __init__(self, x):
self.val = x
self.next = None
class Solution(object):
def reverseList(self, head):
"""
:type head: ListNode
:rtype: ListNode
"""
if not head:
return None
pre = head
cur = pre.next
pre.next = None
while cur:
next_node = cur.next
cur.next = pre
pre = cur
cur = next_node
return pre
def printLinkList(self, head):
while head:
print(head.val)
head = head.next
if __name__ == "__main__":
s = Solution()
head = ListNode(3)
head.next = ListNode(4)
head.next.next = ListNode(5)
head.next.next.next = ListNode(6)
head.next.next.next.next = ListNode(7)
pre = s.reverseList(head)
s.printLinkList(pre)
|
[
"congyingTech@163.com"
] |
congyingTech@163.com
|
800d633cac00277a169c3f3f6d2c89358f811ea0
|
b250ec914ed079b297d9dd19d53304b4c61f150b
|
/HW1/Q3_prokudin_gorskii.py
|
f398bc916765cf94831f6d5029b799e2c6bb22f0
|
[] |
no_license
|
sahelyiyi/ImageProcessingCourse
|
512081f84ca7a4cbaa69fb40eca34a8c78a8f11e
|
bf69c8c7de6425ba2dd33cadd6448647cb0386e7
|
refs/heads/master
| 2020-09-29T01:46:43.729810
| 2020-03-17T10:56:04
| 2020-03-17T10:56:04
| 226,917,907
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,774
|
py
|
import cv2
import numpy as np
from scipy import misc
def _match_imgs(img1, img2, img_name=None):
# img2 is fixed and we move img1
selected_sample = 500
center = 1000
common2 = img2[center:center+selected_sample, center:center+selected_sample]
min_value = None
min_diff_y = None
min_diff_x = None
for y in range(-100, 100):
for x in range(-100, 100):
common1 = img1[center+y:center+y+selected_sample, center+x:center+x+selected_sample]
s = np.sum(abs(common1 - common2))
if min_value is None or s < min_value:
min_value = s
min_diff_y = y
min_diff_x = x
if img_name:
best_common1 = img1[center+min_diff_y:center+min_diff_y+selected_sample, center+min_diff_x:center+min_diff_x+selected_sample]
small = misc.imresize(np.concatenate((common2, best_common1), axis=1), 0.2)
cv2.imwrite(img_name, small)
return min_diff_y, min_diff_x
aggregated_imgs = cv2.imread('BW.tif', 0)
img_size = int(aggregated_imgs.shape[0]/3)
imgs = []
for i in range(1, 4):
imgs.append(aggregated_imgs[(i-1)*img_size:i*img_size, :])
dif_y1, dif_x1 = _match_imgs(np.copy(imgs[0]), np.copy(imgs[1]), 'best_match10.jpg')
dif_y2, dif_x2 = _match_imgs(np.copy(imgs[2]), np.copy(imgs[1]), 'best_match12.jpg')
# 41 -16
# -60 -3
center = max(abs(dif_y1), abs(dif_x1), abs(dif_y2), abs(dif_x2))
h, w = imgs[0].shape
new_h, new_w = h-2*center, w-2*center
b = imgs[0][center+dif_y1:center+dif_y1+new_h, center+dif_x1:center+dif_x1+new_w]
g = imgs[1][center:center+new_h, center:center+new_w]
r = imgs[2][center+dif_y2:center+dif_y2+new_h, center+dif_x2:center+dif_x2+new_w]
new_img = cv2.merge((b, g, r))
cv2.imwrite('im03.jpg', new_img)
|
[
"sarcheshmehpours@gmail.com"
] |
sarcheshmehpours@gmail.com
|
8c96cc9669ce9bb2b02175d643e01c8ae8f83602
|
697a04e866b2ac7259e912c15243fd4349b9529e
|
/quagga/utils/CustomDefaultDict.py
|
117e1940cb56dad468955c40cd36c8c1daedac30
|
[
"Apache-2.0"
] |
permissive
|
sfdcmahi/quagga
|
23379612d50b6274af4e1bc65ab84b764e360ada
|
c1c50e891034742a59f0c712b5978c9f54348d1e
|
refs/heads/master
| 2021-01-18T16:08:48.731021
| 2016-03-02T21:32:15
| 2016-03-02T21:32:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 280
|
py
|
from collections import defaultdict
class CustomDefaultDict(defaultdict):
def __missing__(self, key):
if self.default_factory:
self[key] = self.default_factory(key)
return self[key]
else:
defaultdict.__missing__(self, key)
|
[
"sergii.gavrylov@grammarly.com"
] |
sergii.gavrylov@grammarly.com
|
5ceb8c735fff3ee882d9df8f7f7d9db063a3fa53
|
addddf160a5042c199af21bcd6553a65dc56a9ce
|
/client/client.py
|
9935addc73ea8d24ddd8d4ee6c6c17f1d83cdf3d
|
[] |
no_license
|
shirongzuo/cloud-code
|
fc122f374b49a9a7316b058bfa0cbe402c16e025
|
2c972cab20f273115847393d4cd5199f02e7c158
|
refs/heads/master
| 2022-12-23T16:55:38.464571
| 2019-05-31T01:44:54
| 2019-05-31T01:44:54
| 189,500,292
| 0
| 0
| null | 2022-12-22T04:59:35
| 2019-05-31T00:26:35
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 415
|
py
|
from requests.exceptions import ConnectionError
from socketIO_client import SocketIO
def on_connect():
print 'connect'
def on_new_response(*args):
print args
try:
socket = SocketIO('127.0.0.1', 5000, wait_for_connection=False)
socket.on('connect', on_connect)
socket.on('new_scan', on_new_response)
socket.wait()
except ConnectionError:
print('The server is down. Try again later.')
|
[
"shilingggg@gmail.com"
] |
shilingggg@gmail.com
|
51ca4ac37db82fa3b82ce5951cc8b949f65b2b5d
|
9644567f9cd3415e6c8b1470fde72ab178bb8eb0
|
/flask/lib/python2.7/site-packages/whoosh/matching/mcore.py
|
0b61c7da0d27c7b94f4eb3280f37451c55134bde
|
[
"Apache-2.0"
] |
permissive
|
Ahmad31/Web_Flask_Cassandra
|
01d44ee03fcb457ea3a01629f6fd29870663b8ff
|
76acb074fce521e904f3b2a41e6ab69571f4369e
|
refs/heads/master
| 2021-06-10T02:42:53.494515
| 2019-11-27T16:22:48
| 2019-11-27T16:22:48
| 88,625,344
| 3
| 1
|
Apache-2.0
| 2021-03-19T22:23:05
| 2017-04-18T12:59:21
|
Python
|
UTF-8
|
Python
| false
| false
| 18,841
|
py
|
# Copyright 2010 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""
This module contains "matcher" classes. Matchers deal with posting lists. The
most basic matcher, which reads the list of postings for a term, will be
provided by the backend implementation (for example,
:class:`whoosh.filedb.filepostings.FilePostingReader`). The classes in this
module provide additional functionality, such as combining the results of two
matchers, or modifying the results of a matcher.
You do not need to deal with the classes in this module unless you need to
write your own Matcher implementation to provide some new functionality. These
classes are not instantiated by the user. They are usually created by a
:class:`~whoosh.query.Query` object's :meth:`~whoosh.query.Query.matcher()`
method, which returns the appropriate matcher to implement the query (for
example, the :class:`~whoosh.query.Or` query's
:meth:`~whoosh.query.Or.matcher()` method returns a
:py:class:`~whoosh.matching.UnionMatcher` object).
Certain backends support "quality" optimizations. These backends have the
ability to skip ahead if it knows the current block of postings can't
contribute to the top N documents. If the matcher tree and backend support
these optimizations, the matcher's :meth:`Matcher.supports_block_quality()`
method will return ``True``.
"""
import sys
from itertools import repeat
from whoosh.compat import izip, xrange
from whoosh.compat import abstractmethod
# Exceptions
class ReadTooFar(Exception):
"""Raised when :meth:`~whoosh.matching.Matcher.next()` or
:meth:`~whoosh.matching.Matcher.skip_to()` are called on an inactive
matcher.
"""
class NoQualityAvailable(Exception):
"""Raised when quality methods are called on a matcher that does not
support block quality optimizations.
"""
# Classes
class Matcher(object):
"""Base class for all matchers.
"""
@abstractmethod
def is_active(self):
"""Returns True if this matcher is still "active", that is, it has not
yet reached the end of the posting list.
"""
raise NotImplementedError
@abstractmethod
def reset(self):
"""Returns to the start of the posting list.
Note that reset() may not do what you expect after you call
:meth:`Matcher.replace()`, since this can mean calling reset() not on
the original matcher, but on an optimized replacement.
"""
raise NotImplementedError
def term(self):
"""Returns a ``("fieldname", "termtext")`` tuple for the term this
matcher matches, or None if this matcher is not a term matcher.
"""
return None
def term_matchers(self):
"""Returns an iterator of term matchers in this tree.
"""
if self.term() is not None:
yield self
else:
for cm in self.children():
for m in cm.term_matchers():
yield m
def matching_terms(self, id=None):
"""Returns an iterator of ``("fieldname", "termtext")`` tuples for the
**currently matching** term matchers in this tree.
"""
if not self.is_active():
return
if id is None:
id = self.id()
elif id != self.id():
return
t = self.term()
if t is None:
for c in self.children():
for t in c.matching_terms(id):
yield t
else:
yield t
def is_leaf(self):
return not bool(self.children())
def children(self):
"""Returns an (possibly empty) list of the submatchers of this
matcher.
"""
return []
def replace(self, minquality=0):
"""Returns a possibly-simplified version of this matcher. For example,
if one of the children of a UnionMatcher is no longer active, calling
this method on the UnionMatcher will return the other child.
"""
return self
@abstractmethod
def copy(self):
"""Returns a copy of this matcher.
"""
raise NotImplementedError
def depth(self):
"""Returns the depth of the tree under this matcher, or 0 if this
matcher does not have any children.
"""
return 0
def supports_block_quality(self):
"""Returns True if this matcher supports the use of ``quality`` and
``block_quality``.
"""
return False
def max_quality(self):
"""Returns the maximum possible quality measurement for this matcher,
according to the current weighting algorithm. Raises
``NoQualityAvailable`` if the matcher or weighting do not support
quality measurements.
"""
raise NoQualityAvailable(self.__class__)
def block_quality(self):
"""Returns a quality measurement of the current block of postings,
according to the current weighting algorithm. Raises
``NoQualityAvailable`` if the matcher or weighting do not support
quality measurements.
"""
raise NoQualityAvailable(self.__class__)
@abstractmethod
def id(self):
"""Returns the ID of the current posting.
"""
raise NotImplementedError
def all_ids(self):
"""Returns a generator of all IDs in the matcher.
What this method returns for a matcher that has already read some
postings (whether it only yields the remaining postings or all postings
from the beginning) is undefined, so it's best to only use this method
on fresh matchers.
"""
i = 0
m = self
while m.is_active():
yield m.id()
m.next()
i += 1
if i == 10:
m = m.replace()
i = 0
def all_items(self):
"""Returns a generator of all (ID, encoded value) pairs in the matcher.
What this method returns for a matcher that has already read some
postings (whether it only yields the remaining postings or all postings
from the beginning) is undefined, so it's best to only use this method
on fresh matchers.
"""
i = 0
m = self
while self.is_active():
yield (m.id(), m.value())
m.next()
i += 1
if i == 10:
m = m.replace()
i = 0
def items_as(self, astype):
"""Returns a generator of all (ID, decoded value) pairs in the matcher.
What this method returns for a matcher that has already read some
postings (whether it only yields the remaining postings or all postings
from the beginning) is undefined, so it's best to only use this method
on fresh matchers.
"""
while self.is_active():
yield (self.id(), self.value_as(astype))
self.next()
@abstractmethod
def value(self):
"""Returns the encoded value of the current posting.
"""
raise NotImplementedError
@abstractmethod
def supports(self, astype):
"""Returns True if the field's format supports the named data type,
for example 'frequency' or 'characters'.
"""
raise NotImplementedError("supports not implemented in %s"
% self.__class__)
@abstractmethod
def value_as(self, astype):
"""Returns the value(s) of the current posting as the given type.
"""
raise NotImplementedError("value_as not implemented in %s"
% self.__class__)
def spans(self):
"""Returns a list of :class:`~whoosh.query.spans.Span` objects for the
matches in this document. Raises an exception if the field being
searched does not store positions.
"""
from whoosh.query.spans import Span
if self.supports("characters"):
return [Span(pos, startchar=startchar, endchar=endchar)
for pos, startchar, endchar in self.value_as("characters")]
elif self.supports("positions"):
return [Span(pos) for pos in self.value_as("positions")]
else:
raise Exception("Field does not support spans")
def skip_to(self, id):
"""Moves this matcher to the first posting with an ID equal to or
greater than the given ID.
"""
while self.is_active() and self.id() < id:
self.next()
def skip_to_quality(self, minquality):
"""Moves this matcher to the next block with greater than the given
minimum quality value.
"""
raise NotImplementedError(self.__class__.__name__)
@abstractmethod
def next(self):
"""Moves this matcher to the next posting.
"""
raise NotImplementedError(self.__class__.__name__)
def weight(self):
"""Returns the weight of the current posting.
"""
return self.value_as("weight")
@abstractmethod
def score(self):
"""Returns the score of the current posting.
"""
raise NotImplementedError(self.__class__.__name__)
def __eq__(self, other):
return self.__class__ is type(other)
def __lt__(self, other):
return type(other) is self.__class__
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
# Simple intermediate classes
class ConstantScoreMatcher(Matcher):
def __init__(self, score=1.0):
self._score = score
def supports_block_quality(self):
return True
def max_quality(self):
return self._score
def block_quality(self):
return self._score
def skip_to_quality(self, minquality):
if minquality >= self._score:
self.go_inactive()
def score(self):
return self._score
# Null matcher
class NullMatcherClass(Matcher):
"""Matcher with no postings which is never active.
"""
def __call__(self):
return self
def __repr__(self):
return "<NullMatcher>"
def supports_block_quality(self):
return True
def max_quality(self):
return 0
def block_quality(self):
return 0
def skip_to_quality(self, minquality):
return 0
def is_active(self):
return False
def reset(self):
pass
def all_ids(self):
return []
def copy(self):
return self
# Singleton instance
NullMatcher = NullMatcherClass()
class ListMatcher(Matcher):
"""Synthetic matcher backed by a list of IDs.
"""
def __init__(self, ids, weights=None, values=None, format=None,
scorer=None, position=0, all_weights=None, term=None,
terminfo=None):
"""
:param ids: a list of doc IDs.
:param weights: a list of weights corresponding to the list of IDs.
If this argument is not supplied, a list of 1.0 values is used.
:param values: a list of encoded values corresponding to the list of
IDs.
:param format: a :class:`whoosh.formats.Format` object representing the
format of the field.
:param scorer: a :class:`whoosh.scoring.BaseScorer` object for scoring
the postings.
:param term: a ``("fieldname", "text")`` tuple, or None if this is not
a term matcher.
"""
self._ids = ids
self._weights = weights
self._all_weights = all_weights
self._values = values
self._i = position
self._format = format
self._scorer = scorer
self._term = term
self._terminfo = terminfo
def __repr__(self):
return "<%s>" % self.__class__.__name__
def is_active(self):
return self._i < len(self._ids)
def reset(self):
self._i = 0
def skip_to(self, id):
if not self.is_active():
raise ReadTooFar
if id < self.id():
return
while self._i < len(self._ids) and self._ids[self._i] < id:
self._i += 1
def term(self):
return self._term
def copy(self):
return self.__class__(self._ids, self._weights, self._values,
self._format, self._scorer, self._i,
self._all_weights)
def replace(self, minquality=0):
if not self.is_active():
return NullMatcher()
elif minquality and self.max_quality() < minquality:
return NullMatcher()
else:
return self
def supports_block_quality(self):
return (self._scorer is not None
and self._scorer.supports_block_quality())
def max_quality(self):
# This matcher treats all postings in the list as one "block", so the
# block quality is the same as the quality of the entire list
if self._scorer:
return self._scorer.block_quality(self)
else:
return self.block_max_weight()
def block_quality(self):
return self._scorer.block_quality(self)
def skip_to_quality(self, minquality):
while self._i < len(self._ids) and self.block_quality() <= minquality:
self._i += 1
return 0
def id(self):
return self._ids[self._i]
def all_ids(self):
return iter(self._ids)
def all_items(self):
values = self._values
if values is None:
values = repeat('')
return izip(self._ids, values)
def value(self):
if self._values:
v = self._values[self._i]
if isinstance(v, list):
# This object supports "values" that are actually lists of
# value strings. This is to support combining the results of
# several different matchers into a single ListMatcher (see the
# TOO_MANY_CLAUSES functionality of MultiTerm). We combine the
# values here instead of combining them first and then making
# the ListMatcher to avoid wasting time combining values if the
# consumer never asks for them.
assert len(v) > 0
if len(v) == 1:
v = v[0]
else:
v = self._format.combine(v)
# Replace the list with the computed value string
self._values[self._i] = v
return v
else:
return ''
def value_as(self, astype):
decoder = self._format.decoder(astype)
return decoder(self.value())
def supports(self, astype):
return self._format.supports(astype)
def next(self):
self._i += 1
def weight(self):
if self._all_weights:
return self._all_weights
elif self._weights:
return self._weights[self._i]
else:
return 1.0
def block_min_length(self):
return self._terminfo.min_length()
def block_max_length(self):
return self._terminfo.max_length()
def block_max_weight(self):
if self._all_weights:
return self._all_weights
elif self._weights:
return max(self._weights)
elif self._terminfo is not None:
return self._terminfo.max_weight()
else:
return 1.0
def score(self):
if self._scorer:
return self._scorer.score(self)
else:
return self.weight()
# Term/vector leaf posting matcher middleware
class LeafMatcher(Matcher):
# Subclasses need to set
# self.scorer -- a Scorer object or None
# self.format -- Format object for the posting values
def __repr__(self):
return "%s(%r, %s)" % (self.__class__.__name__, self.term(),
self.is_active())
def term(self):
return self._term
def items_as(self, astype):
decoder = self.format.decoder(astype)
for id, value in self.all_items():
yield (id, decoder(value))
def supports(self, astype):
return self.format.supports(astype)
def value_as(self, astype):
decoder = self.format.decoder(astype)
return decoder(self.value())
def spans(self):
from whoosh.query.spans import Span
if self.supports("characters"):
return [Span(pos, startchar=startchar, endchar=endchar)
for pos, startchar, endchar in self.value_as("characters")]
elif self.supports("positions"):
return [Span(pos) for pos in self.value_as("positions")]
else:
raise Exception("Field does not support positions (%r)"
% self.term())
def supports_block_quality(self):
return self.scorer and self.scorer.supports_block_quality()
def max_quality(self):
return self.scorer.max_quality()
def block_quality(self):
return self.scorer.block_quality(self)
def score(self):
return self.scorer.score(self)
|
[
"aku.anwar.aan@gmail.com"
] |
aku.anwar.aan@gmail.com
|
3646440113ac103a53ca422f0e1dcabcff3bbf0e
|
a0be404b882c9dd8b23d08052d568785b854710f
|
/draw.py
|
213f4523f056a1f4f2d00dd6e20b7c96d1acb0b8
|
[
"MIT"
] |
permissive
|
mncoppola/ws30
|
fa3d0bf0b688e2347c2818c68c9644679d8f343a
|
c80f5a1ae19a13871c1fcf051cbce1e67761ec36
|
refs/heads/master
| 2020-04-14T01:09:38.110523
| 2015-10-19T13:16:10
| 2015-10-19T13:16:10
| 13,512,586
| 59
| 14
| null | 2015-10-19T13:16:11
| 2013-10-12T00:20:06
|
Python
|
UTF-8
|
Python
| false
| false
| 3,404
|
py
|
import hashlib
import Image
import json
import sys
import time
import urllib
import urllib2
from urlgrabber.keepalive import HTTPHandler
MARGIN = 3000
urlbase = "http://scalews.withings.net/cgi-bin/"
urlonce = urlbase + "once"
urlsess = urlbase + "session"
urlmaint = urlbase + "maint"
urlassoc = urlbase + "association"
urlmeas = urlbase + "measure"
def craft_params(params):
return urllib.unquote(urllib.urlencode(params))
def do_request(opener, url, data):
req = urllib2.Request(url, data)
opener.addheaders = headers
return opener.open(req).read()
if __name__ == "__main__":
if len(sys.argv) != 4:
print "%s <mac_addr> <secret> <image>" % sys.argv[0]
exit()
mac_addr = sys.argv[1]
secret = sys.argv[2]
headers = [
("User-Agent", "Withings UserAgent"),
("Accept", "*/*"),
]
opener = urllib2.build_opener(HTTPHandler()) # Keep-alive
##
# Step 1: /cgi-bin/once
##
params = {
"action": "get"
}
data = craft_params(params)
resp = do_request(opener, urlonce, data)
print resp
once = json.loads(resp)["body"]["once"]
##
# Step 2: /cgi-bin/session
##
md5 = hashlib.md5()
md5.update("%s:%s:%s" % (mac_addr, secret, once))
hash_calc = md5.digest().encode("hex")
params = {
"action": "new",
"auth": mac_addr,
"hash": hash_calc,
"mfgid": "262151",
"currentfw": "200",
"batterylvl": "1337",
"duration": "30",
"zreboot": "1"
}
data = craft_params(params)
resp = do_request(opener, urlsess, data)
print resp
tmp = json.loads(resp)
sessionid = tmp["body"]["sessionid"]
user_id = tmp["body"]["sp"]["users"][0]["id"]
##
# Step 3: /cgi-bin/measure
##
curtime = int(time.time())
print "Current time: %d" % curtime
im = Image.open(sys.argv[3])
width, height = im.size
print "Image width: %d" % width
print "Image height: %d" % height
print "Iterating pixels..."
pix = im.load()
count = 0
for x in xrange(width):
points = {
"measures": [
]
}
for y in xrange(height):
a, b, c, d = pix[x,y]
if a < 128:
point = {
"value": 20000 + ((height - y) * MARGIN),
"type": 1,
"unit": -3
}
points["measures"].append(point)
count += 1
print "total points = %d" % count
if len(points["measures"]) == 0:
print "Blank column, skipping request"
continue
params = {
"action": "store",
"sessionid": sessionid,
"macaddress": mac_addr,
"userid": user_id,
"meastime": curtime - ((width - x) * MARGIN),
"devtype": 1,
"attribstatus": 0,
"measures": urllib.quote_plus(json.dumps(points, separators=(",", ":")))
}
data = craft_params(params)
print data
resp = do_request(opener, urlmeas, data)
print resp
##
# Step 4: /cgi-bin/session
##
params = {
"action": "delete",
"sessionid": sessionid
}
data = craft_params(params)
print data
resp = do_request(opener, urlsess, data)
print resp
|
[
"michael.n.coppola@gmail.com"
] |
michael.n.coppola@gmail.com
|
b8880ceb3d542eb55dcfb0215fbabbb155893947
|
c195c6dfe6f9d270babe4641bea558b4427b6ce2
|
/base.py
|
c2d6f6d7c31bc1e0fdd97e2be020ef43bde53e51
|
[] |
no_license
|
umknow/TechnicalIndex
|
427e9bb2184ae7eb72b52ca4796703e603db496a
|
b910ca3a2d652002335ed5ca90e80e7a375c7859
|
refs/heads/master
| 2020-09-01T13:16:45.781338
| 2019-11-01T08:00:52
| 2019-11-01T08:00:52
| 218,966,313
| 1
| 0
| null | 2019-11-01T10:41:29
| 2019-11-01T10:41:29
| null |
UTF-8
|
Python
| false
| false
| 5,698
|
py
|
#!/usr/bin/env python3
# -*- coding:utf-8 -*-
"""
Author: zero
Email: 13256937698@163.com
Date: 2018-09-20
"""
from decimal import Decimal, getcontext
from TechnicalIndex import WindowEvent
class EMAEventWindow(WindowEvent):
def __init__(self, maxLen):
super(EMAEventWindow, self).__init__(maxLen)
self.__value = 0
self.__close = 0
self.__multiplier = (2.0 / (maxLen + 1))
self.__lasterFact = (maxLen - 1) / (maxLen + 1)
def oneNewValue(self, value):
super(EMAEventWindow, self).oneNewValue(value)
if self.getValueLen() == 1:
self.__value = value
else:
self.__value = (self.__value * self.__lasterFact) + (value * self.__multiplier)
def onLastValue(self, value):
super(EMAEventWindow, self).oneNewValue(value)
self.__value = value
def getValue(self):
value = float(Decimal(str(self.__value)).quantize(Decimal('0.000')))
return value
class KDJEvenWindow(WindowEvent):
def __init__(self, maxLen):
super(KDJEvenWindow, self).__init__(maxLen)
self.__value = 50
self.__multiplier = (maxLen-1) / (maxLen)
self.__lasterFact = 1 / maxLen
def oneNewValue(self, value):
self.__value = (self.__value * self.__multiplier) + (value * self.__lasterFact)
def oneLastValue(self, value):
self.setValues(value)
self.__value = value[-1]
def getValue(self):
value = float(Decimal(str(self.__value)).quantize(Decimal('0.000')))
return value
class RSVEvenWindows(WindowEvent):
def __init__(self, maxLen):
super(RSVEvenWindows, self).__init__(maxLen)
self.__value = 0
def oneNewValue(self, value):
super(RSVEvenWindows, self).oneNewValue(value)
if not self.windowfull():
self.__value = 0
else:
values = self.getValues()
C = values[-1][0]
L = min([low_list[1] for low_list in values])
H = max([low_list[2] for low_list in values])
self.__value = (C-L)/(H-L)*100
def oneLastValue(self, value):
self.setValues(value)
def getValue(self):
value = float(Decimal(str(self.__value)).quantize(Decimal('0.000')))
return value
class SMAEventWindow(WindowEvent):
def __init__(self, period):
assert(period > 0)
super(SMAEventWindow, self).__init__(period)
self.__value = None
def oneNewValue(self, value):
firstValue = None
if len(self.getValues()) > 0:
firstValue = self.getValues()[0]
assert(firstValue is not None)
super(SMAEventWindow, self).oneNewValue(value)
if value is not None and self.windowfull():
if self.__value is None:
self.__value = sum(self.getValues())/self.getValueLen()
else:
self.__value = self.__value + value / float(self.getValueLen()) - firstValue / float(self.getValueLen())
def onLastValue(self, MA, values):
self.__value = MA
self.setValues(values)
def getValue(self):
return self.__value
def getFirstValue(self):
return self.getValues()[0]
class RSEventWindow(WindowEvent):
def __init__(self, period):
assert(period > 0)
super(RSEventWindow, self).__init__(period)
self.__value = 0
self.__prevGain = None
self.__prevLoss = None
self.__period = period
def oneNewValue(self, value):
super(RSEventWindow, self).oneNewValue(value)
if value is not None and self.windowfull():
if self.__prevGain is None:
assert (self.__prevLoss is None)
avgGain, avgLoss = self.avgGainLoss(self.getValues(), 0, len(self.getValues()))
else:
assert (self.__prevLoss is not None)
prevValue = self.getValues()[-2]
currValue = self.getValues()[-1]
currGain, currLoss = self.gainLossOne(prevValue, currValue)
avgGain = (self.__prevGain * (self.__period - 1) + currGain) / float(self.__period)
avgLoss = (self.__prevLoss * (self.__period - 1) + currLoss) / float(self.__period)
if avgLoss == 0:
self.__value = 100
else:
rs = avgGain / avgLoss
self.__value = 100 - 100 / (1 + rs)
self.__prevGain = avgGain
self.__prevLoss = avgLoss
def oneLastValue(self, avgGain, avgLoss, value):
self.setValues(value)
self.__prevLoss = avgLoss
self.__prevGain = avgGain
def avgGainLoss(self, values, begin, end):
rangeLen = end - begin
if rangeLen < 2:
return None
gain = 0
loss = 0
for i in range(begin + 1, end):
currGain, currLoss = self.gainLossOne(values[i - 1], values[i])
gain += currGain
loss += currLoss
return gain / float(rangeLen), loss / float(rangeLen)
def gainLossOne(self, prevValue, nextValue):
change = nextValue - prevValue
if change < 0:
gain = 0
loss = abs(change)
else:
gain = change
loss = 0
return gain, loss
def getValue(self):
value = self.changeValue(self.__value)
avgGain = self.changeValue(self.__prevGain)
avgLoss = self.changeValue(self.__prevLoss)
return value, avgGain, avgLoss
def changeValue(self, value):
if value is None:
return 0.0000
else:
return float(Decimal(str(value)).quantize(Decimal('0.000000')))
|
[
"zb@liangplus.com"
] |
zb@liangplus.com
|
b604234cc7badec12fbeb8430b01b3ad746d0afe
|
a7d8b809c96badb19c4405604a132218c0428458
|
/suppliers/urls.py
|
376d353b460ee08531eeeb3923ea5909b7607ce9
|
[] |
no_license
|
nlanaro/providers
|
d1b29765dec71f7c2af3e43d239eb5eb2f9d4811
|
ec288b80ecb0aeba5c0e8662f05de8185c8cb097
|
refs/heads/master
| 2021-01-10T05:42:22.443751
| 2016-02-14T18:20:35
| 2016-02-14T18:20:35
| 51,680,974
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,016
|
py
|
"""suppliers URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/1.9/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: url(r'^$', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: url(r'^$', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.conf.urls import url, include
2. Add a URL to urlpatterns: url(r'^blog/', include('blog.urls'))
"""
from django.conf.urls import url, include
from django.contrib import admin
from tastypie.api import Api
from serviceareas.api import ProviderResource, ServiceAreaResource
v1_api = Api(api_name='v1')
v1_api.register(ProviderResource())
v1_api.register(ServiceAreaResource())
urlpatterns = [
url(r'^admin/', admin.site.urls),
url(r'^api/', include(v1_api.urls)),
]
|
[
"nlanaro@medallia.com"
] |
nlanaro@medallia.com
|
cd04153e6967fb5fc6fed3dd5d14f9d6cfd9d578
|
2fdbb6ddcbad9d7d9bb28a080ca82db978f8d034
|
/karthus/karthus_stack.py
|
39904ab3952b9a5d6224840202cceeca1ea3511e
|
[] |
no_license
|
jairov4/karthus
|
b8a36a73404d38afddb02f69dc0f382b6c1da4dc
|
df8298b4e6ece7949ea8d6d349a4b4494c7ac2d8
|
refs/heads/master
| 2020-11-24T15:10:26.520038
| 2019-12-16T18:17:29
| 2019-12-16T18:17:29
| 228,209,418
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,574
|
py
|
from aws_cdk import core, aws_ec2 as ec2, aws_autoscaling as autoscaling, aws_logs as logs, aws_iam as iam, aws_s3 as s3
from aws_cdk import aws_autoscaling_hooktargets as as_ht, aws_sqs as sqs
class KarthusStack(core.Stack):
def __init__(self, scope: core.Construct, id: str, **kwargs) -> None:
super().__init__(scope, id, **kwargs)
cidr = '10.10.10.0/24'
ssh_key = 'katarina-test'
vpc = ec2.Vpc(self, "Cluster", cidr=cidr, max_azs=3, enable_dns_support=True, enable_dns_hostnames=True)
log_group = logs.LogGroup(self, "LogGroup", retention=logs.RetentionDays.THREE_DAYS)
script = open('init.sh', 'rb').read()
user_data = ec2.UserData.for_linux()
user_data.add_commands(str(script, 'utf-8'))
swarm_bucket = s3.Bucket(self, "SwarmBucket")
sg = ec2.SecurityGroup(self, "Swarm", description='Swarm nodes', vpc=vpc)
sg.add_ingress_rule(sg, ec2.Port.icmp_ping(), description='ICMP ping')
sg.add_ingress_rule(sg, ec2.Port.tcp(2377), description='Swarm management')
sg.add_ingress_rule(sg, ec2.Port.tcp(7946), description='Swarm Communication')
sg.add_ingress_rule(sg, ec2.Port.udp(7946), description='Swarm Communication')
sg.add_ingress_rule(sg, ec2.Port.udp(4789), description='Swarm Overlay Network')
sg.add_ingress_rule(ec2.Peer.any_ipv4(), ec2.Port.tcp(22), description='SSH')
sg.add_ingress_rule(ec2.Peer.any_ipv6(), ec2.Port.tcp(22), description='SSH')
queue = sqs.Queue(self, "SwarmNodeNotifications")
instance_policy = iam.PolicyDocument(statements=[
# S3 Swarm Bucket
iam.PolicyStatement(
actions=['s3:GetObject', 's3:PutObject', 's3:DeleteObject', 's3:ListObjects'],
resources=[swarm_bucket.arn_for_objects('*')]),
iam.PolicyStatement(
actions=['s3:ListBucket'],
resources=[swarm_bucket.bucket_arn]),
# CloudWatch cluster logs
iam.PolicyStatement(
actions=["logs:CreateLogStream", "logs:PutLogEvents", "logs:DescribeLogStreams", "logs:GetLogEvents"],
resources=[log_group.log_group_arn]),
# Auto discovery capabilities
iam.PolicyStatement(
actions=["ec2:DescribeInstances", "ec2:DescribeTags", "ec2:CreateTags"],
resources=['*']),
# CloudFormation instance signaling
iam.PolicyStatement(
actions=["cloudformation:SignalResource"],
resources=["*"]),
# Swarm Node notifications
iam.PolicyStatement(
actions=[
"sqs:ChangeMessageVisibility",
"sqs:ChangeMessageVisibilityBatch",
"sqs:DeleteMessage",
"sqs:DeleteMessageBatch",
"sqs:ReceiveMessage",
"sqs:SendMessage"
],
resources=[queue.queue_arn]),
# LifeCycle hooks response
iam.PolicyStatement(
actions=['autoscaling:CompleteLifecycleAction'],
resources=['*']),
# RexRay EBS
iam.PolicyStatement(
actions=[
"ec2:AttachVolume",
"ec2:CreateVolume",
"ec2:CreateSnapshot",
"ec2:CreateTags",
"ec2:DeleteVolume",
"ec2:DeleteSnapshot",
"ec2:DescribeAvailabilityZones",
"ec2:DescribeInstances",
"ec2:DescribeVolumes",
"ec2:DescribeVolumeAttribute",
"ec2:DescribeVolumeStatus",
"ec2:DescribeSnapshots",
"ec2:CopySnapshot",
"ec2:DescribeSnapshotAttribute",
"ec2:DetachVolume",
"ec2:ModifySnapshotAttribute",
"ec2:ModifyVolumeAttribute",
"ec2:DescribeTags"
],
resources=["*"])
])
role = iam.Role(
self, "InstanceRole", path='/', assumed_by=iam.ServicePrincipal('ec2.amazonaws.com'),
managed_policies=[iam.ManagedPolicy.from_aws_managed_policy_name('AmazonSSMManagedInstanceCore')],
inline_policies={'Policy': instance_policy})
rolling_update_config = autoscaling.RollingUpdateConfiguration(
min_instances_in_service=1, max_batch_size=1, pause_time=core.Duration.minutes(5),
wait_on_resource_signals=True, suspend_processes=[
autoscaling.ScalingProcess.HEALTH_CHECK,
autoscaling.ScalingProcess.REPLACE_UNHEALTHY,
autoscaling.ScalingProcess.AZ_REBALANCE,
autoscaling.ScalingProcess.ALARM_NOTIFICATION,
autoscaling.ScalingProcess.SCHEDULED_ACTIONS
])
asg_managers = autoscaling.AutoScalingGroup(
self,
"Managers",
vpc=vpc,
instance_type=ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE3_AMD, ec2.InstanceSize.NANO),
machine_image=ec2.AmazonLinuxImage(generation=ec2.AmazonLinuxGeneration.AMAZON_LINUX_2),
key_name=ssh_key,
user_data=user_data,
min_capacity=3,
vpc_subnets=ec2.SubnetSelection(subnets=vpc.public_subnets),
role=role,
update_type=autoscaling.UpdateType.ROLLING_UPDATE,
rolling_update_configuration=rolling_update_config,
associate_public_ip_address=True)
asg_managers.add_security_group(sg)
notification_target = as_ht.QueueHook(queue)
asg_managers.add_lifecycle_hook(
"Managers", lifecycle_transition=autoscaling.LifecycleTransition.INSTANCE_TERMINATING,
default_result=autoscaling.DefaultResult.ABANDON, notification_target=notification_target)
core.Tag.add(asg_managers, 'swarm-node-type', 'Manager')
core.Tag.add(asg_managers, 'LogGroup', log_group.log_group_name)
core.Tag.add(asg_managers, 'swarm-state-bucket', swarm_bucket.bucket_name)
core.Tag.add(asg_managers, 'swarm-notification-queue', queue.queue_url)
asg_workers = autoscaling.AutoScalingGroup(
self,
"WorkersLinux",
vpc=vpc,
instance_type=ec2.InstanceType.of(ec2.InstanceClass.BURSTABLE3_AMD, ec2.InstanceSize.MICRO),
machine_image=ec2.AmazonLinuxImage(generation=ec2.AmazonLinuxGeneration.AMAZON_LINUX_2),
key_name=ssh_key,
user_data=user_data,
min_capacity=2,
vpc_subnets=ec2.SubnetSelection(subnets=vpc.private_subnets),
role=role,
update_type=autoscaling.UpdateType.ROLLING_UPDATE,
rolling_update_configuration=rolling_update_config,
associate_public_ip_address=False)
asg_workers.add_security_group(sg)
asg_workers.add_lifecycle_hook(
"Workers", lifecycle_transition=autoscaling.LifecycleTransition.INSTANCE_TERMINATING,
default_result=autoscaling.DefaultResult.ABANDON, notification_target=notification_target)
core.Tag.add(asg_workers, 'swarm-node-type', 'Worker')
core.Tag.add(asg_workers, 'LogGroup', log_group.log_group_name)
core.Tag.add(asg_workers, 'swarm-state-bucket', swarm_bucket.bucket_name)
core.Tag.add(asg_workers, 'swarm-notification-queue', queue.queue_url)
|
[
"1904410+jairov4@users.noreply.github.com"
] |
1904410+jairov4@users.noreply.github.com
|
1e7c11b83b084bfa789ff92f565d1102b7b56ae2
|
4a2b846b39a930edc39462aa6b57444de1f69c65
|
/__init__.py
|
20db3a97a96d8188c27f4cfc38ab06e6d6a988ed
|
[] |
no_license
|
ivi81/eml_parser
|
c8b9f134dae67d0c26f45c691c6edc4484f5e6b0
|
3ab481d690c8923fb1add4d6acd51736237c2a4a
|
refs/heads/master
| 2020-04-01T13:05:52.573875
| 2018-11-21T13:23:09
| 2018-11-21T13:23:09
| 153,236,723
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 170
|
py
|
# -*- coding: utf8 -*-
#!/usr/bin/python3
import sys
try:
from ._smtp_processor import *
except Exception:
sys.path.append('./')
from _smtp_processor import *
|
[
"ppp00@inbox.ru"
] |
ppp00@inbox.ru
|
a5a442ee8c34ff3789e5156e1baf76508b04f52f
|
8d65af8d08ac39d29bd899c809c6028f40a7871c
|
/movingFiles/main.py
|
32a49d2195f0a177c91e2a1b98153b122c94afe9
|
[] |
no_license
|
tonyechen/python-codes
|
860691129da58bc6f6d125ae16de89a0ffbb617c
|
ab457c611806e572d155fc956ff99a55997b545d
|
refs/heads/main
| 2023-07-08T12:48:52.044742
| 2021-08-08T02:45:00
| 2021-08-08T02:45:00
| 394,035,660
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 318
|
py
|
import os
source = "text.txt"
destination = "C:\\Users\\fchen\Desktop\\folder"
try:
if os.path.exists(destination):
print("There is already a file there")
else:
os.replace(source,destination)
print(source + " was moved")
except FileNotFoundError:
print(source + " was not found")
|
[
"anchen082019@gmail.com"
] |
anchen082019@gmail.com
|
bcfdd3cfd8012d73ae2debe9783cc96a1cbfd130
|
d149ef59f3d810dbae135e32cb0f8c6844a7b32c
|
/home/urls.py
|
3cef4f784a043530745f471a0dae52af572b30be
|
[] |
no_license
|
salimdev04/schoolbook_django
|
915c0b6184827f838751f6a7ff32618405015966
|
f8aaaa69d9d2a881219ee7b87b97387f951402af
|
refs/heads/main
| 2023-02-10T08:45:40.455326
| 2021-01-03T12:15:27
| 2021-01-03T12:15:27
| 326,391,779
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 301
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path("", views.index, name="index"),
path('<int:year>/<int:month>/<int:day>/<slug:slug>',
views.campus_detail, name='campus_detail'),
path('<int:id>/<slug:slug>/', views.campus_per_pays, name='campus_per_pays'),
]
|
[
"salimdev@salimdevs-MacBook-Pro.local"
] |
salimdev@salimdevs-MacBook-Pro.local
|
3e895e849c58729bfc3662d24e122fa94de6457f
|
1711b5ca224856b4a4dcbe08f8caaa4ba28848e3
|
/server/book/insert_action.py
|
32b88a5a6b56819a52e2984e6f9527efa29196af
|
[] |
no_license
|
ktisha/ebook-service
|
ef041c715cc02acac9e6774965dd30baa4eacd3c
|
703137643c30757dac039bb56841ac87015b7daf
|
refs/heads/master
| 2016-09-10T17:19:39.942529
| 2013-07-03T09:20:24
| 2013-07-03T09:20:24
| 11,146,567
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,972
|
py
|
"Insert action handler for analyzer interface"
try:
from hashlib import md5
except ImportError:
from md5 import new as md5
import logging
from django.db.models import Q
from django.db import transaction
from django.core.exceptions import ObjectDoesNotExist
from book.models import Book, Author, AuthorAlias, BookFile, Annotation, Language
from spec.exception import InputDataServerException
from spec.utils import get_language
analyzer_log = logging.getLogger("analyser_logger")
MAIN_LOG = logging.getLogger("main_logger")
def strip_str(tstr):
'''Removes leading, endig space from string,
Returns empty string, if str is None'''
if tstr:
return tstr.strip()
return ''
def set_if_notempty(value, key):
"Sets to value key, if key isn't empty"
if key:
value = key
def get_authors(node, messages=None):
'''
Creates or finds authors, returns authors list
Appends warning, error to messages
'''
if messages == None:
messages = []
authors = []
for author_node in node.getchildren():
author = None
is_created_global = False
# TODO add by id
# look in tag author
for details_node in author_node.getchildren():
# create or find author with name from tag
if details_node.tag == 'name':
author_name = strip_str(details_node.text)
if author_name:
(author, is_created) = \
Author.objects.get_or_create(name=author_name)
# is someone created
is_created_global |= is_created
if details_node.tag == 'alias' and author:
alias_name = strip_str(details_node.text)
if alias_name:
alias = AuthorAlias.objects.get_or_create(name=alias_name)[0]
author.alias.add(alias)
# add author to list, if it is created or found
if author:
authors.append(author)
else:
analyzer_log.warning("One author is not added. (Empty name)")
messages.append(('warning',
"One author is not added. (Empty name)"))
return (authors, is_created_global)
def get_files(node, messages=None):
'''Creates or finds files, returns files list'''
if messages == None:
messages = []
book_files = []
for file_node in node.getchildren():
book_file = None
# TODO add by id
# look in tag file
for details_node in file_node.getchildren():
# create or find file with link from tag
if details_node.tag == 'link':
link = strip_str(details_node.text)
if link:
link_hash = md5(link).hexdigest()
book_file = BookFile.objects.get_or_create(link=link,
link_hash=link_hash)[0]
# add size filed if exists book_file and size is int
if details_node.tag == 'size' and book_file:
size = strip_str(details_node.text)
try:
size = int(size)
book_file.size = size
except ValueError:
analyzer_log.warning("size is not integer. \
book_file.link='%s'" % (book_file.link))
if details_node.tag == 'type' and book_file:
file_type = strip_str(details_node.text)
# TODO check type
# should I check type, or maybe i should believe
if file_type:
book_file.type = file_type
if details_node.tag == 'more_info' and book_file:
set_if_notempty(book_file.more_info,
strip_str(details_node.text))
if details_node.tag == 'img_link' and book_file:
set_if_notempty(book_file.img_link,
strip_str(details_node.text))
# add book_file to list, if it is created or found
if book_file:
book_file.save()
book_files.append(book_file)
else:
analyzer_log.warning("One book_file is not added. (Empty link)")
messages.append(('warning',
"One book_file is not added. (Empty link)"))
return book_files
def get_book_inf(xml, messages=None):
'''
Reads information from xml, finds or creates the book author,
the book files, annotations, etc
Returns tupel of
(NOT saved models.Book with information about it in its fileds,
list of authors, list of book files, list of annotations)
Or raises InputDataServerException, if the book title not found.
'''
if messages == None:
messages = []
book = Book(title='', lang='')
authors = []
book_files = []
annotations = []
# unused flag
#is_author_created = False
for node in xml.getchildren():
if node.tag == 'title':
book.title = strip_str(node.text)
if node.tag == 'lang':
book.language = get_language(node.text)
if node.tag == 'authors' and book.title:
(authors, is_author_created) = get_authors(node, messages)
if node.tag == 'files' and book.title:
book_files = get_files(node)
if node.tag == 'annotation' and book.title:
annotation_txt = strip_str(node.text)
if annotation_txt:
annotation = \
Annotation.objects.get_or_create(name=annotation_txt)[0]
annotations.append(annotation)
# if there are not the title of the book, raise exception
if not book.title:
analyzer_log.warning('In request there is not the title of the book')
raise InputDataServerException(
'In request there is not the title of the book')
# if there are not the book_file, raise exception
elif not book_files:
analyzer_log.warning('In request there is not the book_file')
raise InputDataServerException('In request there is not the book_file')
return (book, authors, book_files, annotations)
def save_book_inf(book, authors, book_files, annotations):
'''
Trys to find book by title and its authors.
Creates book, if not found.
Adds all information to book, saves it to database.
'''
messages = []
# try to find the book by this authors
found_books = Book.objects.filter(title=book.title)
if book.lang:
found_books = found_books.filter(lang__in=[book.lang, ''])
for author in authors:
found_books = found_books.filter(author=author)
found_book_in_db = None
# find book with the same numbers of authors
for found_book in found_books:
if found_book.author.count() == len(authors):
found_book_in_db = found_book
if found_book_in_db:
# set lang to book
if book.lang:
found_book_in_db.lang = book.lang
found_book_in_db.save()
book = found_book_in_db
messages.append(('info', 'Book updated'))
else:
# not found the book in database, then save it
# TODO do something with language
# TODO remove it temporary solution
try:
language = Language.objects.get(short=book.lang)
except ObjectDoesNotExist:
language = Language.objects.get(short='?')
book.language = language
book.save()
messages.append(('info', 'Book created'))
messages.append(('info', 'book.id=%i' % (book.id)))
# add authors, book_files, annotations to the book
for author in authors:
book.author.add(author)
for book_file in book_files:
book.book_file.add(book_file)
for annotation in annotations:
book.annotation.add(annotation)
return messages
def xml_exec_insert_unsafe(xml):
"Insert xml request to database, returns list of warning strings"
messages = []
#if xml.tag != 'book':
# raise InputDataServerException("Not found root tag 'book'")
# get infromation about the book from the request
(book, authors, book_files, annotations) = get_book_inf(xml, messages)
# save infomation to database
save_messages = save_book_inf(book, authors, book_files, annotations)
for msg in save_messages:
messages.append(msg)
MAIN_LOG.info("Added book " + book.title)
return messages
@transaction.commit_manually
def xml_exec_insert(xml):
'''
Executes xml request for insert to database.
Returns list of warning, error messages.
'''
try:
messages = xml_exec_insert_unsafe(xml)
except:
transaction.rollback()
raise
transaction.commit()
messages.append(('DEBUG', "transaction.commit()"))
return messages
|
[
"Ekaterina.Tuzova@jetbrains.com"
] |
Ekaterina.Tuzova@jetbrains.com
|
1bd1312ffdf3178c08caa60c547fc57c244b9015
|
24779d6a953061a56f357e2667e5c165f2f440c4
|
/src/app/controllers/dm_controllers/update_dm_message_controller.py
|
a705bc00afcb0987ab73f0355b04e0197cff2262
|
[] |
no_license
|
jack-y-wang/slack-backend
|
b3ec564c6a8aaec78858b118eb5a1b88f327ba3a
|
9ef4bf366027c3f7f0cc63def4ab9b9816e1bde9
|
refs/heads/master
| 2023-02-10T18:26:26.726432
| 2021-01-12T05:35:49
| 2021-01-12T05:35:49
| 325,659,550
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 602
|
py
|
from app.controllers import *
from flask import request
from app.dao import dms_dao
class UpdateDmMessageController(Controller):
def get_name(self):
return "update-dm-message"
def get_path(self):
return "/dm-messages/<int:message_id>/"
def get_methods(self):
return ["POST"]
@authorize_user
def content(self, message_id, **kwargs):
user = kwargs.get("user")
data = request.get_json()
content = data.get("content")
dm = dms_dao.update_dm_message(message_id, user.id, content)
return dm.serialize()
|
[
"jack.y.wang@berkeley.edu"
] |
jack.y.wang@berkeley.edu
|
485bb1976ef3b511d0be2b4a2dfba7f4be12e3e5
|
974c5a4f101d0e6f4dfa5fc2f7c641c9d2bd8184
|
/sdk/containerservice/azure-mgmt-containerservice/azure/mgmt/containerservice/v2022_05_02_preview/aio/operations/_trusted_access_role_bindings_operations.py
|
94bb973f5c4c88ec447012d30012c415b6a64ba6
|
[
"MIT",
"LicenseRef-scancode-generic-cla",
"LGPL-2.1-or-later"
] |
permissive
|
gaoyp830/azure-sdk-for-python
|
4816f04c554dcffb7510a6b7044b0c86a2dd32e1
|
1c66defa502b754abcc9e5afa444ca03c609342f
|
refs/heads/master
| 2022-10-20T21:33:44.281041
| 2022-09-29T17:03:13
| 2022-09-29T17:03:13
| 250,355,505
| 0
| 0
|
MIT
| 2020-03-26T19:42:13
| 2020-03-26T19:42:12
| null |
UTF-8
|
Python
| false
| false
| 19,837
|
py
|
# pylint: disable=too-many-lines
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, IO, Optional, TypeVar, Union, overload
from urllib.parse import parse_qs, urljoin, urlparse
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import (
ClientAuthenticationError,
HttpResponseError,
ResourceExistsError,
ResourceNotFoundError,
ResourceNotModifiedError,
map_error,
)
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse
from azure.core.rest import HttpRequest
from azure.core.tracing.decorator import distributed_trace
from azure.core.tracing.decorator_async import distributed_trace_async
from azure.core.utils import case_insensitive_dict
from azure.mgmt.core.exceptions import ARMErrorFormat
from ... import models as _models
from ..._vendor import _convert_request
from ...operations._trusted_access_role_bindings_operations import (
build_create_or_update_request,
build_delete_request,
build_get_request,
build_list_request,
)
T = TypeVar("T")
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class TrustedAccessRoleBindingsOperations:
"""
.. warning::
**DO NOT** instantiate this class directly.
Instead, you should access the following operations through
:class:`~azure.mgmt.containerservice.v2022_05_02_preview.aio.ContainerServiceClient`'s
:attr:`trusted_access_role_bindings` attribute.
"""
models = _models
def __init__(self, *args, **kwargs) -> None:
input_args = list(args)
self._client = input_args.pop(0) if input_args else kwargs.pop("client")
self._config = input_args.pop(0) if input_args else kwargs.pop("config")
self._serialize = input_args.pop(0) if input_args else kwargs.pop("serializer")
self._deserialize = input_args.pop(0) if input_args else kwargs.pop("deserializer")
@distributed_trace
def list(
self, resource_group_name: str, resource_name: str, **kwargs: Any
) -> AsyncIterable["_models.TrustedAccessRoleBinding"]:
"""List trusted access role bindings.
List trusted access role bindings.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either TrustedAccessRoleBinding or the result of
cls(response)
:rtype:
~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding]
:raises ~azure.core.exceptions.HttpResponseError:
"""
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-02-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.TrustedAccessRoleBindingListResult]
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
def prepare_request(next_link=None):
if not next_link:
request = build_list_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.list.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
else:
# make call to next link with the client's api-version
_parsed_next_link = urlparse(next_link)
_next_request_params = case_insensitive_dict(parse_qs(_parsed_next_link.query))
_next_request_params["api-version"] = self._config.api_version
request = HttpRequest("GET", urljoin(next_link, _parsed_next_link.path), params=_next_request_params)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
request.method = "GET"
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize("TrustedAccessRoleBindingListResult", pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(get_next, extract_data)
list.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/trustedAccessRoleBindings"} # type: ignore
@distributed_trace_async
async def get(
self, resource_group_name: str, resource_name: str, trusted_access_role_binding_name: str, **kwargs: Any
) -> _models.TrustedAccessRoleBinding:
"""Get a trusted access role binding.
Get a trusted access role binding.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param trusted_access_role_binding_name: The name of trusted access role binding. Required.
:type trusted_access_role_binding_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TrustedAccessRoleBinding or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-02-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[_models.TrustedAccessRoleBinding]
request = build_get_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
trusted_access_role_binding_name=trusted_access_role_binding_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.get.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("TrustedAccessRoleBinding", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/trustedAccessRoleBindings/{trustedAccessRoleBindingName}"} # type: ignore
@overload
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
trusted_access_role_binding_name: str,
trusted_access_role_binding: _models.TrustedAccessRoleBinding,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TrustedAccessRoleBinding:
"""Create or update a trusted access role binding.
Create or update a trusted access role binding.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param trusted_access_role_binding_name: The name of trusted access role binding. Required.
:type trusted_access_role_binding_name: str
:param trusted_access_role_binding: A trusted access role binding. Required.
:type trusted_access_role_binding:
~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding
:keyword content_type: Body Parameter content-type. Content type parameter for JSON body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TrustedAccessRoleBinding or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding
:raises ~azure.core.exceptions.HttpResponseError:
"""
@overload
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
trusted_access_role_binding_name: str,
trusted_access_role_binding: IO,
*,
content_type: str = "application/json",
**kwargs: Any
) -> _models.TrustedAccessRoleBinding:
"""Create or update a trusted access role binding.
Create or update a trusted access role binding.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param trusted_access_role_binding_name: The name of trusted access role binding. Required.
:type trusted_access_role_binding_name: str
:param trusted_access_role_binding: A trusted access role binding. Required.
:type trusted_access_role_binding: IO
:keyword content_type: Body Parameter content-type. Content type parameter for binary body.
Default value is "application/json".
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TrustedAccessRoleBinding or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding
:raises ~azure.core.exceptions.HttpResponseError:
"""
@distributed_trace_async
async def create_or_update(
self,
resource_group_name: str,
resource_name: str,
trusted_access_role_binding_name: str,
trusted_access_role_binding: Union[_models.TrustedAccessRoleBinding, IO],
**kwargs: Any
) -> _models.TrustedAccessRoleBinding:
"""Create or update a trusted access role binding.
Create or update a trusted access role binding.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param trusted_access_role_binding_name: The name of trusted access role binding. Required.
:type trusted_access_role_binding_name: str
:param trusted_access_role_binding: A trusted access role binding. Is either a model type or a
IO type. Required.
:type trusted_access_role_binding:
~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding or IO
:keyword content_type: Body Parameter content-type. Known values are: 'application/json'.
Default value is None.
:paramtype content_type: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: TrustedAccessRoleBinding or the result of cls(response)
:rtype: ~azure.mgmt.containerservice.v2022_05_02_preview.models.TrustedAccessRoleBinding
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = case_insensitive_dict(kwargs.pop("headers", {}) or {})
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-02-preview")) # type: str
content_type = kwargs.pop("content_type", _headers.pop("Content-Type", None)) # type: Optional[str]
cls = kwargs.pop("cls", None) # type: ClsType[_models.TrustedAccessRoleBinding]
content_type = content_type or "application/json"
_json = None
_content = None
if isinstance(trusted_access_role_binding, (IO, bytes)):
_content = trusted_access_role_binding
else:
_json = self._serialize.body(trusted_access_role_binding, "TrustedAccessRoleBinding")
request = build_create_or_update_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
trusted_access_role_binding_name=trusted_access_role_binding_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
content_type=content_type,
json=_json,
content=_content,
template_url=self.create_or_update.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize("TrustedAccessRoleBinding", pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
create_or_update.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/trustedAccessRoleBindings/{trustedAccessRoleBindingName}"} # type: ignore
@distributed_trace_async
async def delete( # pylint: disable=inconsistent-return-statements
self, resource_group_name: str, resource_name: str, trusted_access_role_binding_name: str, **kwargs: Any
) -> None:
"""Delete a trusted access role binding.
Delete a trusted access role binding.
:param resource_group_name: The name of the resource group. The name is case insensitive.
Required.
:type resource_group_name: str
:param resource_name: The name of the managed cluster resource. Required.
:type resource_name: str
:param trusted_access_role_binding_name: The name of trusted access role binding. Required.
:type trusted_access_role_binding_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: None or the result of cls(response)
:rtype: None
:raises ~azure.core.exceptions.HttpResponseError:
"""
error_map = {
401: ClientAuthenticationError,
404: ResourceNotFoundError,
409: ResourceExistsError,
304: ResourceNotModifiedError,
}
error_map.update(kwargs.pop("error_map", {}) or {})
_headers = kwargs.pop("headers", {}) or {}
_params = case_insensitive_dict(kwargs.pop("params", {}) or {})
api_version = kwargs.pop("api_version", _params.pop("api-version", "2022-05-02-preview")) # type: str
cls = kwargs.pop("cls", None) # type: ClsType[None]
request = build_delete_request(
resource_group_name=resource_group_name,
resource_name=resource_name,
trusted_access_role_binding_name=trusted_access_role_binding_name,
subscription_id=self._config.subscription_id,
api_version=api_version,
template_url=self.delete.metadata["url"],
headers=_headers,
params=_params,
)
request = _convert_request(request)
request.url = self._client.format_url(request.url) # type: ignore
pipeline_response = await self._client._pipeline.run( # type: ignore # pylint: disable=protected-access
request, stream=False, **kwargs
)
response = pipeline_response.http_response
if response.status_code not in [200, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
delete.metadata = {"url": "/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.ContainerService/managedClusters/{resourceName}/trustedAccessRoleBindings/{trustedAccessRoleBindingName}"} # type: ignore
|
[
"noreply@github.com"
] |
gaoyp830.noreply@github.com
|
146fd114a0390327155648dcecd12e452a0ef164
|
4c4d60d3ffc3f0832a53ce51aa35807c905566ba
|
/question/migrations/0004_auto_20191028_1338.py
|
6fdcc41186bc3be386e8120dec5b960ef2880dab
|
[] |
no_license
|
bulletmys/Techpark-Web
|
5a86435e42dd4367d728570b466fc99d116829af
|
5d081130f26ca7e50a8e0234495fc1262b40fbec
|
refs/heads/develop
| 2020-09-03T23:29:49.453358
| 2019-11-04T22:09:14
| 2019-11-04T22:09:14
| 219,601,440
| 0
| 1
| null | 2020-07-07T20:43:11
| 2019-11-04T21:37:23
| null |
UTF-8
|
Python
| false
| false
| 847
|
py
|
# Generated by Django 2.2.6 on 2019-10-28 13:38
import datetime
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('question', '0003_auto_20191028_1249'),
]
operations = [
migrations.AddField(
model_name='question',
name='thumbs_down',
field=models.IntegerField(default=0, verbose_name='Num_of_dislikes'),
),
migrations.AddField(
model_name='question',
name='thumbs_up',
field=models.IntegerField(default=0, verbose_name='Num_of_likes'),
),
migrations.AlterField(
model_name='user',
name='birthday',
field=models.DateField(default=datetime.datetime(2019, 10, 28, 13, 38, 10, 467440), verbose_name='Birthday'),
),
]
|
[
"bulletmys@yandex.ru"
] |
bulletmys@yandex.ru
|
62a5cae3b4c092a1ae2f91940148f82ac26bcb3d
|
a3aeb4575855d828e9b6d1f0c445f07e3548dae5
|
/exercises/22_oop_basics/test_task_22_1c.py
|
4e6f0c9daacca13a927bce384b3b0c55fc8b618b
|
[] |
no_license
|
Dimirs/python-Junior
|
40e70e8218d04e6e88e3d18447eaa7258949a4f0
|
b46001549250a312b855be108dadd8128a26fe4a
|
refs/heads/master
| 2023-08-13T08:30:14.605401
| 2021-10-03T17:11:57
| 2021-10-03T17:11:57
| 413,067,835
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,302
|
py
|
import pytest
import warnings
import task_22_1c
import sys
sys.path.append("..")
from pyneng_common_functions import (
check_class_exists,
check_attr_or_method,
stdout_incorrect_warning,
unify_topology_dict,
)
# Проверка что тест вызван через pytest ..., а не python ...
from _pytest.assertion.rewrite import AssertionRewritingHook
if not isinstance(__loader__, AssertionRewritingHook):
print(f"Тесты нужно вызывать используя такое выражение:\npytest {__file__}\n\n")
def test_class_created():
"""
Проверка, что класс создан
"""
check_class_exists(task_22_1c, "Topology")
def test_attr_topology(topology_with_dupl_links):
"""Проверяем, что в объекте Topology есть атрибут topology"""
top_with_data = task_22_1c.Topology(topology_with_dupl_links)
check_attr_or_method(top_with_data, attr="topology")
def test_topology_normalization(topology_with_dupl_links, normalized_topology_example):
"""Проверка удаления дублей в топологии"""
correct_topology = unify_topology_dict(normalized_topology_example)
return_value = task_22_1c.Topology(topology_with_dupl_links)
return_topology = unify_topology_dict(return_value.topology)
assert (
type(return_value.topology) == dict
), f"По заданию в переменной topology должен быть словарь, а не {type(top_with_data.topology).__name__}"
assert len(correct_topology) == len(
return_value.topology
), "После создания экземпляра, в переменной topology должна находиться топология без дублей"
def test_method_delete_node_created(
topology_with_dupl_links, normalized_topology_example
):
"""Проверяем, что в объекте Topology есть метод delete_node"""
return_value = task_22_1c.Topology(normalized_topology_example)
check_attr_or_method(return_value, method="delete_node")
def test_method_delete_node(normalized_topology_example, capsys):
"""Проверка работы метода delete_node"""
return_value = task_22_1c.Topology(normalized_topology_example)
node = "SW1"
delete_node_result = return_value.delete_node(node)
assert None == delete_node_result, "Метод delete_node не должен ничего возвращать"
ports_with_node = [
src for src, dst in return_value.topology.items() if node in src or node in dst
]
assert 0 == len(ports_with_node), "Соединения с хостом SW1 не были удалены"
assert 3 == len(
return_value.topology
), "В топологии должны остаться только три соединения"
# проверка удаления несуществующего устройства
return_value.delete_node(node)
out, err = capsys.readouterr()
assert (
"Такого устройства нет" in out
), "При удалении несуществующего устройства, не было выведено сообщение 'Такого устройства нет'"
|
[
"91830963+Dimirs@users.noreply.github.com"
] |
91830963+Dimirs@users.noreply.github.com
|
9f25c7df88e816e6c9198132c896b412b51e3d19
|
d38e71c3f1652e7f90464b76fe72e068e2dde299
|
/app/mail.py
|
0358e165ba8643c332de3f86851879aa886c06dd
|
[] |
no_license
|
aswinashok44/mailbot
|
c2bf509e2627a34ab4fdb174142bd706436607c6
|
35e81f96532c3d093f3c33e761fdb95c2d84c105
|
refs/heads/master
| 2022-01-05T01:40:31.234792
| 2019-04-28T17:55:37
| 2019-04-28T17:55:37
| 179,039,873
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,177
|
py
|
import smtplib
from config import Config
from threading import Thread
from email.mime.multipart import MIMEMultipart
from email.mime.text import MIMEText
import datetime
from flask import render_template
from app.models import User, Courier
def send_mail(sub, htmlbody, recipient):
smtpserver = smtplib.SMTP(Config.MAIL_SERVER, Config.MAIL_PORT)
smtpserver.ehlo()
smtpserver.starttls()
smtpserver.login(Config.MAIL_USERNAME, Config.MAIL_PASSWORD)
msg = MIMEMultipart('alternative')
msg['Subject'] = sub
msg['From'] = Config.MAIL_DEFAULT_SENDER
msg['To'] = recipient
part1 = MIMEText(htmlbody, 'html')
msg.attach(part1)
smtpserver.sendmail(Config.MAIL_DEFAULT_SENDER, recipient, msg.as_string())
smtpserver.close()
def email_new(user,courier):
send_mail("You have a new Courier - Mailbot",
htmlbody=render_template('emails/new.html', user=user, courier=courier),
recipient=user.email
)
return "success"
def email_collected(user,courier):
send_mail("Courier Collected - Mailbot",
htmlbody=render_template('emails/collected.html', user=user, courier=courier),
recipient=user.email
)
return "success"
def email_returned(user,courier):
send_mail("Courier Returned - Mailbot",
htmlbody=render_template('emails/returned.html', user=user, courier=courier),
recipient=user.email
)
return "success"
def email_new_user(user):
send_mail("Welcome - Mailbot",
htmlbody=render_template('emails/new_user.html', user=user),
recipient=user.email
)
return "success"
def email_new_cod(user,courier):
send_mail("You Requested a COD Courier - Mailbot",
htmlbody=render_template('emails/codnew.html', user=user, courier=courier),
recipient=user.email
)
return "success"
def email_cod_approved(user,courier):
send_mail("You COD Approved - Mailbot",
htmlbody=render_template('emails/codapproved.html', user=user, courier=courier),
recipient=user.email
)
return "success"
|
[
"aswin@am.students.amrita.edu"
] |
aswin@am.students.amrita.edu
|
5085696c9c60e922bd5245bb522c3d1c94710bb9
|
90dbd6556944bc6dbafaa79aa69b794bb19f0a16
|
/BasicCodes/kap.py
|
e5a4eba716b89ee68e52c7828f12c76b0ac070c4
|
[] |
no_license
|
prasadovhal/Python-Codes
|
e5f325818f52562462e9223ad8d04c68b9ff7022
|
14c42ef133d8f55de46c88d63d8a6a5657895c1c
|
refs/heads/master
| 2020-03-29T16:51:35.829910
| 2018-09-24T16:09:13
| 2018-09-24T16:09:13
| 149,887,454
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 418
|
py
|
def kap(n):
if (n == 6174):
return n
lis = list(str(n))
lis.sort()
small = int(lis[0])*1000 + int(lis[1])*100 + int(lis[2])*10 + int(lis[3])
big = int(lis[3])*1000 + int(lis[2])*100 + int(lis[1])*10 + int(lis[0])
diff = big - small
print diff
return kap(diff)
n = input("enter a number : ")
if(len(str(n)) ==4):
if(n%1111 != 0):
kap(n)
else:
print "ny hou sakat"
else:
print "4 anki number tak ki"
|
[
"prasadovhal99@gmail.com"
] |
prasadovhal99@gmail.com
|
64cb3ed7a5a0361bebb47dbec37d5482848d8607
|
16385e10f6ad05b8147517daf2f40dbdda02617c
|
/etc/power_reports.conf
|
6593b39b0c799cebaf717078b701abafae07a532
|
[] |
no_license
|
prachipainuly-rbei/devops-poc
|
308d6cab02c14ffd23a0998ff88d9ed0420f513a
|
6bc932c67bc8d93b873838ae6d9fb8d33c72234d
|
refs/heads/master
| 2020-04-18T01:26:10.152844
| 2019-02-01T12:25:19
| 2019-02-01T12:25:19
| 167,118,611
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,529
|
conf
|
# -*- mode: python; coding: utf-8 -*-
# $Id: power_reports.conf 181863 2018-08-08 14:17:59Z kbu $
import os
# Timeout in seconds (default is 600 = 10 mins; 0 = deactivated)
REPORT_SERVER_TIMEOUT = 600
# Sender information (email)
REPORT_QUEUE_SENDER_EMAIL_ADDRESS = "NoReply@contact.de"
REPORT_QUEUE_SENDER_ORGANIZATION = "CONTACT"
REPORT_QUEUE_FAILURE_MESSAGE = \
"An error occurred while generating the PowerReport. " \
"Please contact your CONTACT Elements administrator."
# append the actual error message at the end of the failure message
REPORT_QUEUE_FAILURE_MESSAGE_APPEND_ERROR = 0
REPORT_QUEUE_TEMPFOLDER = "report_queue_payload"
# In some cases Excel might deactivate OfficeLink which ends up in empty reports.
# Setting this to True tries automatically re-activating OfficeLink before opening/filling a report.
REPORT_RESTORE_OFFICELINK = True
# Leaving following variable empty will search for an OfficeLink installation only in the default
# installation directories ('%PROGRAMFILES%\*OfficeLink*' and '%PROGRAMFILES(X86)%\*OfficeLink*')
REPORT_OFFICELINK_INSTALLDIR = None
# The (proxied) Apache URL of the CDB application server
# (only required when displaying PowerReports as E-Link)
REPORT_ELINK_SERVER_URL = os.environ["CADDOK_WWWCMSG_URL_PREFIX"] \
if "CADDOK_WWWCMSG_URL_PREFIX" in os.environ else "http://contact.de"
# Note:
# The logging for both PowerReports services can't be activated
# in this CONF file. See the notes in "report_queue.conf".
|
[
"PPR4COB@rbeigcn.com"
] |
PPR4COB@rbeigcn.com
|
e336ddb6a4b6e2d055a56a59b5a4d985b61146e3
|
dd2df0fbb8b1ebfa674344db668f5a057f53af8b
|
/azbankgateways/readers/bases.py
|
0abc56641d8c910206a92fa8ef0df94c7968f3d0
|
[
"MIT",
"LicenseRef-scancode-unknown-license-reference"
] |
permissive
|
joejoe-am/az-iranian-bank-gateways
|
cfbf7719790e85af901dd6610c026477fecc22f4
|
7ba5cde9b446b72da8004253e4dc67b16f681983
|
refs/heads/master
| 2023-08-18T15:25:48.398436
| 2021-08-14T09:46:06
| 2021-08-14T09:46:06
| 398,999,623
| 1
| 0
|
MIT
| 2021-08-23T06:41:27
| 2021-08-23T06:41:26
| null |
UTF-8
|
Python
| false
| false
| 935
|
py
|
import abc
import six
from azbankgateways.models import BankType
from azbankgateways import default_settings as settings
@six.add_metaclass(abc.ABCMeta)
class Reader:
@abc.abstractmethod
def read(self, bank_type: BankType, identifier: str) -> dict:
"""
:param bank_type:
:param identifier:
:return:
base on bank type for example for BMI:
{
'MERCHANT_CODE': '<YOUR INFO>',
'TERMINAL_CODE': '<YOUR INFO>',
'SECRET_KEY': '<YOUR INFO>',
}
"""
pass
def klass(self, bank_type: BankType, identifier: str) -> dict:
return settings.BANK_CLASS[bank_type]
@abc.abstractmethod
def get_bank_priorities(self, identifier: str) -> list:
pass
@abc.abstractmethod
def default(self, identifier: str):
pass
@abc.abstractmethod
def currency(self, identifier: str):
pass
|
[
"ali.zahedigol@gmail.com"
] |
ali.zahedigol@gmail.com
|
5508ec98711c97111b4e5798efb2810d44e1a1d3
|
3d19e1a316de4d6d96471c64332fff7acfaf1308
|
/Users/K/klausz/test_53.py
|
0d2daaa54ecb4c73b9d8576abdb5b37ac0820e98
|
[] |
no_license
|
BerilBBJ/scraperwiki-scraper-vault
|
4e98837ac3b1cc3a3edb01b8954ed00f341c8fcc
|
65ea6a943cc348a9caf3782b900b36446f7e137d
|
refs/heads/master
| 2021-12-02T23:55:58.481210
| 2013-09-30T17:02:59
| 2013-09-30T17:02:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,712
|
py
|
# from @andymboyle - Andy Boyle / Pythoninquiries
import scraperwiki
import simplejson
# retrieve a page
#base_url = 'http://search.twitter.com/search.json?q='
#q = '%3A('
#options = '&rpp=100&page='
#page = 1
base_url = 'http://www.50hertz-transmission.net/cps/rde/papp/SID-60C46786-AB1F4138/apc_nextgen_inter_trm-prod/http://miniapp-internet.corp.transmission-it.de:8081/ma-trm-eegdata/Report.action?prepare=&_sourcePage=%2FWEB-INF%2Fpages%2Freport.jsp&reportType=masterDataEEG&eegYear=&filter.allEnergySources=true&kunde.id=&bundesland.id=&pagingDescriptor.currentPage='
# q = 'to:andymboyle+python'
# q = 'to:klausz'
options = '&spannungsebene.id='
page = 1
while 1:
try:
url = base_url + str(page) + options
html = scraperwiki.scrape(url)
print html
soup = BeautifulSoup(html)
for ldiv in soup.findAll('table'):
if ldiv.find('th').text == 'Netzbetreiber':
div = ldiv
cells = susp_row.findAll('td')
soup = simplejson.loads(html)
for result in soup['Einzelwerte']:
data = {}
print susp_row # this helped to see what was coming from the scraper -> then correction start from Line 2 instead of Line 1.
#Seems to be used when there is content in Line
# Netzbetreiber = cells[0].text
# Anlagenschluessel = cells[1].text
# Energietraeger = cells[2].text
# Ort = cells[3].text
# Plz = cells[4].text
# StrasseFlst = cells[5].text
# Bundesland = cells[6].text
# InstallierteLeistung = cells[7].text
# KWKAnteil = cells[8].text
# Technologie = cells[9].text
# Inbetriebnahmejahr = cells[10].text
# EinspeiseSpannungsebene = cells[11].text
# save records to the datastore
# scraperwiki.datastore.save(["id"], data)
page = page + 1
except:
print str
# from @andymboyle - Andy Boyle / Pythoninquiries
import scraperwiki
import simplejson
# retrieve a page
#base_url = 'http://search.twitter.com/search.json?q='
#q = '%3A('
#options = '&rpp=100&page='
#page = 1
base_url = 'http://www.50hertz-transmission.net/cps/rde/papp/SID-60C46786-AB1F4138/apc_nextgen_inter_trm-prod/http://miniapp-internet.corp.transmission-it.de:8081/ma-trm-eegdata/Report.action?prepare=&_sourcePage=%2FWEB-INF%2Fpages%2Freport.jsp&reportType=masterDataEEG&eegYear=&filter.allEnergySources=true&kunde.id=&bundesland.id=&pagingDescriptor.currentPage='
# q = 'to:andymboyle+python'
# q = 'to:klausz'
options = '&spannungsebene.id='
page = 1
while 1:
try:
url = base_url + str(page) + options
html = scraperwiki.scrape(url)
print html
soup = BeautifulSoup(html)
for ldiv in soup.findAll('table'):
if ldiv.find('th').text == 'Netzbetreiber':
div = ldiv
cells = susp_row.findAll('td')
soup = simplejson.loads(html)
for result in soup['Einzelwerte']:
data = {}
print susp_row # this helped to see what was coming from the scraper -> then correction start from Line 2 instead of Line 1.
#Seems to be used when there is content in Line
# Netzbetreiber = cells[0].text
# Anlagenschluessel = cells[1].text
# Energietraeger = cells[2].text
# Ort = cells[3].text
# Plz = cells[4].text
# StrasseFlst = cells[5].text
# Bundesland = cells[6].text
# InstallierteLeistung = cells[7].text
# KWKAnteil = cells[8].text
# Technologie = cells[9].text
# Inbetriebnahmejahr = cells[10].text
# EinspeiseSpannungsebene = cells[11].text
# save records to the datastore
# scraperwiki.datastore.save(["id"], data)
page = page + 1
except:
print str
|
[
"pallih@kaninka.net"
] |
pallih@kaninka.net
|
514aafeef973d88ddf5d9fbe1fa84175d6edc409
|
76fa9f306b3f13bdf33efa53035196c341fbed8b
|
/framework/urls.py
|
01f0287df403fb834ade4fc177bc5354218a988f
|
[] |
no_license
|
lohitha02/Blood-Bank-Management
|
52c0c4b6844b8c4f0413ead6118bb2fce1101515
|
a37edfabba40472dcb906e3d399bd02fe3ec2eb8
|
refs/heads/master
| 2020-08-18T12:39:17.408650
| 2019-10-17T12:03:54
| 2019-10-17T12:13:49
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 751
|
py
|
"""framework URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path
urlpatterns = [
path('admin/', admin.site.urls),
]
|
[
"pabbatiharshith@gmail.com"
] |
pabbatiharshith@gmail.com
|
20c242ff67b4e638444744b807ad8ee971a5416d
|
51e8dbc7d5195586ccbf789957eaa602ae6c2d8b
|
/handlers.py
|
3741eb6b19f4801e8c776f658e1627a23ba4c7eb
|
[
"Apache-2.0"
] |
permissive
|
tdudgeon/jupyter-operator
|
ccf7947bd7d8ab3510278d9b2f49a52efc0b3019
|
7bc96f69c7d55ce55fc3e594d7aeed1cfbfba707
|
refs/heads/main
| 2023-03-28T13:05:51.165164
| 2021-03-29T15:40:57
| 2021-03-29T15:40:57
| 331,370,421
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,767
|
py
|
import os
import random
import hashlib
import string
import kopf
import kubernetes
notebook_startup = """#!/bin/bash
conda init
source $HOME/.bashrc
if [ ! -f $HOME/.condarc ]; then
cat > $HOME/.condarc << EOF
envs_dirs:
- $HOME/.conda/envs
EOF
fi
if [ -d $HOME/.conda/envs/workspace ]; then
echo "Activate virtual environment 'workspace'."
conda activate workspace
fi
if [ ! -f $HOME/.jupyter/jupyter_notebook_config.json ]; then
mkdir -p $HOME/.jupyter
cat > $HOME/.jupyter/jupyter_notebook_config.json << EOF
{
"NotebookApp": {
"token": "%(token)s"
}
}
EOF
fi
"""
@kopf.on.create("squonk.it", "v1alpha1", "jupyternotebooks", id="jupyter")
def create(name, uid, namespace, spec, logger, **_):
characters = string.ascii_letters + string.digits
token = "".join(random.sample(characters, 16))
config_map_body = {
"apiVersion": "v1",
"kind": "ConfigMap",
"metadata": {
"name": name,
"labels": {
"app": name
}
},
"data": {
"setup-environment.sh": notebook_startup % dict(token=token)
}
}
kopf.adopt(config_map_body)
core_api = kubernetes.client.CoreV1Api()
core_api.create_namespaced_config_map(namespace, config_map_body)
logger.debug("Created configmap")
notebook_interface = spec.get("notebook", {}).get("interface", "lab")
image = spec.get("deployment", {}).get("image", "jupyter/minimal-notebook:latest")
service_account = spec.get("deployment", {}).get("serviceAccountName", "default")
memory_limit = spec.get("deployment", {}).get("resources", {}).get("limits", {}).get("memory", "512Mi")
memory_request = spec.get("deployment", {}).get("resources", {}).get("requests", {}).get("memory", memory_limit)
deployment_body = {
"apiVersion": "apps/v1",
"kind": "Deployment",
"metadata": {
"name": name,
"labels": {
"app": name
}
},
"spec": {
"replicas": 1,
"selector": {
"matchLabels": {
"deployment": name
}
},
"strategy": {
"type": "Recreate"
},
"template": {
"metadata": {
"labels": {
"deployment": name
}
},
"spec": {
"serviceAccountName": service_account,
"containers": [
{
"name": "notebook",
"image": image,
"imagePullPolicy": "Always",
"resources": {
"requests": {
"memory": memory_request
},
"limits": {
"memory": memory_limit
}
},
"ports": [
{
"name": "8888-tcp",
"containerPort": 8888,
"protocol": "TCP",
}
],
"env": [],
"volumeMounts": [
{
"name": "startup",
"mountPath": "/usr/local/bin/before-notebook.d"
}
]
}
],
"securityContext": {
"fsGroup": 0
},
"volumes": [
{
"name": "startup",
"configMap": {
"name": "notebook"
}
}
]
},
},
},
}
if notebook_interface != "classic":
deployment_body["spec"]["template"]["spec"]["containers"][0]["env"].append(
{"name": "JUPYTER_ENABLE_LAB", "value": "true"})
storage_request = ""
storage_limit = ""
storage_claim_name = spec.get("storage", {}).get("claimName", "")
storage_sub_path = spec.get("storage", {}).get("subPath", "")
if not storage_claim_name:
storage_request = spec.get("deployment", {}).get("resources", {}).get("requests", {}).get("storage", "")
storage_limit = spec.get("deployment", {}).get("resources", {}).get("limits", {}).get("storage", "")
if storage_request or storage_limit:
volume = {"name": "data", "persistentVolumeClaim": {"claimName": "notebook"}}
deployment_body["spec"]["template"]["spec"]["volumes"].append(volume)
storage_mount = {"name": "data", "mountPath": "/home/jovyan"}
deployment_body["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(storage_mount)
persistent_volume_claim_body = {
"apiVersion": "v1",
"kind": "PersistentVolumeClaim",
"metadata": {
"name": name,
"labels": {
"app": name
}
},
"spec": {
"accessModes": ["ReadWriteOnce"],
"resources": {
"requests": {},
"limits": {}
}
}
}
if storage_request:
persistent_volume_claim_body["spec"]["resources"]["requests"]["storage"] = storage_request
if storage_limit:
persistent_volume_claim_body["spec"]["resources"]["limits"]["storage"] = storage_limit
kopf.adopt(persistent_volume_claim_body)
core_api.create_namespaced_persistent_volume_claim(namespace, persistent_volume_claim_body)
logger.debug("Created pvc")
else:
volume = {"name": "data", "persistentVolumeClaim": {"claimName": storage_claim_name}}
deployment_body["spec"]["template"]["spec"]["volumes"].append(volume)
storage_mount = {"name": "data", "mountPath": "/home/jovyan"}
if storage_sub_path:
storage_mount["subPath"] = storage_sub_path
deployment_body["spec"]["template"]["spec"]["containers"][0]["volumeMounts"].append(storage_mount)
kopf.adopt(deployment_body)
apps_api = kubernetes.client.AppsV1Api()
apps_api.create_namespaced_deployment(namespace, deployment_body)
logger.debug("Created deployment")
service_body = {
"apiVersion": "v1",
"kind": "Service",
"metadata": {
"name": name,
"labels": {
"app": name
}
},
"spec": {
"type": "ClusterIP",
"ports": [
{
"name": "8888-tcp",
"port": 8888,
"protocol": "TCP",
"targetPort": 8888,
}
],
"selector": {
"deployment": name
},
},
}
kopf.adopt(service_body)
core_api.create_namespaced_service(namespace, service_body)
logger.debug("Created service")
ingress_domain = os.environ.get("INGRESS_DOMAIN")
ingress_hostname = f"notebook-{namespace}.{ingress_domain}"
ingress_body = {
"apiVersion": "extensions/v1beta1",
"kind": "Ingress",
"metadata": {
"name": name,
"labels": {
"app": name
},
"annotations": {
"projectcontour.io/websocket-routes": "/"
}
},
"spec": {
"rules": [
{
"host": ingress_hostname,
"http": {
"paths": [
{
"path": "/",
"backend": {
"serviceName": name,
"servicePort": 8888,
},
}
]
}
}
]
}
}
kopf.adopt(ingress_body)
ext_api = kubernetes.client.ExtensionsV1beta1Api()
ext_api.create_namespaced_ingress(namespace, ingress_body)
logger.debug("Created ingress")
return {
"notebook": {
"url": f"http://{ingress_hostname}/?token={token}",
"token": token,
"interface": notebook_interface,
}
,
"deployment": {
"image": image,
"serviceAccountName": service_account,
"resources": {
"requests": {
"memory": memory_request,
"storage": storage_request
},
"limits": {
"memory": memory_limit,
"storage": storage_limit
}
}
},
"storage": {
"claimName": storage_claim_name,
"subPath": storage_sub_path
}
}
@kopf.on.delete("squonk.it", "v1alpha1", "jupyternotebooks")
def delete(body, **kwargs):
msg = f"Jupyter notebook {body['metadata']['name']} and its Pod/Service/Ingress children deleted"
return {'message': msg}
|
[
"tdudgeon@informaticsmatters.com"
] |
tdudgeon@informaticsmatters.com
|
109fbbf58b3ea28aba19fa9b0a7ee49c1fbcba50
|
fc3d5f0b29c2b9d9dee3ef358f6add12dc8c7a09
|
/airmozilla/comments/tests/test_views.py
|
98657b0393e6f981eee92062950be986ef51978a
|
[] |
no_license
|
imclab/airmozilla
|
80ba52c8e9a4b66e44becbf92da3cce91abaff1e
|
b3dfa4349141be19683ab885519c0f795efd43c1
|
refs/heads/master
| 2020-04-01T17:43:24.325177
| 2014-05-05T20:39:44
| 2014-05-05T20:39:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 21,582
|
py
|
import calendar
import json
import re
import uuid
import mock
from django.core.cache import cache
from django.test import TestCase
from django.contrib.auth.models import User
from django.core import mail
from funfactory.urlresolvers import reverse
from nose.tools import eq_, ok_
from airmozilla.main.models import Event
from airmozilla.comments.views import (
can_manage_comments,
get_latest_comment
)
from airmozilla.comments.models import (
Discussion,
Comment,
Unsubscription
)
MOZILLIAN_USER = """
{
"meta": {
"previous": null,
"total_count": 1,
"offset": 0,
"limit": 20,
"next": null
},
"objects": [
{
"website": "",
"bio": "",
"resource_uri": "/api/v1/users/2429/",
"last_updated": "2012-11-06T14:41:47",
"groups": [
"ugly tuna"
],
"city": "Casino",
"skills": [],
"country": "Albania",
"region": "Bush",
"id": "2429",
"languages": [],
"allows_mozilla_sites": true,
"photo": "http://www.gravatar.com/avatar/0409b497734934400822bb33...",
"is_vouched": true,
"email": "peterbe@mozilla.com",
"ircname": "",
"allows_community_sites": true,
"full_name": "Peter Bengtsson"
}
]
}
"""
class Response(object):
def __init__(self, content=None, status_code=200):
self.content = content
self.status_code = status_code
class TestComments(TestCase):
fixtures = ['airmozilla/manage/tests/main_testdata.json']
def _create_discussion(self, event, enabled=True, moderate_all=True,
notify_all=True):
return Discussion.objects.create(
event=event,
enabled=enabled,
moderate_all=moderate_all,
notify_all=notify_all
)
def test_can_manage_comments(self):
event = Event.objects.get(title='Test event')
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
richard = User.objects.create(username='richard',
email='richard@mozilla.com',
is_superuser=True)
discussion = self._create_discussion(event)
discussion.moderators.add(jay)
ok_(not can_manage_comments(bob, discussion))
ok_(can_manage_comments(jay, discussion))
ok_(can_manage_comments(richard, discussion))
def test_get_latest_comment(self):
event = Event.objects.get(title='Test event')
eq_(get_latest_comment(event), None)
bob = User.objects.create(username='bob', email='bob@mozilla.com')
comment = Comment.objects.create(
event=event,
user=bob,
comment="Hi, it's Bob",
status=Comment.STATUS_POSTED
)
latest = get_latest_comment(event)
eq_(latest, None)
latest = get_latest_comment(event, include_posted=True)
modified = calendar.timegm(comment.modified.utctimetuple())
eq_(latest, modified)
latest = get_latest_comment(event, include_posted=True, since=modified)
eq_(latest, None)
def test_basic_event_data(self):
event = Event.objects.get(title='Test event')
# render the event and there should be no comments
url = reverse('main:event', args=(event.slug,))
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Comments' not in response.content)
# if not enabled you get that back in JSON
comments_url = reverse('comments:event_data', args=(event.pk,))
response = self.client.get(comments_url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['discussion']['enabled'], False)
# also, trying to post a comment when it's not enable
# should cause an error
response = self.client.post(comments_url, {
'name': 'Peter',
'comment': 'Bla bla'
})
eq_(response.status_code, 400)
# enable discussion
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
discussion.moderators.add(jay)
response = self.client.get(url)
eq_(response.status_code, 200)
ok_('Comments' in response.content)
comments_url = reverse('comments:event_data', args=(event.pk,))
response = self.client.get(comments_url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['discussion']['enabled'], True)
eq_(structure['discussion']['closed'], False)
ok_('No comments posted' in structure['html'])
# even though it's enabled, it should reject postings
# because we're not signed in
response = self.client.post(comments_url, {
'name': 'Peter',
'comment': 'Bla bla'
})
eq_(response.status_code, 403)
# so, let's sign in and try again
User.objects.create_user('richard', password='secret')
# but it should be ok if self.user had the add_event permission
assert self.client.login(username='richard', password='secret')
response = self.client.post(comments_url, {
'name': 'Richard',
'comment': 'Bla bla'
})
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_('No comments posted' not in structure['html'])
ok_('Bla bla' in structure['html'])
comment = Comment.objects.get(comment='Bla bla')
ok_(comment)
eq_(comment.status, Comment.STATUS_POSTED)
# the moderator should now have received an email
email_sent = mail.outbox[-1]
ok_(event.title in email_sent.subject)
ok_('requires moderation' in email_sent.subject)
ok_(url in email_sent.body)
ok_(url + '#comment-%d' % comment.pk in email_sent.body)
def test_moderation_immediately(self):
"""when you post a comment that needs moderation, the moderator
can click a link in the email notification that immediately
approves the comment without being signed in"""
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_POSTED
)
identifier = uuid.uuid4().hex[:10]
cache.set('approve-%s' % identifier, comment.pk, 60)
cache.set('remove-%s' % identifier, comment.pk, 60)
approve_url = reverse(
'comments:approve_immediately',
args=(identifier, comment.pk)
)
remove_url = reverse(
'comments:remove_immediately',
args=(identifier, comment.pk)
)
response = self.client.get(approve_url)
eq_(response.status_code, 200)
ok_('Comment Approved' in response.content)
# reload
comment = Comment.objects.get(pk=comment.pk)
eq_(comment.status, Comment.STATUS_APPROVED)
response = self.client.get(remove_url)
eq_(response.status_code, 200)
ok_('Comment Removed' in response.content)
# reload
comment = Comment.objects.get(pk=comment.pk)
eq_(comment.status, Comment.STATUS_REMOVED)
# try with identifiers that aren't in the cache
bogus_identifier = uuid.uuid4().hex[:10]
bogus_approve_url = reverse(
'comments:approve_immediately',
args=(bogus_identifier, comment.pk)
)
bogus_remove_url = reverse(
'comments:remove_immediately',
args=(bogus_identifier, comment.pk)
)
response = self.client.get(bogus_approve_url)
eq_(response.status_code, 200)
ok_('Comment Approved' not in response.content)
ok_('Unable to Approve Comment' in response.content)
response = self.client.get(bogus_remove_url)
eq_(response.status_code, 200)
ok_('Comment Removed' not in response.content)
ok_('Unable to Remove Comment' in response.content)
def test_unsubscribe_on_reply_notifications(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_APPROVED
)
jay.set_password('secret')
jay.save()
assert self.client.login(username='jay', password='secret')
# post a reply
url = reverse('comments:event_data', args=(event.pk,))
response = self.client.post(url, {
'comment': 'I think this',
'name': 'Jay',
'reply_to': comment.pk,
})
eq_(response.status_code, 200)
structure = json.loads(response.content)
ok_('Bla bla' in structure['html'])
ok_('I think this' in structure['html'])
# now, we must approve this comment
new_comment = Comment.objects.get(
comment='I think this',
user=jay
)
response = self.client.post(url, {
'approve': new_comment.pk,
})
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure, {'ok': True})
email_sent = mail.outbox[-1]
ok_('Reply' in email_sent.subject)
ok_(event.title in email_sent.subject)
eq_(email_sent.to, ['bob@mozilla.com'])
# expect there to be two unsubscribe links in there
url_unsubscribe = re.findall(
'/comments/unsubscribe/\w{10}/\d+/',
email_sent.body
)[0]
urls_unsubscribe_all = re.findall(
'/comments/unsubscribe/\w{10}/',
email_sent.body
)
for url in urls_unsubscribe_all:
if not url_unsubscribe.startswith(url):
url_unsubscribe_all = url
self.client.logout()
# now let's visit these
response = self.client.get(url_unsubscribe)
eq_(response.status_code, 200)
ok_('Are you sure' in response.content)
response = self.client.post(url_unsubscribe, {})
eq_(response.status_code, 302)
Unsubscription.objects.get(
user=bob,
discussion=discussion
)
unsubscribed_url = reverse(
'comments:unsubscribed',
args=(discussion.pk,)
)
ok_(unsubscribed_url in response['location'])
response = self.client.get(unsubscribed_url)
eq_(response.status_code, 200)
ok_('Unsubscribed' in response.content)
ok_(event.title in response.content)
response = self.client.post(url_unsubscribe_all, {})
eq_(response.status_code, 302)
Unsubscription.objects.get(
user=bob,
discussion__isnull=True
)
unsubscribed_url = reverse('comments:unsubscribed_all')
ok_(unsubscribed_url in response['location'])
def test_unsubscribed_reply_notifications_discussion(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_APPROVED
)
Unsubscription.objects.create(
user=bob,
discussion=discussion
)
jay.set_password('secret')
jay.save()
assert self.client.login(username='jay', password='secret')
# post a reply
url = reverse('comments:event_data', args=(event.pk,))
response = self.client.post(url, {
'comment': 'I think this',
'reply_to': comment.pk,
})
eq_(response.status_code, 200)
# But it needs to be approved for reply notifications to
# even be attempted.
new_comment = Comment.objects.get(comment='I think this')
eq_(new_comment.reply_to.user, bob)
response = self.client.post(url, {
'approve': new_comment.pk,
})
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure, {'ok': True})
ok_(not mail.outbox)
def test_unsubscribed_reply_notifications_all(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_APPROVED
)
Unsubscription.objects.create(
user=bob,
)
jay.set_password('secret')
jay.save()
assert self.client.login(username='jay', password='secret')
# post a reply
url = reverse('comments:event_data', args=(event.pk,))
response = self.client.post(url, {
'comment': 'I think this',
'reply_to': comment.pk,
})
eq_(response.status_code, 200)
# But it needs to be approved for reply notifications to
# even be attempted.
new_comment = Comment.objects.get(comment='I think this')
eq_(new_comment.reply_to.user, bob)
response = self.client.post(url, {
'approve': new_comment.pk,
})
ok_(not mail.outbox)
def test_invalid_reply_to(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_APPROVED
)
jay.set_password('secret')
jay.save()
assert self.client.login(username='jay', password='secret')
# post a reply
url = reverse('comments:event_data', args=(event.pk,))
response = self.client.post(url, {
'comment': 'I think this',
'reply_to': '999999999',
})
eq_(response.status_code, 400)
@mock.patch('logging.error')
@mock.patch('requests.get')
def test_fetch_user_name(self, rget, rlogging):
cache.clear()
def mocked_get(url, **options):
if 'peterbe' in url:
return Response(MOZILLIAN_USER)
raise NotImplementedError(url)
rget.side_effect = mocked_get
url = reverse('comments:user_name')
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['name'], '')
peterbe = User.objects.create_user(
username='peterbe', password='secret'
)
assert self.client.login(username='peterbe', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['name'], '')
peterbe.email = 'peterbe@mozilla.com'
peterbe.save()
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['name'], 'Peter Bengtsson')
def test_modify_comment_without_permission(self):
event = Event.objects.get(title='Test event')
self._create_discussion(event)
bob = User.objects.create(username='bob', email='bob@mozilla.com')
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_POSTED
)
url = reverse('comments:event_data', args=(event.pk,))
response = self.client.post(url, {
'approve': comment.pk,
})
eq_(response.status_code, 403)
# and not being logged in you definitely can't post comments
response = self.client.post(url, {
'comment': "My opinion",
})
eq_(response.status_code, 403)
User.objects.create_user(username='jay', password='secret')
assert self.client.login(username='jay', password='secret')
response = self.client.post(url, {
'approve': comment.pk,
})
eq_(response.status_code, 403)
response = self.client.post(url, {
'unapprove': comment.pk,
})
eq_(response.status_code, 403)
response = self.client.post(url, {
'remove': comment.pk,
})
eq_(response.status_code, 403)
# but you can flag
response = self.client.post(url, {
'flag': comment.pk,
})
eq_(response.status_code, 200)
# but not unflag
response = self.client.post(url, {
'unflag': comment.pk,
})
eq_(response.status_code, 403)
def test_modify_comment_with_permission(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
bob = User.objects.create(username='bob', email='bob@mozilla.com')
jay = User.objects.create_user(username='jay', password='secret')
discussion.moderators.add(jay)
comment = Comment.objects.create(
event=event,
user=bob,
comment='Bla bla',
status=Comment.STATUS_POSTED,
flagged=1
)
url = reverse('comments:event_data', args=(event.pk,))
assert self.client.login(username='jay', password='secret')
response = self.client.post(url, {
'approve': comment.pk,
})
eq_(response.status_code, 200)
ok_(Comment.objects.get(status=Comment.STATUS_APPROVED))
response = self.client.post(url, {
'unapprove': comment.pk,
})
eq_(response.status_code, 200)
ok_(Comment.objects.get(status=Comment.STATUS_POSTED))
response = self.client.post(url, {
'remove': comment.pk,
})
eq_(response.status_code, 200)
ok_(Comment.objects.get(status=Comment.STATUS_REMOVED))
response = self.client.post(url, {
'unflag': comment.pk,
})
eq_(response.status_code, 200)
ok_(Comment.objects.get(flagged=0))
def test_event_data_latest_400(self):
event = Event.objects.get(title='Test event')
url = reverse('comments:event_data_latest', args=(event.pk,))
response = self.client.get(url)
eq_(response.status_code, 400)
discussion = self._create_discussion(event)
discussion.enabled = False
discussion.save()
response = self.client.get(url)
eq_(response.status_code, 400)
def test_event_data_latest(self):
event = Event.objects.get(title='Test event')
discussion = self._create_discussion(event)
url = reverse('comments:event_data_latest', args=(event.pk,))
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['latest_comment'], None)
jay = User.objects.create(username='jay', email='jay@mozilla.com')
bob = User.objects.create(username='bob', email='bob@mozilla.com')
discussion.moderators.add(jay)
comment = Comment.objects.create(
user=bob,
event=event,
comment="Hi, it's Bob",
status=Comment.STATUS_POSTED
)
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['latest_comment'], None)
# different if jay checks it
jay.set_password('secret')
jay.save()
assert self.client.login(username='jay', password='secret')
response = self.client.get(url)
eq_(response.status_code, 200)
structure = json.loads(response.content)
modified = calendar.timegm(comment.modified.utctimetuple())
eq_(structure['latest_comment'], modified)
response = self.client.get(url, {'since': 'xxx'})
eq_(response.status_code, 400)
response = self.client.get(url, {'since': str(modified)})
eq_(response.status_code, 200)
structure = json.loads(response.content)
eq_(structure['latest_comment'], None)
|
[
"mail@peterbe.com"
] |
mail@peterbe.com
|
ec9edf0f549af074cbca6fa62c5e4e1f8ea38e5f
|
869aa267b451071ad9a0982cd536c5a98d1772b8
|
/hooks/background_fba.py
|
6c0ebd45baf24ff54c1cec2f9062c4b94b47ac5b
|
[
"Apache-2.0"
] |
permissive
|
kibernetika-ai/demo-zoo
|
d66382b063306760b87cea11331ee3e6653ce5fb
|
e3ff6010dc2636a44beff2a0333270a824996ed0
|
refs/heads/master
| 2021-07-24T21:10:37.780731
| 2020-05-07T04:20:27
| 2020-05-07T04:20:27
| 166,981,700
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,194
|
py
|
import logging
import numpy as np
import cv2
import fbamatting.serving as fba
from ml_serving.utils.helpers import get_param, load_image, boolean_string, predict_grpc
import glob
import os
LOG = logging.getLogger(__name__)
backgrounds = {'None': None}
glob_background = None
style_srv = 'styles:9000'
def init_hook(**params):
backgrounds_dir = params.get('backgrounds', None)
global style_srv
style_srv = params.get('style_srv', 'styles:9000')
global backgrounds
if backgrounds_dir is not None:
for f in glob.glob(backgrounds_dir + '/*.jpg'):
name = os.path.basename(f)[:-4]
LOG.info('Load: {}'.format(name))
img = cv2.imread(f)
backgrounds[name] = img[:, :, ::-1]
back = params.get('background', None)
if back is not None:
global glob_background
glob_background = backgrounds.get(back, None)
LOG.info('Loaded.')
gpu = int(os.environ.get('GPU_COUNT', 0)) > 0
logging.info("Use gpu: {}. load model from: {}".format(gpu,params.get('fba_model')))
return {'fba': fba.load_model(gpu, params.get('fba_model'))}
obj_classes = {
'Person': 1
}
def limit(v, l, r, d):
if v < l:
return d
if v > r:
return d
return v
def apply_style(img, style):
outputs = predict_grpc({'image': img.astype(np.uint8),
'style': style},
style_srv)
return outputs['output'][:, :, ::-1]
def process(inputs, ct_x, **kwargs):
original_image, is_video = load_image(inputs, 'inputs')
if original_image is None:
raise RuntimeError('Missing "inputs" key in inputs. Provide an image in "inputs" key')
def _return(result):
encoding = ''
if not is_video:
if result.shape[2] == 3:
result = result[:, :, ::-1]
result = cv2.imencode('.jpg', result)[1].tostring()
encoding = 'jpeg'
else:
result = result
result = cv2.imencode('.png', result)[1].tostring()
encoding = 'png'
return {'output': result, 'encoding': encoding}
ratio = 1.0
w = float(original_image.shape[1])
h = float(original_image.shape[0])
if w > h:
if w > 1024:
ratio = w / 1024.0
else:
if h > 1024:
ratio = h / 1024.0
if ratio > 1:
image = cv2.resize(original_image, (int(w / ratio), int(h / ratio)))
else:
image = original_image
if not boolean_string(get_param(inputs, 'return_origin_size', False)):
original_image = image
try:
area_threshold = int(get_param(inputs, 'area_threshold', 0))
except:
area_threshold = 0
area_threshold = limit(area_threshold, 0, 100, 0)
try:
max_objects = int(get_param(inputs, 'max_objects', 1))
except:
max_objects = 1
max_objects = limit(max_objects, 1, 10, 1)
try:
pixel_threshold = int(float(get_param(inputs, 'pixel_threshold', 0.5)) * 255)
except:
pixel_threshold = int(0.5 * 255)
pixel_threshold = limit(pixel_threshold, 1, 254, int(0.5 * 255))
object_classes = [obj_classes.get(get_param(inputs, 'object_class', 'Person'), 1)]
effect = get_param(inputs, 'effect', 'Remove background') # Remove background,Mask,Blur
try:
blur_radius = int(get_param(inputs, 'blur_radius', 2))
except:
blur_radius = 2
blur_radius = limit(blur_radius, 1, 10, 2)
image = cv2.resize(image,(320,320))
#original_image = image
outputs = ct_x.drivers[0].predict({'inputs': np.expand_dims(image, axis=0)})
num_detection = int(outputs['num_detections'][0])
if num_detection < 1:
return _return(original_image)
process_width = image.shape[1]
process_height = image.shape[0]
image_area = process_width * process_height
detection_boxes = outputs["detection_boxes"][0][:num_detection]
detection_boxes = detection_boxes * [process_height, process_width, process_height, process_width]
detection_boxes = detection_boxes.astype(np.int32)
detection_classes = outputs["detection_classes"][0][:num_detection]
detection_masks = outputs["detection_masks"][0][:num_detection]
masks = []
for i in range(num_detection):
if int(detection_classes[i]) not in object_classes:
continue
box = detection_boxes[i]
mask_image = cv2.resize(detection_masks[i], (box[3] - box[1], box[2] - box[0]), interpolation=cv2.INTER_LINEAR)
left = max(0, box[1] - 50)
right = min(process_width, box[3] + 50)
upper = max(0, box[0] - 50)
lower = min(process_height, box[2] + 50)
box_mask = np.pad(mask_image, ((box[0] - upper, lower - box[2]), (box[1] - left, right - box[3])), 'constant')
area = int(np.sum(np.greater_equal(box_mask, 0.5).astype(np.int32)))
if area * 100 / image_area < area_threshold:
continue
masks.append((area, box_mask, [upper, left, lower, right]))
if len(masks) < 1:
return _return(original_image)
masks = sorted(masks, key=lambda row: -row[0])
total_mask = np.zeros((process_height, process_width), np.float32)
min_left = process_width
min_upper = process_height
max_right = 0
max_lower = 0
for i in range(min(len(masks), max_objects)):
pre_mask = masks[i][1]
box = masks[i][2]
left = max(0, box[1])
right = min(process_width, box[3])
upper = max(0, box[0])
lower = min(process_height, box[2])
box_mask = np.pad(pre_mask, ((upper, process_height - lower), (left, process_width - right)), 'constant')
total_mask = np.maximum(total_mask, box_mask)
if left < min_left:
min_left = left
if right > max_right:
max_right = right
if upper < min_upper:
min_upper = upper
if lower > max_lower:
max_lower = lower
mask = np.uint8(total_mask * 255)
#box = (min_upper, min_left, max_lower, max_right)
if len(mask.shape) > 2:
logging.warning('Mask shape is {}'.format(mask.shape))
mask = mask[:, :, 0]
#image = image[box[0]:box[2], box[1]:box[3], :]
image = image.astype(np.float32)
#mask = cv2.resize(mask, (image.shape[1], image.shape[0]))
mask[np.less_equal(mask, pixel_threshold)] = 0
mask[np.greater(mask, pixel_threshold)] = 255
mask = fba.pred(ct_x.global_ctx['fba'], image / 255, mask)
logging.info(mask.shape)
logging.info(image.shape)
#mask = mask.astype(np.float32)/255
#mask = cv2.resize(mask, (box[3] - box[1], box[2] - box[0]))
#mask = np.pad(mask,
# ((box[0], process_height - box[2]), (box[1], process_width - box[3])),
# 'constant')
mask = cv2.resize(mask, (original_image.shape[1], original_image.shape[0]))
# mask = cv2.GaussianBlur(mask, (21, 21), 11)
if effect == 'Remove background':
background = None
if 'background_img' in inputs:
background, _ = load_image(inputs, 'background_img')
if background is None:
back_name = get_param(inputs, 'background', None)
if back_name is not None:
background = backgrounds.get(back_name)
else:
if glob_background is not None:
background = glob_background
else:
background = backgrounds.get('None')
add_style = get_param(inputs, 'style', '')
if len(add_style) > 0:
image = apply_style(original_image, add_style).astype(np.float32)
else:
image = original_image.astype(np.float32)
mask = np.expand_dims(mask, 2)
if background is not None:
image = image * mask
background = cv2.resize(background, (image.shape[1], image.shape[0]))
background = background.astype(np.float32)
background = background * (1 - mask)
image = background + image
image = image.astype(np.uint8)
else:
if not is_video:
mask = (mask * 255).astype(np.uint8)
image = image[:, :, ::-1].astype(np.uint8)
image = np.concatenate([image, mask], axis=2)
else:
image = image * mask
image = image.astype(np.uint8)
elif effect == "Mask":
mask = mask * 255
image = mask.astype(np.uint8)
else:
image = original_image.astype(np.float32)
mask = np.expand_dims(mask, 2)
foreground = mask * image
radius = min(max(blur_radius, 2), 10)
if effect == 'Grey':
background = rgb2gray(original_image)
else:
background = cv2.GaussianBlur(original_image, (radius, radius), 10)
background = (1.0 - mask) * background.astype(np.float32)
image = foreground + background
image = image.astype(np.uint8)
return _return(image)
def rgb2gray(rgb):
gray = np.dot(rgb[..., :3], [0.299, 0.587, 0.114])
return np.stack([gray, gray, gray], axis=2)
|
[
"agunin@kibernetika.ai"
] |
agunin@kibernetika.ai
|
2eb909ede4f87fb08a7d9d859c74ced6f87433c8
|
33e036f0a2f30cbb32434f5e1b2ae60b84d98814
|
/inventory/migrations/0003_auto_20210929_2050.py
|
d33a444ad385e0104f4dd6960d8cd1162d04595c
|
[] |
no_license
|
RandyBrilliant/django-delight
|
ce78eced7afbcbc91876b0e1961de9d4abd576af
|
c68f756f1d18d34b0460a3af912e34c602856284
|
refs/heads/main
| 2023-07-31T08:36:49.771811
| 2021-10-08T14:36:02
| 2021-10-08T14:36:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 537
|
py
|
# Generated by Django 3.2.7 on 2021-09-29 13:50
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('inventory', '0002_menuitem_image'),
]
operations = [
migrations.AddField(
model_name='ingredient',
name='slug',
field=models.SlugField(default='test'),
),
migrations.AddField(
model_name='menuitem',
name='slug',
field=models.SlugField(default='test'),
),
]
|
[
"randybrilliant68@gmail.com"
] |
randybrilliant68@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.