blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
⌀ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
09394de8bc6e7a32980628857b93e839de19e4ff
|
de0775338f6eb4df7f8ef8c61df9cf3b563c0a28
|
/problems/fashion/fashion.py
|
4d5493648f28584ca558e0f7cd4d2a79eb4237ca
|
[] |
no_license
|
sefakilic/spoj
|
c5b37e9cd2413e57f117b0ba83ca4b4f509868da
|
17c54cad2e87d1f79841038b6537ad9eee263727
|
refs/heads/master
| 2021-01-15T14:03:18.393394
| 2015-04-15T15:52:57
| 2015-04-15T15:52:57
| 32,475,833
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 302
|
py
|
def fashion(A, B):
return sum(map(lambda (x,y): x*y, zip(sorted(A), sorted(B))))
if __name__ == '__main__':
t = int(raw_input())
for i in xrange(t):
n = int(raw_input())
A = map(int, raw_input().split())
B = map(int, raw_input().split())
print fashion(A,B)
|
[
"sefakilic@gmail.com"
] |
sefakilic@gmail.com
|
9a89b7e67b317746539d46ebe88544dd8119db69
|
d718c3ca1ced8e2d44c4ce0f18af8a183d9afaf2
|
/rahul/migrations/0002_auto_20190202_1320.py
|
922b85f2505d743a116057b604fc5c13be7b66f8
|
[] |
no_license
|
iamrraj/Django_RealEstate
|
c9030177688bf5500f82d86f381a82471adba321
|
0370d0ed6d95dd751cb7889afd0dc328d0ea259c
|
refs/heads/master
| 2022-12-11T10:12:06.626546
| 2019-03-04T16:08:54
| 2019-03-04T16:08:54
| 173,445,088
| 1
| 1
| null | 2022-12-08T01:39:53
| 2019-03-02T12:26:27
|
HTML
|
UTF-8
|
Python
| false
| false
| 735
|
py
|
# Generated by Django 2.0.4 on 2019-02-02 13:20
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('rahul', '0001_initial'),
]
operations = [
migrations.RemoveField(
model_name='product',
name='acprice',
),
migrations.RemoveField(
model_name='product',
name='discount',
),
migrations.RemoveField(
model_name='product',
name='disprice',
),
migrations.RemoveField(
model_name='product',
name='offer',
),
migrations.RemoveField(
model_name='product',
name='stock',
),
]
|
[
"rajr97555@gmail.com"
] |
rajr97555@gmail.com
|
72c60892c0eca29753c90a81f153f351c0cd43a6
|
591cfb754e27bd090dfb4cf23259708601984c67
|
/IO/LSM_reader_plugin.py
|
db2f2aaf56b23c8a837e4085dd4298bebfef98a4
|
[] |
no_license
|
luckylara/lasagna
|
69b934d43bbe232aa0322d91cf637bc4a5a7a856
|
e88f5c79e603dbfcb1d05fbd81684e1d444679ae
|
refs/heads/master
| 2021-01-20T12:28:47.979819
| 2016-11-15T15:39:33
| 2016-11-15T15:39:33
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,523
|
py
|
"""
Load an LSM stack into Lasagna
"""
import os
from lasagna_plugin import lasagna_plugin
import tifffile
from PyQt4 import QtGui
import lasagna_helperFunctions as lasHelp # Module the provides a variety of import functions (e.g. preference file handling)
class loaderClass(lasagna_plugin):
def __init__(self,lasagna):
super(loaderClass,self).__init__(lasagna)
self.lasagna = lasagna
self.objectName = 'LSM_reader'
self.kind = 'imagestack'
#Construct the QActions and other stuff required to integrate the load dialog into the menu
self.loadAction = QtGui.QAction(self.lasagna) #Instantiate the menu action
#Add an icon to the action
iconLoadOverlay = QtGui.QIcon()
iconLoadOverlay.addPixmap(QtGui.QPixmap(":/actions/icons/overlay.png"), QtGui.QIcon.Normal, QtGui.QIcon.Off)
self.loadAction.setIcon(iconLoadOverlay)
#Insert the action into the menu
self.loadAction.setObjectName("LSMread")
self.lasagna.menuLoad_ingredient.addAction(self.loadAction)
self.loadAction.setText("Load LSM stack")
self.loadAction.triggered.connect(self.showLoadDialog) #Link the action to the slot
#Slots follow
def showLoadDialog(self):
"""
This slot brings up the load dialog and retrieves the file name.
If the file name is valid, it loads the base stack using the load method.
"""
fname = self.lasagna.showFileLoadDialog(fileFilter="LSM (*.lsm)")
if fname is None:
return
colorOrder = lasHelp.readPreference('colorOrder')
if os.path.isfile(fname):
im=tifffile.imread(str(fname))
print "Found LSM stack with dimensions:"
print im.shape
for ii in range(im.shape[2]):
stack=im[0,:,ii,:,:]
objName="layer_%d" % (ii+1)
self.lasagna.addIngredient(objectName=objName,
kind='imagestack',
data=stack,
fname=fname
)
self.lasagna.returnIngredientByName(objName).addToPlots() #Add item to all three 2D plots
print "Adding '%s' layer" % colorOrder[ii]
self.lasagna.returnIngredientByName(objName).lut=colorOrder[ii]
self.lasagna.initialiseAxes()
else:
self.lasagna.statusBar.showMessage("Unable to find " + str(fname))
|
[
"git@raacampbell.com"
] |
git@raacampbell.com
|
ac51599caf2e31e3859f0fde36a58d6cf4ef01f4
|
98814ccf333d1bace4e2d9811ac778aa94e464d5
|
/watcher_dashboard/test/selenium.py
|
189326858758b1a889144bf1c6a3d8b5f720c08b
|
[
"Apache-2.0"
] |
permissive
|
openstack/watcher-dashboard
|
5c19632c591db4ff83dc9e7f22b9ed7f7f8e4039
|
2114b979df67873f9251101b5351fbf5a5ab56a6
|
refs/heads/master
| 2023-09-01T14:54:29.644531
| 2023-02-28T13:27:59
| 2023-02-28T13:27:59
| 51,065,767
| 15
| 3
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,979
|
py
|
# Copyright (c) 2016 b<>com
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import logging
from horizon.test import helpers as test
from selenium.common import exceptions as selenium_exceptions
class BrowserTests(test.SeleniumTestCase):
def test_jasmine(self):
url = "%s%s" % (self.live_server_url, "/jasmine/")
self.selenium.get(url)
wait = self.ui.WebDriverWait(self.selenium, 10)
def jasmine_done(driver):
text = driver.find_element_by_id("jasmine-testresult").text
return "Tests completed" in text
wait.until(jasmine_done)
failed_elem = self.selenium.find_element_by_class_name("failed")
failed = int(failed_elem.text)
if failed:
self.log_failure_messages()
self.assertEqual(failed, 0)
def log_failure_messages(self):
logger = logging.getLogger('selenium')
logger.error("Errors found during jasmine test:")
fail_elems = self.selenium.find_elements_by_class_name("fail")
for elem in fail_elems:
try:
module = elem.find_element_by_class_name("module-name").text
except selenium_exceptions.NoSuchElementException:
continue
message = elem.find_element_by_class_name("test-message").text
source = elem.find_element_by_tag_name("pre").text
logger.error("Module: %s, message: %s, source: %s" % (
module, message, source))
|
[
"David.TARDIVEL@b-com.com"
] |
David.TARDIVEL@b-com.com
|
027c052b540274afe6e91d1e2dd08279bfb2d51e
|
3c20f49820d53ed781cdc6f66985c1b5a3b293ba
|
/registration.py
|
f995c3ad81db31176ea3d5b5e3cf4fdc6a01208b
|
[] |
no_license
|
joekrom/docker_test-
|
f225ac93ae7193b9a69839f1ea8ae18cc071563e
|
adb113c682c853bd477667681e642ca7524179e0
|
refs/heads/master
| 2020-12-29T13:30:56.293817
| 2020-02-06T07:28:01
| 2020-02-06T07:28:01
| 238,623,196
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 260
|
py
|
from flask import Flask
app = Flask(__name__)
@app.route("/")
def hello_world():
return "that is the service for registration first version herve likes energy drink,first modif to github"
if __name__== "__main__":
app.run(host='0.0.0.0',debug=True )
|
[
"joel.dfankam@yahoo.fr"
] |
joel.dfankam@yahoo.fr
|
8346055c465d62e9899a51ef44eaeba777d40e3c
|
d3c561a6ce2f82ca3cf79a01ec941b669f6ccf57
|
/application.py
|
00e9415d81cce631cb5c2be702f194637f8f4e45
|
[] |
no_license
|
Simratpreet/cherry_app
|
c3f845a2666b91df1dfb13aace2dc2cddd5af17f
|
41bb301f2498aa91c4e90ff2cb2f0f4cf39a856d
|
refs/heads/master
| 2022-12-14T03:37:38.925668
| 2019-01-27T18:22:48
| 2019-01-27T18:22:48
| 167,792,096
| 0
| 0
| null | 2022-12-08T01:34:01
| 2019-01-27T09:38:39
|
Python
|
UTF-8
|
Python
| false
| false
| 903
|
py
|
from flask import (g, request, render_template, Flask)
from constants import *
import pandas as pd
import redis
import json
application = Flask(__name__)
# connection creator with redis
def init_db():
db = redis.StrictRedis(host=DB_HOST, port=DB_PORT, db=DB_NO, password=PASSWORD)
return db
@application.before_request
def before_request():
g.db = init_db()
@application.route('/')
def view_bhav():
# these columns are shown on the UI
columns = ['SC_CODE', 'SC_NAME', 'OPEN', 'CLOSE', 'PREVCLOSE', 'LAST', 'LOW', 'HIGH', 'NET_TURNOV']
# use msgpack to read redis data into pandas df
bhav_df = pd.read_msgpack(g.db.get("bhav"))
bhav_df = bhav_df[columns]
# json conversion
bhav_records = json.loads(bhav_df.to_json(orient='records'))
return render_template('bhav_list.html', bhav_records=bhav_records, columns=columns)
if __name__ == "__main__":
application.run(debug=True)
|
[
"ec2-user@ip-172-31-19-18.eu-west-1.compute.internal"
] |
ec2-user@ip-172-31-19-18.eu-west-1.compute.internal
|
0b55ba7f00b4e64e8c7d5203073beaeefc5cc9bf
|
8e7fd94fabf66c143de0105d1007bac6b46105b5
|
/extras/build.py
|
36a28486925f1cc3acba76aa0b337ae748801b7e
|
[
"BSD-3-Clause",
"LicenseRef-scancode-unknown-license-reference",
"MIT",
"LicenseRef-scancode-public-domain"
] |
permissive
|
davidgiven/wordgrinder
|
9420fe20994294d6c6f3be6514e15719ed062420
|
62ec2319bb2e839df2f888ec2b49d645955d75ae
|
refs/heads/master
| 2023-08-16T20:09:03.230267
| 2023-06-06T22:44:40
| 2023-06-06T22:44:40
| 37,010,967
| 830
| 79
| null | 2023-06-06T22:44:41
| 2015-06-07T09:27:07
|
Lua
|
UTF-8
|
Python
| false
| false
| 703
|
py
|
from build.ab2 import normalrule, Rule, Target
from config import DATE, VERSION
@Rule
def manpage(self, name, outfile, date, version, src: Target):
normalrule(
replaces=self,
ins=[src],
outs=[outfile],
commands=[
"sed 's/@@@DATE@@@/"
+ date
+ "/g; s/@@@VERSION@@@/"
+ version
+ "/g' {ins} > {outs}"
],
label="MANPAGE",
)
manpage(
name="xwordgrinder.1",
outfile="xwordgrinder.1",
date=DATE,
version=VERSION,
src="wordgrinder.man",
)
manpage(
name="wordgrinder.1",
outfile="wordgrinder.1",
date=DATE,
version=VERSION,
src="xwordgrinder.man",
)
|
[
"dg@cowlark.com"
] |
dg@cowlark.com
|
80a7aed86e2c3a0d638eeac86aa7695677224737
|
3dae43ca1d3eb85ea499e7d244ba2cfdbf4707f5
|
/couchdb/design.py
|
08340108a99b0497fcab37b52928c644f04f6d7e
|
[
"Apache-2.0"
] |
permissive
|
rmaiko/pyvsim
|
5da6340331df6818a9bf4cc2ed0c3e8268f249d5
|
18d51d8fc3678ffcb08fd0939dc72c1a8834327d
|
refs/heads/master
| 2021-01-17T07:24:40.573216
| 2016-05-25T15:18:39
| 2016-05-25T15:18:39
| 33,446,279
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,124
|
py
|
# -*- coding: utf-8 -*-
#
# Copyright (C) 2008-2009 Christopher Lenz
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution.
"""Utility code for managing design documents."""
from copy import deepcopy
from inspect import getsource
from itertools import groupby
from operator import attrgetter
from textwrap import dedent
from types import FunctionType
__all__ = ['ViewDefinition']
__docformat__ = 'restructuredtext en'
class ViewDefinition(object):
r"""Definition of a view stored in a specific design document.
An instance of this class can be used to access the results of the view,
as well as to keep the view definition in the design document up to date
with the definition in the application code.
>>> from couchdb import Server
>>> server = Server()
>>> db = server.create('python-tests')
>>> view = ViewDefinition('tests', 'all', '''function(doc) {
... emit(doc._id, null);
... }''')
>>> view.get_doc(db)
The view is not yet stored in the database, in fact, design doc doesn't
even exist yet. That can be fixed using the `sync` method:
>>> view.sync(db) #doctest: +ELLIPSIS
[(True, '_design/tests', ...)]
>>> design_doc = view.get_doc(db)
>>> design_doc #doctest: +ELLIPSIS
<Document '_design/tests'@'...' {...}>
>>> print design_doc['views']['all']['map']
function(doc) {
emit(doc._id, null);
}
If you use a Python view server, you can also use Python functions instead
of code embedded in strings:
>>> def my_map(doc):
... yield doc['somekey'], doc['somevalue']
>>> view = ViewDefinition('test2', 'somename', my_map, language='python')
>>> view.sync(db) #doctest: +ELLIPSIS
[(True, '_design/test2', ...)]
>>> design_doc = view.get_doc(db)
>>> design_doc #doctest: +ELLIPSIS
<Document '_design/test2'@'...' {...}>
>>> print design_doc['views']['somename']['map']
def my_map(doc):
yield doc['somekey'], doc['somevalue']
Use the static `sync_many()` method to create or update a collection of
views in the database in an atomic and efficient manner, even across
different design documents.
>>> del server['python-tests']
"""
def __init__(self, design, name, map_fun, reduce_fun=None,
language='javascript', wrapper=None, options=None,
**defaults):
"""Initialize the view definition.
Note that the code in `map_fun` and `reduce_fun` is automatically
dedented, that is, any common leading whitespace is removed from each
line.
:param design: the name of the design document
:param name: the name of the view
:param map_fun: the map function code
:param reduce_fun: the reduce function code (optional)
:param language: the name of the language used
:param wrapper: an optional callable that should be used to wrap the
result rows
:param options: view specific options (e.g. {'collation':'raw'})
"""
if design.startswith('_design/'):
design = design[8:]
self.design = design
self.name = name
if isinstance(map_fun, FunctionType):
map_fun = _strip_decorators(getsource(map_fun).rstrip())
self.map_fun = dedent(map_fun.lstrip('\n'))
if isinstance(reduce_fun, FunctionType):
reduce_fun = _strip_decorators(getsource(reduce_fun).rstrip())
if reduce_fun:
reduce_fun = dedent(reduce_fun.lstrip('\n'))
self.reduce_fun = reduce_fun
self.language = language
self.wrapper = wrapper
self.options = options
self.defaults = defaults
def __call__(self, db, **options):
"""Execute the view in the given database.
:param db: the `Database` instance
:param options: optional query string parameters
:return: the view results
:rtype: `ViewResults`
"""
merged_options = self.defaults.copy()
merged_options.update(options)
return db.view('/'.join([self.design, self.name]),
wrapper=self.wrapper, **merged_options)
def __repr__(self):
return '<%s %r>' % (type(self).__name__, '/'.join([
'_design', self.design, '_view', self.name
]))
def get_doc(self, db):
"""Retrieve and return the design document corresponding to this view
definition from the given database.
:param db: the `Database` instance
:return: a `client.Document` instance, or `None` if the design document
does not exist in the database
:rtype: `Document`
"""
return db.get('_design/%s' % self.design)
def sync(self, db):
"""Ensure that the view stored in the database matches the view defined
by this instance.
:param db: the `Database` instance
"""
return type(self).sync_many(db, [self])
@staticmethod
def sync_many(db, views, remove_missing=False, callback=None):
"""Ensure that the views stored in the database that correspond to a
given list of `ViewDefinition` instances match the code defined in
those instances.
This function might update more than one design document. This is done
using the CouchDB bulk update feature to ensure atomicity of the
operation.
:param db: the `Database` instance
:param views: a sequence of `ViewDefinition` instances
:param remove_missing: whether views found in a design document that
are not found in the list of `ViewDefinition`
instances should be removed
:param callback: a callback function that is invoked when a design
document gets updated; the callback gets passed the
design document as only parameter, before that doc
has actually been saved back to the database
"""
docs = []
views = sorted(views, key=attrgetter('design'))
for design, views in groupby(views, key=attrgetter('design')):
doc_id = '_design/%s' % design
doc = db.get(doc_id, {'_id': doc_id})
orig_doc = deepcopy(doc)
languages = set()
missing = list(doc.get('views', {}).keys())
for view in views:
funcs = {'map': view.map_fun}
if view.reduce_fun:
funcs['reduce'] = view.reduce_fun
if view.options:
funcs['options'] = view.options
doc.setdefault('views', {})[view.name] = funcs
languages.add(view.language)
if view.name in missing:
missing.remove(view.name)
if remove_missing and missing:
for name in missing:
del doc['views'][name]
elif missing and 'language' in doc:
languages.add(doc['language'])
if len(languages) > 1:
raise ValueError('Found different language views in one '
'design document (%r)', list(languages))
doc['language'] = list(languages)[0]
if doc != orig_doc:
if callback is not None:
callback(doc)
docs.append(doc)
return db.update(docs)
def _strip_decorators(code):
retval = []
beginning = True
for line in code.splitlines():
if beginning and not line.isspace():
if line.lstrip().startswith('@'):
continue
beginning = False
retval.append(line)
return '\n'.join(retval)
|
[
"ricardo.entz@dlr.de"
] |
ricardo.entz@dlr.de
|
4eaaded3c75290e39e59843708a75c730052c7e7
|
2356d304de017e7a11b53af17b20dce2d66c647b
|
/LnProtocol/RaspBerry_Prev/Source/Functions/DisplayRawData.py
|
7217d54616cb606bb61e1758f9afe17709a49a6c
|
[] |
no_license
|
Stevetdr/TOR
|
5c71ff0c620ad8b90584fd828f1f0524c1b9e6b9
|
3206c91c8fe55ac76028b2e734d725932c86aab1
|
refs/heads/master
| 2021-10-18T22:02:26.168917
| 2019-02-14T17:03:20
| 2019-02-14T17:03:20
| 118,484,021
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 972
|
py
|
#!/usr/bin/python
# -*- coding: iso-8859-1 -*-
def displayRawData(rawData):
if len(rawData):
COMMAND_DATA = 7 # TX - dati necessari al comando per la sua corretta esecuzione/RX - dati di risposta
print (' full data - len: [{0:03}] - '.format(len(rawData)), end="")
for byte in rawData: print ('{0:02X} '.format(byte), end="")
print ()
print ()
commandData = rawData[COMMAND_DATA*2:]
print (' raw data - len: [{0:03}] - '.format(len(commandData)), end="")
print (' '*COMMAND_DATA, end="")
print ('[', end="")
printableChars = list(range(31,126))
printableChars.append(13)
printableChars.append(10)
for byte in rawData:
if byte in printableChars: # Handle only printable ASCII
print(chr(byte), end="")
else:
print(' ', end="")
else:
print('No data received!')
|
[
"stevetdr@gmail.com"
] |
stevetdr@gmail.com
|
140287a99f4d2e64e977c72c6d24df7c2e0ca38a
|
d7f43ee7b91c216b1740dead4cc348f3704d2f5a
|
/.metadata/.plugins/org.eclipse.core.resources/.history/b7/80250ecc9ea7001711a5cc76fdb6fd7b
|
4b4fd72ea13464b26888689bd6c7068e2d6a70c7
|
[] |
no_license
|
capslockqq/catkin_ws
|
26f734cf45cb5fe15301f5448a6005f2b21073b5
|
a0989427e42988f36ae9e4d83ba7eb871a56b64e
|
refs/heads/master
| 2021-08-24T07:04:07.551220
| 2017-12-08T14:42:19
| 2017-12-08T14:42:19
| 113,569,359
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,661
|
#!/usr/bin/env python
import rospy
import actionlib
from control_msgs.msg import FollowJointTrajectoryAction
from control_msgs.msg import FollowJointTrajectoryFeedback
from control_msgs.msg import FollowJointTrajectoryResult
from control_msgs.msg import FollowJointTrajectoryGoal
from trajectory_msgs.msg import JointTrajectoryPoint
from trajectory_msgs.msg import JointTrajectory
import math
class ActionExampleNode:
N_JOINTS = 4
def __init__(self,server_name):
self.client = actionlib.SimpleActionClient(server_name, FollowJointTrajectoryAction)
self.joint_positions = []
self.names =["joint1",
"joint2",
"joint3",
"joint4"
]
# the list of joint positions
joint_positions = [
#[0.0, 1.44, -1.17, 0],
#[0.38, 1.13, -0.75, 0],
[-3.14, -3.14/2+3.14/2, -3.14/2+3.14/2, 0]
]
# initial duration
dur = rospy.Duration(1)
# construct a list of joint positions
for p in joint_positions:
jtp = JointTrajectoryPoint(positions=p,velocities=[0.5]*self.N_JOINTS ,time_from_start=dur)
dur += rospy.Duration(5)
self.joint_positions.append(jtp)
self.jt = JointTrajectory(joint_names=self.names, points=self.joint_positions)
self.goal = FollowJointTrajectoryGoal( trajectory=self.jt, goal_time_tolerance=dur+rospy.Duration(2) )
def send_command(self):
self.client.wait_for_server()
print self.goal
self.client.send_goal(self.goal)
self.client.wait_for_result()
print self.client.get_result()
print('hej din idiot')
b = 10;
print(b)
if __name__ == "__main__":
rospy.init_node("au_dynamixel_test_node")
node= ActionExampleNode("/arm_controller/follow_joint_trajectory")
node.send_command()
|
[
"ubuntu@ubuntu.(none)"
] |
ubuntu@ubuntu.(none)
|
|
30ccfa146013bf4f3d0235cf2601d081adf8e96f
|
49371b759260632d95a5b75d60ccc1ec3449e4ce
|
/nipyapi/nifi/apis/processgroups_api.py
|
3fdec47015bc424e7895991e29c9981a71c80b6a
|
[
"Apache-2.0"
] |
permissive
|
rkarthik29/nipyapi
|
05112cdb1412cc3a747ea9615e3c74a5ea4ffa97
|
78a9d8e711b3e0a2cc682e523630069a2338cd73
|
refs/heads/master
| 2021-05-04T23:53:08.658101
| 2018-01-26T16:56:28
| 2018-01-26T16:56:28
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 146,162
|
py
|
# coding: utf-8
"""
NiFi Rest Api
The Rest Api provides programmatic access to command and control a NiFi instance in real time. Start and stop processors, monitor queues, query provenance data, and more. Each endpoint below includes a description, definitions of the expected input and output, potential response codes, and the authorizations required to invoke each service.
OpenAPI spec version: 1.5.0
Contact: dev@nifi.apache.org
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from __future__ import absolute_import
import sys
import os
import re
# python 2 and python 3 compatibility library
from six import iteritems
from ..configuration import Configuration
from ..api_client import ApiClient
class ProcessgroupsApi(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
Ref: https://github.com/swagger-api/swagger-codegen
"""
def __init__(self, api_client=None):
config = Configuration()
if api_client:
self.api_client = api_client
else:
if not config.api_client:
config.api_client = ApiClient()
self.api_client = config.api_client
def copy_snippet(self, id, body, **kwargs):
"""
Copies a snippet and discards it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.copy_snippet(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param CopySnippetRequestEntity body: The copy snippet request. (required)
:return: FlowEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.copy_snippet_with_http_info(id, body, **kwargs)
else:
(data) = self.copy_snippet_with_http_info(id, body, **kwargs)
return data
def copy_snippet_with_http_info(self, id, body, **kwargs):
"""
Copies a snippet and discards it.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.copy_snippet_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param CopySnippetRequestEntity body: The copy snippet request. (required)
:return: FlowEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method copy_snippet" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `copy_snippet`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `copy_snippet`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/snippet-instance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FlowEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_connection(self, id, body, **kwargs):
"""
Creates a connection
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_connection(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ConnectionEntity body: The connection configuration details. (required)
:return: ConnectionEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_connection_with_http_info(id, body, **kwargs)
else:
(data) = self.create_connection_with_http_info(id, body, **kwargs)
return data
def create_connection_with_http_info(self, id, body, **kwargs):
"""
Creates a connection
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_connection_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ConnectionEntity body: The connection configuration details. (required)
:return: ConnectionEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_connection" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_connection`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_connection`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/connections', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ConnectionEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_controller_service(self, id, body, **kwargs):
"""
Creates a new controller service
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_controller_service(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ControllerServiceEntity body: The controller service configuration details. (required)
:return: ControllerServiceEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_controller_service_with_http_info(id, body, **kwargs)
else:
(data) = self.create_controller_service_with_http_info(id, body, **kwargs)
return data
def create_controller_service_with_http_info(self, id, body, **kwargs):
"""
Creates a new controller service
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_controller_service_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ControllerServiceEntity body: The controller service configuration details. (required)
:return: ControllerServiceEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_controller_service" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_controller_service`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_controller_service`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/controller-services', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ControllerServiceEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_funnel(self, id, body, **kwargs):
"""
Creates a funnel
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_funnel(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param FunnelEntity body: The funnel configuration details. (required)
:return: FunnelEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_funnel_with_http_info(id, body, **kwargs)
else:
(data) = self.create_funnel_with_http_info(id, body, **kwargs)
return data
def create_funnel_with_http_info(self, id, body, **kwargs):
"""
Creates a funnel
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_funnel_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param FunnelEntity body: The funnel configuration details. (required)
:return: FunnelEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_funnel" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_funnel`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_funnel`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/funnels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FunnelEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_input_port(self, id, body, **kwargs):
"""
Creates an input port
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_input_port(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param PortEntity body: The input port configuration details. (required)
:return: PortEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_input_port_with_http_info(id, body, **kwargs)
else:
(data) = self.create_input_port_with_http_info(id, body, **kwargs)
return data
def create_input_port_with_http_info(self, id, body, **kwargs):
"""
Creates an input port
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_input_port_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param PortEntity body: The input port configuration details. (required)
:return: PortEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_input_port" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_input_port`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_input_port`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/input-ports', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_label(self, id, body, **kwargs):
"""
Creates a label
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_label(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param LabelEntity body: The label configuration details. (required)
:return: LabelEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_label_with_http_info(id, body, **kwargs)
else:
(data) = self.create_label_with_http_info(id, body, **kwargs)
return data
def create_label_with_http_info(self, id, body, **kwargs):
"""
Creates a label
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_label_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param LabelEntity body: The label configuration details. (required)
:return: LabelEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_label" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_label`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_label`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/labels', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LabelEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_output_port(self, id, body, **kwargs):
"""
Creates an output port
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_output_port(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param PortEntity body: The output port configuration. (required)
:return: PortEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_output_port_with_http_info(id, body, **kwargs)
else:
(data) = self.create_output_port_with_http_info(id, body, **kwargs)
return data
def create_output_port_with_http_info(self, id, body, **kwargs):
"""
Creates an output port
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_output_port_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param PortEntity body: The output port configuration. (required)
:return: PortEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_output_port" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_output_port`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_output_port`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/output-ports', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='PortEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_process_group(self, id, body, **kwargs):
"""
Creates a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_process_group(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessGroupEntity body: The process group configuration details. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_process_group_with_http_info(id, body, **kwargs)
else:
(data) = self.create_process_group_with_http_info(id, body, **kwargs)
return data
def create_process_group_with_http_info(self, id, body, **kwargs):
"""
Creates a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_process_group_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessGroupEntity body: The process group configuration details. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_process_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_process_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_process_group`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/process-groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessGroupEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_processor(self, id, body, **kwargs):
"""
Creates a new processor
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_processor(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessorEntity body: The processor configuration details. (required)
:return: ProcessorEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_processor_with_http_info(id, body, **kwargs)
else:
(data) = self.create_processor_with_http_info(id, body, **kwargs)
return data
def create_processor_with_http_info(self, id, body, **kwargs):
"""
Creates a new processor
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_processor_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessorEntity body: The processor configuration details. (required)
:return: ProcessorEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_processor" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_processor`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_processor`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/processors', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessorEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_remote_process_group(self, id, body, **kwargs):
"""
Creates a new process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_remote_process_group(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param RemoteProcessGroupEntity body: The remote process group configuration details. (required)
:return: RemoteProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_remote_process_group_with_http_info(id, body, **kwargs)
else:
(data) = self.create_remote_process_group_with_http_info(id, body, **kwargs)
return data
def create_remote_process_group_with_http_info(self, id, body, **kwargs):
"""
Creates a new process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_remote_process_group_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param RemoteProcessGroupEntity body: The remote process group configuration details. (required)
:return: RemoteProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_remote_process_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_remote_process_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_remote_process_group`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/remote-process-groups', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteProcessGroupEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def create_template(self, id, body, **kwargs):
"""
Creates a template and discards the specified snippet.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param CreateTemplateRequestEntity body: The create template request. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.create_template_with_http_info(id, body, **kwargs)
else:
(data) = self.create_template_with_http_info(id, body, **kwargs)
return data
def create_template_with_http_info(self, id, body, **kwargs):
"""
Creates a template and discards the specified snippet.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.create_template_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param CreateTemplateRequestEntity body: The create template request. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method create_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `create_template`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `create_template`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/templates', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def delete_variable_registry_update_request(self, group_id, update_id, **kwargs):
"""
Deletes an update request for a process group's variable registry. If the request is not yet complete, it will automatically be cancelled.
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_variable_registry_update_request(group_id, update_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The process group id. (required)
:param str update_id: The ID of the Variable Registry Update Request (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.delete_variable_registry_update_request_with_http_info(group_id, update_id, **kwargs)
else:
(data) = self.delete_variable_registry_update_request_with_http_info(group_id, update_id, **kwargs)
return data
def delete_variable_registry_update_request_with_http_info(self, group_id, update_id, **kwargs):
"""
Deletes an update request for a process group's variable registry. If the request is not yet complete, it will automatically be cancelled.
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.delete_variable_registry_update_request_with_http_info(group_id, update_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The process group id. (required)
:param str update_id: The ID of the Variable Registry Update Request (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'update_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method delete_variable_registry_update_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `delete_variable_registry_update_request`")
# verify the required parameter 'update_id' is set
if ('update_id' not in params) or (params['update_id'] is None):
raise ValueError("Missing the required parameter `update_id` when calling `delete_variable_registry_update_request`")
collection_formats = {}
path_params = {}
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'update_id' in params:
path_params['updateId'] = params['update_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{groupId}/variable-registry/update-requests/{updateId}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableRegistryUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_connections(self, id, **kwargs):
"""
Gets all connections
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_connections(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ConnectionsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_connections_with_http_info(id, **kwargs)
else:
(data) = self.get_connections_with_http_info(id, **kwargs)
return data
def get_connections_with_http_info(self, id, **kwargs):
"""
Gets all connections
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_connections_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ConnectionsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_connections" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_connections`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/connections', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ConnectionsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_funnels(self, id, **kwargs):
"""
Gets all funnels
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_funnels(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: FunnelsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_funnels_with_http_info(id, **kwargs)
else:
(data) = self.get_funnels_with_http_info(id, **kwargs)
return data
def get_funnels_with_http_info(self, id, **kwargs):
"""
Gets all funnels
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_funnels_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: FunnelsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_funnels" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_funnels`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/funnels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FunnelsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_input_ports(self, id, **kwargs):
"""
Gets all input ports
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_input_ports(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: InputPortsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_input_ports_with_http_info(id, **kwargs)
else:
(data) = self.get_input_ports_with_http_info(id, **kwargs)
return data
def get_input_ports_with_http_info(self, id, **kwargs):
"""
Gets all input ports
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_input_ports_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: InputPortsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_input_ports" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_input_ports`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/input-ports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='InputPortsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_labels(self, id, **kwargs):
"""
Gets all labels
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_labels(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: LabelsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_labels_with_http_info(id, **kwargs)
else:
(data) = self.get_labels_with_http_info(id, **kwargs)
return data
def get_labels_with_http_info(self, id, **kwargs):
"""
Gets all labels
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_labels_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: LabelsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_labels" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_labels`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/labels', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='LabelsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_local_modifications(self, id, **kwargs):
"""
Gets a list of local modifications to the Process Group since it was last synchronized with the Flow Registry
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_local_modifications(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: FlowComparisonEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_local_modifications_with_http_info(id, **kwargs)
else:
(data) = self.get_local_modifications_with_http_info(id, **kwargs)
return data
def get_local_modifications_with_http_info(self, id, **kwargs):
"""
Gets a list of local modifications to the Process Group since it was last synchronized with the Flow Registry
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_local_modifications_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: FlowComparisonEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_local_modifications" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_local_modifications`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/local-modifications', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FlowComparisonEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_output_ports(self, id, **kwargs):
"""
Gets all output ports
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_output_ports(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: OutputPortsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_output_ports_with_http_info(id, **kwargs)
else:
(data) = self.get_output_ports_with_http_info(id, **kwargs)
return data
def get_output_ports_with_http_info(self, id, **kwargs):
"""
Gets all output ports
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_output_ports_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: OutputPortsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_output_ports" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_output_ports`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/output-ports', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='OutputPortsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_process_group(self, id, **kwargs):
"""
Gets a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_process_group(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_process_group_with_http_info(id, **kwargs)
else:
(data) = self.get_process_group_with_http_info(id, **kwargs)
return data
def get_process_group_with_http_info(self, id, **kwargs):
"""
Gets a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_process_group_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_process_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_process_group`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessGroupEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_process_groups(self, id, **kwargs):
"""
Gets all process groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_process_groups(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ProcessGroupsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_process_groups_with_http_info(id, **kwargs)
else:
(data) = self.get_process_groups_with_http_info(id, **kwargs)
return data
def get_process_groups_with_http_info(self, id, **kwargs):
"""
Gets all process groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_process_groups_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: ProcessGroupsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_process_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_process_groups`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/process-groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessGroupsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_processors(self, id, **kwargs):
"""
Gets all processors
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_processors(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param bool include_descendant_groups: Whether or not to include processors from descendant process groups
:return: ProcessorsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_processors_with_http_info(id, **kwargs)
else:
(data) = self.get_processors_with_http_info(id, **kwargs)
return data
def get_processors_with_http_info(self, id, **kwargs):
"""
Gets all processors
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_processors_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param bool include_descendant_groups: Whether or not to include processors from descendant process groups
:return: ProcessorsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_descendant_groups']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_processors" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_processors`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'include_descendant_groups' in params:
query_params.append(('includeDescendantGroups', params['include_descendant_groups']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/processors', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessorsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_remote_process_groups(self, id, **kwargs):
"""
Gets all remote process groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_remote_process_groups(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: RemoteProcessGroupsEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_remote_process_groups_with_http_info(id, **kwargs)
else:
(data) = self.get_remote_process_groups_with_http_info(id, **kwargs)
return data
def get_remote_process_groups_with_http_info(self, id, **kwargs):
"""
Gets all remote process groups
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_remote_process_groups_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: RemoteProcessGroupsEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_remote_process_groups" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_remote_process_groups`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/remote-process-groups', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='RemoteProcessGroupsEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_variable_registry(self, id, **kwargs):
"""
Gets a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_variable_registry(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param bool include_ancestor_groups: Whether or not to include ancestor groups
:return: VariableRegistryEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_variable_registry_with_http_info(id, **kwargs)
else:
(data) = self.get_variable_registry_with_http_info(id, **kwargs)
return data
def get_variable_registry_with_http_info(self, id, **kwargs):
"""
Gets a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_variable_registry_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param bool include_ancestor_groups: Whether or not to include ancestor groups
:return: VariableRegistryEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'include_ancestor_groups']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_variable_registry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `get_variable_registry`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'include_ancestor_groups' in params:
query_params.append(('includeAncestorGroups', params['include_ancestor_groups']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/variable-registry', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableRegistryEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def get_variable_registry_update_request(self, group_id, update_id, **kwargs):
"""
Gets a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_variable_registry_update_request(group_id, update_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The process group id. (required)
:param str update_id: The ID of the Variable Registry Update Request (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.get_variable_registry_update_request_with_http_info(group_id, update_id, **kwargs)
else:
(data) = self.get_variable_registry_update_request_with_http_info(group_id, update_id, **kwargs)
return data
def get_variable_registry_update_request_with_http_info(self, group_id, update_id, **kwargs):
"""
Gets a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.get_variable_registry_update_request_with_http_info(group_id, update_id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str group_id: The process group id. (required)
:param str update_id: The ID of the Variable Registry Update Request (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['group_id', 'update_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method get_variable_registry_update_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'group_id' is set
if ('group_id' not in params) or (params['group_id'] is None):
raise ValueError("Missing the required parameter `group_id` when calling `get_variable_registry_update_request`")
# verify the required parameter 'update_id' is set
if ('update_id' not in params) or (params['update_id'] is None):
raise ValueError("Missing the required parameter `update_id` when calling `get_variable_registry_update_request`")
collection_formats = {}
path_params = {}
if 'group_id' in params:
path_params['groupId'] = params['group_id']
if 'update_id' in params:
path_params['updateId'] = params['update_id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{groupId}/variable-registry/update-requests/{updateId}', 'GET',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableRegistryUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def import_template(self, id, **kwargs):
"""
Imports a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.import_template(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.import_template_with_http_info(id, **kwargs)
else:
(data) = self.import_template_with_http_info(id, **kwargs)
return data
def import_template_with_http_info(self, id, **kwargs):
"""
Imports a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.import_template_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method import_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `import_template`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/xml'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/xml'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/templates/import', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def instantiate_template(self, id, body, **kwargs):
"""
Instantiates a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.instantiate_template(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param InstantiateTemplateRequestEntity body: The instantiate template request. (required)
:return: FlowEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.instantiate_template_with_http_info(id, body, **kwargs)
else:
(data) = self.instantiate_template_with_http_info(id, body, **kwargs)
return data
def instantiate_template_with_http_info(self, id, body, **kwargs):
"""
Instantiates a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.instantiate_template_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param InstantiateTemplateRequestEntity body: The instantiate template request. (required)
:return: FlowEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method instantiate_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `instantiate_template`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `instantiate_template`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/template-instance', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='FlowEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def remove_process_group(self, id, **kwargs):
"""
Deletes a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_process_group(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param str version: The revision is used to verify the client is working with the latest version of the flow.
:param str client_id: If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.remove_process_group_with_http_info(id, **kwargs)
else:
(data) = self.remove_process_group_with_http_info(id, **kwargs)
return data
def remove_process_group_with_http_info(self, id, **kwargs):
"""
Deletes a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.remove_process_group_with_http_info(id, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param str version: The revision is used to verify the client is working with the latest version of the flow.
:param str client_id: If the client id is not specified, new one will be generated. This value (whether specified or generated) is included in the response.
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'version', 'client_id']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method remove_process_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `remove_process_group`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
if 'version' in params:
query_params.append(('version', params['version']))
if 'client_id' in params:
query_params.append(('clientId', params['client_id']))
header_params = {}
form_params = []
local_var_files = {}
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['*/*'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}', 'DELETE',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessGroupEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def submit_update_variable_registry_request(self, id, body, **kwargs):
"""
Submits a request to update a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_update_variable_registry_request(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param VariableRegistryEntity body: The variable registry configuration details. (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.submit_update_variable_registry_request_with_http_info(id, body, **kwargs)
else:
(data) = self.submit_update_variable_registry_request_with_http_info(id, body, **kwargs)
return data
def submit_update_variable_registry_request_with_http_info(self, id, body, **kwargs):
"""
Submits a request to update a process group's variable registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.submit_update_variable_registry_request_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param VariableRegistryEntity body: The variable registry configuration details. (required)
:return: VariableRegistryUpdateRequestEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method submit_update_variable_registry_request" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `submit_update_variable_registry_request`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `submit_update_variable_registry_request`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/variable-registry/update-requests', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableRegistryUpdateRequestEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_process_group(self, id, body, **kwargs):
"""
Updates a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_process_group(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessGroupEntity body: The process group configuration details. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_process_group_with_http_info(id, body, **kwargs)
else:
(data) = self.update_process_group_with_http_info(id, body, **kwargs)
return data
def update_process_group_with_http_info(self, id, body, **kwargs):
"""
Updates a process group
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_process_group_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param ProcessGroupEntity body: The process group configuration details. (required)
:return: ProcessGroupEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_process_group" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_process_group`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_process_group`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='ProcessGroupEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def update_variable_registry(self, id, body, **kwargs):
"""
Updates the contents of a Process Group's variable Registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_variable_registry(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param VariableRegistryEntity body: The variable registry configuration details. (required)
:return: VariableRegistryEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.update_variable_registry_with_http_info(id, body, **kwargs)
else:
(data) = self.update_variable_registry_with_http_info(id, body, **kwargs)
return data
def update_variable_registry_with_http_info(self, id, body, **kwargs):
"""
Updates the contents of a Process Group's variable Registry
Note: This endpoint is subject to change as NiFi and it's REST API evolve.
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.update_variable_registry_with_http_info(id, body, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param VariableRegistryEntity body: The variable registry configuration details. (required)
:return: VariableRegistryEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'body']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method update_variable_registry" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `update_variable_registry`")
# verify the required parameter 'body' is set
if ('body' not in params) or (params['body'] is None):
raise ValueError("Missing the required parameter `body` when calling `update_variable_registry`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
body_params = None
if 'body' in params:
body_params = params['body']
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/json'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['application/json'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/variable-registry', 'PUT',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='VariableRegistryEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
def upload_template(self, id, template, **kwargs):
"""
Uploads a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.upload_template(id, template, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param file template: The binary content of the template file being uploaded. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
kwargs['_return_http_data_only'] = True
if kwargs.get('callback'):
return self.upload_template_with_http_info(id, template, **kwargs)
else:
(data) = self.upload_template_with_http_info(id, template, **kwargs)
return data
def upload_template_with_http_info(self, id, template, **kwargs):
"""
Uploads a template
This method makes a synchronous HTTP request by default. To make an
asynchronous HTTP request, please define a `callback` function
to be invoked when receiving the response.
>>> def callback_function(response):
>>> pprint(response)
>>>
>>> thread = api.upload_template_with_http_info(id, template, callback=callback_function)
:param callback function: The callback function
for asynchronous request. (optional)
:param str id: The process group id. (required)
:param file template: The binary content of the template file being uploaded. (required)
:return: TemplateEntity
If the method is called asynchronously,
returns the request thread.
"""
all_params = ['id', 'template']
all_params.append('callback')
all_params.append('_return_http_data_only')
all_params.append('_preload_content')
all_params.append('_request_timeout')
params = locals()
for key, val in iteritems(params['kwargs']):
if key not in all_params:
raise TypeError(
"Got an unexpected keyword argument '%s'"
" to method upload_template" % key
)
params[key] = val
del params['kwargs']
# verify the required parameter 'id' is set
if ('id' not in params) or (params['id'] is None):
raise ValueError("Missing the required parameter `id` when calling `upload_template`")
# verify the required parameter 'template' is set
if ('template' not in params) or (params['template'] is None):
raise ValueError("Missing the required parameter `template` when calling `upload_template`")
collection_formats = {}
path_params = {}
if 'id' in params:
path_params['id'] = params['id']
query_params = []
header_params = {}
form_params = []
local_var_files = {}
if 'template' in params:
local_var_files['template'] = params['template']
body_params = None
# HTTP header `Accept`
header_params['Accept'] = self.api_client.\
select_header_accept(['application/xml'])
# HTTP header `Content-Type`
header_params['Content-Type'] = self.api_client.\
select_header_content_type(['multipart/form-data'])
# Authentication setting
auth_settings = []
return self.api_client.call_api('/process-groups/{id}/templates/upload', 'POST',
path_params,
query_params,
header_params,
body=body_params,
post_params=form_params,
files=local_var_files,
response_type='TemplateEntity',
auth_settings=auth_settings,
callback=params.get('callback'),
_return_http_data_only=params.get('_return_http_data_only'),
_preload_content=params.get('_preload_content', True),
_request_timeout=params.get('_request_timeout'),
collection_formats=collection_formats)
|
[
"dchaffey@hortonworks.com"
] |
dchaffey@hortonworks.com
|
9bbaa8e1635b43d3264fc411bfe7512431436848
|
f2d7575c2a19455c0bb6d7e344fb628df330b610
|
/tests/test_utils/test_plotting.py
|
b4a85fd9c3fcdd01b833a07ec2b993d12621db3b
|
[
"MIT"
] |
permissive
|
GenevieveBuckley/2019-organizing-documenting-distributing
|
1c40a63d774a2574021e7fd4b07253b6ae15e9c5
|
6aa28c927a8287b1643fc1ec22bee26a2458861e
|
refs/heads/master
| 2020-04-17T21:10:25.428094
| 2019-01-22T05:58:50
| 2019-01-22T05:58:50
| 166,937,140
| 1
| 0
|
MIT
| 2019-01-22T06:07:41
| 2019-01-22T06:07:40
| null |
UTF-8
|
Python
| false
| false
| 704
|
py
|
# -*- coding: utf-8 -*-
"""Test the plotting functions in mypack
"""
import os
import matplotlib.pyplot as plt
import numpy as np
import mypack.utils.io as my_io
from mypack.utils.plotting import plot_airfoil
def test_plot_airfoil():
"""Check the correct data is plotter to the figure
"""
# given
mod_path = os.path.dirname(os.path.abspath(__file__))
air_path = os.path.join(mod_path, 'files', 'demo_selig.dat')
air_df = my_io.read_selig(air_path)
fig, ax = plt.subplots()
# when
plot_airfoil(air_df)
x_plot, y_plot = ax.lines[0].get_xydata().T
# then
np.testing.assert_array_equal(x_plot, air_df.x)
np.testing.assert_array_equal(y_plot, air_df.y)
|
[
"student@localhost"
] |
student@localhost
|
2494d7d015d8f5d3ea8709b3e109f085d902b831
|
86d6d56343f7872ad9abfe1301fcfd962374f61d
|
/api_manager/apps.py
|
638de1c79807a6363beb747b341d10673986e26a
|
[] |
no_license
|
TheWeirdDev/API-Manager
|
c63c3e68f4fa4f611026cd5f85d7e3074c443eed
|
b290ff8361daad43d1e58b6bac7b7febd00d719e
|
refs/heads/master
| 2023-05-31T09:09:41.906179
| 2021-06-22T05:42:18
| 2021-06-22T05:42:18
| 377,508,398
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 153
|
py
|
from django.apps import AppConfig
class ApiManagerConfig(AppConfig):
default_auto_field = 'django.db.models.BigAutoField'
name = 'api_manager'
|
[
"alireza6677@gmail.com"
] |
alireza6677@gmail.com
|
7bf04d25d8c8c1fa5af749227ed1586f9840cbd1
|
a5fabc6d6341925b587fecb082dc70c0d1b95619
|
/FLM_stageI.py
|
8afcebd9e4ef64845fb780e99f78f59988b02671
|
[] |
no_license
|
illuminous/pythonScripts
|
fcfef9c0fb9bd3155edcf02b56bbec563ff08b2a
|
1bb69a1bb1e10f6041274c027cc0ab06c7a9efed
|
refs/heads/master
| 2022-10-22T09:35:48.088907
| 2022-09-28T22:35:32
| 2022-09-28T22:35:32
| 120,963,663
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,109
|
py
|
#!/usr/bin/env python
"""FLM_stageI.py builds a list of zones (buildZones) creates Folders based on a directory structure,
and unzips files to a location.
****MUST BE RUN IN PYTHON 2.6 OR GREATER BECAUSE OF THE EXTRACTALL FUNCTION****
"""
__author__ = "Jason M. Herynk"
__copyright__ = "Copyright 2011, SEM Systems for Environmental Management"
__credits__ = [""]
__license__ = "GPL"
__version__ = "1.0.1"
__maintainer__ = "Jason M. Herynk"
__email__ = "jherynk.sem@gmail.com"
__status__ = "Prototype"
import sys
import os
import zipfile
products = ['evt08', 'evc08', 'evh08']
zoneres = []
geoarea = 'SW'
rootdata = 'G:/data/' + geoarea
wsdata = 'G:/LF_Refreash/FLM30_Refreash/c08/' + geoarea
structure1 = '/%s'
structure2 = '/veg/%s%s.zip'
def buildZones(zone_number_lower, zone_number_upper):
for zone in range (zone_number_lower, zone_number_upper):
if zone < 10:
zonenum = 'z0'+'%s' %(zone)
zoneres.append(zonenum)
else:
zonenum = 'z%s' %(zone)
zoneres.append(zonenum)
def createFoldersUnzip():
for r in zoneres:
rootfolder = rootdata + structure1 %(r)
wsfolder = wsdata + structure1 %(r)
print rootfolder
print wsfolder
if os.path.exists(rootfolder):
try:
os.makedirs(wsfolder)
print wsfolder
print 'created'
except:
print wsfolder
print 'Directory Already Exists'
for p in products:
vegdata = rootfolder + structure2 %(r, p)
print vegdata
if os.path.exists(vegdata):
os.chdir(wsfolder)
destination = os.getcwd()
unzipitem = zipfile.ZipFile(vegdata)
print unzipitem
## zipitems = unzipitem.namelist()
## print zipitems
unzipitem.extractall(wsfolder)
else:
print 'file isnt here'
##buildZones(1,100)
##createFoldersUnzip()
|
[
"noreply@github.com"
] |
illuminous.noreply@github.com
|
2d6de542a35946939f11579335df3c94dc93e66f
|
9bf79a6edfd916b4932f763876cdd5a8459a0ebe
|
/scripts/change_namespace.py
|
ddee71a50e6a72606dae0c852e597273114ae3ed
|
[
"MIT"
] |
permissive
|
hseom/brunet-1
|
46103314870bcc9eff0b27fbe5cf6ea712c4b9a9
|
26e6e75250a8ac05569e711fa77b86d0e9976c79
|
refs/heads/master
| 2021-01-18T02:58:44.794881
| 2011-07-16T17:41:32
| 2011-07-16T17:41:32
| 3,490,583
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,497
|
py
|
#!/usr/bin/env python
import sys, tempfile, os, re, os.path
from functools import partial
filestorename = sys.argv[3:]
def get_namespace(sourcef):
ns_str = "namespace ([^{;]+)\\b"
for line in sourcef:
m = re.search(ns_str, line)
if m:
return m.group(1)
def fixnamespace(oldns, newns, sourcef):
(tempf, tempn) = tempfile.mkstemp()
oldstr = r"(.*)namespace %s\b(.*)" % oldns
newstr = r"\1namespace %s\2" % newns
for line in sourcef:
newline = re.sub(oldstr, newstr, line)
os.write(tempf, newline)
os.close(tempf)
return tempn
def is_using(sourcef, ns):
using_ns = "using %s;" % ns
return any(using_ns in line for line in sourcef)
def add_using(fn, ns):
(tempf, tempn) = tempfile.mkstemp()
sourcef = open(fn)
not_done = True
for line in sourcef:
if not_done and "namespace" in line:
#write using just before:
os.write(tempf, "using %s;\n" % ns)
not_done = False
os.write(tempf,line)
os.close(tempf)
#print "rename %s -> %s" % (tempn, fn)
os.rename(tempn, fn)
def change_ns(fn, old, new):
temp = fixnamespace(old, new, open(fn))
os.rename(temp, fn)
def fixns_main(argv):
for fn in argv[3:]:
change_ns(fn, argv[1], argv[2])
def printns_main(argv):
for fn in argv[1:]:
print get_namespace(open(fn))
def add_using_main(argv):
namespace = argv[1]
for fn in argv[2:]:
if not is_using(open(fn), namespace):
add_using(fn, namespace)
def rec_split(dirstr, list):
if dirstr == '':
return list
else:
(path, this_part) = os.path.split(dirstr)
list.insert(0,this_part)
return rec_split(path, list)
def print_error(fn, actual_ns, expected_ns):
print "%s ns=%s should be %s" % (fn, actual_ns, expected_ns)
def check_tree_main(on_err, args):
base = args[1]
ftype = ".cs"
for (dpath, dnames, fnames) in os.walk(os.curdir):
nslist = rec_split(dpath, [])
nslist[0] = base
ns = ".".join(nslist)
for fn in fnames:
if not fn.endswith(ftype):
continue
fullnm = os.path.join(dpath, fn)
this_ns = get_namespace(open(fullnm))
if this_ns != ns:
on_err(fullnm, this_ns, ns)
if __name__ == "__main__":
modes = { 'change_ns_f' : fixns_main,
'print_ns_f' : printns_main,
'check_tree' : partial(check_tree_main, print_error),
'change_tree' : partial(check_tree_main, change_ns),
'add_using' : add_using_main,
}
mode = sys.argv.pop(1)
modes[mode](sys.argv)
|
[
"boykin@pobox.com"
] |
boykin@pobox.com
|
83129e98f9964caf026af2e1134ea22429062362
|
a476bf64643ab9e294157c6b673d3187e307256c
|
/petro/multi/conc.py
|
ac5d994fb536784fd955e54ea1b3dc6c785f7efa
|
[] |
no_license
|
snytav/Sudents
|
5484751441fe558bf624dd2ba17e318735b8503f
|
81eb6fe38ea47ff9c62555fbe443b6d4ad23c744
|
refs/heads/master
| 2022-07-07T00:33:25.496098
| 2020-05-15T16:49:57
| 2020-05-15T16:49:57
| 264,201,203
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 411
|
py
|
import concurrent.futures
import time
start = time.perf_counter()
def do_something(seconds):
print(f'Sleeping {seconds} second(s)... ')
time.sleep(seconds)
return 'Done Sleeping... '
with concurrent.futures.ProcessPoolExecutor() as executor:
f1 = executor.submit(do_something,1)
print(f1.result())
finish = time.perf_counter()
print(f'Finished in {round(finish-start,2)} second(s)')
|
[
"snytav@gmail.com"
] |
snytav@gmail.com
|
d0d0a70fc66944f731d306b57470b3e230b98ff7
|
dc3d6f88067b27cd0ddb012dafdf13b260ca6d64
|
/houses/models.py
|
11135365d70f4b8bee994146f32663a4745c72bc
|
[] |
no_license
|
AronForero/RestFramework_Test
|
1f0efdfd194279b818e2690dbb27e189663c5da5
|
d0e31db64c13e40dca093f43beed70989f898a1d
|
refs/heads/master
| 2022-12-23T09:45:39.019601
| 2019-01-10T22:12:37
| 2019-01-10T22:12:37
| 164,375,024
| 0
| 0
| null | 2021-06-10T21:10:55
| 2019-01-07T03:48:12
|
Python
|
UTF-8
|
Python
| false
| false
| 1,482
|
py
|
from django.db import models
# Create your models here.
class general(models.Model):
"""
La informacion mas general del inmueble
"""
direccion = models.CharField(max_length=50)
ciudad = models.CharField(max_length = 30)
departamento = models.CharField(max_length = 30)
pais = models.CharField(max_length = 30)
telefono = models.CharField(max_length = 10)
class interior(models.Model):
"""
Informacion sobre el interior del inmueble
"""
cuartos = models.IntegerField()
baños = models.IntegerField()
closets = models.IntegerField()
calentador = models.BooleanField(null = True, blank = True)
class exterior(models.Model):
"""
Informacion del lugar (o sus alrededores) en donde esta situado el inmueble
"""
vigilancia = models.BooleanField(null = True, blank = True)
parqueadero = models.BooleanField(null = True, blank = True)
salon_social = models.BooleanField(null = True, blank = True)
numero_pisos = models.IntegerField()
class inmueble(models.Model):
"""
Informacion completa del inmueble
"""
tipo = models.CharField(max_length=50)
subtipo = models.CharField(max_length=50)
gen = models.ForeignKey(general, on_delete = 'CASCADE')
inte = models.ForeignKey(interior, on_delete = 'CASCADE')
ext = models.ForeignKey(exterior, on_delete = 'CASCADE')
class Meta:
ordering = ['tipo', 'subtipo',] #Ordena los registros por tipo y luego por subtipo
|
[
"Foreroo965@gmail.com"
] |
Foreroo965@gmail.com
|
0f60c2091d27ba1bac009139d2b7ed4e9f7e4c5f
|
fe19d2fac4580d463132e61509bd6e3cc2cf958d
|
/toontown/hood/ZoneUtil.py
|
1216bf85f94b68f99ca71e81390a1460878b6f17
|
[] |
no_license
|
t00nt0wn1dk/c0d3
|
3e6db6dd42c3aa36ad77709cf9016176a3f3a44f
|
7de105d7f3de0f8704b020e32fd063ee2fad8d0d
|
refs/heads/master
| 2021-01-01T16:00:15.367822
| 2015-03-21T21:25:52
| 2015-03-21T21:25:55
| 32,647,654
| 3
| 5
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,106
|
py
|
# 2013.08.22 22:21:01 Pacific Daylight Time
# Embedded file name: toontown.hood.ZoneUtil
from toontown.toonbase.ToontownGlobals import *
from direct.directnotify import DirectNotifyGlobal
zoneUtilNotify = DirectNotifyGlobal.directNotify.newCategory('ZoneUtil')
tutorialDict = None
def isGoofySpeedwayZone(zoneId):
return zoneId == 8000
def isCogHQZone(zoneId):
return zoneId >= 10000 and zoneId < 15000
def isMintInteriorZone(zoneId):
return zoneId in (CashbotMintIntA, CashbotMintIntB, CashbotMintIntC)
def isDynamicZone(zoneId):
return zoneId >= DynamicZonesBegin and zoneId < DynamicZonesEnd
def getStreetName(branchId):
global tutorialDict
if tutorialDict:
return StreetNames[20000][-1]
else:
return StreetNames[branchId][-1]
def getLoaderName(zoneId):
if tutorialDict:
if zoneId == ToontownCentral:
loaderName = 'safeZoneLoader'
else:
loaderName = 'townLoader'
else:
suffix = zoneId % 1000
if suffix >= 500:
suffix -= 500
if isCogHQZone(zoneId):
loaderName = 'cogHQLoader'
elif suffix < 100:
loaderName = 'safeZoneLoader'
else:
loaderName = 'townLoader'
return loaderName
def getBranchLoaderName(zoneId):
return getLoaderName(getBranchZone(zoneId))
def getSuitWhereName(zoneId):
where = getWhereName(zoneId, 0)
return where
def getToonWhereName(zoneId):
where = getWhereName(zoneId, 1)
return where
def isPlayground(zoneId):
whereName = getWhereName(zoneId, False)
if whereName == 'cogHQExterior':
return True
else:
return zoneId % 1000 == 0 and zoneId < DynamicZonesBegin
def isPetshop(zoneId):
if zoneId == 2522 or zoneId == 1510 or zoneId == 3511 or zoneId == 4508 or zoneId == 5505 or zoneId == 9508:
return True
return False
def getWhereName(zoneId, isToon):
if tutorialDict:
if zoneId in tutorialDict['interiors']:
where = 'toonInterior'
elif zoneId in tutorialDict['exteriors']:
where = 'street'
elif zoneId == ToontownCentral or zoneId == WelcomeValleyToken:
where = 'playground'
else:
zoneUtilNotify.error('No known zone: ' + str(zoneId))
else:
suffix = zoneId % 1000
suffix = suffix - suffix % 100
if isCogHQZone(zoneId):
if suffix == 0:
where = 'cogHQExterior'
elif suffix == 100:
where = 'cogHQLobby'
elif suffix == 200:
where = 'factoryExterior'
elif getHoodId(zoneId) == LawbotHQ and suffix in (300, 400, 500, 600):
where = 'stageInterior'
elif getHoodId(zoneId) == BossbotHQ and suffix in (500, 600, 700):
where = 'countryClubInterior'
elif suffix >= 500:
if getHoodId(zoneId) == SellbotHQ:
where = 'factoryInterior'
elif getHoodId(zoneId) == CashbotHQ:
where = 'mintInterior'
else:
zoneUtilNotify.error('unknown cogHQ interior for hood: ' + str(getHoodId(zoneId)))
else:
zoneUtilNotify.error('unknown cogHQ where: ' + str(zoneId))
elif suffix == 0:
where = 'playground'
elif suffix >= 500:
if isToon:
where = 'toonInterior'
else:
where = 'suitInterior'
else:
where = 'street'
return where
def getBranchZone(zoneId):
if tutorialDict:
branchId = tutorialDict['branch']
else:
branchId = zoneId - zoneId % 100
if not isCogHQZone(zoneId):
if zoneId % 1000 >= 500:
branchId -= 500
return branchId
def getCanonicalBranchZone(zoneId):
return getBranchZone(getCanonicalZoneId(zoneId))
def isWelcomeValley(zoneId):
return zoneId == WelcomeValleyToken or zoneId >= WelcomeValleyBegin and zoneId < WelcomeValleyEnd
def getCanonicalZoneId(zoneId):
if zoneId == WelcomeValleyToken:
zoneId = ToontownCentral
elif zoneId >= WelcomeValleyBegin and zoneId < WelcomeValleyEnd:
zoneId = zoneId % 2000
if zoneId < 1000:
zoneId = zoneId + ToontownCentral
else:
zoneId = zoneId - 1000 + GoofySpeedway
return zoneId
def getTrueZoneId(zoneId, currentZoneId):
if zoneId >= WelcomeValleyBegin and zoneId < WelcomeValleyEnd or zoneId == WelcomeValleyToken:
zoneId = getCanonicalZoneId(zoneId)
if currentZoneId >= WelcomeValleyBegin and currentZoneId < WelcomeValleyEnd:
hoodId = getHoodId(zoneId)
offset = currentZoneId - currentZoneId % 2000
if hoodId == ToontownCentral:
return zoneId - ToontownCentral + offset
elif hoodId == GoofySpeedway:
return zoneId - GoofySpeedway + offset + 1000
return zoneId
def getHoodId(zoneId):
if tutorialDict:
hoodId = Tutorial
else:
hoodId = zoneId - zoneId % 1000
return hoodId
def getSafeZoneId(zoneId):
hoodId = getHoodId(zoneId)
if hoodId in HQToSafezone:
hoodId = HQToSafezone[hoodId]
return hoodId
def getCanonicalHoodId(zoneId):
return getHoodId(getCanonicalZoneId(zoneId))
def getCanonicalSafeZoneId(zoneId):
return getSafeZoneId(getCanonicalZoneId(zoneId))
def isInterior(zoneId):
if tutorialDict:
if zoneId in tutorialDict['interiors']:
r = 1
else:
r = 0
else:
r = zoneId % 1000 >= 500
return r
def overrideOn(branch, exteriorList, interiorList):
global tutorialDict
if tutorialDict:
zoneUtilNotify.warning('setTutorialDict: tutorialDict is already set!')
tutorialDict = {'branch': branch,
'exteriors': exteriorList,
'interiors': interiorList}
def overrideOff():
global tutorialDict
tutorialDict = None
return
def getWakeInfo(hoodId = None, zoneId = None):
wakeWaterHeight = 0
showWake = 0
try:
if hoodId is None:
hoodId = base.cr.playGame.getPlaceId()
if zoneId is None:
zoneId = base.cr.playGame.getPlace().getZoneId()
canonicalZoneId = getCanonicalZoneId(zoneId)
if canonicalZoneId == DonaldsDock:
wakeWaterHeight = DDWakeWaterHeight
showWake = 1
elif canonicalZoneId == ToontownCentral:
wakeWaterHeight = TTWakeWaterHeight
showWake = 1
elif canonicalZoneId == OutdoorZone:
wakeWaterHeight = OZWakeWaterHeight
showWake = 1
elif hoodId == MyEstate:
wakeWaterHeight = EstateWakeWaterHeight
showWake = 1
except AttributeError:
pass
return (showWake, wakeWaterHeight)
# okay decompyling C:\Users\Maverick\Documents\Visual Studio 2010\Projects\Unfreezer\py2\toontown\hood\ZoneUtil.pyc
# decompiled 1 files: 1 okay, 0 failed, 0 verify failed
# 2013.08.22 22:21:01 Pacific Daylight Time
|
[
"anonymoustoontown@gmail.com"
] |
anonymoustoontown@gmail.com
|
730f99277e997cb391569a24d6d0aa2d3fe4ca86
|
814984dc0ce294540ccff09342acd77607023e2f
|
/booking/migrations/0005_auto_20160426_0604.py
|
49fa864ac1b5bc7866ac5e37f242617a84fc0bd4
|
[] |
no_license
|
msrshahrukh100/legistify
|
371e20d5b4497e8483375e31f904a763acd2e185
|
35606710458665f5e030fa8c8730c0fac3bbcd65
|
refs/heads/master
| 2021-01-01T05:10:36.098150
| 2017-08-14T11:10:56
| 2017-08-14T11:10:56
| 56,976,176
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 472
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-04-26 06:04
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('booking', '0004_auto_20160426_0603'),
]
operations = [
migrations.AlterField(
model_name='users',
name='email',
field=models.EmailField(default='', max_length=50, unique=True),
),
]
|
[
"msr.concordfly@gmail.com"
] |
msr.concordfly@gmail.com
|
2f06223f1778e0f95fac7186a878be7de9b9567e
|
46dae0ee9a8a3a00721cfc77e0b825a8c3b03dac
|
/kaldi-plda/nnet-emotion/converter/training/expand_mfccs_and_pitch_features_with_target_emotion.py
|
6153fcc76221b675d6cdbadeb06dfcce1cf81aa3
|
[] |
no_license
|
kailashkarthik9/MultiModalEmotionDetection
|
970b34182f7d0731bccfefc77c959ce867038247
|
896cbb492d084f00efac844933fb50f6acab094e
|
refs/heads/master
| 2022-12-13T14:23:11.374925
| 2020-08-14T18:44:20
| 2020-08-14T18:44:20
| 227,515,273
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,141
|
py
|
#!/usr/bin/env python
# author: aa4461
import glob
import os.path
import sys
import kaldiio
import numpy as np
def get_target_emotion(utt):
return int(utt[0])
def write_expanded_feature(raw_mfcc_and_pitch_file, output_data_dir):
expanded_features = {}
for utt, features in kaldiio.load_ark(raw_mfcc_and_pitch_file):
num_frames = len(features)
target_emotion_column = np.full((num_frames, 1), get_target_emotion(utt))
expanded_feature = np.append(features, target_emotion_column, 1)
expanded_features[utt] = expanded_feature
(_, split, _) = raw_mfcc_and_pitch_file.split('.', 2)
kaldiio.save_ark(
os.path.join(output_data_dir, 'mfcc_pitch_and_target_emotion.%s.ark' % (split)),
expanded_features,
scp=os.path.join(output_data_dir, 'mfcc_pitch_and_target_emotion.%s.scp' % (split))
)
def main():
input_data_dir = sys.argv[1]
output_data_dir = sys.argv[2]
for raw_mfcc_and_pitch_file in glob.glob('%s/*ark' % (input_data_dir)):
write_expanded_feature(raw_mfcc_and_pitch_file, output_data_dir)
if __name__ == "__main__":
main()
|
[
"kailashkarthik9@gmail.com"
] |
kailashkarthik9@gmail.com
|
db7fd9f978f798c76ad63fdc488634c38020171a
|
adf2b606ba2feab9fe9aa580a238b07c61521ef6
|
/post/urls.py
|
f8bf912c66c84d6116a9105bdade1bcaaed172a0
|
[] |
no_license
|
Avlayev/mysite
|
f97b5b387f50df2e23155fe513d5b7ee50ff750b
|
1b7e4cfc44f1ebe64435b020f7d4dca761461ff1
|
refs/heads/main
| 2023-01-19T00:24:42.982007
| 2020-11-27T13:57:31
| 2020-11-27T13:57:31
| 316,501,331
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 183
|
py
|
from django.urls import path
from . import views
urlpatterns = [
path('', views.index, name='index'),
path('addcomment/<int:id>/', views.addcomment, name='addcomment'),
]
|
[
"avlayev@gmail.com"
] |
avlayev@gmail.com
|
297f0b4bdc0767a93886314e9c09e2efc56559fa
|
55087121730dd61dca43b56e3d468846e221ccda
|
/03wk_homework.py
|
b02cf6badcfe2118c3e5277182db3bdbfde91060
|
[] |
no_license
|
sangmokang/spartacoding7th_homework
|
c0d90711b5694332b97ad3584935fccfe73525ce
|
d0f39e45b16c120d44fed8111b9af81f69bb50dd
|
refs/heads/master
| 2021-04-09T21:23:26.655571
| 2020-03-29T07:48:57
| 2020-03-29T07:48:57
| 248,881,418
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 963
|
py
|
import requests
from bs4 import BeautifulSoup
# 타겟 URL을 읽어서 HTML를 받아오고,
headers = {'User-Agent' : 'Mozilla/5.0 (Windows NT 10.0; Win64; x64)AppleWebKit/537.36 (KHTML, like Gecko) Chrome/73.0.3683.86 Safari/537.36'}
data = requests.get('https://movie.naver.com/movie/sdb/rank/rmovie.nhn?sel=pnt&date=20200303',headers=headers)
# HTML을 BeautifulSoup이라는 라이브러리를 활용해 검색하기 용이한 상태로 만듦
# soup이라는 변수에 "파싱 용이해진 html"이 담긴 상태가 됨
# 이제 코딩을 통해 필요한 부분을 추출하면 된다.
soup = BeautifulSoup(data.text, 'html.parser')
print (soup.select(".ac.title.tit5"))
movies = soup.select("#old_content > table > tbody > tr ")
rank = 1
for movie in movies:
title_tag = movie.select_one(".tit5")
point_tag = movie.select_one(".point")
if title_tag != None:
print(rank, title_tag.text.strip(), point_tag.text)
rank += 1
|
[
"rmrm.help@gmail.com"
] |
rmrm.help@gmail.com
|
4238c2cac8654bd0b0cd968250da9060c77816a3
|
e084f81eceb362114a2fe04ab1a5a320720be4e2
|
/2/2.2/Spyder/2.2.3/2.2.3.py
|
091e7d471a200692a5002fade57551aa3b456aaf
|
[] |
no_license
|
mygithubhas/Work
|
8b54e81128dcada96308edd59a9ba27a5a0472a1
|
5b70c43f4678edd3892d11c930437a62c8ff9816
|
refs/heads/master
| 2020-05-17T14:04:43.559816
| 2019-05-17T01:12:47
| 2019-05-17T01:12:47
| 183,754,553
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 752
|
py
|
# -*- coding: utf-8 -*-
import sift
from numpy import *
from PIL import Image
from pylab import *
"""
imname = 'qaz.jpg'
im1 = array(Image.open(imname).convert('L'))
sift.process_image(imname,'empire.sift')
l1,d1 = sift.read_features_from_file('empire.sift')
figure()
gray()
sift.plot_features(im1,l1,circle=ture)
show()"""
if __name__ == '__main__':
imname = "qaz.jpg" #待处理图像路径
im=Image.open(imname)
sift.process_image(imname,'test.sift')
l1,d1 = sift.read_features_from_file('test.sift') #l1为兴趣点坐标、尺度和方位角度 l2是对应描述符的128 维向
figure()
gray()
plot_features(im,l1,circle = True)
title('sift-features')
show()
|
[
"noreply@github.com"
] |
mygithubhas.noreply@github.com
|
869e7e65527f1128d4c5e8f08e06198bdb2be16e
|
0e170a0e638f1f9af00d2f75550271efc287c3d7
|
/Tools/Scripts/webkitpy/port/darwin_testcase.py
|
1e8da6b3b9aa765659f74ee3843bccc76894e9c0
|
[] |
no_license
|
JoeyLi-1/webkit
|
b55cfa8bb11760f98c22b12b96b146a3414f323d
|
a0da9778cb6d06865a146b9161ce6b83dc857161
|
refs/heads/master
| 2023-01-13T22:39:49.158658
| 2017-05-10T21:11:31
| 2017-05-10T21:11:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,439
|
py
|
# Copyright (C) 2014-2016 Apple Inc. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions
# are met:
# 1. Redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer.
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY APPLE INC. AND ITS CONTRIBUTORS ``AS IS'' AND
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL APPLE INC. OR ITS CONTRIBUTORS BE LIABLE FOR
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
import time
from webkitpy.port import port_testcase
from webkitpy.common.system.outputcapture import OutputCapture
from webkitpy.tool.mocktool import MockOptions
from webkitpy.common.system.executive_mock import MockExecutive, MockExecutive2, MockProcess, ScriptError
from webkitpy.common.system.systemhost_mock import MockSystemHost
class DarwinTest(port_testcase.PortTestCase):
def assert_skipped_file_search_paths(self, port_name, expected_paths, use_webkit2=False):
port = self.make_port(port_name=port_name, options=MockOptions(webkit_test_runner=use_webkit2))
self.assertEqual(port._skipped_file_search_paths(), expected_paths)
def test_default_timeout_ms(self):
super(DarwinTest, self).test_default_timeout_ms()
self.assertEqual(self.make_port(options=MockOptions(guard_malloc=True)).default_timeout_ms(), 350000)
def assert_name(self, port_name, os_version_string, expected):
host = MockSystemHost(os_name=self.os_name, os_version=os_version_string)
port = self.make_port(host=host, port_name=port_name)
self.assertEqual(expected, port.name())
def test_show_results_html_file(self):
port = self.make_port()
# Delay setting a should_log executive to avoid logging from MacPort.__init__.
port._executive = MockExecutive(should_log=True)
expected_logs = "MOCK popen: ['Tools/Scripts/run-safari', '--release', '--no-saved-state', '-NSOpen', 'test.html'], cwd=/mock-checkout\n"
OutputCapture().assert_outputs(self, port.show_results_html_file, ["test.html"], expected_logs=expected_logs)
def test_helper_starts(self):
host = MockSystemHost(MockExecutive())
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
host.executive._proc = MockProcess('ready\n')
port.start_helper()
port.stop_helper()
oc.restore_output()
# make sure trying to stop the helper twice is safe.
port.stop_helper()
def test_helper_fails_to_start(self):
host = MockSystemHost(MockExecutive())
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
port.start_helper()
port.stop_helper()
oc.restore_output()
def test_helper_fails_to_stop(self):
host = MockSystemHost(MockExecutive())
host.executive._proc = MockProcess()
def bad_waiter():
raise IOError('failed to wait')
host.executive._proc.wait = bad_waiter
port = self.make_port(host)
oc = OutputCapture()
oc.capture_output()
port.start_helper()
port.stop_helper()
oc.restore_output()
def test_spindump(self):
def logging_run_command(args):
print args
port = self.make_port()
port.host.filesystem.files['/__im_tmp/tmp_0_/test-42-spindump.txt'] = 'Spindump file'
port.host.executive = MockExecutive2(run_command_fn=logging_run_command)
expected_stdout = "['/usr/bin/sudo', '-n', '/usr/sbin/spindump', 42, 10, 10, '-file', '/__im_tmp/tmp_0_/test-42-spindump.txt']\n"
OutputCapture().assert_outputs(self, port.sample_process, args=['test', 42], expected_stdout=expected_stdout)
self.assertEqual(port.host.filesystem.files['/mock-build/layout-test-results/test-42-spindump.txt'], 'Spindump file')
self.assertIsNone(port.host.filesystem.files['/__im_tmp/tmp_0_/test-42-spindump.txt'])
def test_sample_process(self):
def logging_run_command(args):
if args[0] == '/usr/bin/sudo':
return 1
print args
return 0
port = self.make_port()
port.host.filesystem.files['/__im_tmp/tmp_0_/test-42-sample.txt'] = 'Sample file'
port.host.executive = MockExecutive2(run_command_fn=logging_run_command)
expected_stdout = "['/usr/bin/sample', 42, 10, 10, '-file', '/__im_tmp/tmp_0_/test-42-sample.txt']\n"
OutputCapture().assert_outputs(self, port.sample_process, args=['test', 42], expected_stdout=expected_stdout)
self.assertEqual(port.host.filesystem.files['/mock-build/layout-test-results/test-42-sample.txt'], 'Sample file')
self.assertIsNone(port.host.filesystem.files['/__im_tmp/tmp_0_/test-42-sample.txt'])
def test_sample_process_exception(self):
def throwing_run_command(args):
if args[0] == '/usr/bin/sudo':
return 1
raise ScriptError("MOCK script error")
port = self.make_port()
port.host.executive = MockExecutive2(run_command_fn=throwing_run_command)
OutputCapture().assert_outputs(self, port.sample_process, args=['test', 42])
def test_get_crash_log(self):
# Darwin crash logs are tested elsewhere, so here we just make sure we don't crash.
def fake_time_cb():
times = [0, 20, 40]
return lambda: times.pop(0)
port = self.make_port(port_name=self.port_name)
port._get_crash_log('DumpRenderTree', 1234, None, None, time.time(), wait_for_log=False)
|
[
"jbedard@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc"
] |
jbedard@apple.com@268f45cc-cd09-0410-ab3c-d52691b4dbfc
|
2c4d549249da38ceb8da0305322c72917bf990a5
|
e03abe6096389b10ea9b89ee0f61985e7b717234
|
/src/blogs/views.py
|
981343fe30244915856103eee1eb94782998a0b3
|
[] |
no_license
|
JoseAntpr/teckium-public-front
|
ed2f8a030b5640e37daad81de0905e743beba21c
|
0df6c75717434ae1619ce16c060f6c0ab22cf0ff
|
refs/heads/master
| 2021-07-23T03:50:06.814967
| 2017-09-21T11:55:37
| 2017-09-21T11:55:37
| 104,091,439
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 175
|
py
|
from django.shortcuts import render
def index(request):
return render(request, "blogs/list.html")
def detail(request):
return render(request, "blogs/detail.html")
|
[
"joseantpalaciosramirez@gmail.com"
] |
joseantpalaciosramirez@gmail.com
|
c300768a0c9fb2fa6b2e0fddd71c78f1ae4b9a8c
|
15f321878face2af9317363c5f6de1e5ddd9b749
|
/solutions_python/Problem_120/798.py
|
c329dc15e7b4a261218cbfd6ca4c9d71add4828b
|
[] |
no_license
|
dr-dos-ok/Code_Jam_Webscraper
|
c06fd59870842664cd79c41eb460a09553e1c80a
|
26a35bf114a3aa30fc4c677ef069d95f41665cc0
|
refs/heads/master
| 2020-04-06T08:17:40.938460
| 2018-10-14T10:12:47
| 2018-10-14T10:12:47
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 537
|
py
|
#!/usr/bin/python
import sys
def getNLines(r, t):
result = 0
havePaint = True
t = t - (2 * r + 1)
r += 2
while t >= 0:
result += 1
t = t - (2 * r + 1)
r += 2
return result
fin = open(sys.argv[1], 'r')
fout = open(sys.argv[2], 'w')
ncases = int(fin.readline().strip())
i = 0
for line in fin:
[r, t] = line.strip().split()
r, t = int(r), int(t)
result = getNLines(r, t)
i += 1
fout.write("Case #" + str(i) + ": " + str(result) + "\n")
fin.close()
fout.close()
|
[
"miliar1732@gmail.com"
] |
miliar1732@gmail.com
|
96234ec593ec6c4db45f6e68070d9aa149f5faf2
|
03f771d745927cdd03029d8e92ab38b52e9cce26
|
/web/oddeye/products/migrations_bk/0008_auto_20200612_1331.py
|
9f68f887536005b9759b4191527e5c64df0a8b63
|
[] |
no_license
|
soothingni/Personal_Shopper-KJH-KIDS
|
524704dcc5729c910bc316108b9d519837bb5031
|
ec8761cc3e563f9af4e0303c972a5ad664b96361
|
refs/heads/master
| 2022-11-05T00:07:31.393271
| 2020-06-23T01:44:09
| 2020-06-23T01:44:09
| 266,282,880
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 505
|
py
|
# Generated by Django 3.0.6 on 2020-06-12 04:31
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('products', '0007_products_productsembedding'),
]
operations = [
migrations.RemoveField(
model_name='productsembedding',
name='product_ID',
),
migrations.DeleteModel(
name='Products',
),
migrations.DeleteModel(
name='ProductsEmbedding',
),
]
|
[
"root@ip-172-31-40-147.ap-northeast-2.compute.internal"
] |
root@ip-172-31-40-147.ap-northeast-2.compute.internal
|
ab8874afa9de48da98722c3415e004160d50031b
|
e9fb0e4871123ed365f250b0dbc9c8330bcbb1a9
|
/ModulesFunctions/11.3StandardPythonlibrary.py
|
5f69868813b756603106757ed1b32405da3f77db
|
[] |
no_license
|
jerin17/PythonProgrammingMasterclass
|
783c5aca9d4aa745a304489b17194a6a4a3094b2
|
6627d1c2f000d707beef6d4d47d552115bf007a8
|
refs/heads/master
| 2020-07-26T00:39:57.235701
| 2019-10-21T17:59:14
| 2019-10-21T17:59:14
| 208,473,283
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 179
|
py
|
print(dir())
print(dir(__builtins__))
import shelve
print(dir())
print()
print(dir(shelve))
for i in dir(shelve.Shelf):
if i[0] != '_':
print(i)
# help
help(shelve)
|
[
"jerinthomas17@gamil.com"
] |
jerinthomas17@gamil.com
|
ee753229c62f0431748b6798d01d48c923e507fd
|
a248e50edb7fb61192e2c341008585e0b37e4f01
|
/util/pfind/pfind.py
|
9d41944d6f3a088706bdaad9e89c7e96bcbc49e5
|
[] |
no_license
|
akipta/hobbyutil
|
48508351a86a8f3cebcac97ede8b17097bf408e3
|
1f9bffe7f3c2b3655177e5f8e1916c476344b9c8
|
refs/heads/master
| 2021-01-18T17:19:05.074492
| 2014-08-28T03:37:28
| 2014-08-28T03:37:28
| 39,736,637
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 17,030
|
py
|
'''
File finding utility (similar to UNIX find). It's not especially
fast, but the usage is more convenient than find and the output is
colorized to see the matches.
Run the script with no arguments to get a help message.
---------------------------------------------------------------------------
Copyright (C) 2008, 2012 Don Peterson
Contact: gmail.com@someonesdad1
The Wide Open License (WOL)
Permission to use, copy, modify, distribute and sell this software and
its documentation for any purpose is hereby granted without fee,
provided that the above copyright notice and this license appear in
all source copies. THIS SOFTWARE IS PROVIDED "AS IS" WITHOUT EXPRESS
OR IMPLIED WARRANTY OF ANY KIND. See
http://www.dspguru.com/wide-open-license for more information.
'''
import sys
import re
import getopt
import os
import fnmatch
import subprocess
from collections import OrderedDict as odict
import color as c
out = sys.stdout.write
nl = "\n"
# If you're using cygwin, set the following variable to point to the
# cygpath utility. Otherwise, set it to None or the empty string.
# This tool allows UNIX-style path conversions so that command line
# directory arguments like /home/myname work correctly.
cygwin = "c:/cygwin/bin/cygpath.exe"
# If you have some version control system directories you'd rather
# ignore, create a regexp for them and add a suitable command line
# option.
rcs = re.compile("/RCS$|/RCS")
hg = re.compile("/.hg$|/.hg/")
# The following variable, if True, causes a leading './' to be removed
# from found files and directories. This shortens things up a bit.
# However, when the -s option is used, leaving rm_dir_tag False causes
# the current directory's entries to be printed last and sorted in
# alphabetical order. This is how I prefer to see things, as
# sometimes the matches can be quite long and scroll off the top of
# the page. Usually, I'm only interested in stuff in the current
# directory.
rm_dir_tag = False
# Colors for output; colors available are:
# black gray
# blue lblue
# green lgreen
# cyan lcyan
# red lred
# magenta lmagenta
# brown yellow
# white lwhite
(black, blue, green, cyan, red, magenta, brown, white, gray, lblue,
lgreen, lcyan, lred, lmagenta, yellow, lwhite) = ( c.black, c.blue,
c.green, c.cyan, c.red, c.magenta, c.brown, c.white, c.gray, c.lblue,
c.lgreen, c.lcyan, c.lred, c.lmagenta, c.yellow, c.lwhite)
c_norm = (white, black) # Color when finished
c_plain = (white, black)
# The following variable can be used to choose different color styles
colorstyle = 0
if colorstyle == 0:
c_dir = (lred, black)
c_match = (yellow, black)
elif colorstyle == 1:
c_dir = (lred, black)
c_match = (lwhite, blue)
elif colorstyle == 2:
c_dir = (lgreen, black)
c_match = (lred, black)
elif colorstyle == 3:
c_dir = (lmagenta, black)
c_match = (yellow, black)
elif colorstyle == 4:
c_dir = (lgreen, black)
c_match = (lwhite, magenta)
elif colorstyle == 5:
c_dir = (lred, black)
c_match = (black, yellow)
# Glob patterns for source code files
source_code_files = [
"*.a",
"*.asm",
"*.awk",
"*.bas",
"*.bash",
"*.bcc",
"*.bsh",
"*.c",
"*.c++",
"*.cc",
"*.cgi",
"*.cob",
"*.cobol",
"*.cpp",
"*.cxx",
"*.dtd",
"*.f",
"*.f90",
"*.h",
"*.hh",
"*.hxx",
"*.java",
"*.js",
"*.ksh",
"*.lisp",
"*.lua",
"*.m4",
"*.mac",
"*.mp",
"*.pas",
"*.perl",
"*.php",
"*.pl",
"*.py",
"*.rb",
"*.rst",
"*.sed",
"*.sh",
"*.sql",
"*.src",
"*.tcl",
"*.vim",
"*.xml",
"*.zsh",
"[Mm]akefile",
"*.f95",
"*.tk",
"*.csh",
"*.v",
"*.ada",
"*.jav",
"*.c__",
"*.d",
"*.f77",
"*.lex",
"*.yacc",
]
# Glob patterns for documentation files
documentation_files = [
"*.doc", "*.odg", "*.ods", "*.odt", "*.pdf", "*.xls",
]
# Glob patterns for picture files
picture_files = [
"*.bmp", "*.clp", "*.dib", "*.emf", "*.eps", "*.gif", "*.img",
"*.jpeg", "*.jpg", "*.pbm", "*.pcx", "*.pgm", "*.png", "*.ppm",
"*.ps", "*.psd", "*.psp", "*.pspimage", "*.raw", "*.tga", "*.tif",
"*.tiff", "*.wmf", "*.xbm", "*.xpm",
]
class Swallow(): # Swallow calls to color module when color output is off
def fg(self, *p, **kw):
pass
def Usage(d, status=2):
d["name"] = os.path.split(sys.argv[0])[1]
d["-s"] = "Don't sort" if d["-s"] else "Sort"
d["-c"] = "Color" if not d["-c"] else "Don't color"
out('''Usage: {name} [options] regex [dir1 [dir2...]]
Finds files using python regular expressions. If no directories are
given on the command line, searches at and below the current
directory. Mercurial, git, RCS, and hidden directories are not
searched by default.
Options:
-C str Globbing pattern separation string (defaults to space)
-D Show documentation files
-G Show git hidden directories and files
-M Show Mercurial hidden directories and files
-P Show picture files
-R Show RCS directories and files
-S Show source code files
-c {-c} code the output
-d Show directories only
-f Show files only
-h Show hidden files/directories that begin with '.'
-i Case-sensitive search
-l n Limit depth to n levels
-m patt Show only files that match glob pattern (can be multiples)
-r Not recursive; search indicated directories only
-s {-s} the output directories and files
-x patt Igore files that match glob pattern (can be multiples)
Examples:
* Find files at and below the current directory containing
the string "rational" (case-insensitive search) excluding *.bak and
*.o:
python {name} -f -x "*.bak *.o" rational
* Find any directories named TMP (case-sensitive search) in or below
the current directory, but exclude any with 'cygwin' in the name:
python {name} -d -i -x "*cygwin*" TMP
* Find all documentation and source code files starting with 't' in
the directory foo
python {name} -DS /t foo
Note this will also find such files in directories that begin with
't' also.
'''.format(**d))
exit(status)
def ParseCommandLine(d):
d["-C"] = " " # Separation string for glob patterns
d["-D"] = False # Print documentation files
d["-M"] = False # Show Mercurial hidden directories
d["-P"] = False # Print picture files
d["-R"] = False # Show RCS directories
d["-S"] = False # Print source code files
d["-c"] = False # Color code the output
d["-d"] = False # Show directories only
d["-f"] = False # Show files only
d["-h"] = False # Show hidden files/directories
d["-i"] = False # Case-sensitive search
d["-m"] = [] # Only list files with these glob patterns
d["-l"] = -1 # Limit to this number of levels (-1 is no limit)
d["-r"] = False # Don't recurse into directories
d["-s"] = False # Sort the output directories and files
d["-x"] = [] # Ignore files with these glob patterns
if len(sys.argv) < 2: Usage(d)
try:
optlist, args = getopt.getopt(sys.argv[1:], "aC:DMPRScdfhim:l:rsx:")
except getopt.GetoptError as str:
msg, option = str
out(msg + nl)
exit(1)
for opt in optlist:
if opt[0] == "-C":
d["-C"] = opt[1]
if opt[0] == "-D":
d["-D"] = True
d["-m"] += documentation_files
if opt[0] == "-h":
d["-h"] = True
if opt[0] == "-i":
d["-i"] = True
if opt[0] == "-M":
d["-M"] = not d["-M"]
if opt[0] == "-P":
d["-P"] = True
d["-m"] += picture_files
if opt[0] == "-R":
d["-R"] = not d["-R"]
if opt[0] == "-S":
d["-S"] = True
d["-m"] += source_code_files
if opt[0] == "-c":
d["-c"] = not d["-c"]
if opt[0] == "-d":
d["-d"] = not d["-d"]
if opt[0] == "-f":
d["-f"] = not d["-f"]
if opt[0] == "-m":
d["-m"] += opt[1].split(d["-C"])
if opt[0] == "-l":
n = int(opt[1])
if n < 0:
raise ValueError("-l option must include number >= 0")
d["-l"] = n
if opt[0] == "-r":
d["-r"] = not d["-r"]
if opt[0] == "-s":
d["-s"] = not d["-s"]
if opt[0] == "-x":
s, c = opt[1], d["-C"]
d["-x"] += opt[1].split(d["-C"])
if len(args) < 1:
Usage(d)
if d["-i"]:
d["regex"] = re.compile(args[0])
else:
d["regex"] = re.compile(args[0], re.I)
args = args[1:]
if len(args) == 0:
args = ["."]
# Store search information in order it was found
d["search"] = odict()
return args
def Normalize(x):
return x.replace("\\", "/")
def TranslatePath(path, to_DOS=True):
'''Translates an absolute cygwin (a UNIX-style path on Windows) to
an absolute DOS path with forward slashes and returns it. Use
to_DOS set to True to translate from cygwin to DOS; set it to
False to translate the other direction.
'''
direction = "-w" if to_DOS else "-u"
if to_DOS and path[0] != "/":
raise ValueError("path is not an absolute cygwin path")
if "\\" in path:
# Normalize path (cypath works with either form, but let's not
# borrow trouble).
path = path.replace("\\", "/")
msg = ["Could not translate path '%s'" % path]
s = subprocess.Popen((cygwin, direction, path),
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
errlines = s.stderr.readlines()
if errlines:
# Had an error, so raise an exception with the error details
msg.append(" Error message sent to stderr:")
for i in errlines:
msg.append(" " + i)
raise ValueError(nl.join(msg))
lines = [i.strip() for i in s.stdout.readlines()]
if len(lines) != 1:
msg.append(" More than one line returned by cygpath command")
raise ValueError(nl.join(msg))
return lines[0].replace("\\", "/")
def Ignored(s, d):
'''s is a file name. If s matches any of the glob patterns in
d["-x"], return True.
'''
for pattern in d["-x"]:
if d["-i"]:
if fnmatchcase(s, pattern):
return True
else:
if fnmatch.fnmatch(s, pattern):
return True
return False
def Included(s, d):
'''s is a file name. If s matches any of the glob patterns in
d["-m"], return True.
'''
for pattern in d["-m"]:
if d["-i"]:
if fnmatchcase(s, pattern):
return True
else:
if fnmatch.fnmatch(s, pattern):
return True
return False
def PrintMatch(s, d, start, end, isdir=False):
'''For the match in s, print things out in the appropriate colors.
'''
if isdir:
c.fg(c_dir)
else:
c.fg(c_plain)
out(s[:start])
c.fg(c_match)
out(s[start:end])
if isdir:
c.fg(c_dir)
else:
c.fg(c_plain)
def PrintMatches(s, d, isdir=False):
'''Print the string s and show the matches in appropriate
colors. Note that s can end in '/' if it's a directory.
We handle this case specially by leaving off the trailing
'/'.
'''
if d["-f"] and not d["-d"]:
# Files only -- don't print any matches in directory
dir, file = os.path.split(s)
out(dir)
if dir and dir[:-1] != "/":
out("/")
s = file
while s:
if isdir and s[-1] == "/":
mo = d["regex"].search(s[:-1])
else:
mo = d["regex"].search(s)
if mo and d["-c"]:
PrintMatch(s, d, mo.start(), mo.end(), isdir=isdir)
s = s[mo.end():]
else:
# If the last character is a '/', we'll print it in color
# to make it easier to see directories.
if s[-1] == "/":
out(s[:-1])
c.fg(c_dir)
out("/")
else:
try:
out(s)
except IOError:
# Caused by broken pipe error when used with less
exit(0)
s = ""
c.fg(c_plain)
out(nl)
def PrintReport(d):
'''Note we'll put a '/' after directories to flag them as such.
'''
D = d["search"]
if d["-s"]:
# Print things in sorted form, directories first.
dirs, files = [], []
# Organize by directories and files. Note you need to use keys()
# to get the original insertion order
for i in D.keys():
if D[i]:
dirs.append(i)
else:
files.append(i)
c.fg(c_plain)
dirs.sort()
files.sort()
if not d["-d"] and not d["-f"]:
# Both directories and files
for i in dirs:
PrintMatches(i + "/", d, isdir=True)
for i in files:
PrintMatches(i, d)
else:
if d["-d"]: # Directories only
for i in dirs:
PrintMatches(i + "/", d, isdir=True)
else: # Files only
for i in files:
PrintMatches(i, d)
else:
# Print things as encountered by os.walk
for i in D.keys():
if (d["-f"] and D[i]) or (d["-d"] and not D[i]):
continue
PrintMatches(i + "/" if D[i] else i, d, isdir=D[i])
c.fg(c_norm)
def Join(root, name, d, isdir=False):
'''Join the given root directory and the file name and store
appropriately in the d["search"] odict. isdir will be True if
this is a directory. Note we use UNIX notation for the file
system's files, regardless of what system we're on.
'''
# Note we check both the path and the filename with the glob
# patterns to see if they should be included or excluded.
is_ignored = Ignored(name, d) or Ignored(root, d)
is_included = Included(name, d) or Included(root, d)
if is_ignored:
return
if d["-m"] and not is_included:
return
root, name = Normalize(root), Normalize(name)
if not d["-R"]: # Ignore RCS directories
mo = rcs.search(root)
if mo or name == "RCS":
return
if not d["-M"]: # Ignore Mercurial directories
mo = hg.search(root)
if mo or name == ".hg":
return
# Check if we're too many levels deep. We do this by counting '/'
# characters. If root starts with '.', then that's the number of
# levels deep; otherwise, subtract 1. Note if isdir is True, then
# name is another directory name, so we add 1 for that.
lvl = root.count("/") + isdir
if root[0] == ".":
lvl -= 1
if d["-l"] != -1 and lvl >= d["-l"]:
return
if root == ".":
root = ""
elif rm_dir_tag and len(root) > 2 and root[:2] == "./":
root = root[2:]
s = Normalize(os.path.join(root, name))
d["search"][s] = isdir
def Find(dir, d):
def RemoveHidden(names):
'''Unless d["-h"] is set, remove any name that begins with
'.'.
'''
if not d["-h"]:
names = [i for i in names if i[0] != "."]
return names
contains = d["regex"].search
J = lambda root, name: Normalize(os.path.join(root, name))
find_files = d["-f"] & ~ d["-d"]
find_dirs = d["-d"] & ~ d["-f"]
for root, dirs, files in os.walk(dir):
# If any component of root begins with '.', ignore unless
# d["-h"] is set.
has_dot = any([i.startswith(".") and len(i) > 1
for i in root.split("/")])
if not d["-h"] and has_dot:
continue
files = RemoveHidden(files)
dirs = RemoveHidden(dirs)
if find_files:
[Join(root, name, d) for name in files if contains(name)]
elif find_dirs:
[Join(root, dir, d, isdir=True) for dir in dirs
if contains(J(root, dir))]
else:
[Join(root, name, d, isdir=True) for name in dirs
if contains(J(root, name))]
[Join(root, name, d) for name in files if contains(J(root, name))]
if d["-r"]: # Not recursive
# This works because the search is top-down
break
def main():
d = {} # Settings dictionary
directories = ParseCommandLine(d)
if not d["-c"]:
global c
c = Swallow()
for dir in directories:
# Following needed on cygwin
#if dir and dir[0] == "/":
# dir = TranslatePath(dir)
Find(dir, d)
PrintReport(d)
main()
|
[
"donp@localhost"
] |
donp@localhost
|
598771da6eacd1ad23c8329681d90b014b4dc13b
|
bf74f9c2ffc3be249919978d68030f71318d302a
|
/Python/Assignment/Assignment.py
|
2c141fe730d17d7a4fb15188f9b7737a0f96467e
|
[] |
no_license
|
stondiki/Software_Engineering_Assignmets
|
845fbb3e1a3b5df1ac026bdaabba34d13dab9063
|
8e62f9871a23a98a2986e4a8e0dea45faa0360e6
|
refs/heads/master
| 2020-04-02T04:42:59.249408
| 2018-11-05T16:43:47
| 2018-11-05T16:43:47
| 154,030,707
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 126
|
py
|
# Terence Amunga
a = [1, 1, 2, 3, 5, 8, 13, 21, 34, 55, 89]
for i in range (0, len(a)):
if a[i] < 5:
print(a[i])
|
[
"stondikiyeye@gmail.com"
] |
stondikiyeye@gmail.com
|
20a269a8d43deee384fb79623e313a304afaead5
|
d67560f8e3bf6e1a9058a96d377dfaa37152d9a5
|
/smake.py
|
aff238474c2588bd3e9764efeeb6c1ab1977f5fc
|
[] |
no_license
|
balabit-deps/glib
|
5d1292acf9b0ae43c0c9c48f95570d98b634b488
|
713f1b5b1f29a5d5567644dd21a39197bd4fd3e0
|
refs/heads/master
| 2022-11-15T18:38:53.767607
| 2020-06-23T14:04:31
| 2020-06-23T14:04:31
| 106,435,794
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,431
|
py
|
#!/usr/bin/python
import sys
sys.path.append("../lib")
import os
import Utils
import argparse
from smake import builder
def get_main_options():
usage = 'usage: bootstrap.py [options] [command [command-args]]'
epilog = "Build a submodule"
parser = argparse.ArgumentParser(add_help=False, epilog=epilog, usage=usage)
parser.add_argument("-h", action="store_true", dest="help", help="Show this help message and exit")
parser.add_argument("--help", action='store_true', dest='help', help='Show manual page');
parser.add_argument('command', nargs=argparse.REMAINDER, help='Optional command and it\'s options', default='full')
return parser
def handle_command(obj, command):
parameter = None
if len(command) == 0:
cmd = 'full'
else:
cmd = command[0]
parameter = " ".join(command[1:])
try:
func = getattr(obj, cmd)
if parameter:
func(parameter)
else:
func()
except AttributeError:
print "Unknown command: %s"%cmd
def main(args):
options = get_main_options()
opts = options.parse_args(args[1:])
obj = builder.get_builder()
if (len(opts.command) and opts.command[0] == 'help' or
len(opts.command) <= 1 and opts.help):
options.print_help()
obj.print_commands()
return
handle_command(obj, opts.command)
if __name__ == "__main__":
main(sys.argv)
|
[
"viktor.juhasz@balabit.com"
] |
viktor.juhasz@balabit.com
|
4ff2c781cdf81d45aaf784a5232c12eb713ce75c
|
613cbdeb74e2a74761ff20cd4378382d2fc6b9db
|
/tornado/lesson/util/uimodules.py
|
483f02394e491032233aaffefec57de869232882
|
[] |
no_license
|
atiger808/doc-md
|
771b282750b5f79db43598fc064be65164690fbe
|
d5babd94d3244e9e6eba9b026717718e907b1f50
|
refs/heads/main
| 2022-12-28T23:51:16.772226
| 2020-10-14T08:32:43
| 2020-10-14T08:32:43
| 303,942,186
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 566
|
py
|
'''
this is ui_modules
'''
from tornado.web import UIModule
class UiModule(UIModule):
def render(self, *args, **kwargs):
return '我是 ui_module'
class Advertisement(UIModule):
def render(self, *args, **kwargs):
return self.render_string('07ad.html')
def css_files(self):
return "/static/css/King_Chance_Layer7.css"
def javascript_files(self):
return [
"/static/js/jquery_1_7.js",
"/static/js/King_Chance_Layer.js",
"/static/js/King_layer_test.js",
]
|
[
"atiger0614@163.com"
] |
atiger0614@163.com
|
131d9af5038c92568ff92dcec79fa162113e0497
|
66ba6906460f8bc90fb1823698025168528d4484
|
/back/database/db_init.py
|
92e0342f0ad2805a6655715cf2c891e98269aa15
|
[] |
no_license
|
tdbeirne/graffiti
|
96a1441eacd504c0ce5db02d3b4254650f1ea496
|
e37b544e9ba676808d0de327d1c8d37e7531907e
|
refs/heads/master
| 2021-02-06T10:51:16.022407
| 2020-03-01T21:37:07
| 2020-03-01T21:37:07
| 243,907,620
| 0
| 0
| null | 2020-03-01T21:37:09
| 2020-02-29T05:02:35
|
Python
|
UTF-8
|
Python
| false
| false
| 1,083
|
py
|
import sqlite3
from sqlite3 import Error as SQLiteError
DATABASE = '/opt/data/graffiti.db'
MAKE_TABLE = """CREATE TABLE IF NOT EXISTS messages (
id INTEGER PRIMARY KEY,
lat REAL NOT NULL,
lon REAL NOT NULL,
txt TEXT NOT NULL,
time INTEGER NOT NULL
);"""
def create_connection(db: str) -> sqlite3.Connection:
""" create a database connection to a SQLite database """
c = None
try:
c = sqlite3.connect(db)
except SQLiteError as e:
print(e)
return c
def sql_command(connection: sqlite3.Connection, sql: str):
try:
c: sqlite3.Cursor = connection.cursor()
c.execute(sql)
except SQLiteError as e:
print(e)
def init():
connection = create_connection(DATABASE)
if connection is not None:
sql_command(connection, MAKE_TABLE)
# cur = connection.cursor()
# # for i in range(1000):
# # query = "INSERT INTO {} (lat, lon, txt, time) VALUES{}".format(TABLE_NAME, str(data_tuple))
# # cur.execute(query)
# # conn.commit()
if __name__ == '__main__':
init()
|
[
"noreply@github.com"
] |
tdbeirne.noreply@github.com
|
b5b6b40039ceff6bb34e1cea882a7a58f9d912ef
|
22aaf50b5f6b89b30d58f0d1c48c3fdf08d5dcdf
|
/code/UI.py
|
718095187d5fa5a99feaf5022cddbc9f5ecd32c3
|
[] |
no_license
|
n1ck404/Topic-Cluster
|
054c6e637721b4dcbf9c1cda0e5dbfaa04612ed2
|
f43e837658c1c4c721179787db3236f6e62be4b5
|
refs/heads/master
| 2020-03-31T16:53:24.202873
| 2018-11-02T03:03:23
| 2018-11-02T03:03:23
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,618
|
py
|
# -*- coding: utf-8 -*-
"""
# @Time : 6/10/18 5:56 PM
# @Author : Heng Guo
# @File : UI.py
"""
import sys
from PyQt5.QtWidgets import *
from PyQt5.QtCore import Qt
from PyQt5.QtGui import QPixmap
import UIFunction as UF
class Root(QMainWindow):
def __init__(self):
super().__init__()
self.initUI()
def initUI(self):
self.resize(300, 400)
self.setFixedSize(300, 400)
UF.center(self)
self.setWindowTitle('Topic Cluster')
self.statusBar().showMessage('Current no model')
self.wig = QWidget(self)
self.setCentralWidget(self.wig)
self.create_logo()
self.create_button()
def create_logo(self):
logo = QLabel(self)
pixmap = QPixmap('icon.png')
pixmap = pixmap.scaledToHeight(120)
logo.setPixmap(pixmap)
logo.setGeometry(0, 0, 300, 120)
logo.setAlignment(Qt.AlignCenter)
def create_button(self):
button_wig = QWidget(self.wig)
button_wig.setGeometry(60, 120, 180, 240)
grid = QGridLayout()
grid.setSpacing(5)
button_wig.setLayout(grid)
button1 = QPushButton('Train Model')
button2 = QPushButton('Load Model')
button3 = QPushButton('Updata Model')
button4 = QPushButton('Save Model')
button5 = QPushButton('View Model')
grid.addWidget(button1, 0, 0)
grid.addWidget(button2, 1, 0)
grid.addWidget(button3, 2, 0)
grid.addWidget(button4, 3, 0)
grid.addWidget(button5, 4, 0)
button1.clicked.connect(lambda : UF.choose(self))
button2.clicked.connect(lambda : UF.choose(self))
button3.clicked.connect(lambda : UF.choose(self))
button4.clicked.connect(lambda : UF.choose(self))
button5.clicked.connect(lambda : UF.choose(self))
# esc key to quit
def keyPressEvent(self, e):
if e.key() == Qt.Key_Escape:
self.close()
def closeEvent(self, event):
reply = QMessageBox.question(self, 'Message', "Are you sure to quit?", QMessageBox.Yes | QMessageBox.No,
QMessageBox.Yes)
if reply == QMessageBox.Yes:
event.accept()
else:
event.ignore()
class FileWindow(QDialog):
def __init__(self):
super().__init__()
self.resize(400,60)
self.path =''
self.initUI()
def initUI(self):
button = QPushButton('...')
self.buttonbox = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.buttonbox.rejected.connect(self.reject)
self.buttonbox.accepted.connect(self.accept)
self.le = QLineEdit()
Hlayout = QHBoxLayout()
Vlayout = QVBoxLayout()
Vlayout.addLayout(Hlayout)
self.setLayout(Vlayout)
Hlayout.addWidget(self.le)
Hlayout.addWidget(button)
Vlayout.addWidget(self.buttonbox)
button.clicked.connect(self.choose_dir)
def choose_dir(self):
self.path = QFileDialog.getExistingDirectory(self, 'Select a folder:', '..', QFileDialog.ShowDirsOnly)
self.le.setText(self.path)
class TrainWindow(QDialog):
def __init__(self):
super().__init__()
self.resize(300,400)
self.setWindowTitle('Choose model and features')
self.initUI()
def initUI(self):
main_layout = QVBoxLayout()
f_layout = QFormLayout()
v_layout = QVBoxLayout()
self.m_bt = QComboBox()
self.m_bt.addItems(['LDA','LSI','HDP'])
# m_bt.addItem('LSI')
# m_bt.addItem('HDP')
l = QLabel('model:')
self.m_bt.currentIndexChanged.connect(self.choose_model)
self.le1 = QLineEdit()
self.le1.setText('1000')
self.le2 = QLineEdit()
self.le2.setText('20')
f_layout.addRow(l,self.m_bt)
f_layout.addRow('iteration times',self.le1)
f_layout.addRow('topic numbers:',self.le2)
groupbox = QGroupBox('Function:')
vbox = QVBoxLayout()
self.c1 = QCheckBox('ngram')
self.c2 = QCheckBox('lemmatization')
self.c3 = QCheckBox('stop_words')
self.c4 = QCheckBox('tfidf')
vbox.addWidget(self.c1)
vbox.addWidget(self.c2)
vbox.addWidget(self.c3)
vbox.addWidget(self.c4)
groupbox.setLayout(vbox)
self.button = QDialogButtonBox(QDialogButtonBox.Ok | QDialogButtonBox.Cancel)
self.button.rejected.connect(self.reject)
self.button.accepted.connect(self.accept)
main_layout.addLayout(f_layout)
v_layout.addWidget(groupbox)
v_layout.addWidget(self.button)
main_layout.addLayout(v_layout)
main_layout.setStretch(0,1)
main_layout.setStretch(1,2)
self.setLayout(main_layout)
def choose_model(self, i):
if i == 1:
self.le1.setText('can`t set')
if self.le2.text() == '' or self.le2.text() == 'can`t set':
self.le2.setText('20')
self.le1.setEnabled(False)
self.le2.setEnabled(True)
elif i == 2:
self.le2.setText('can`t set')
if self.le1.text() == '' or self.le1.text() == 'can`t set':
self.le1.setText('1000')
self.le2.setEnabled(False)
self.le1.setEnabled(True)
else:
self.le1.setEnabled(True)
self.le2.setEnabled(True)
if self.le1.text() == '' or self.le1.text() == 'can`t set':
self.le1.setText('1000')
if self.le2.text() == '' or self.le2.text() == 'can`t set':
self.le2.setText('20')
class ProgressWindow(QWidget):
def __init__(self):
super().__init__()
self.resize(300,50)
self.initUI()
def initUI(self):
self.pb = QProgressBar()
self.pb.setMinimum(0)
self.pb.setMaximum(0)
self.label = QLabel('Start training, it may take few minutes')
v_layout = QVBoxLayout()
v_layout.addWidget(self.label)
v_layout.addWidget(self.pb)
self.setLayout(v_layout)
class ViewWindow(QWidget):
def __init__(self):
super().__init__()
self.resize(600,800)
self.initUI()
def initUI(self):
self.topic_btn = QSpinBox()
self.topic_btn.setRange(1,UF.MODEL.num_topics)
self.topic_btn.setPrefix('topic ')
self.topic_btn.valueChanged.connect(lambda :self.choose_topic(self.topic_btn.value()))
self.topic_id = 0
self.page = 0
self.page_num = 0
lb1 = QLabel('key words:')
lb2 = QLabel('sentence:')
lb3 = QLabel('document:')
self.topic_lb = QTextBrowser()
self.sent_tb = QTextBrowser()
self.doc_tb = QTextBrowser()
self.uppage = QPushButton('<<')
self.downpage = QPushButton('>>')
self.page_sp = QSpinBox()
self.uppage.setEnabled(False)
self.choose_topic(1)
self.uppage.clicked.connect(lambda :self.choose_file(self.page-1))
self.downpage.clicked.connect(lambda :self.choose_file(self.page+1))
self.page_sp.valueChanged.connect(lambda :self.choose_file(self.page_sp.value()))
grid = QGridLayout()
grid.addWidget(self.topic_btn,0,0)
grid.addWidget(lb1,1,0)
grid.addWidget(self.topic_lb,1,1,2,4)
grid.addWidget(lb2,3,0)
grid.addWidget(self.sent_tb,3,1,3,4)
grid.addWidget(lb3,6,0)
grid.addWidget(self.doc_tb,6,1,5,4)
grid.addWidget(self.uppage,11,1)
grid.addWidget(self.page_sp,11,2)
grid.addWidget(self.downpage,11,3)
self.setLayout(grid)
def choose_topic(self, value):
self.topic_id = value - 1
self.topic_lb.setText(UF.MODEL.topic_key.iloc[self.topic_id, 1])
self.sent_tb.setText(UF.MODEL.topic_sent.iloc[self.topic_id, 1])
self.doc_tb.setText(UF.find_doc(UF.MODEL, self.topic_id, 0))
self.page_num = len(UF.MODEL.topic_doc.iloc[self.topic_id, 1].split(','))
self.page_sp.setRange(1, self.page_num)
self.page_sp.setSuffix('/{}'.format(self.page_num))
def choose_file(self, value):
self.page = value
self.page_sp.setValue(self.page)
self.doc_tb.setText(UF.find_doc(UF.MODEL, self.topic_id, self.page - 1))
if self.page > 1:
self.uppage.setEnabled(True)
else:
self.uppage.setEnabled(False)
if self.page == self.page_num:
self.downpage.setEnabled(False)
else:
self.downpage.setEnabled(True)
|
[
"noreply@github.com"
] |
n1ck404.noreply@github.com
|
f54758657758e32fe16dfe7bb10ac04a025dcd1e
|
1d8e4ef73563499dc1e6eb81f14f0c0b175a2b44
|
/sell_slide.py
|
feccab3a4ba9d644b5b9ab17fa65a99708532f62
|
[] |
no_license
|
tylerjw/stockfighter
|
4b2c86589479e21f893efb1cf7cfa9fd202705ac
|
fb534d41a34131935bf0295ebe1b74a4c6dec2d8
|
refs/heads/master
| 2021-01-10T02:25:14.861103
| 2015-12-24T19:21:54
| 2015-12-24T19:21:54
| 48,448,592
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,089
|
py
|
from stockfighter import Stockfighter as Fighter
from stockfighter import GM
import matplotlib.pyplot as plt
import math as m
import random
from position import Position
import time as t
from book import Book
import pprint as pp
exchange = 'CQNEX'
symbol = 'ELT'
account = 'JB76422300'
key = '52d0445bb4e4a5e4f7672d3701e55cef1bacc7e1'
def fill_math(fills):
items = 0
total = 0
for fill in fills:
items += fill['qty']
total += fill['qty'] * fill['price']
avg = total / items
return [items, total, avg]
def test():
sf = Fighter(exchange, account, key)
pos = Position()
quote = sf.symbol_quote(symbol)
last = int(quote['last'])
transaction_size = 10
order = sf.place_new_order(symbol, last, transaction_size, 'sell', 'limit')
if order['ok'] == False:
print order
order_id = order['id']
order_open = True
fullfilled = 0
while order_open:
status = sf.status_for_order(order_id, symbol)
order_open = status['open']
fill = status['totalFilled']
if fill != fullfilled:
fullfilled = fill
print "total filled = {}".format(fullfilled)
status = sf.status_for_order(order_id, symbol)
[items, total, avg] = fill_math(status['fills'])
pos.sell(items, total)
quote = sf.symbol_quote(symbol)
last = int(quote['last'])
pos.print_pos(last)
def monitor_book():
sf = Fighter(exchange, account, key)
spreads = []
for idx in range(200):
print idx
book = Book(sf.orderbook_for_stock(symbol))
print book.totals()
spread = book.spread()
if(spread > 0):
spreads.append(spread)
t.sleep(0.1)
plt.plot(spreads)
plt.show()
def sell_slide_book_smart():
sf = Fighter(exchange, account, key)
timeout = 2
bid_max = 50
bid_min = 30
while True:
book_json = sf.orderbook_for_stock(symbol)
book = Book(book_json)
center = book.center()
if book.total_asks() == 0 or book.total_bids() == 0:
continue # try again
spread = int(book.spread() * 0.6)
print book.totals()
order_price = int(center - spread/2)
sell_price = int(center + spread/2)
bid_size = bid_max
print bid_size
print "Order Price: ${:.2f}".format(order_price/100.0)
print "Sell Price: ${:.2f}".format(sell_price/100.0)
order = sf.place_new_order(symbol, order_price, bid_size, 'buy', 'limit')
sell = sf.place_new_order(symbol, sell_price, bid_size, 'sell', 'limit')
# bid_size = bid_max
# print bid_size
# spread = int(spread / 2)
# order_price = int(center - spread/2)
# sell_price = int(center + spread/2)
# print "Order Price: ${:.2f}".format(order_price/100.0)
# print "Sell Price: ${:.2f}".format(sell_price/100.0)
# order2 = sf.place_new_order(symbol, order_price, bid_size, 'buy', 'limit')
# sell2 = sf.place_new_order(symbol, sell_price, bid_size, 'sell', 'limit')
# bid_size = random.randrange(bid_min,bid_max,2)
# print bid_size
# spread = int(spread / 2)
# order_price = int(center - spread/2)
# sell_price = int(center + spread/2)
# print "Order Price: ${:.2f}".format(order_price/100.0)
# print "Sell Price: ${:.2f}".format(sell_price/100.0)
# order3 = sf.place_new_order(symbol, order_price, bid_size, 'buy', 'limit')
# sell3 = sf.place_new_order(symbol, sell_price, bid_size, 'sell', 'limit')
# bid_size = random.randrange(bid_min,bid_max,2)
# print bid_size
# spread = int(spread / 2)
# order_price = int(center - spread/2)
# sell_price = int(center + spread/2)
# print "Order Price: ${:.2f}".format(order_price/100.0)
# print "Sell Price: ${:.2f}".format(sell_price/100.0)
# order4 = sf.place_new_order(symbol, order_price, bid_size, 'buy', 'limit')
# sell4 = sf.place_new_order(symbol, sell_price, bid_size, 'sell', 'limit')
# bid_size = random.randrange(bid_min,bid_max,2)
# print bid_size
# spread = int(spread / 2)
# order_price = int(center - spread/2)
# sell_price = int(center + spread/2)
# print "Order Price: ${:.2f}".format(order_price/100.0)
# print "Sell Price: ${:.2f}".format(sell_price/100.0)
# order5 = sf.place_new_order(symbol, order_price, bid_size, 'buy', 'limit')
# sell5 = sf.place_new_order(symbol, sell_price, bid_size, 'sell', 'limit')
if order['ok'] == False:
print "ORDER ERROR!"
print order
break
if sell['ok'] == False:
print "SELL ERROR!"
print sell
break
bought = wait_for_order(sf, symbol,order['id'],timeout)
sold = wait_for_order(sf, symbol,sell['id'],timeout)
# bought2 = wait_for_order(sf, symbol,order2['id'],timeout)
# sold2 = wait_for_order(sf, symbol,sell2['id'],timeout)
# bought3 = wait_for_order(sf, symbol,order3['id'],timeout)
# sold3 = wait_for_order(sf, symbol,sell3['id'],timeout)
# bought4 = wait_for_order(sf, symbol,order4['id'],timeout)
# sold4 = wait_for_order(sf, symbol,sell4['id'],timeout)
# bought5 = wait_for_order(sf, symbol,order5['id'],timeout)
# sold5 = wait_for_order(sf, symbol,sell5['id'],timeout)
def update_pos(sf, pos, symbol, order_id):
status = sf.status_for_order(order_id, symbol)
[items, total, avg] = fill_math(status['fills'])
if status['direction'] == 'buy':
pos.buy(items, total)
else:
pos.sell(items, total)
def wait_for_order(sf, symbol, order_id, timeout):
order_open = True
start_time = t.clock()
while order_open:
status = sf.status_for_order(order_id, symbol)
order_open = status['open']
if order_open:
time_now = t.clock()
if ((t.clock() - start_time) > timeout):
# sf.cancel_order(order_id, symbol)
# print 'caneled order: {}'.format(order_id)
order_open = False
else:
t.sleep(0.1)
status = sf.status_for_order(order_id, symbol)
n_fullfilled = status['totalFilled']
return n_fullfilled
def sell_slide():
sf = Fighter(exchange, account, key)
pos = Position()
spread = 15
transaction_size = 100
share_limit = 150
recovery_step = 30
while True:
# if lowest_last > target_price:
# lowest_last = target_price
# print "adjusted to target price of {}".format(target_price)
quote = sf.symbol_quote(symbol)
last = int(quote['last'])
print "price = {}".format(last)
order = sf.place_new_order(symbol, last, transaction_size, 'buy', 'limit')
sell = sf.place_new_order(symbol, last+spread, transaction_size, 'sell', 'limit')
if pos.get_position() > share_limit:
# sell some
print "recovery sell"
recovery = sf.place_new_order(symbol, 0, recovery_step, 'sell', 'market')
recovery_open = True
while recovery_open:
status = sf.status_for_order(symbol, recovery['id'])
recovery_open = status['open']
status = sf.status_for_order(symbol, recovery['id'])
[items, total, avg] = fill_math(status['fills'])
pos.sell(items, total)
if pos.get_position() < -1*share_limit:
# buy some
print "recovery buy"
recovery = sf.place_new_order(symbol, 0, recovery_step, 'buy', 'market')
recovery_open = True
while recovery_open:
status = sf.status_for_order(symbol, recovery['id'])
recovery_open = status['open']
status = sf.status_for_order(symbol, recovery['id'])
[items, total, avg] = fill_math(status['fills'])
pos.buy(items, total)
if order['ok'] == False:
print "ORDER ERROR!"
print order
break
order_id = order['id']
order_open = True
if sell['ok'] == False:
print "SELL ERROR!"
print sell
break
sell_id = sell['id']
sell_open = True
fullfilled = 0
timeout = 15 # seconds
start_time = t.clock()
#wait for order to close
while order_open:
status = sf.status_for_order(order_id, symbol)
order_open = status['open']
fill = status['totalFilled']
if fill != fullfilled:
fullfilled = fill
print "total ordered = {}".format(fullfilled)
quote = sf.symbol_quote(symbol)
new_last = int(quote['last'])
if(new_last < (last - spread) or
(t.clock() - start_time) > timeout):
sf.cancel_order(order_id, symbol)
order_open = False
# stoping the order and sell for a better price
#wait for sell to close
while sell_open:
status = sf.status_for_order(symbol, sell_id)
sell_open = status['open']
quote = sf.symbol_quote(symbol)
new_last = int(quote['last'])
if(new_last > (last + 2*spread) or
(t.clock() - start_time) > timeout):
sf.cancel_order(symbol, sell_id)
sell_open = False
# stoping the order and sell for a better price
status = sf.status_for_order(order_id, symbol)
[items, total, avg] = fill_math(status['fills'])
pos.buy(items, total)
status = sf.status_for_order(symbol, sell_id)
[items, total, avg] = fill_math(status['fills'])
pos.sell(items, total)
quote = sf.symbol_quote(symbol)
last = int(quote['last'])
pos.print_pos(last)
def main():
sell_slide_book_smart()
if __name__ == '__main__':
main()
|
[
"tweaver@lgsinnovations.com"
] |
tweaver@lgsinnovations.com
|
e34f05678cd7c322ec61141e37f381b0421eb484
|
b9a7f854c82b411406dcc77008b2862ec9f3bf70
|
/LeetCode/209_minSubArrayLen.py
|
2129c3bf07bb93c1b268a0ba36cbd0c2359ff6a3
|
[] |
no_license
|
cabbageGG/play_with_algorithm
|
0f145db092944d3808a6e23e4b5b43d7ff9f9d84
|
6b11084e3537240d1c40b016b7343cb47aef9244
|
refs/heads/master
| 2021-05-14T06:50:57.572117
| 2018-04-18T01:39:32
| 2018-04-18T01:39:32
| 116,250,677
| 3
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,247
|
py
|
#-*- coding: utf-8 -*-
'''
Given an array of n positive integers and a positive integer s,
find the minimal length of a contiguous subarray of which the sum ≥ s. If there isn't one, return 0 instead.
For example, given the array [2,3,1,2,4,3] and s = 7,
the subarray [4,3] has the minimal length under the problem constraint.
'''
class Solution:
def minSubArrayLen(self, s, nums):
"""
:type s: int
:type nums: List[int]
:rtype: int
"""
#两个指针i,j 从前往后搜索。i,j的和小于s,则j++增加元素,大于s,就尝试i++减少元素,以得到最小的长度。
l = len(nums)
if l < 1:
return 0
i,j = 0,0
sum = nums[0]
minlength = l+1
while i<=j and j<l:
if sum >= s:
minlength = min(j - i + 1,minlength)
sum -= nums[i]
i += 1
else:
j += 1
if j<l:
sum += nums[j]
if minlength == l+1:
return 0
return minlength
[2,3,1,2,4,3]
if __name__ == '__main__':
s = Solution()
ss = [2,3,1,2,4,3]
tt = s.minSubArrayLen(7,ss)
print (tt)
|
[
"13246856469@163.com"
] |
13246856469@163.com
|
ac4e8b4b45af025ac1339e21e6186ff76f637fa3
|
256416073b7017701e95beef71897720551abd1b
|
/application_pyowm/views.py
|
24efc5c2c612f989e0285f34bc5a3f60736fa00a
|
[] |
no_license
|
B-Ester/pyowm_app
|
4263e85abb3c5691571ea821abf55724d57eef00
|
1ae3ce8783ebf2ba17c7190605d5ea51054e5ffb
|
refs/heads/master
| 2020-04-05T13:08:39.528656
| 2017-08-03T11:26:43
| 2017-08-03T11:26:43
| 95,116,934
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,182
|
py
|
from django.shortcuts import render, HttpResponse, HttpResponseRedirect
from .weather_api import weather_at_any_city as ws, all_locations, forecast_snow as fs
from .weather_api import cords, tomorrow_forecast as tf
from .weather_api import forecast_sun as tfs, forecast_c as fc, forecast_fog as fg
from .weather_api import forecast_h as fh, forecast_t as ft, weather_at_coords as wac
from .forms import City, Cords
from .weather_api import timezone_detec as tzdt, time_in_tz_now as titn
def index(request):
context = {
'loc': all_locations,
}
return render(request, 'application_pyowm/index.html', context)
def city_forecast(request):
if request.method == 'POST':
form = City(request.POST)
if form.is_valid():
city_name = form.cleaned_data['name_of_city']
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
else:
return HttpResponse('Введите корректное назване города')
else:
form = City()
context = {
'form': form,
}
return render(request, 'application_pyowm/any_city.html', context)
return HttpResponseRedirect("/city_forecast/")
def city_from_loc(request):
context = {
'data': ws(),
'city': all_locations,
'cords_lon': cords().get_lon(),
'cords_lat': cords().get_lat(),
'location': all_locations
}
return render(request, 'application_pyowm/any_city.html', all_locations, context)
def forecast(request, city):
context = {
'loc': all_locations,
'data': ws(str(city))
}
return render(request, 'application_pyowm/any_city.html', context, city)
def future_fc(request):
if request.method == 'POST':
form = City(request.POST)
if form.is_valid():
city_name = (str(form.cleaned_data['name_of_city']))
context = {
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'tom': tf(city_name),
'tfs': tfs(city_name),
'fc': fc(city_name),
'fg': fg(city_name),
'fs': fs(city_name),
'fh': fh(city_name),
'ft': ft(city_name),
'city': city_name
}
return render(request, 'application_pyowm/future_fc.html', context)
else:
return HttpResponse('Введите корректное назване города')
else:
form = City()
context = {
'form': form,
}
return render(request, 'application_pyowm/future_fc.html', context)
return HttpResponseRedirect("/future_fc/")
def coords(request):
if request.method == 'POST':
form_c = Cords(request.POST)
if form_c.is_valid():
long = form_c.cleaned_data['long']
lat = form_c.cleaned_data['lat']
res = wac(long, lat).get_weather()
context = {
'status': res.get_status(),
'temp': res.get_temperature('celsius'),
'humidity': res.get_humidity(),
'wind': res.get_wind(),
'press': res.get_pressure(),
'sr': res.get_sunrise_time(timeformat='iso')[11:19],
'sn': res.get_sunset_time(timeformat='iso')[11:19],
'long': long,
'lat': lat
}
return render(request, 'application_pyowm/coords.html', context)
else:
return HttpResponse('Введите корректные значения координат (0-90)')
else:
form_c = Cords
context = {
'form': form_c,
}
return render(request, 'application_pyowm/coords.html', context)
return HttpResponseRedirect("/coords/")
def sever(request):
city_name ='Severodonetsk'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def kiev(request):
city_name ='Kiev'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def kharkov(request):
city_name ='Kharkov'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def lvov(request):
city_name ='Lvov'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def dnepr(request):
city_name ='Dnerpopetrovsk'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def odessa(request):
city_name ='Odessa'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def barca(request):
city_name ='Barcelona'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def london(request):
city_name ='London'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def paris(request):
city_name ='Paris'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def madrid(request):
city_name ='Madrid'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def berlin(request):
city_name ='Berlin'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
def lissabon(request):
city_name ='Lissabon'
context = {
'data': ws(city_name),
'city': city_name,
'cords_lon': cords(city_name).get_lon(),
'cords_lat': cords(city_name).get_lat(),
'location': all_locations,
'tz': tzdt(city_name),
'titin': titn(tzdt(city_name))[10:19],
}
return render(request, 'application_pyowm/any_city.html', context)
|
[
"denis.pletenyov@gmail.com"
] |
denis.pletenyov@gmail.com
|
d84f464e75525c3e87722b344ef859fb50cf06d0
|
2705d156e594728ff47b3468be30ea37a8e69ea9
|
/AI2.py
|
ab2d5b498c0bb4bb31b9b3abe70007642fede356
|
[] |
no_license
|
blackpanther0/AI-proj
|
b6ca0b6e67b1000f340ddb75db75b42c238c060c
|
05d515b8d46994a8f070c2868d0a18fa92bab29d
|
refs/heads/master
| 2021-06-10T18:59:35.115639
| 2020-04-09T10:19:40
| 2020-04-09T10:19:40
| 254,338,596
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,899
|
py
|
import speech_recognition as sr
import pyttsx3
engine=pyttsx3.init('sapi5')
voices=engine.getProperty('voices')
engine.setProperty('voice',voices[0])
def speak(audio):
print('bot :',audio)
engine.say(audio)
engine.runAndWait()
speak("hello welcome to doctor's advicor")
speak("Please enter your name ->")
name=input()
speak(f'hello {name} nice to meet you')
speak("which mode you want to chat by text or by voice")
speak('press 1 for chat or press 2 for voice ->')
x=int(input())
def docomm():
if x==2:
speak("please on your internet")
r=sr.Recognizer()
with sr.Microphone() as source:
audio=r.listen(source)
try:
print("say HEY GHOST to activate")
ret=r.recognize_google(audio,language='eng-us')
print(ret)
except:
ret=None
docomm()
return ret.lower()
elif x==1:
txt=input(f'{name} :')
return txt.lower()
speak("what made you to come hear")
symptoms=[]
def disease():
print("you symptoms are")
print(symptoms)
if 'fever' in symptoms:
if 'cold' in symptoms:
speak("you may have corona virus")
speak("your self isolated and call nearby hospial")
elif 'cold' in symptoms:
if 'headache' in symptoms:
if 'temprature' in symptoms:
speak("you may have fever")
speak("you have to take PARACETMOL")
else:
speak("you may have headache")
speak("you have to take zero DAL P")
else:
speak("you may have common cold")
speak("you can take PARACETMOL")
elif 'stomachache' in symptoms:
if 'gas' in symptoms:
speak("It might be gas problem cause you have skipped your meal right?")
speak("you have to drink ENO with glass of water")
else:
speak("you ate junk food right? i think you got motions ")
speak("you have to take METROGEL")
while(True):
ret=docomm()
if 'fever' in ret:
symptoms.append('fever')
elif 'stomachache' in ret:
symptoms.append('stomachache')
elif 'gas' in ret:
symptoms.append('gas')
elif 'headache' in ret:
symptoms.append('headache')
elif 'temprature' in ret:
symptoms.append('temprature')
elif 'cold' in ret:
symptoms.append('cold')
elif 'nothing' in ret:
disease()
speak("take care of yourself")
input("press enter to exit")
exit()
elif 'good' in ret:
speak("thats really good to hear")
speak("we are kinda busy we will catch you up thankyou")
exit()
speak("what else symptoms do you have")
|
[
"noreply@github.com"
] |
blackpanther0.noreply@github.com
|
cbec028482767dcc7949e650a8dc96a6b523cf9e
|
a040b5affd65875c644138b3e317eb6b1e4ea20c
|
/blog/urls.py
|
64f0c235c78f0bc28067ffc18287f7e186bee179
|
[] |
no_license
|
sophia1215/cms
|
39f1f7bd0b408189650616633246ecad9f617e25
|
1eea3d638cb05feaedcd227731692a2c7bfdd85a
|
refs/heads/master
| 2020-03-23T12:07:41.922282
| 2018-07-21T06:29:03
| 2018-07-21T06:29:03
| 141,538,522
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
from django.urls import path, include
from . import views
app_name = 'blog'
urlpatterns = [
# path('', include([
path('', views.list_of_post, name='list_of_post'),
path('<slug:slug>/', views.post_detail, name='post_detail'),
# path('<int:id>/', views.post_detail, name='post_detail')
path('category/<slug:category_slug>/', views.list_of_post_by_category, name='list_of_post_by_category'),
path('<slug:slug>/comment/', views.add_comment, name='add_comment'),
# ]))
]
|
[
"code.sophiawang@gmail.com"
] |
code.sophiawang@gmail.com
|
d7aa18e6479562e3c086fa5748bb617de0abcd34
|
7ca029a974bf52129708bf764e2648405e288b04
|
/envhw/lib/python3.6/_weakrefset.py
|
d5924e6f55766cb4e434fa1f07fe06dcb108cab8
|
[] |
no_license
|
Maxim-Poleshko/Djangohw
|
e6fe912f33589df33df0c432e141752d6bc23de9
|
5ea04f3fa74a8ebf1792d3b468074d4d35b7774a
|
refs/heads/master
| 2020-04-11T19:53:46.871378
| 2019-01-23T18:35:37
| 2019-01-23T18:35:37
| 162,051,741
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 33
|
py
|
/new/lib/python3.6/_weakrefset.py
|
[
"poleshkomaxim@gmail.com"
] |
poleshkomaxim@gmail.com
|
45eb482cc20a7a0b87a149ac02f2fa71bba69fe7
|
c143b17e65ecfa9ffc03d52cd6e2a4a240d26230
|
/fase06/ex01/ateZero.py
|
8c0070b4da597f7fed810527d01fcec52bef76bb
|
[] |
no_license
|
The-Totti/marvin
|
473966806ee90bef3792d355142a1ee7b5b8d2bd
|
d3382dcdfcfc6c6aff26401313625134bb03d434
|
refs/heads/master
| 2020-06-17T23:06:28.625288
| 2019-07-30T23:22:20
| 2019-07-30T23:22:20
| 196,093,544
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 125
|
py
|
def ateZero(x):
if (x > 0):
return list(range(x + 1))
elif (x < 0):
return list(range(x ,1))
|
[
"52722890+The-Totti@users.noreply.github.com"
] |
52722890+The-Totti@users.noreply.github.com
|
dfbe392cb539bfb0b7dd47e615482179b0a33f02
|
9605b21cbf0b9e33bac1329ce2d4cd7ff3fca4b5
|
/cricket/classes/delivery.py
|
f160089a3aad224e668a69d8e64b1eaf8c2abad7
|
[] |
no_license
|
oficiallyAkshayEdu/crickit
|
651d72b814990eaf7ed1a0a2c0898bae76abba94
|
47f28157c44c28a18ac102ebccd13e2903cd13ab
|
refs/heads/master
| 2022-03-31T01:05:35.431140
| 2020-01-22T06:25:36
| 2020-01-22T06:25:36
| 109,299,417
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 87
|
py
|
class Delivery:
def __init__(self):
self.speed = ""
self.type = ""
|
[
"akshay.space9@gmail.com"
] |
akshay.space9@gmail.com
|
282f6d79c6959d08d6fd5239d8e89c988928b494
|
ae48f96aedc36427718dc50276377df48829a64b
|
/platform/pygame/event.py
|
cea70b8c3802d189b7e48381a2d0f2fdcdedc70b
|
[
"Unlicense"
] |
permissive
|
gregr/old-and-miscellaneous
|
8fce2c8767302c450c8f01f3528b28b0f91fd995
|
c7a147037b806058d18d9a200ffa4a14f3402d04
|
refs/heads/master
| 2021-01-10T18:38:16.311759
| 2015-04-27T01:00:45
| 2015-04-27T01:00:45
| 9,311,629
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,054
|
py
|
# Copyright (C) 2007 Gregory L. Rosenblatt
# All rights reserved
# <greg.uriel@gmail.com>
# http://code.google.com/p/uriel/
# This file is part of Uriel.
# Uriel is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
# Uriel is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this library. If not, see <http://www.gnu.org/licenses/>
import pygame
from pygame.locals import *
import logging
class _pygameInstance(object):
"""Initializes/shuts down pygame when constructed/destroyed
Only one instance can be active at a time.
"""
_initialized = False
def __init__(self):
"""Initialize pygame."""
assert not self._initialized
pygame.init()
self._initialized = True
logging.info("Initialized pygame")
def __del__(self):
"""Shut down pygame."""
pygame.quit()
self._initialized = False
logging.info("Shut down pygame")
class Dispatcher(object):
"""An event dispatcher for use with pygame
Dispatches pygame events to their appropriate handlers where specified.
Only one instance should exist at any given time.
"""
def __init__(self):
"""Prepare the event loop."""
self._pygameInstance = _pygameInstance()
self.resetHandlers()
def resetHandlers(self):
"""Remove all event handlers and restore defaults."""
def stop(e):
raise StopIteration
self._eventHandlers = {QUIT: stop}
pygame.event.set_allowed(None) # this should block all event types
self.addHandlers({}) # then add them back in selectively
def addHandlers(self, handlers):
"""Set additional event handlers.
Only event types with handlers will be processed by the loop.
If a handler for QUIT is not provided, a default will be used.
"""
self._eventHandlers.update(handlers)
keys = self._eventHandlers.keys()
pygame.event.set_allowed(keys)
def start(self, update=lambda:None):
"""Start the event loop.
The given update procedure will be called once per iteration.
"""
for none in iter(self):
update()
def __iter__(self):
"""Create an event loop iterator.
Iteration ends after receiving a QUIT event by default.
"""
return _iterEvents(self._eventHandlers)
def _iterEvents(handlers):
# this will at least save a couple extra object lookups
# however much that matters...
_getEvents = pygame.event.get
while True:
for e in _getEvents():
handlers[e.type](e)
yield
|
[
"greg.weiqi@gmail.com"
] |
greg.weiqi@gmail.com
|
ce7932add6161c3d72b410ce1b91417be5428afe
|
d234e4a865a0ed94d28552781c6f29ae7b07e053
|
/Robot V2/motor_test.py
|
8f951c3b24d58b7d0ca4d4e675c8cd2bafe75800
|
[] |
no_license
|
andrewfell/CERC-PIWars2019
|
6a279407045977d787c3dfae7a6b26d30a1cfce9
|
cdfa3a88a1652d3513f92d2b2b7df75e2e243541
|
refs/heads/master
| 2020-04-06T15:05:03.973672
| 2020-02-15T21:30:36
| 2020-02-15T21:30:36
| 157,565,167
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,391
|
py
|
#This program tests that you have the motor GPIO mapping correct on your CERC Robot V2 chassis
#refer to GPIO pins allocations here: https://www.raspberrypi.org/documentation/usage/gpio/
#the 'robot' class of the gpiozero library is used below: https://gpiozero.readthedocs.io/en/stable/recipes.html#robot
#the robot should first move forward, then spin right, then spin left, then go backwards.
#if this works as expected, your GPIO mapping and motor driver board wiring is correct
#first of all, import the Robot class from the 'gpiozero' library
from gpiozero import Robot
#then import the 'sleep' class from the 'time' library (so we can add pauses in our program)
from time import sleep
#define a robot (it's called Burt! :-) ), with the GPIO pin mapping as per the GPIO in the RobotV2.md file
burt_the_robot = Robot(left=(8, 7), right=(21, 20)) # dont change this pin mapping, otherwise your robot will be different to the others!
#set the speed. 1 = 100%, 0.5 = 50% and so on...
speed = 0.7
#go forward indefinitely
burt_the_robot.forward(speed)
#sleep for 2seconds
sleep(2)
#spin right indefinitely
burt_the_robot.right(speed)
#sleep for 2seconds
sleep(2)
#spin left indefinitely
burt_the_robot.left(speed)
#sleep for 2seconds
sleep(2)
#go backwards indefinitely
burt_the_robot.backward(speed)
#sleep for 2seconds
sleep(2)
#stop, Burt!
burt_the_robot.stop()
|
[
"noreply@github.com"
] |
andrewfell.noreply@github.com
|
f600fe87604eba8b1bbc1105e5fc6ac5de5f245b
|
251a1a53fb1b45860f9a6656bc48696bcbaa8083
|
/guvi7_8.py
|
ca22da19a14c5f1717d3ad844afc4d419aca6205
|
[] |
no_license
|
EktaArora1/Guvii
|
3216e6ebb1b90afd6714e13707dd6a7fe4617733
|
4955d0ab3b2f574d1ebee60863719c6d42c76f15
|
refs/heads/master
| 2020-04-01T02:38:57.096305
| 2019-03-13T15:37:53
| 2019-03-13T15:37:53
| 152,788,499
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 59
|
py
|
num=int(input())
print('yes') if num%7==0 else print('no')
|
[
"noreply@github.com"
] |
EktaArora1.noreply@github.com
|
bc7ec86468ce5d2d2710c61e68d7d267fb79440f
|
81f97e696d7a6c89b6477e9275877ff4f7462131
|
/misc/datasets.py
|
e6180e379d9202ae5be08df33f184c3d80f6e555
|
[] |
no_license
|
mahima12/ImageGeneration-using-Stack-GAN
|
affdfba1fdc70c90536d470eab3251eaad977dd0
|
83a9b8047b12b6d934cd06d46774505ccd6e7572
|
refs/heads/master
| 2020-03-18T06:30:14.153077
| 2018-06-21T08:26:51
| 2018-06-21T08:26:51
| 134,399,887
| 0
| 0
| null | 2018-06-21T07:32:51
| 2018-05-22T10:36:02
| null |
UTF-8
|
Python
| false
| false
| 10,090
|
py
|
from __future__ import division
from __future__ import print_function
import numpy as np
import pickle
import random
class Dataset(object):
def __init__(self, images, imsize, embeddings=None,
filenames=None, workdir=None,
labels=None, aug_flag=True,
class_id=None, class_range=None):
self._images = images
self._embeddings = embeddings
self._filenames = filenames
self.workdir = workdir
self._labels = labels
self._epochs_completed = -1
self._num_examples = len(images)
self._saveIDs = self.saveIDs()
# shuffle on first run
self._index_in_epoch = self._num_examples
self._aug_flag = aug_flag
self._class_id = np.array(class_id)
self._class_range = class_range
self._imsize = imsize
self._perm = None
@property
def images(self):
return self._images
@property
def embeddings(self):
return self._embeddings
@property
def filenames(self):
return self._filenames
@property
def num_examples(self):
return self._num_examples
@property
def epochs_completed(self):
return self._epochs_completed
def saveIDs(self):
self._saveIDs = np.arange(self._num_examples)
np.random.shuffle(self._saveIDs)
return self._saveIDs
def readCaptions(self, filenames, class_id):
name = filenames
if name.find('jpg/') != -1: # flowers dataset
class_name = 'class_%05d/' % class_id
name = name.replace('jpg/', class_name)
cap_path = "C:/Users/user/Desktop/st/8Sem/skip-thoughts-master/StackGAN-master/models/demo/stageI/stageII/misc/"+'%s/text_c10/%s.txt' %\
(self.workdir, name)
with open(cap_path, "r") as f:
captions = f.read().split('\n')
captions = [cap for cap in captions if len(cap) > 0]
return captions
def transform(self, images):
if self._aug_flag:
transformed_images =\
np.zeros([images.shape[0], self._imsize, self._imsize, 3])
ori_size = images.shape[1]
for i in range(images.shape[0]):
h1 = np.floor((ori_size - self._imsize) * np.random.random())
w1 = np.floor((ori_size - self._imsize) * np.random.random())
cropped_image =\
images[i][int(w1): int(w1 + self._imsize), int(h1): int(h1 + self._imsize), :]
if random.random() > 0.5:
transformed_images[i] = np.fliplr(cropped_image)
else:
transformed_images[i] = cropped_image
return transformed_images
else:
return images
def sample_embeddings(self, embeddings, filenames, class_id, sample_num):
if len(embeddings.shape) == 2 or embeddings.shape[1] == 1:
return np.squeeze(embeddings)
else:
batch_size, embedding_num, _ = embeddings.shape
# Take every sample_num captions to compute the mean vector
sampled_embeddings = []
sampled_captions = []
for i in range(batch_size):
randix = np.random.choice(embedding_num,
sample_num, replace=False)
if sample_num == 1:
randix = int(randix)
captions = self.readCaptions(filenames[i],
class_id[i])
sampled_captions.append(captions[randix])
sampled_embeddings.append(embeddings[i, randix, :])
else:
e_sample = embeddings[i, randix, :]
e_mean = np.mean(e_sample, axis=0)
sampled_embeddings.append(e_mean)
sampled_embeddings_array = np.array(sampled_embeddings)
return np.squeeze(sampled_embeddings_array), sampled_captions
def next_batch(self, batch_size, window):
"""Return the next `batch_size` examples from this data set."""
start = self._index_in_epoch
self._index_in_epoch += batch_size
if self._index_in_epoch > self._num_examples:
# Finished epoch
self._epochs_completed += 1
# Shuffle the data
self._perm = np.arange(self._num_examples)
np.random.shuffle(self._perm)
# Start next epoch
start = 0
self._index_in_epoch = batch_size
assert batch_size <= self._num_examples
end = self._index_in_epoch
current_ids = self._perm[start:end]
fake_ids = np.random.randint(self._num_examples, size=batch_size)
collision_flag =\
(self._class_id[current_ids] == self._class_id[fake_ids])
fake_ids[collision_flag] =\
(fake_ids[collision_flag] +
np.random.randint(100, 200)) % self._num_examples
sampled_images = self._images[current_ids]
sampled_wrong_images = self._images[fake_ids, :, :, :]
sampled_images = sampled_images.astype(np.float32)
sampled_wrong_images = sampled_wrong_images.astype(np.float32)
sampled_images = sampled_images * (2. / 255) - 1.
sampled_wrong_images = sampled_wrong_images * (2. / 255) - 1.
sampled_images = self.transform(sampled_images)
sampled_wrong_images = self.transform(sampled_wrong_images)
ret_list = [sampled_images, sampled_wrong_images]
if self._embeddings is not None:
filenames = [self._filenames[i] for i in current_ids]
class_id = [self._class_id[i] for i in current_ids]
sampled_embeddings, sampled_captions = \
self.sample_embeddings(self._embeddings[current_ids],
filenames, class_id, window)
ret_list.append(sampled_embeddings)
ret_list.append(sampled_captions)
else:
ret_list.append(None)
ret_list.append(None)
if self._labels is not None:
ret_list.append(self._labels[current_ids])
else:
ret_list.append(None)
return ret_list
def next_batch_test(self, batch_size, start, max_captions):
"""Return the next `batch_size` examples from this data set."""
if (start + batch_size) > self._num_examples:
end = self._num_examples
start = end - batch_size
else:
end = start + batch_size
sampled_images = self._images[start:end]
sampled_images = sampled_images.astype(np.float32)
# from [0, 255] to [-1.0, 1.0]
sampled_images = sampled_images * (2. / 255) - 1.
sampled_images = self.transform(sampled_images)
sampled_embeddings = self._embeddings[start:end]
_, embedding_num, _ = sampled_embeddings.shape
sampled_embeddings_batchs = []
sampled_captions = []
sampled_filenames = self._filenames[start:end]
sampled_class_id = self._class_id[start:end]
for i in range(len(sampled_filenames)):
captions = self.readCaptions(sampled_filenames[i],
sampled_class_id[i])
# print(captions)
sampled_captions.append(captions)
for i in range(np.minimum(max_captions, embedding_num)):
batch = sampled_embeddings[:, i, :]
sampled_embeddings_batchs.append(np.squeeze(batch))
return [sampled_images, sampled_embeddings_batchs,
self._saveIDs[start:end], sampled_captions]
class TextDataset(object):
def __init__(self, workdir, embedding_type, hr_lr_ratio):
lr_imsize = 64
self.hr_lr_ratio = hr_lr_ratio
if self.hr_lr_ratio == 1:
self.image_filename = '/76images.pickle'
elif self.hr_lr_ratio == 4:
self.image_filename = '/304images.pickle'
self.image_shape = [lr_imsize * self.hr_lr_ratio,
lr_imsize * self.hr_lr_ratio, 3]
self.image_dim = self.image_shape[0] * self.image_shape[1] * 3
self.embedding_shape = None
self.train = None
self.test = None
self.workdir = workdir
if embedding_type == 'cnn-rnn':
self.embedding_filename = '/char-CNN-RNN-embeddings.pickle'
elif embedding_type == 'skip-thought':
self.embedding_filename = '/skip-thought-embeddings.pickle'
def get_data(self, pickle_path, aug_flag=True):
#with open(pickle_path + self.image_filename, 'rb') as f:
with open("C:/Users/user/Desktop/st/8Sem/skip-thoughts-master/StackGAN-master/models/demo/stageI/stageII/misc/Data/birds/test/" + self.image_filename, 'rb') as f:
images = pickle.load(f,encoding='latin1')
images = np.array(images)
print('images: ', images.shape)
with open("C:/Users/user/Desktop/st/8Sem/skip-thoughts-master/StackGAN-master/models/demo/stageI/stageII/misc/Data/birds/test/" + self.embedding_filename, 'rb') as f:
embeddings = pickle.load(f,encoding='latin1')
embeddings = np.array(embeddings)
self.embedding_shape = [embeddings.shape[-1]]
print('embeddings: ', embeddings.shape)
with open("C:/Users/user/Desktop/st/8Sem/skip-thoughts-master/StackGAN-master/models/demo/stageI/stageII/misc/Data/birds/test/" + '/filenames.pickle', 'rb') as f:
list_filenames = pickle.load(f,encoding='latin1')
print('list_filenames: ', len(list_filenames), list_filenames[0])
with open("C:/Users/user/Desktop/st/8Sem/skip-thoughts-master/StackGAN-master/models/demo/stageI/stageII/misc/Data/birds/test/" + '/class_info.pickle', 'rb') as f:
class_id = pickle.load(f,encoding='latin1')
return Dataset(images, self.image_shape[0], embeddings,
list_filenames, self.workdir, None,
aug_flag, class_id)
|
[
"mahimapandya4@gmail.com"
] |
mahimapandya4@gmail.com
|
f62cc712f231059743b5e22b2cc0ee1721cfbdd1
|
edd329fba9acb67fa0294c3d602ebcdf892a2b39
|
/venv/bin/easy_install
|
446ebd19666ea59a5927812f69ee50682ffc6447
|
[] |
no_license
|
Abdul-Afeez/zeno
|
ec70ce6005ac8ce196ee2facf817c077d46e8bad
|
99ddb5de0dc84c750cdf9eccc6c1f8117dd78d36
|
refs/heads/master
| 2022-07-21T07:43:21.474213
| 2020-05-13T16:31:20
| 2020-05-13T16:31:20
| 263,685,079
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 442
|
#!/home/abdulhafeez/PycharmProjects/zeno/venv/bin/python
# EASY-INSTALL-ENTRY-SCRIPT: 'setuptools==40.8.0','console_scripts','easy_install'
__requires__ = 'setuptools==40.8.0'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('setuptools==40.8.0', 'console_scripts', 'easy_install')()
)
|
[
"abodunrinafeezlekan@gmail.com"
] |
abodunrinafeezlekan@gmail.com
|
|
bbcb607cdf7a0feeae7cc8b38bc21b72bd79f59f
|
18aab7769d023bca4289fc09a2e65ad37b482e71
|
/src/image_utils.py
|
bde65e7b74b4a1fb68bff1611fac05497e8e6037
|
[] |
no_license
|
Freitacr/cassava-disease-analysis
|
74904a95982123adaae57da05ef3dc58be518652
|
a7371bed860055a257cbad773ce6109c4c4b90a7
|
refs/heads/master
| 2023-03-23T03:34:51.284212
| 2021-03-15T19:10:49
| 2021-03-15T19:10:49
| 345,775,093
| 0
| 0
| null | 2021-03-15T19:10:50
| 2021-03-08T19:44:12
|
Python
|
UTF-8
|
Python
| false
| false
| 3,672
|
py
|
from typing import Tuple, List, Dict
import os
from os.path import sep
import cv2
import numpy as np
__image_files_by_label: Dict[int, List[str]] = {}
__num_label_categories: int = 5
def __open_jpg(file_path: str, image_size: Tuple[int, int, int]) -> np.ndarray:
ret = cv2.imread(file_path)
ret = cv2.resize(ret, image_size[:2])
return ret
def __load_image_file_mapping(directory_path: str):
if __image_files_by_label:
return
image_files = os.listdir(directory_path)
for image_file in image_files:
disease_code_index = int(image_file.split('_')[-1].split('.')[0])
if disease_code_index not in __image_files_by_label:
__image_files_by_label[disease_code_index] = []
__image_files_by_label[disease_code_index].append(directory_path + sep + image_file)
def load_specified_batch(file_paths: List[str],
image_size: Tuple[int, int], normalize: bool) -> Tuple[np.ndarray, np.ndarray]:
"""
Loads the specified images, and mapping them to their disease codes.
:param file_paths: Paths of images to load
:param image_size: Size the images should be after loading.
:param normalize: Flag for image normalization. If true, all elements in an image are divided by 255
to translate them into the range [0.0-1.0]
:return: Two numpy arrays containing the loaded image data and disease codes respectively.
These arrays are index locked; meaning the disease code vector for an image at index i in the image data array
will be at index i in the disease code array.
"""
ret_labels = np.zeros((len(file_paths), __num_label_categories), dtype=np.float32)
ret_images = np.zeros((len(file_paths), image_size[1], image_size[0], 3))
for index, file in enumerate(file_paths):
disease_code_index = int(file.split('_')[-1].split('.')[0])
ret_labels[index] = np.zeros(__num_label_categories)
ret_labels[index][disease_code_index] = 1
ret_images[index] = __open_jpg(file, (*image_size, 3))
if normalize:
ret_images[index] /= 255
return ret_images, ret_labels
def load_batch(directory_path: str, num_to_load: int,
image_size: Tuple[int, int], normalize: bool) -> Tuple[np.ndarray, np.ndarray]:
"""
Loads the specified amount of images from the directory, and mapping them to their disease codes.
:param directory_path: Path to the directory where images should be loaded from
:param num_to_load: The number of images to load from the directory.
:param image_size: Size the images should be after loading.
:param normalize: Flag for image normalization. If true, all elements in an image are divided by 255
to translate them into the range [0.0-1.0]
:return: Two numpy arrays containing the loaded image data and disease codes respectively.
These arrays are index locked; meaning the disease code vector for an image at index i in the image data array
will be at index i in the disease code array.
"""
if not __image_files_by_label:
__load_image_file_mapping(directory_path)
num_remaining = num_to_load
num_per_category = int(num_to_load / __num_label_categories)
selected_files = []
for i in range(__num_label_categories-1):
selected_files.extend(np.random.choice(__image_files_by_label[i], num_per_category))
num_remaining -= num_per_category
selected_files.extend(np.random.choice(__image_files_by_label[__num_label_categories-1], num_remaining))
return load_specified_batch(selected_files, image_size, normalize)
|
[
"freitacr@plu.edu"
] |
freitacr@plu.edu
|
56db01d27ea1ddc68ac5bad60ac931e39e4f142c
|
dd9ce5ad36d79f8d269b52b97f2e121ffc957188
|
/DailyPython/calender.py
|
b0a88e2f694ed3bb511003b8d4ac0e2e03ab2124
|
[] |
no_license
|
LevanceWam/PythonTutorial
|
9a7efb43829994ec6dd8b706f671eef54438155c
|
93c9fbc80618848e32296da99b3880331d2c811b
|
refs/heads/master
| 2021-05-18T06:09:37.928915
| 2020-04-16T21:13:29
| 2020-04-16T21:13:29
| 251,150,211
| 0
| 0
| null | 2020-04-15T17:39:02
| 2020-03-29T22:46:28
|
Python
|
UTF-8
|
Python
| false
| false
| 116
|
py
|
import calendar
y = int(input("Input the year: "))
m = int(input("Input the month: "))
print(calendar.month(y, m))
|
[
"lkwamley@fullsail.edu"
] |
lkwamley@fullsail.edu
|
04aefb86aaf1abc218beff66a7c209bfe2bfe550
|
4de03eecadc4c69caf792f4773571c2f6dbe9d68
|
/tests/seahub/views/test_list_lib_dir.py
|
e8ba1aaa6e5d5392e382efd8d7688b080e0859a4
|
[
"Apache-2.0"
] |
permissive
|
Tr-1234/seahub
|
c1663dfd12f7584f24c160bcf2a83afdbe63a9e2
|
ed255e0566de054b5570218cb39cc320e99ffa44
|
refs/heads/master
| 2022-12-23T16:20:13.138757
| 2020-10-01T04:13:42
| 2020-10-01T04:13:42
| 300,138,290
| 0
| 0
|
Apache-2.0
| 2020-10-01T04:11:41
| 2020-10-01T04:11:40
| null |
UTF-8
|
Python
| false
| false
| 722
|
py
|
import json
import os
from django.core.urlresolvers import reverse
from seahub.test_utils import BaseTestCase
class ListLibDirTest(BaseTestCase):
def setUp(self):
self.login_as(self.user)
self.endpoint = reverse('list_lib_dir', args=[self.repo.id])
self.folder_name = os.path.basename(self.folder)
def tearDown(self):
self.remove_repo()
def test_can_list(self):
resp = self.client.get(self.endpoint, HTTP_X_REQUESTED_WITH='XMLHttpRequest')
self.assertEqual(200, resp.status_code)
json_resp = json.loads(resp.content)
assert self.folder_name == json_resp['dirent_list'][0]['obj_name']
assert self.repo.name == json_resp['repo_name']
|
[
"colinsippl@gmx.de"
] |
colinsippl@gmx.de
|
8cf3c7acf8f78ad3e4507a5e4359dab11e77a8fa
|
8187c8ff0c9825197c36e5054ab762493e5bad83
|
/inve/models.py
|
bfa618915f579b57826d949c88e850b7def75ecb
|
[] |
no_license
|
derikkip96/inventory
|
9053ed78158ea8237d8e45458639589d6c8bea83
|
8df2fa530d3572110cba5d778239fa7b5340df3a
|
refs/heads/master
| 2020-06-06T06:29:33.408365
| 2019-09-17T23:27:10
| 2019-09-17T23:27:10
| 192,664,018
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,913
|
py
|
from django.conf import settings
from django.db import models
# from order.models import ReceiveOrder
# # Create your models here.
class ItemUnit(models.Model):
name = models.CharField(max_length=20)
abbr = models.CharField(max_length=20)
inactive = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('created',)
def __str__(self):
return self.name
class Category(models.Model):
description= models.CharField(max_length=20)
dflt_units = models.ForeignKey(ItemUnit, on_delete=models.CASCADE)
inactive = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
verbose_name = 'category'
verbose_name_plural = 'categories'
def __str__(self):
return self.description
class Tax(models.Model):
CHOICES = (
(0,'No'),
(1,'Yes')
)
name = models.CharField(max_length=200)
tax_rate = models.DecimalField(max_digits=10, decimal_places=2)
excempt = models.BooleanField(choices=CHOICES, default=False)
default = models.BooleanField(max_length=9, choices=CHOICES, default = False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return self.name
class StockMaster(models.Model):
stock_id = models.CharField(max_length=50, primary_key=True)
category = models.ForeignKey(Category, on_delete=models.CASCADE)
tax_type = models.ForeignKey(Tax, on_delete=models.CASCADE)
description = models.TextField(max_length=50)
long_description = models.TextField()
units = models.ForeignKey(ItemUnit, on_delete=models.CASCADE)
inactive = models.BooleanField(default=False)
deleted_status = models.BooleanField(default=False)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return self.description
class Item(models.Model):
description = models.CharField(max_length=200,db_index=True)
stock_id = models.CharField(max_length=50, primary_key=True)
category = models.ForeignKey(Category, on_delete=models.CASCADE)
image = models.FileField(upload_to='uploads/item_image')
inactive = models.BooleanField(default=False)
deleted_status = models.BooleanField(default=True)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return self.description
class Preference(models.Model):
category=models.CharField(max_length=50)
field = models.CharField(max_length=50)
value = models.CharField(max_length=50)
class StockTransfer(models.Model):
person = models.ForeignKey(settings.AUTH_USER_MODEL, on_delete=models.CASCADE)
source = models.CharField(max_length=60)
destination = models.CharField(max_length=60)
note = models.TextField()
qty = models.DecimalField(max_digits=10, decimal_places=2)
transfer_date = models.DateTimeField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Location(models.Model):
code = models.CharField(max_length=10)
name = models.CharField(max_length=60)
address = models.CharField(max_length=60)
Email = models.EmailField()
phone = models.CharField(max_length=60)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
class Meta:
ordering = ('-created',)
def __str__(self):
return self.name
|
[
"derikkip96@gmail.com"
] |
derikkip96@gmail.com
|
6f29fcc2634c3f7a624d00ddd0547d2343c55e1b
|
e808642f71488f6cd4a1683ad3bd882e58b3540a
|
/models/networks.py
|
0b50f429a4f422b9a4b248aa3fce4c4ab252f46e
|
[
"Apache-2.0"
] |
permissive
|
bishwasregmi/2dtodepth
|
9a54ff25e0f3e2f2ef284eea371c6f7272558064
|
f6bd76b17d4cbc245c3fd7a3857960bcd67a09fe
|
refs/heads/master
| 2022-11-12T00:24:21.374894
| 2020-07-01T15:24:22
| 2020-07-01T15:24:22
| 265,872,296
| 2
| 0
|
Apache-2.0
| 2020-05-21T14:38:27
| 2020-05-21T14:38:26
| null |
UTF-8
|
Python
| false
| false
| 24,148
|
py
|
# Copyright 2019 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# https://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import torch
import torch.nn as nn
import torch.optim as optim
import torch.autograd as autograd
import numpy as np
import functools
###############################################################################
# Functions
###############################################################################
EPSILON = 1e-6
def gradient(input, do_normalize=False):
if input.dim() == 2:
D_ry = input[1:, :]
D_ly = input[:-1, :]
D_rx = input[:, 1:]
D_lx = input[:, :-1]
elif input.dim() == 3:
D_ry = input[:, 1:, :]
D_ly = input[:, :-1, :]
D_rx = input[:, :, 1:]
D_lx = input[:, :, :-1]
elif input.dim() == 4:
D_ry = input[:, :, 1:, :]
D_ly = input[:, :, :-1, :]
D_rx = input[:, :, :, 1:]
D_lx = input[:, :, :, :-1]
Dx = D_rx - D_lx
Dy = D_ry - D_ly
if do_normalize:
Dx = Dx / (D_rx + D_lx + EPSILON)
Dy = Dy / (D_ry + D_ly + EPSILON)
return Dx, Dy
def init_weights(net, init_type='normal', gain=0.02):
def init_func(m):
classname = m.__class__.__name__
if hasattr(m, 'weight') and (classname.find('Conv') != -1 or
classname.find('Linear') != -1):
if init_type == 'normal':
nn.init.normal_(m.weight.data, 0.0, gain)
elif init_type == 'xavier':
nn.init.xavier_normal_(m.weight.data, gain=gain)
elif init_type == 'kaiming':
nn.init.kaiming_normal_(m.weight.data, a=0, mode='fan_in')
elif init_type == 'orthogonal':
nn.init.orthogonal_(m.weight.data, gain=gain)
else:
raise NotImplementedError(
'initialization method [%s] is not implemented' % init_type)
if hasattr(m, 'bias') and m.bias is not None:
nn.init.constant_(m.bias.data, 0.0)
elif classname.find('BatchNorm2d') != -1:
nn.init.normal_(m.weight.data, 1.0, gain)
nn.init.constant_(m.bias.data, 0.0)
print('initialize network with %s' % init_type)
net.apply(init_func)
def get_scheduler(optimizer, opt):
if opt.lr_policy == 'lambda':
def lambda_rule(epoch):
lr_l = 1.0 - max(0, epoch + 1 + opt.epoch_count -
opt.niter) / float(opt.niter_decay + 1)
return lr_l
scheduler = optim.lr_scheduler.LambdaLR(optimizer, lr_lambda=lambda_rule)
elif opt.lr_policy == 'step':
scheduler = optim.lr_scheduler.StepLR(
optimizer, step_size=opt.lr_decay_epoch, gamma=0.5)
elif opt.lr_policy == 'plateau':
scheduler = optim.lr_scheduler.ReduceLROnPlateau(
optimizer, mode='min', factor=0.2, threshold=0.01, patience=5)
else:
return NotImplementedError('learning rate policy [%s] is not implemented',
opt.lr_policy)
return scheduler
def get_norm_layer(norm_type='instance'):
if norm_type == 'batch':
norm_layer = functools.partial(nn.BatchNorm2d, affine=True)
elif norm_type == 'instance':
norm_layer = functools.partial(
nn.InstanceNorm2d, affine=False, track_running_stats=True)
elif norm_type == 'none':
norm_layer = None
else:
raise NotImplementedError('normalization layer [%s] is not found' %
norm_type)
return norm_layer
def init_net(net, init_type='normal', init_gain=0.02, gpu_ids=[]):
if len(gpu_ids) > 0:
assert (torch.cuda.is_available())
net.to(gpu_ids[0])
net = torch.nn.DataParallel(net, gpu_ids)
init_weights(net, init_type, gain=init_gain)
return net
def print_network(net_):
num_params = 0
for param in net_.parameters():
num_params += param.numel()
print(net_)
print('Total number of parameters: %d' % num_params)
##############################################################################
# Classes
##############################################################################
class LaplacianLayer(nn.Module):
def __init__(self):
super(LaplacianLayer, self).__init__()
w_nom = torch.FloatTensor([[0, -1, 0], [-1, 4, -1],
[0, -1, 0]]).view(1, 1, 3, 3).cuda()
w_den = torch.FloatTensor([[0, 1, 0], [1, 4, 1],
[0, 1, 0]]).view(1, 1, 3, 3).cuda()
self.register_buffer('w_nom', w_nom)
self.register_buffer('w_den', w_den)
def forward(self, input, do_normalize=True):
assert (input.dim() == 2 or input.dim() == 3 or input.dim() == 4)
input_size = input.size()
if input.dim() == 4:
x = input.view(input_size[0] * input_size[1], 1, input_size[2],
input_size[3])
elif input.dim() == 3:
x = input.unsqueeze(1)
else:
x = input.unsqueeze(0).unsqueeze(0)
x_nom = torch.nn.functional.conv2d(
input=x, weight=autograd.Variable(self.w_nom), stride=1, padding=0)
if do_normalize:
x_den = torch.nn.functional.conv2d(
input=x, weight=autograd.Variable(self.w_den), stride=1, padding=0)
# x_den = x.std() + 1e-5
x = (x_nom.abs() / x_den)
else:
x = x_nom.abs()
if input.dim() == 4:
return x.view(input_size[0], input_size[1], input_size[2] - 2,
input_size[3] - 2)
elif input.dim() == 3:
return x.squeeze(1)
elif input.dim() == 2:
return x.squeeze(0).squeeze(0)
class JointLoss(nn.Module):
def __init__(self, opt):
super(JointLoss, self).__init__()
self.opt = opt
self.w_si_mse = 1.0
self.w_l1_rel = 1.0
self.w_confidence = 1.0
self.w_grad = 0.75
self.w_sm = 0.1
self.w_sm1 = 0.075
self.w_sm2 = 0.1
self.w_normal = 0.5
self.num_scales = 5
self.total_loss = None
self.laplacian_func = LaplacianLayer()
def LaplacianSmoothnessLoss(self, depth, img):
img_lap = self.laplacian_func(img, do_normalize=False)
depth_lap = self.laplacian_func(depth, do_normalize=False)
x = (-img_lap.mean(1)).exp() * (depth_lap)
return x.mean()
def compute_image_aware_2nd_smoothness_cost(self, depth, img):
depth_grad_x, depth_grad_y = gradient(depth, do_normalize=False)
depth_grad_x2, depth_grad_xy = gradient(
depth_grad_x, do_normalize=False)
depth_grad_yx, depth_grad_y2 = gradient(
depth_grad_y, do_normalize=False)
return depth_grad_x2.abs().mean() \
+ depth_grad_xy.abs().mean() + depth_grad_yx.abs().mean() + \
depth_grad_y2.abs().mean()
def compute_image_aware_1st_smoothness_cost(self, depth, img):
depth_grad_x, depth_grad_y = gradient(depth, do_normalize=False)
img_grad_x, img_grad_y = gradient(img, do_normalize=False)
if img.dim() == 3:
weight_x = torch.exp(-img_grad_x.abs().mean(0))
weight_y = torch.exp(-img_grad_y.abs().mean(0))
cost = ((depth_grad_x.abs() * weight_x)[:-1, :] +
(depth_grad_y.abs() * weight_y)[:, :-1]).mean()
else:
weight_x = torch.exp(-img_grad_x.abs().mean(1))
weight_y = torch.exp(-img_grad_y.abs().mean(1))
cost = ((depth_grad_x.abs() * weight_x)[:, :-1, :] +
(depth_grad_y.abs() * weight_y)[:, :, :-1]).mean()
return cost
def SecondOrderLoss(self, log_pred_d, mask, log_gt):
N = torch.sum(mask) + 1e-6
v_pred_lap = log_pred_d[:-2, :] - 2 * \
log_pred_d[1:-1, :] + log_pred_d[2:, :]
v_gt_lap = log_gt[:-2, :] - 2 * log_gt[1:-1, :] + log_gt[2:, :]
v_diff = torch.abs(v_pred_lap - v_gt_lap)
v_mask = torch.mul(torch.mul(mask[:-2, :], mask[2:, :]), mask[1:-1, :])
v_lap_term = torch.mul(v_diff, v_mask)
h_pred_lap = log_pred_d[:, :-2] - 2 * \
log_pred_d[:, 1:-1] + log_pred_d[:, 2:]
h_gt_lap = log_gt[:, :-2] - 2 * log_gt[:, 1:-1] + log_gt[:, 2:]
h_diff = torch.abs(h_pred_lap - h_gt_lap)
h_mask = torch.mul(torch.mul(mask[:, :-2], mask[:, 2:]), mask[:, 1:-1])
h_lap_term = torch.mul(h_diff, h_mask)
second_order_term = torch.sum(v_lap_term) + torch.sum(h_lap_term)
second_order_term = second_order_term / N
return second_order_term
def GradientLoss(self, log_prediction_d, mask, log_gt):
log_d_diff = log_prediction_d - log_gt
v_gradient = torch.abs(log_d_diff[:, :-2, :] - log_d_diff[:, 2:, :])
v_mask = torch.mul(mask[:, :-2, :], mask[:, 2:, :])
v_gradient = torch.mul(v_gradient, v_mask)
h_gradient = torch.abs(log_d_diff[:, :, :-2] - log_d_diff[:, :, 2:])
h_mask = torch.mul(mask[:, :, :-2], mask[:, :, 2:])
h_gradient = torch.mul(h_gradient, h_mask)
N = torch.sum(h_mask) + torch.sum(v_mask) + EPSILON
gradient_loss = torch.sum(h_gradient) + torch.sum(v_gradient)
gradient_loss = gradient_loss / N
return gradient_loss
def DeMonGradientLoss(self, prediction_d, mask, gt_d):
v_mask = torch.mul(mask[:, :-2, :], mask[:, 2:, :])
v_grad_pred = (prediction_d[:, :-2, :] - prediction_d[:, 2:, :]) / (
prediction_d[:, :-2, :] + prediction_d[:, 2:, :] + EPSILON)
v_grad_gt = (gt_d[:, :-2, :] - gt_d[:, 2:, :]) / (
gt_d[:, :-2, :] + gt_d[:, 2:, :] + EPSILON)
v_grad_term = v_mask * torch.abs(v_grad_pred - v_grad_gt)
h_mask = torch.mul(mask[:, :, :-2], mask[:, :, 2:])
h_grad_pred = (prediction_d[:, :, :-2] - prediction_d[:, :, 2:]) / (
prediction_d[:, :, :-2] + prediction_d[:, :, 2:] + EPSILON)
h_grad_gt = (gt_d[:, :, :-2] - gt_d[:, :, 2:]) / (
gt_d[:, :, :-2] + gt_d[:, :, 2:] + EPSILON)
h_grad_term = h_mask * torch.abs(h_grad_pred - h_grad_gt)
N = torch.sum(h_mask) + torch.sum(v_mask) + EPSILON
gradient_loss = torch.sum(v_grad_term) + torch.sum(h_grad_term)
gradient_loss = gradient_loss / N
return gradient_loss
def Data_Loss(self, log_prediction_d, mask, log_gt):
N = torch.sum(mask) + EPSILON
log_d_diff = log_prediction_d - log_gt
log_d_diff = torch.mul(log_d_diff, mask)
s1 = torch.sum(torch.pow(log_d_diff, 2)) / N
s2 = (torch.sum(log_d_diff) * torch.sum(log_d_diff)) / (N * N)
data_loss = s1 - s2
return data_loss
def Confidence_Loss(self, pred_confidence, mask, pred_d, gt_d):
# using least square to find scaling factor
N = torch.sum(mask) + EPSILON
N = N.item()
if N > 0.5:
scale_factor = torch.median(
gt_d.data[mask.data > 0.1] /
(pred_d.data[mask.data > 0.1] + EPSILON)).item()
pred_d_aligned = pred_d * scale_factor
error = torch.abs(pred_d_aligned.data -
gt_d.data) / (gt_d.data + EPSILON)
error = torch.exp(-error * 2.0)
error_var = autograd.Variable(error, requires_grad=False)
u_loss = mask * torch.abs(pred_confidence - error_var)
confidence_term = torch.sum(u_loss) / N
else:
confidence_term = 0.0
return confidence_term
def Normal_Loss(self, pred_d, gt_d, mask, normalized_p_3d, p_3d_gt):
p_3d_pred = normalized_p_3d * pred_d.unsqueeze(1).repeat(1, 2, 1, 1)
x_mask = mask[:, 1:-1, :-2] * mask[:, 1:-1, 2:]
y_mask = mask[:, :-2, 1:-1] * mask[:, 2:, 1:-1]
final_mask = y_mask * x_mask
grad_x_pred = (pred_d[:, 1:-1, :-2] - pred_d[:, 1:-1, 2:]) / (
p_3d_pred[:, 0, 1:-1, :-2] - p_3d_pred[:, 0, 1:-1, 2:] + EPSILON)
grad_x_gt = (gt_d[:, 1:-1, :-2] - gt_d[:, 1:-1, 2:]) / (
p_3d_gt[:, 0, 1:-1, :-2] - p_3d_gt[:, 0, 1:-1, 2:] + EPSILON)
grad_y_pred = (pred_d[:, :-2, 1:-1] - pred_d[:, 2:, 1:-1]) / (
p_3d_pred[:, 1, :-2, 1:-1] - p_3d_pred[:, 1, 2:, 1:-1] + EPSILON)
grad_y_gt = (gt_d[:, :-2, 1:-1] - gt_d[:, 2:, 1:-1]) / (
p_3d_gt[:, 1, :-2, 1:-1] - p_3d_gt[:, 1, 2:, 1:-1] + EPSILON)
norm_pred = torch.sqrt(grad_x_pred * grad_x_pred +
grad_y_pred * grad_y_pred + 1.0) + EPSILON
norm_gt = torch.sqrt(grad_x_gt * grad_x_gt + grad_y_gt * grad_y_gt +
1.0) + EPSILON
dot_product = grad_x_gt * grad_x_pred + grad_y_gt * grad_y_pred + 1.0
dot_product = dot_product * final_mask
N = torch.sum(final_mask) + EPSILON
normal_term = dot_product / (norm_pred * norm_gt)
normal_term = 1.0 - torch.sum(normal_term) / N
return normal_term
def Weighted_Data_Loss(self, log_prediction_d, w_mask, log_gt):
log_d_diff = log_prediction_d - log_gt
wx_2 = torch.sum(w_mask * torch.pow(log_d_diff, 2))
wx = torch.sum(w_mask * log_d_diff)
w_sum = torch.sum(w_mask)
total_term = w_sum * wx_2 - torch.pow(wx, 2)
N = w_sum * w_sum + EPSILON
return total_term / N
def L1_inv_loss(self, pred_d_aligned, mask, depth_gt):
l1_inv_error = torch.abs(1. / (depth_gt + EPSILON) - 1. /
(EPSILON + pred_d_aligned))
l1_inv_error = l1_inv_error * mask
num_valid_pixels = torch.sum(mask) + EPSILON
return torch.sum(l1_inv_error) / num_valid_pixels
def L1RelLoss(self, pred_d_aligned, mask, depth_gt):
l1_rel_error = torch.abs(depth_gt - pred_d_aligned)
l1_rel_error = l1_rel_error * mask
l1_rel_error = l1_rel_error / \
torch.abs(depth_gt + pred_d_aligned + 1e-8)
num_valid_pixels = torch.sum(mask) + EPSILON
return torch.sum(l1_rel_error) / num_valid_pixels
def L1_rel_loss(self, pred_d_aligned, mask, depth_gt):
l1_rel_error = torch.abs(depth_gt - pred_d_aligned)
l1_rel_error = l1_rel_error * mask
l1_rel_error = l1_rel_error / torch.abs(depth_gt + 1e-8)
num_valid_pixels = torch.sum(mask) + EPSILON
return torch.sum(l1_rel_error) / num_valid_pixels
def compute_si_rmse(self, pred_log_d, targets):
gt_mask = targets['gt_mask'].cuda()
log_d_gt = torch.log(targets['depth_gt'].cuda())
env_mask = targets['env_mask'].cuda()
human_gt_mask = (1.0 - env_mask) * gt_mask
env_gt_mask = env_mask * gt_mask
si_rmse_full = 0.0
si_rmse_human = 0.0
si_rmse_env = 0.0
si_rmse_inter = 0.0
si_rmse_intra = 0.0
# compute full error
for i in range(0, gt_mask.size(0)):
mse_full = self.Data_Loss(pred_log_d[i, :, :], gt_mask[i, :, :],
log_d_gt[i, :, :])
mse_env = self.Data_Loss(pred_log_d[i, :, :], env_gt_mask[i, :, :],
log_d_gt[i, :, :])
mse_intra = self.Data_Loss(pred_log_d[i, :, :], human_gt_mask[i, :, :],
log_d_gt[i, :, :])
# compute human error
n_full = torch.sum(gt_mask[i, :, :])
n_human = torch.sum(human_gt_mask[i, :, :])
n_env = torch.sum(env_gt_mask[i, :, :])
log_diff = pred_log_d[i, :, :] - log_d_gt[i, :, :]
log_diff_mask = log_diff * gt_mask[i, :, :]
# full human error
sum_sq_log_diff = torch.sum(torch.pow(log_diff_mask, 2))
sum_log_diff = torch.sum(log_diff_mask)
per_pixel_error = n_full * torch.pow(
log_diff, 2) + sum_sq_log_diff - 2 * sum_log_diff * log_diff
per_pixel_error = per_pixel_error * human_gt_mask[i, :, :]
mse_human = torch.sum(per_pixel_error) / \
(n_human * n_full + EPSILON)
# inter class mse error
log_diff_env_mask = log_diff * env_gt_mask[i, :, :]
sum_sq_log_env_diff = torch.sum(torch.pow(log_diff_env_mask, 2))
sum_log_env_diff = torch.sum(log_diff_env_mask)
inter_error = n_env * torch.pow(
log_diff, 2) + sum_sq_log_env_diff - 2 * sum_log_env_diff * log_diff
inter_error = inter_error * human_gt_mask[i, :, :]
mse_inter = torch.sum(inter_error) / (n_human * n_env + EPSILON)
si_rmse_full += torch.sqrt(2.0 * mse_full)
si_rmse_human += torch.sqrt(mse_human)
si_rmse_env += torch.sqrt(2.0 * mse_env)
si_rmse_intra += torch.sqrt(2.0 * mse_intra)
si_rmse_inter += torch.sqrt(mse_inter)
return si_rmse_full, si_rmse_human, si_rmse_env, si_rmse_intra, si_rmse_inter
def compute_l1_rel_error(self, pred_d, targets):
gt_mask = targets['gt_mask']
d_gt = targets['depth_gt']
rel_full = 0.
for i in range(0, gt_mask.size(0)):
gt_d_np = d_gt[i, :, :].cpu().numpy()
pred_d_np = pred_d[i, :, :].cpu().numpy()
gt_mask_np = gt_mask[i, :, :].cpu().numpy()
scale_factor = np.linalg.lstsq(
np.expand_dims(pred_d_np[gt_mask_np > 1e-8], axis=-1),
gt_d_np[gt_mask_np > 1e-8])
scale_factor = scale_factor[0][0]
pred_d_aligned_np = pred_d_np * scale_factor
total_full_rel = np.sum(gt_mask_np * np.abs(gt_d_np - pred_d_aligned_np) /
(gt_d_np + EPSILON))
rel_full += total_full_rel / (np.sum(gt_mask_np) + EPSILON)
return rel_full
def compute_rmse_error(self, pred_d, targets):
gt_mask = targets['gt_mask']
d_gt = targets['depth_gt']
rmse_full = 0.
for i in range(0, gt_mask.size(0)):
gt_d_np = d_gt[i, :, :].cpu().numpy()
pred_d_np = pred_d[i, :, :].cpu().numpy()
gt_mask_np = gt_mask[i, :, :].cpu().numpy()
scale_factor = np.linalg.lstsq(
np.expand_dims(pred_d_np[gt_mask_np > 1e-8], axis=-1),
gt_d_np[gt_mask_np > 1e-8])
scale_factor = scale_factor[0][0]
pred_d_aligned_np = pred_d_np * scale_factor
total_full_rmse = np.sum(gt_mask_np *
np.square(gt_d_np - pred_d_aligned_np))
rmse_full += np.sqrt(total_full_rmse /
(np.sum(gt_mask_np) + EPSILON))
return rmse_full
def Data_Human_Loss(self, pred_log_d, gt_mask, human_gt_mask, log_d_gt):
n_full = torch.sum(gt_mask)
n_human = torch.sum(human_gt_mask)
log_diff = pred_log_d - log_d_gt
log_diff_mask = log_diff * gt_mask
sum_sq_log_diff = torch.sum(torch.pow(log_diff_mask, 2))
sum_log_diff = torch.sum(log_diff_mask)
inter_error = n_full * torch.pow(
log_diff, 2) + sum_sq_log_diff - 2 * sum_log_diff * log_diff
inter_error = inter_error * human_gt_mask
mse_human = torch.sum(inter_error) / (n_human * n_full + EPSILON)
mse_human = mse_human / 2.0
return mse_human
def __call__(self, input_images, log_pred_d_0, pred_confidence, targets):
log_pred_d_1 = log_pred_d_0[:, ::2, ::2]
log_pred_d_2 = log_pred_d_1[:, ::2, ::2]
log_pred_d_3 = log_pred_d_2[:, ::2, ::2]
log_pred_d_4 = log_pred_d_3[:, ::2, ::2]
input_0 = input_images
input_1 = input_0[:, :, ::2, ::2]
input_2 = input_1[:, :, ::2, ::2]
input_3 = input_2[:, :, ::2, ::2]
input_4 = input_3[:, :, ::2, ::2]
d_gt_0 = autograd.Variable(targets['depth_gt'].cuda(), requires_grad=False)
log_d_gt_0 = torch.log(d_gt_0)
log_d_gt_1 = log_d_gt_0[:, ::2, ::2]
log_d_gt_2 = log_d_gt_1[:, ::2, ::2]
log_d_gt_3 = log_d_gt_2[:, ::2, ::2]
log_d_gt_4 = log_d_gt_3[:, ::2, ::2]
gt_mask = autograd.Variable(targets['gt_mask'].cuda(), requires_grad=False)
human_mask = 1.0 - \
autograd.Variable(targets['env_mask'].cuda(), requires_grad=False)
human_gt_mask = human_mask * gt_mask
mask_0 = gt_mask
mask_1 = mask_0[:, ::2, ::2]
mask_2 = mask_1[:, ::2, ::2]
mask_3 = mask_2[:, ::2, ::2]
mask_4 = mask_3[:, ::2, ::2]
data_term = 0.0
grad_term = 0.0
sm_term = 0.0
confidence_term = 0.0
num_samples = mask_0.size(0)
for i in range(0, num_samples):
if self.opt.human_data_term > 0.1:
data_term += (self.w_si_mse / num_samples * self.Data_Loss(
log_pred_d_0[i, :, :], mask_0[i, :, :], log_d_gt_0[i, :, :]))
data_term += (self.w_si_mse / num_samples * 0.5 * self.Data_Human_Loss(
log_pred_d_0[i, :, :], mask_0[i,
:, :], human_gt_mask[i, :, :],
log_d_gt_0[i, :, :]))
else:
data_term += (self.w_si_mse / num_samples * 1.5 * self.Data_Loss(
log_pred_d_0[i, :, :], mask_0[i, :, :], log_d_gt_0[i, :, :]))
grad_term += self.w_grad * self.GradientLoss(log_pred_d_0, mask_0,
log_d_gt_0)
grad_term += self.w_grad * self.GradientLoss(log_pred_d_1, mask_1,
log_d_gt_1)
grad_term += self.w_grad * self.GradientLoss(log_pred_d_2, mask_2,
log_d_gt_2)
grad_term += self.w_grad * self.GradientLoss(log_pred_d_3, mask_3,
log_d_gt_3)
grad_term += self.w_grad * self.GradientLoss(log_pred_d_4, mask_4,
log_d_gt_4)
sm_term += self.w_sm1 * self.compute_image_aware_1st_smoothness_cost(
log_pred_d_0, input_0)
sm_term += (self.w_sm1 * 0.5 * self.compute_image_aware_1st_smoothness_cost(
log_pred_d_1, input_1))
sm_term += (self.w_sm1 * 0.25 * self.compute_image_aware_1st_smoothness_cost(
log_pred_d_2, input_2))
sm_term += (self.w_sm1 * 0.125 * self.compute_image_aware_1st_smoothness_cost(
log_pred_d_3, input_3))
sm_term += (self.w_sm1 * 0.0625 * self.compute_image_aware_1st_smoothness_cost(
log_pred_d_4, input_4))
sm_term += self.w_sm2 * \
self.LaplacianSmoothnessLoss(log_pred_d_0, input_0)
sm_term += self.w_sm2 * 0.5 * self.LaplacianSmoothnessLoss(
log_pred_d_1, input_1)
sm_term += self.w_sm2 * 0.25 * self.LaplacianSmoothnessLoss(
log_pred_d_2, input_2)
sm_term += self.w_sm2 * 0.125 * self.LaplacianSmoothnessLoss(
log_pred_d_3, input_3)
sm_term += self.w_sm2 * 0.0625 * self.LaplacianSmoothnessLoss(
log_pred_d_4, input_4)
print('data_term %f' % data_term.item())
print('grad_term %f' % grad_term.item())
print('sm_term %f' % sm_term.item())
total_loss = data_term + grad_term + sm_term + confidence_term
self.total_loss = total_loss
return total_loss.item()
def get_loss_var(self):
return self.total_loss
|
[
"noreply@github.com"
] |
bishwasregmi.noreply@github.com
|
98d46d87899a252a811104dab6f0dcbaa9fdc66f
|
771fc0f4bd8700217e153b222d772655e42baf96
|
/.github/scripts/testSync.py
|
76c71e24758dd08111384ef71d416d2fd78b87cd
|
[
"LicenseRef-scancode-unknown-license-reference",
"Apache-2.0"
] |
permissive
|
zyfjeff/juicefs
|
e09f4255613a7714e219ee571f9289e014c364c4
|
95ea32bed332245fe4b58425f80176d4da51212a
|
refs/heads/main
| 2023-05-14T14:25:38.843139
| 2023-04-10T08:37:54
| 2023-04-10T08:37:54
| 329,810,048
| 0
| 3
|
Apache-2.0
| 2023-03-20T06:38:14
| 2021-01-15T04:39:17
|
Go
|
UTF-8
|
Python
| false
| false
| 4,915
|
py
|
import subprocess
import random
import shutil
from hypothesis import given, strategies as st, settings, example
import os
JFS_SOURCE_DIR='/Users/chengzhou/Documents/juicefs/pkg/'
JFS_SOURCE_DIR='jfs_source/pkg/'
MOUNT_POINT='/tmp/sync-test/'
JFS_BIN='./juicefs-1.0.0-beta1'
JFS_BIN='./juicefs-1.0.0-beta2'
JFS_BIN='./juicefs-1.0.0-beta3'
JFS_BIN='./juicefs'
MAX_EXAMPLE=100
def setup():
meta_url = 'sqlite3://abc.db'
volume_name='sync-test'
if os.path.isfile('abc.db'):
os.remove('abc.db')
if os.path.exists(MOUNT_POINT):
os.system('umount %s'%MOUNT_POINT)
cache_dir = os.path.expanduser('~/.juicefs/local/%s/'%volume_name)
if os.path.exists(cache_dir):
try:
shutil.rmtree(cache_dir)
except OSError as e:
print("Error: %s : %s" % (cache_dir, e.strerror))
subprocess.check_call([JFS_BIN, 'format', meta_url, volume_name])
subprocess.check_call([JFS_BIN, 'mount', '-d', meta_url, MOUNT_POINT])
subprocess.check_call([JFS_BIN, 'sync', JFS_SOURCE_DIR, MOUNT_POINT+'jfs_source/'])
def generate_all_entries(root_dir):
entries = set()
for root, dirs, files in os.walk(root_dir):
# print(root)
for d in dirs:
entries.add(d+'/')
for file in files:
entries.add(file)
file_path = os.path.join(root, file)[len(root_dir):]
entries.add(file_path)
print(len(entries))
return entries
def generate_nested_dir(root_dir):
result = []
for root, dirs, files in os.walk(root_dir):
for d in dirs:
dir = os.path.join(root, d)[len(root_dir):]
li = dir.split('/')
entries = []
for i in range(0, len(li)):
entries.append('/'.join(li[i:])+'/')
for i in range(0, len(entries)):
result.append(random.sample(entries, random.randint(0, min(len(entries), 5)) ))
print(result)
return result
def change_entry(entries):
# entries = random.sample( entries, random.randint(0, min(len(entries), 5)) )
options = []
for entry in entries:
type = random.choice(['--include', '--exclude'])
value = entry.replace(random.choice(entry), random.choice(['*', '?']), random.randint(0,2))
# print(type+' '+value)
options.append( (type, "'%s'"%value) )
# print(options)
return options
all_entry = generate_all_entries(JFS_SOURCE_DIR)
st_all_entry = st.lists(st.sampled_from(list(all_entry))).map(lambda x: change_entry(x)).filter(lambda x: len(x) != 0)
nested_dir = generate_nested_dir(JFS_SOURCE_DIR)
st_nested_dir = st.sampled_from(nested_dir).map(lambda x: change_entry(x)).filter(lambda x: len(x) != 0)
valid_name = st.text(st.characters(max_codepoint=1000, blacklist_categories=('Cc', 'Cs')), min_size=2).map(lambda s: s.strip()).filter(lambda s: len(s) > 0)
st_random_text = st.lists(valid_name).map(lambda x: change_entry(x)).filter(lambda x: len(x) != 0)
@given(sync_options=st_random_text)
@example([['--include', '[*'] ])
@settings(max_examples=MAX_EXAMPLE, deadline=None)
def test_sync_with_random_text(sync_options):
print(sync_options)
compare_rsync_and_juicesync(sync_options)
@given(sync_options=st_all_entry)
@settings(max_examples=MAX_EXAMPLE, deadline=None)
def test_sync_with_path_entry(sync_options):
compare_rsync_and_juicesync(sync_options)
@given(sync_options=st_nested_dir)
@example([ ['--include', 'chu*/'], ['--exclude', 'pk*/'], ['--exclude', '*.go'] ])
@settings(max_examples=MAX_EXAMPLE, deadline=None)
def test_sync_with_nested_dir(sync_options):
compare_rsync_and_juicesync(sync_options)
def compare_rsync_and_juicesync(sync_options):
assert sync_options != 0
sync_options = [item for sublist in sync_options for item in sublist]
do_rsync(MOUNT_POINT+'jfs_source/', 'rsync_dir/', sync_options)
do_juicesync(MOUNT_POINT+'jfs_source/', 'juicesync_dir/', sync_options)
diff_result = os.system('diff -ur juicesync_dir rsync_dir')
assert diff_result==0
def do_juicesync(source_dir, dest_dir, sync_options):
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
juicesync_cmd = [JFS_BIN , 'sync', '--dirs', source_dir, dest_dir]+sync_options
print('juicesync_cmd: '+' '.join(juicesync_cmd))
try:
subprocess.check_call(juicesync_cmd)
except Exception as e:
assert False
def do_rsync(source_dir, dest_dir, sync_options):
if os.path.exists(dest_dir):
shutil.rmtree(dest_dir)
os.makedirs(dest_dir)
rsync_cmd = ['rsync', '-a', '-r' , source_dir, dest_dir]+sync_options
print('rsync_cmd: '+ ' '.join(rsync_cmd))
try:
subprocess.check_call(rsync_cmd)
except Exception as e:
assert False
if __name__ == "__main__":
setup()
test_sync_with_random_text()
test_sync_with_nested_dir()
test_sync_with_path_entry()
|
[
"noreply@github.com"
] |
zyfjeff.noreply@github.com
|
69bb7644389703eb52a97e789cabe71b7cc4d5ac
|
1a7e6b0f6281c7705e75e4ec57520388e9eac0bc
|
/multipool/multiappend_tofile_py2.py
|
9804c62512aeb716b72826f99dc78b246084344b
|
[] |
no_license
|
drafski89/useful-python
|
139954cf521c4eec1c0bab3420185c6612c6fbd6
|
ebc3ff2f3ab89b1b9e4fb1c051564baddbeec8e8
|
refs/heads/master
| 2021-09-06T21:27:14.766923
| 2018-02-11T17:50:59
| 2018-02-11T17:50:59
| 108,578,058
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,885
|
py
|
#!/usr/bin/env python
# Imports
import multiprocessing
from multiprocessing import Lock
import time
OUTPUT_FILE_NAME = "multiappend_results"
# Constant for the count function to count to
# Higher number will show greater differences in multithreading
COUNT_TO_NUMBER = 10000000
# count function
# Purpose: Count from 1 to COUNT_TO_NUMBER
# Inputs: None
# Returns: void
def count():
x = 1
# While we are less than the COUNT_TO_NUMBER
while x < COUNT_TO_NUMBER:
# Increment by 1
x = x + 1
# check_processes function
# Purpose: Check to see if any jobs in a list are alive
# Inputs: list_of_jobs - List of jobs created
# Returns: True if any job is alive, otherwise False
def check_processes(list_of_jobs):
# For each job in the list of jobs
for job in list_of_jobs:
# If this job is alive, return True
if job.is_alive():
return True
# We have checked all jobs, none alive, return False
return False
# multiple_cores function
# Purpose: Start many counters in different cores
# Inputs: NUMBER_ITERATIONS - Total number of iterations to perform
# Returns: void
def multiple_cores(NUMBER_ITERATIONS):
jobs_local = []
# For each job in the total number of iterations we will run
for job_count in range (NUMBER_ITERATIONS):
# Hand off the job id into the counting method
next_process = multiprocessing.Process(target=count)
# Start the job in the queue
next_process.start()
# Append it to the queue to be processed
jobs_local.append(next_process)
# While the last job is alive (so while we still have counts to run)
# Infinite loop as long as the last job has not completed yet
while check_processes(jobs_local):
time.sleep(0.01)
pass
# single_core function
# Purpose: Start a counter in a single core many times iteratively
# Inputs: NUMBER_ITERATIONS - Total number of iterations to perform
# Returns: void
def single_core(NUMBER_ITERATIONS):
# For loop to perform NUMBER_ITERATIONS counts
for x in range(0, NUMBER_ITERATIONS):
# Count from 1
count()
def constant_per_core():
# Open the output file
with open(OUTPUT_FILE_NAME + "_constant_per_core.csv", "w") as output_file:
output_file.write("CoreCount,ParallelTime,SerialTime,Improvement\n")
for run in range(1, multiprocessing.cpu_count()+1):
print "Constant per run: " + str(run)
NUMBER_CORES = run
ITERATIONS_PER_CORE = 4
NUMBER_ITERATIONS = NUMBER_CORES * ITERATIONS_PER_CORE
# Multiple cores
multi_time_start = time.time()
multiple_cores(NUMBER_ITERATIONS)
multi_time_stop = time.time()
# Single core
single_time_start = time.time()
single_core(NUMBER_ITERATIONS)
single_time_stop = time.time()
# Calculate total time used in multi and single workloads
multi_time_total = multi_time_stop - multi_time_start
single_time_total = single_time_stop - single_time_start
# Percentage Improvement = (Single - Parallel) / Single
improvement = (single_time_total - multi_time_total) / single_time_total
# Write the data to the file
output_file.write("" + str(run) + "," + str(multi_time_total) + "," + str(single_time_total) + "," + str(improvement) + "\n")
def constant_total():
# Open the output file
with open(OUTPUT_FILE_NAME + "_constant_total.csv", "w") as output_file:
output_file.write("CoreCount,ParallelTime,SerialTime,Improvement\n")
for run in range(1, multiprocessing.cpu_count()+1):
print "Constant Total, Run: " + str(run)
NUMBER_ITERATIONS = 16
# Multiple cores
multi_time_start = time.time()
multiple_cores(NUMBER_ITERATIONS)
multi_time_stop = time.time()
# Single core
single_time_start = time.time()
single_core(NUMBER_ITERATIONS)
single_time_stop = time.time()
# Calculate total time used in multi and single workloads
multi_time_total = multi_time_stop - multi_time_start
single_time_total = single_time_stop - single_time_start
# Percentage Improvement = (Single - Parallel) / Single
improvement = (single_time_total - multi_time_total) / single_time_total
# Write the data to the file
output_file.write("" + str(run) + "," + str(multi_time_total) + "," + str(single_time_total) + "," + str(improvement) + "\n")
# Main function
if __name__ == '__main__':
constant_per_core()
constant_total()
print "Completed!"
|
[
"brandt.andrew89@gmail.com"
] |
brandt.andrew89@gmail.com
|
bf59571c5aeef678657434afb693f5136d84e5e5
|
3fed8f90dce1bff81ad9157512335416c4715d9a
|
/KaggleIowa3.py
|
942d5e40f01518bf55d443a4d4a6a974fe9bd816
|
[] |
no_license
|
dtl333/Kaggle-IowaHousing
|
9a0bed9c0e8f5f542089d2b234601c799dd4bbfa
|
0ed2d4732c78952c7dd8759f47ee34c72cd06af7
|
refs/heads/master
| 2020-03-20T10:14:55.045267
| 2018-06-15T16:24:37
| 2018-06-15T16:24:37
| 137,363,947
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,255
|
py
|
import pandas as pd
from pandas.plotting import scatter_matrix
import numpy as np
import matplotlib.pyplot as plt
import sklearn
from sklearn.preprocessing import StandardScaler, CategoricalEncoder
from sklearn.linear_model import Ridge
from sklearn.ensemble import GradientBoostingRegressor
from sklearn.metrics import mean_squared_error
from sklearn.model_selection import cross_val_score, GridSearchCV
#Import our Iowas housing training dataset and testing dataset
train = pd.read_csv('/Users/dylanloughlin/Desktop/PythonTUT/IowaHousing/train.csv')
housing_test = pd.read_csv('/Users/dylanloughlin/Desktop/PythonTUT/IowaHousing/test.csv')
housing_train_ = train.drop('SalePrice', axis=1)
housing_train = pd.concat([housing_train_, housing_test])
#housing_labels = train['SalePrice']
#print(housing_train.columns) #-- View our attributes(SalePrice is our target)
#STEP 1: LETS CLEANUP OUR DATA
#First lets deal with our NaN values in the datasets. We will use df_all so that our transformations will occur to both
cols_with_na = housing_train.isnull().sum()
cols_with_na = cols_with_na[cols_with_na>0]
#print(cols_with_na.sort_values(ascending=False))
#Lets define a variable with the features(columns) we want to fill
cols_fillna = ['PoolQC','MiscFeature','Alley','Fence','MasVnrType','FireplaceQu',
'GarageQual','GarageCond','GarageFinish','GarageType',
'BsmtExposure','BsmtCond','BsmtQual','BsmtFinType1','BsmtFinType2']
#Since the NaN in these columns simply means the house does not have that feature lets fill with 'None'
for col in cols_fillna:
housing_train[col].fillna('None', inplace=True)
#For GarageYrBlt, NaN means the garage was built with the original house, so replace with YearBuilt
housing_train.loc[housing_train.GarageYrBlt.isnull(), 'GarageYrBlt'] = housing_train.loc[housing_train.GarageYrBlt.isnull(), 'YearBuilt']
#For MasVnrArea(Masonry Veneer) -- fill with 0
housing_train.MasVnrArea.fillna(0,inplace=True)
#For houses with no basements/garage, fill all basement/garage features with 0
housing_train.BsmtFullBath.fillna(0,inplace=True)
housing_train.BsmtHalfBath.fillna(0,inplace=True)
housing_train.BsmtFinSF1.fillna(0,inplace=True)
housing_train.BsmtFinSF2.fillna(0,inplace=True)
housing_train.BsmtUnfSF.fillna(0,inplace=True)
housing_train.TotalBsmtSF.fillna(0,inplace=True)
housing_train.GarageArea.fillna(0,inplace=True)
housing_train.GarageCars.fillna(0,inplace=True)
#Now to deal with LotFrontage, to fill these NaN values we use a LinearRegression Ridge model for the best estimates
#First convert categorical values to dummy values, and drop SalePrice. Then normalize columns to (0,1)
def scale_minmax(col):
return (col-col.min())/(col.max()-col.min())
housing_frontage = pd.get_dummies(housing_train)
for col in housing_frontage.drop('LotFrontage', axis=1).columns:
housing_frontage[col] = scale_minmax(housing_frontage[col])
#Create our X and y values for LotFrontage to use in our Ridge Model
lf_train = housing_frontage.dropna()
lf_train_y = lf_train.LotFrontage
lf_train_X = lf_train.drop('LotFrontage', axis=1)
#Fit and predict our model
lr = Ridge()
lr.fit(lf_train_X, lf_train_y)
lf_pred = lr.predict(lf_train_X)
#Fill our NaN values with our model predictions
nan_frontage = housing_train.LotFrontage.isnull()
X = housing_frontage[nan_frontage].drop('LotFrontage',axis=1)
y = lr.predict(X)
housing_train.loc[nan_frontage,'LotFrontage'] = y
#Remaining NaN values:
cols_with_na = housing_train.isnull().sum()
cols_with_na = cols_with_na[cols_with_na>0]
#print(cols_with_na.sort_values(ascending=False))
rows_with_na = housing_train.isnull().sum(axis=1)
rows_with_na = rows_with_na[rows_with_na>0]
#print(rows_with_na.sort_values(ascending=False))
#Fill remaining nans with mode in that column
for col in cols_with_na.index:
housing_train[col].fillna(housing_train[col].mode()[0], inplace=True)
#Now no more NaN values apart from SalePrice in test data (Missing SalePrice data is the data to be predicted)
#print(housing_train.info())
#Now lets deal with our different datatypes
cat_cols = [x for x in housing_train.columns if housing_train[x].dtype == 'object']
int_cols = [x for x in housing_train.columns if housing_train[x].dtype == 'int64']
float_cols = [x for x in housing_train.columns if housing_train[x].dtype == 'float64']
cat_cols.append('MSSubClass') #This appears categorical but was put in int_cols for some reason
remove_from_int = ['MSSubClass', 'Id']
int_cols = [x for x in int_cols if x not in remove_from_int]
num_cols = int_cols + float_cols
#Now to encode our categorical data with CategoricalEncoder
def encode_cat(dat):
cat_encoder = CategoricalEncoder(encoding='onehot-dense')
dat = dat.astype('str')
dat_reshaped = dat.values.reshape(-1,1)
dat_1hot = cat_encoder.fit_transform(dat_reshaped)
col_names = [dat.name + '_' + str(x) for x in list(cat_encoder.categories_[0])]
return pd.DataFrame(dat_1hot, columns=col_names)
cat_df = pd.DataFrame()
for x in cat_cols:
cat_df = pd.concat([cat_df, encode_cat(housing_train[x])], axis=1)
cat_df.index = housing_train.index
full_df = pd.concat([housing_train[num_cols], cat_df], axis=1)
#Now lets scale and preprocess our data for ML
ss = StandardScaler()
scaled_data = ss.fit_transform(full_df.values)
train_processed = scaled_data[:len(train),:]
test_processed = scaled_data[1460:,:]
log_sp = np.log1p(train['SalePrice'].values).ravel()
#Now to fit our model(Lets use GradientBoostingRegressor)
gbr_reg = GradientBoostingRegressor()
gbr_reg.fit(y=log_sp, X=train_processed)
gbr_pred = np.expm1(gbr_reg.predict(test_processed))
#Lets K-Fold cross validate
scores = cross_val_score(gbr_reg, train_processed, log_sp, scoring='neg_mean_squared_error', cv=10)
gbr_scores = np.sqrt(-scores)
def display_scores(scores):
print('Scores:', scores)
print('Mean', scores.mean())
print('Standard deviation', scores.std())
print(display_scores(gbr_scores))
#Now lets use GridSearchCV to tune our models hyperparameters
param_grid = [
{'n_estimators': [3,10,20,30,40,50], 'max_depth': [1,2,3,4,5,6,7,8]}
]
grid_search = GridSearchCV(gbr_reg, param_grid, cv=5, scoring='neg_mean_squared_error')
grid_search.fit(train_processed, log_sp)
# print(grid_search.best_params_)
# print(grid_search.best_estimator_)
#Lets use RandomForestRegression(grid_search.best_estimator_) and test on our test set
final_model = GradientBoostingRegressor(alpha=0.9, criterion='friedman_mse', init=None,
learning_rate=0.1, loss='ls', max_depth=5, max_features=None,
max_leaf_nodes=None, min_impurity_decrease=0.0,
min_impurity_split=None, min_samples_leaf=1,
min_samples_split=2, min_weight_fraction_leaf=0.0,
n_estimators=50, n_iter_no_change=None, presort='auto',
random_state=None, subsample=1.0, tol=0.0001,
validation_fraction=0.1, verbose=0, warm_start=False)
final_model.fit(y=log_sp, X=train_processed)
final_predictions = np.expm1(gbr_reg.predict(test_processed))
print(final_predictions)
#my_submission4 = pd.DataFrame({'Id': housing_test.Id, 'SalePrice': final_predictions})
# print(my_submission4.head())
#my_submission4.to_csv('submission4.csv', index=False)
|
[
"noreply@github.com"
] |
dtl333.noreply@github.com
|
f50e456642266ed04cd8875d888da4b032735015
|
09e57dd1374713f06b70d7b37a580130d9bbab0d
|
/data/p4VQE/R1/benchmark/startQiskit_QC17.py
|
d9005b49ff121b30c7999f298fed729dee83a7b9
|
[
"BSD-3-Clause"
] |
permissive
|
UCLA-SEAL/QDiff
|
ad53650034897abb5941e74539e3aee8edb600ab
|
d968cbc47fe926b7f88b4adf10490f1edd6f8819
|
refs/heads/main
| 2023-08-05T04:52:24.961998
| 2021-09-19T02:56:16
| 2021-09-19T02:56:16
| 405,159,939
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,471
|
py
|
# qubit number=3
# total number=8
import numpy as np
from qiskit import QuantumCircuit, execute, Aer, QuantumRegister, ClassicalRegister, transpile, BasicAer, IBMQ
import networkx as nx
from qiskit.visualization import plot_histogram
from typing import *
from pprint import pprint
from math import log2
from collections import Counter
from qiskit.test.mock import FakeVigo, FakeYorktown
kernel = 'circuit/bernstein'
def make_circuit(n:int) -> QuantumCircuit:
# circuit begin
input_qubit = QuantumRegister(n,"qc")
prog = QuantumCircuit(input_qubit)
prog.h(input_qubit[0]) # number=1
prog.h(input_qubit[1]) # number=2
prog.z(input_qubit[3]) # number=5
prog.h(input_qubit[2]) # number=3
prog.h(input_qubit[3]) # number=4
for edge in E:
k = edge[0]
l = edge[1]
prog.cp(-2 * gamma, input_qubit[k-1], input_qubit[l-1])
prog.p(gamma, k)
prog.p(gamma, l)
prog.rx(2 * beta, range(len(V)))
prog.swap(input_qubit[2],input_qubit[0]) # number=6
prog.swap(input_qubit[2],input_qubit[0]) # number=7
# circuit end
return prog
if __name__ == '__main__':
n = 4
V = np.arange(0, n, 1)
E = [(0, 1, 1.0), (0, 2, 1.0), (1, 2, 1.0), (3, 2, 1.0), (3, 1, 1.0)]
G = nx.Graph()
G.add_nodes_from(V)
G.add_weighted_edges_from(E)
step_size = 0.1
a_gamma = np.arange(0, np.pi, step_size)
a_beta = np.arange(0, np.pi, step_size)
a_gamma, a_beta = np.meshgrid(a_gamma, a_beta)
F1 = 3 - (np.sin(2 * a_beta) ** 2 * np.sin(2 * a_gamma) ** 2 - 0.5 * np.sin(4 * a_beta) * np.sin(4 * a_gamma)) * (
1 + np.cos(4 * a_gamma) ** 2)
result = np.where(F1 == np.amax(F1))
a = list(zip(result[0], result[1]))[0]
gamma = a[0] * step_size
beta = a[1] * step_size
prog = make_circuit(4)
sample_shot =5200
writefile = open("../data/startQiskit_QC17.csv", "w")
# prog.draw('mpl', filename=(kernel + '.png'))
IBMQ.load_account()
provider = IBMQ.get_provider(hub='ibm-q')
provider.backends()
backend = provider.get_backend("ibmq_5_yorktown")
circuit1 = transpile(prog, FakeYorktown())
circuit1.measure_all()
prog = circuit1
info = execute(prog,backend=backend, shots=sample_shot).result().get_counts()
print(info, file=writefile)
print("results end", file=writefile)
print(circuit1.depth(), file=writefile)
print(circuit1, file=writefile)
writefile.close()
|
[
"wangjiyuan123@yeah.net"
] |
wangjiyuan123@yeah.net
|
2da67d1fbc54e2a109b9ff091f701097f7d6f93f
|
07e53b9e84adb5b747b11d8f73c1fe841419cb01
|
/REs/2: Simple data/remainder.py
|
f02dc51b7c637c1a8e9bd1ec520b668c32babdc3
|
[] |
no_license
|
Ralfe15/FPRO-2020
|
da9702235a29cf564826d9174bdc15f741cde44b
|
bb1e33a46eb927820665e00eb38f70a9d0fdf343
|
refs/heads/main
| 2023-01-13T08:47:20.674096
| 2020-11-17T13:44:14
| 2020-11-17T13:44:14
| 305,211,905
| 0
| 0
| null | 2020-11-17T13:44:15
| 2020-10-18T22:56:23
|
Python
|
UTF-8
|
Python
| false
| false
| 246
|
py
|
import datetime
hour = datetime.datetime.now().hour + 8
minute = datetime.datetime.now().minute + 30
if minute >= 60:
hour += 1
minute -= 60
if hour > 24:
hour -= 24
print("{}:{}".format(str(hour).zfill(2), str(minute).zfill(2)))
|
[
"noreply@github.com"
] |
Ralfe15.noreply@github.com
|
ed4c00181cc65c62ac8f54e882e4ed8303074f32
|
2edfc09fcc6951554f50a42ee2de1ae9e738d0b0
|
/test_loader_pil_crop.py
|
ee29597f689b1cf893652193fd06aaa0a073bae5
|
[
"MIT"
] |
permissive
|
jorisgu/Mask_RCNN
|
27c2bc9804fbde2329d3cff403ecf23f638c2b2f
|
37ee6af055ea373c0db56c9d647bf92480c3e6b4
|
refs/heads/master
| 2021-09-10T06:23:00.181055
| 2018-03-21T13:42:02
| 2018-03-21T13:42:02
| 116,161,195
| 0
| 0
| null | 2018-01-03T16:58:43
| 2018-01-03T16:58:43
| null |
UTF-8
|
Python
| false
| false
| 2,304
|
py
|
from memory_profiler import profile
from PIL import Image, ImageFile
Image.MAX_IMAGE_PIXELS = 1e10
ImageFile.LOAD_TRUNCATED_IMAGES = True
import os
def change_tile(tile, new_width, new_height, memory_offset):
tup = tile[0]
return [(tup[0],) + ((0,0,new_width, new_height),) + (tup[-2]+memory_offset,) + (tup[-1],)]
def read_line_portion(img_path,x,y,w,h,i):
img_pil = Image.open(img_path)
W = img_pil.size[0]
img_pil.size=(w,1)
memory_offset = (x+i)*3*W+3*y
img_pil.tile = change_tile(img_pil.tile,w,1,memory_offset)
#print(img_pil.tile)
#print(img_pil.size)
return img_pil
def read_from_memory(img_path,x,y,w,h):
result = Image.new('RGB',(w,h))
for i in range(h):
a = read_line_portion(img_path, x,y,w,h,i)
result.paste(a,(0,i))
return result
def show_thumbnail(img_pil, max_size_thumbnail = 200):
img_pil_thumbnail = img_pil.copy()
size = img_pil.size
max_size_img = float(max(size))
new_size = tuple((max_size_thumbnail/max_size_img*np.asarray(result.size)).astype(int))
img_pil_thumbnail.thumbnail(new_size, Image.ANTIALIAS)
return img_pil_thumbnail
@profile
def my_func():
#(960, 230)
x = 100
y = 800
h = 30000
w = 15000
result = Image.new('RGB',(w,h))
img_path = '/dds/workspace/data_ja/cat.ppm'
i = Image.open(img_path)
print(i.size)
print(i.tile)
for i in range(h):
a = read_line_portion(img_path, x,y,w,h,i)
result.paste(a,(0,i))
def old():
i = Image.open(image_path)
print(i.size)
print(i.tile)
#os.system("convert "+image_path+" "+image_path[-3:]+"ppm")
#return
##save
#rgb_im = i.convert('RGB')
#i.save(image_path+'.3.png',optimize=False,compress_level=0)
## crop
#left = 100
#top = 100
#width = 200
#height = 100
#box = (left, top, left+width, top+height)
#i.crop(box)
#return
#i = Image.open(image_path)
w=1000
h=1000
i.size = (w, h)
#i.tile = [('jpeg', (0, 0, w, h), 0, ('RGB', ''))]
i.tile = [('raw', (0, 0, w, h), 19+2000, ('RGB', 0, 1))]
print("Changing tile")
print(i.size)
print(i.tile)
i.load()
print(i.getextrema())
if __name__ == '__main__':
my_func()
|
[
"joguerry+git@gmail.com"
] |
joguerry+git@gmail.com
|
f2ba8f8a4440e3126dc06430b6e2c178a54ae706
|
a758ca49d88e5e9cb11a788584f44ab591a17a7b
|
/tests/tests.py
|
dcabce1ecf0155d2071d7986e9125983c3af4208
|
[] |
no_license
|
mudolucas/cs162-continuous-integration
|
8878780f05546389d2a7219c5410627164214b00
|
10c45ecb4d5b96bd5783027e774f4ff0dc9cb42b
|
refs/heads/master
| 2021-06-12T16:49:11.827139
| 2020-04-09T14:12:05
| 2020-04-09T14:12:05
| 254,361,660
| 0
| 0
| null | 2020-04-09T12:11:46
| 2020-04-09T12:11:46
| null |
UTF-8
|
Python
| false
| false
| 1,642
|
py
|
import os
import requests
import unittest
from sqlalchemy import create_engine
from sqlalchemy.orm import sessionmaker, mapper
import psycopg2
DB_URI = 'postgresql://cs162_user:cs162_password@127.0.0.1/cs162?port=5432'
engine = create_engine(DB_URI)
class TestCases(unittest.TestCase):
def setUp(self):
with engine.connect() as connection:
connection.execute("DELETE FROM Expression")
def tearDown(self):
with engine.connect() as connection:
connection.execute("DELETE FROM Expression")
def test_correct_expression(self):
r = requests.post('http://127.0.0.1:5000/add', data={'expression': '7+22'})
self.assertEqual(r.status_code, 200)
self.assertIn('29',r.text)
def test_expression_db(self):
r = requests.post('http://127.0.0.1:5000/add', data={'expression': '10+30'})
with engine.connect() as connection:
query = connection.execute("SELECT COUNT('*') FROM Expression WHERE text='10+30'")
rows = query.fetchall()
self.assertEqual(len(rows),1)
self.assertEqual(rows[0][0],1)
def test_invalid_expression(self):
r = requests.post('http://127.0.0.1:5000/add', data={'expression': '20+'})
# Check for internal server error
self.assertEqual(r.status_code, 500)
with engine.connect() as connection:
query = connection.execute("SELECT COUNT('*') FROM Expression LIMIT 1")
rows = query.fetchall()
self.assertEqual(rows[0][0],0)
if __name__ == '__main__':
unittest.main()
|
[
"noreply@github.com"
] |
mudolucas.noreply@github.com
|
3f38cdf7533aaceb69440ab441ead317499117f3
|
4f0ecde0978d1b65ae229c855c9488fb067f4ea9
|
/baekjoon/baekjoon_1110/tests/test_baekjoon_1110.py
|
13c1f1be2fcfcf5b79bc9eb99f1fa3ca91c8ca91
|
[] |
no_license
|
LazyRichard/coding-test
|
6d84a8a5287d987c23537162b4276a71d06de216
|
c2fa6c6b307db6e0b3049a12e585c3cb7d1b8e24
|
refs/heads/master
| 2021-05-19T23:41:02.648174
| 2021-04-29T07:51:35
| 2021-04-29T07:51:35
| 252,088,390
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 445
|
py
|
from typing import Iterable, Tuple
from unittest import mock
import pytest
from .. import solution
@pytest.mark.parametrize("test_input,expected", [
["26", "4"],
["55", "3"],
["1", "60"]
])
def test_solution(capsys, test_input: str, expected: str) -> None:
with mock.patch("builtins.input", lambda: test_input + "\n"):
solution()
captured = capsys.readouterr()
assert captured.out == expected + "\n"
|
[
"sof.midnight@live.co.kr"
] |
sof.midnight@live.co.kr
|
2f4b12d6826edd41078d1e1b742111a0bb469310
|
250cd105123453319f4d1be350cd243696203a09
|
/.Rproj.user/20A420EE/sources/s-55D67619/B0B54A9E-contents
|
b6d0f15b1974ceebeb8a75ad4efb1dc56ea51dec
|
[] |
no_license
|
blee2020/Website
|
d131bd2eb5cf22f6d515cd7aad151eae9b52dacb
|
026a884946de3884745f15b83f313780d379577e
|
refs/heads/master
| 2022-06-29T23:05:47.395267
| 2020-05-09T23:52:40
| 2020-05-09T23:52:40
| 262,578,053
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,035
|
#!/usr/bin/env python
# coding: utf-8
# In[177]:
import pandas as pd
import seaborn as sns
pokemon=pd.read_csv("pokemon.csv",index_col=0)
poke=pokemon
poke.head()
# In[178]:
poke.iloc[:,[0,4,5,6]].head()
# In[179]:
poke1=poke[["Name","HP","Attack","Defense"]]
poke1.head()
# In[180]:
poke1["HP"].mean()
# In[181]:
poke1[poke1["HP"] > 69.25875].count()
# In[182]:
poke2 = poke[(poke["HP"] > 69.25875)]
poke2['Total']=poke2['Attack']+poke2['Defense']
poke2.head()
# In[183]:
import matplotlib.pyplot as plt
fig, ax = plt.subplots()
ax.scatter(poke['Attack'], poke['Defense'])
ax.set_title('Pokemon Dataset')
ax.set_xlabel('Attack')
ax.set_ylabel('Defense')
# In[184]:
poke2.plot.hist(subplots=True, layout=(3,3), figsize=(10, 10), bins=30)
# In[185]:
import numpy as np
corr = poke.corr()
im = ax.imshow(corr.values)
corr
# In[186]:
import seaborn as sns
sns.heatmap(poke.corr(), annot=True)
# In[187]:
g = sns.FacetGrid(poke, col='Legendary')
g = g.map(sns.kdeplot, 'Speed')
# In[ ]:
|
[
"2bleeboba@gmail.com"
] |
2bleeboba@gmail.com
|
|
99c2dfbaae3afa4f3edbdaa92c3fbf1766abf06a
|
9aba704758c170deacecf847e52ad20a44aa430f
|
/hotelwebsite/forms.py
|
8e0f22fb03efab1dbb4fff3b1042eb0f03d57ab6
|
[] |
no_license
|
shubhamrangate/hotelwebsite
|
15e44b72dbf3cdaa882974baf74d025ac3a8f312
|
7cf3d4d86b6b98724801f6c78051e3e7fc7a1b9a
|
refs/heads/main
| 2023-06-03T10:12:26.379642
| 2021-06-25T14:54:21
| 2021-06-25T14:54:21
| 352,679,836
| 0
| 0
| null | 2021-06-25T14:54:22
| 2021-03-29T14:50:11
| null |
UTF-8
|
Python
| false
| false
| 3,736
|
py
|
from flask_wtf import FlaskForm
from flask_login import current_user
from wtforms import StringField,PasswordField,SubmitField,BooleanField,RadioField,TextAreaField
from wtforms.fields.html5 import DateField
from wtforms_components import TimeField
from wtforms.validators import DataRequired,Length,Email,EqualTo,ValidationError,Optional
from hotelwebsite.models import User
import phonenumbers
class RegistrationForm(FlaskForm):
username=StringField('Username',validators=[DataRequired(),Length(min=2,max=20)])
email=StringField('Email',validators=[DataRequired(),Email()])
password=PasswordField('Password',validators=[DataRequired()])
Confirm_password=PasswordField('Confirm Password',validators=[DataRequired(),EqualTo('password')])
submit=SubmitField('Sign Up')
def validate_username(self,username):
user=User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. please choose a different.')
def validate_email(self,email):
user=User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. please choose a different.')
class LoginForm(FlaskForm):
email=StringField('Email',validators=[DataRequired(),Email()])
password=PasswordField('Password',validators=[DataRequired()])
remember=BooleanField('Remember Me')
submit=SubmitField('Log In')
class ReservetableForm(FlaskForm):
email=StringField('Email',validators=[DataRequired(),Email()])
guest=RadioField('Number Of Guest', choices = [('one','1'),('two','2'),('three','3'),('four','4'),('five','5'),('six','6')])
section=RadioField('Section',choices=[('nonsmoking','Non-smoking'),('smoking','Smoking')])
date = DateField('Date', format='%Y-%m-%d')
time = TimeField('Time')
submit=SubmitField('Reserve')
class UpdateAccountForm(FlaskForm):
username = StringField('Username',
validators=[DataRequired(), Length(min=2, max=20)])
email = StringField('Email',
validators=[DataRequired(), Email()])
submit = SubmitField('Update')
def validate_username(self, username):
if username.data != current_user.username:
user = User.query.filter_by(username=username.data).first()
if user:
raise ValidationError('That username is taken. Please choose a different one.')
def validate_email(self, email):
if email.data != current_user.email:
user = User.query.filter_by(email=email.data).first()
if user:
raise ValidationError('That email is taken. Please choose a different one.')
class FeedBackForm(FlaskForm):
username=StringField('User',validators=[DataRequired(),Length(min=2, max=20)])
mobno=StringField("(+country code)Mob-No",validators=[DataRequired()])
email=StringField('Email',validators=[DataRequired(),Email()])
comment=TextAreaField('Your Feedback',validators=[Optional(),Length(min=0,max=100)])
submit=SubmitField('Send Feedback')
def validate_phone(self,phone):
p=phonenumbers.parse(mobno.data)
if not phonenumbers.is_valid_number(p):
raise ValidationError('Invalid Phone number')
class MenuForm(FlaskForm):
utthapizza=RadioField('Do you Serve utthapizza',choices=[('menu1','yes'),('nomenu1','no')])
dosa=RadioField('Do you Serve Dosa',choices=[('menu2','yes'),('nomenu2','no')])
idali=RadioField('Do you Serve idali',choices=[('menu3','yes'),('nomenu3','no')])
cofee=RadioField('Do you Serve cofee',choices=[('menu4','yes'),('nomenu4','no')])
uditvada=RadioField('Do you Serve uditvada',choices=[('menu5','yes'),('nomenu5','no')])
submit=SubmitField('Place the Orders')
|
[
"noreply@github.com"
] |
shubhamrangate.noreply@github.com
|
9430b4e173934bd6c375d40453af5db922edbb54
|
5b85a969758b89489f63d7f1d08857dfd2d58e9c
|
/example/test/L3_Typing_af.py
|
78e0ed98b0f87a57cad1009eab572c6ccecf5c03
|
[
"MIT",
"Python-2.0"
] |
permissive
|
Michael8968/skulpt
|
0e8507418579aea554bddca3686316adf867058a
|
15956a60398fac92ee1dab25bf661ffc003b2eaf
|
refs/heads/master
| 2023-03-17T18:01:41.624659
| 2021-03-01T06:48:02
| 2021-03-01T06:48:02
| 346,204,039
| 2
| 0
|
NOASSERTION
| 2021-03-10T02:34:08
| 2021-03-10T02:13:53
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 4,146
|
py
|
import sys, random, time, pygame
from pygame.locals import *
import L3_Setting_af
import time
def print_text(font, x, y, text, color=(0,0,0)):
imgText = font.render(text, True, color)
screen.blit(imgText, (x,y))
pygame.init()
screen = pygame.display.set_mode((1000,600))
pygame.display.set_caption("typing game")
font1 = pygame.font.Font(None, 100) #倒计时
font2 = pygame.font.Font(None, 180) #大字母
font3 = pygame.font.Font(None, 90) #得分
font4 = pygame.font.Font('simhei.ttf', 22) #游戏规则
white = 255,255,255
key_flag = False
correct_answer = 97 #"a"
seconds = 31
score = 0
clock_start = 0
game_over = True
back = pygame.image.load('lesson3/back.png').convert_alpha()
back = pygame.transform.smoothscale(back, (1000,600))
docNor = pygame.image.load('lesson3/normal.png').convert_alpha()
docNor = pygame.transform.smoothscale(docNor, (180,240))
docSmill = pygame.image.load('lesson3/smill.png').convert_alpha()
docSmill = pygame.transform.smoothscale(docSmill, (180,240))
docMad = pygame.image.load('lesson3/wrong.png').convert_alpha()
docMad = pygame.transform.smoothscale(docMad, (180,240))
setx, sety = L3_Setting_af.setpos()
docFlag = 0 #0是normal,1是smile, 2是mad(wrong)
count = 0
while True:
time.sleep(0.01)
keys = pygame.key.get_pressed()
for event in pygame.event.get():
if event.type == QUIT:
pygame.quit()
sys.exit()
elif event.type == KEYDOWN and event.key != correct_answer and not game_over:
print(correct_answer)
docFlag = 2
if keys[K_ESCAPE]:
pygame.quit()
sys.exit()
if keys[K_RETURN]:
if game_over:
game_over = False
score = 0
seconds = 31
r,g,b = L3_Setting_af.setcolor()
clock_start = time.clock()
current = time.clock() - clock_start
speed = score * 2
if seconds-current < 0:
game_over = True
elif current <= 30:
if keys[correct_answer] and not game_over:
correct_answer = random.randint(97,122)
score += 1
setx,sety = L3_Setting_af.setpos()
docFlag = 1
r,g,b = L3_Setting_af.setcolor()
screen.blit(back, (0,0))
#切换三个图片
if docFlag == 0:
screen.blit(docNor, (670,180))
r,g,b=L3_Setting_af.setcolor()
color=r,g,b
elif docFlag == 1:
screen.blit(docSmill, (670,180))
r,g,b=L3_Setting_af.setcolor()
color=r,g,b
elif docFlag == 2:#如果输入的字母是错误的
screen.blit(docMad, (670,180))
r=200
g=0
b=0
color=r,g,b
#课堂练习3(黑色变成红色)
if docFlag != 0: # 让smill跟wrong图片停留一段时间
count +=1
if count == 90:
count = 0
docFlag = 0
#显示倒计时
if game_over:
# print_text(font1, 0, 160, "Press Enter to start...")
print_text(font1, 65, 53, '30')
if not game_over:
#字母颜色变化
if r <= 225 and g <= 225 and b <= 225:
r += random.randint(15,20)/100
g += random.randint(15,20)/100
b += random.randint(15,20)/100
else:
r = 205
g = 205
b = 155
color = r,g,b
if int(seconds-current) >=10:
print_text(font1, 65, 53, str(int(seconds-current)))
else:
print_text(font1, 65, 53, ' ' + str(int(seconds-current)),(255,0,0))
# print_text(font1, 0, 80, "Time: " + str(int(seconds-current)))
#显示得分
if speed < 10:
speedStr = ' ' + str(speed)
else:
speedStr = str(speed)
print_text(font3, 760, 110, speedStr)
#显示大字母
print_text(font2, setx, sety, chr(correct_answer-32), color)
#显示游戏说明
print_text(font4, 220, 450, ' 游戏规则:按下Enter键开始游戏')
print_text(font4, 220, 480, ' 在键盘上敲下你看到的字母')
print_text(font4, 220, 510, ' 看你能够得到多少分')
pygame.display.update()
|
[
"wincax@gmail.com"
] |
wincax@gmail.com
|
5cadbda137257f84b462fd6cadff8eb5282befcc
|
9f25d25e2e381ed3f71fd19abd0c12f7cc554a2c
|
/comp_prot_pred_and_des/hw5/mln_hw5_q3_folding.py
|
ae0f617f74b080a276444a7f1632c924a3753172
|
[] |
no_license
|
mlnance/JHU_Class_Material
|
e48e90581b568872dbea5fcaa5f5854f35501aa8
|
7614ec6ab6a838a62be6b0f83c539a9672c90917
|
refs/heads/master
| 2020-05-30T15:36:59.806908
| 2019-02-13T17:09:33
| 2019-02-13T17:09:33
| 68,547,024
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,257
|
py
|
#!/usr/bin/python
__author__="morganlnance"
"""
Morgan Nance Homework 5 Question 3 (Workshop 5 Exercise 2)
*requires 9mer and 3mer fragment files*
*file paths were hard-coded into script*
Program an ab initio folding algorithm
This program starts folding using 9mer fragments
then moves into using 3mer fragments
then alternates between Small and Shear moves
After each move, packing and minimization occurs
and the Metropolis criterion is applied
This program can send visuals to PyMOL
Usage: python <script>
Example: python mln_hw5_q3_folding.py
"""
###########
# IMPORTS #
###########
from pyrosetta import init, pose_from_sequence, get_fa_scorefxn, \
PyMOLMover, MonteCarlo, MoveMap, standard_packer_task, \
SequenceMover, TrialMover, RepeatMover
from pyrosetta.teaching import MinMover, PackRotamersMover
from pyrosetta import rosetta
from rosetta.core.fragment import ConstantLengthFragSet
from rosetta.protocols.simple_moves import ClassicFragmentMover, \
SmallMover, ShearMover
from rosetta.numeric.random import random_range
from time import time
##################
# INITIALIZATION #
##################
init("-mute core.pack")
# start timing
start = time()
# RecA sequence from last 60 AA of 2reb
pose = pose_from_sequence("YKGEKIGQGKANATAWLKDNPETAKEIEKKVRELLLSNPNSTPDFSVDDSEGVAETNEDF")
# full-atom score function
sf = get_fa_scorefxn()
# pymol mover
pmm = PyMOLMover()
pmm.keep_history(True)
pmm.apply(pose)
# MonteCarlo object for Metropolic criterion checking
kT = 1.0
mc = MonteCarlo(pose, sf, kT)
# backbone MoveMap for Small and Shear moves and fragments
mm = MoveMap()
mm.set_bb(True)
# PackRotamersMover for packing
pack_task = standard_packer_task(pose)
pack_task.restrict_to_repacking()
pack_task.or_include_current(True)
pack_mover = PackRotamersMover(sf, pack_task)
# MinMover for minimization
min_mm = MoveMap()
min_mm.set_bb(True)
min_mm.set_chi(True)
# not setting all the other fancy options
# just focused on the MoveMap and ScoreFunction
min_mover = MinMover()
min_mover.movemap(min_mm)
min_mover.score_function(sf)
###########################
# 9MER FRAGMENT INSERTION #
###########################
print "\nFolding with 9mers...\n"
# for counting accepted moves
frag_9mer_n_accepts = 0
# set up the fragment object with the 9mers
fragset_9mer = ConstantLengthFragSet(9)
fragset_9mer.read_fragment_file("RecA_9mer_frags.frag9")
mover_9mer = ClassicFragmentMover(fragset_9mer, mm)
for ii in range(3 * pose.size()):
# apply 9mer fragment
mover_9mer.apply(pose)
# pack
pack_mover.apply(pose)
# minimize
min_mover.apply(pose)
# accept or reject with Metropolic criterion
if mc.boltzmann(pose):
frag_9mer_n_accepts += 1
pmm.apply(pose)
###########################
# 3MER FRAGMENT INSERTION #
###########################
print "\nFolding with 3mers...\n"
# for counting accepted moves
frag_3mer_n_accepts = 0
# set up the fragment object with the 3mers
fragset_3mer = ConstantLengthFragSet(3)
fragset_3mer.read_fragment_file("RecA_3mer_frags.frag3")
mover_3mer = ClassicFragmentMover(fragset_3mer, mm)
for ii in range(2 * pose.size()):
# apply 3mer fragment
mover_3mer.apply(pose)
# pack
pack_mover.apply(pose)
# minimize
min_mover.apply(pose)
# accept or reject with Metropolic criterion
if mc.boltzmann(pose):
frag_3mer_n_accepts += 1
pmm.apply(pose)
#########################
# SMALL AND SHEAR MOVES #
#########################
print "\nFolding with Small and Shear moves...\n"
n_small_and_shear_accepts = 0
# setup Small and Shear movers
small_mover = SmallMover(mm, kT, nmoves_in=5)
shear_mover = ShearMover(mm, kT, nmoves_in=5)
# either apply a small or shear move
for ii in range(2 * pose.size()):
# True == 1 == SmallMover
# False == 0 == ShearMover
if random_range(0,1):
small_mover.apply(pose)
else:
shear_mover.apply(pose)
# pack
pack_mover.apply(pose)
# min
min_mover.apply(pose)
if mc.boltzmann(pose):
n_small_and_shear_accepts += 1
pmm.apply(pose)
# apply the lowest score pose
pmm.apply(mc.lowest_score_pose())
# end timing
end = time()
print "\n\nFolding simulation took %s seconds" %round((end - start),3)
|
[
"morganlnance@gmail.com"
] |
morganlnance@gmail.com
|
6bd44c5dd6b2fac78721a72bd92b81320b4c5e55
|
efa5458e5ed86ef9f543eae234dd0bbc985cc8a2
|
/conftest.py
|
e8cff587c7e43f20d72b7503937e65d464cd4e2d
|
[] |
no_license
|
uktrade/trade-tariff-reference
|
03b0a1086914c7c5a9d95ff3ae8646a16db19023
|
0f07825625822ba2282ea5efacd0e98d9fa395b0
|
refs/heads/develop
| 2021-07-09T02:03:49.203434
| 2020-09-21T16:59:19
| 2020-09-21T16:59:19
| 194,101,749
| 1
| 0
| null | 2020-09-21T16:59:21
| 2019-06-27T13:35:21
|
Python
|
UTF-8
|
Python
| false
| false
| 854
|
py
|
import pytest
from django.test import TestCase, TransactionTestCase
from django.contrib.auth import get_user_model
def pytest_sessionstart(session):
databases_to_enable = {'default', 'tariff'}
TransactionTestCase.databases = databases_to_enable
TestCase.databases = databases_to_enable
@pytest.fixture
def authenticated_client(client, user):
client.force_login(user)
yield client
@pytest.fixture
def user():
yield get_user_model().objects.create(
email='test@test.com',
is_staff=False,
is_superuser=False
)
@pytest.fixture
def admin_user():
yield get_user_model().objects.create(
email='admin@test.com',
is_staff=True,
is_superuser=True
)
@pytest.fixture
def authenticated_admin_client(client, admin_user):
client.force_login(admin_user)
yield client
|
[
"marcus.patino-pan@digital.trade.gov.uk"
] |
marcus.patino-pan@digital.trade.gov.uk
|
e493dfd8f3c6ba3792fc388fc36f8791036a3d77
|
d4a09fe1225c1a798abad38e1174201f45aaff89
|
/client/test.py
|
7790ea141455307238bfc5fbee5bcf033dcf1a7a
|
[] |
no_license
|
PkBadger/ApiTelepresenceRobot
|
bb7a94bb7a03cc7c8879eff1ff84808ac4393dcd
|
3298149f8d8237b540fbc53be2cb8486a2341c51
|
refs/heads/master
| 2021-01-12T17:18:22.759253
| 2016-11-24T20:14:01
| 2016-11-24T20:14:01
| 69,481,711
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 368
|
py
|
import os
import time
os.system('sudo ./servod --pcm')
def writeServo(servo,percent):
string = "echo "+str(servo)+"="+str(percent)+"% > /dev/servoblaster"
print string
while True:
os.system("echo 3=0% > /dev/servoblaster")
time.sleep(1.5)
os.system("echo 3=100% > /dev/servoblaster")
#writeServo(2,100)
time.sleep(2)
#writeServo(1,50)
|
[
"aleexpeerez@gmail.com"
] |
aleexpeerez@gmail.com
|
e6f7c3b8632a505f9f826933edf28a65706af06d
|
7351a2f38a346306d8e2a14b044183369823e26f
|
/project_euler/problem81.py
|
2125cf522316c818f319b4ff72e94f3af6250cd6
|
[] |
no_license
|
jeffanberg/Coding_Problems
|
a4ac693c8bb9901ff087fec1d5d6f0c49abd5f03
|
c061cd71c9de5bf3bca2749a98a425a14e3b1e4e
|
refs/heads/master
| 2023-09-05T15:25:04.358948
| 2021-10-08T02:12:49
| 2021-10-08T02:12:49
| 279,954,749
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,941
|
py
|
''' Project Euler Problem 81
Find the minimal path sum from the top left to the bottom right by only
moving right and down in matrix.txt (right click and "Save Link/Target As..."),
a 31K text file containing an 80 by 80 matrix.
'''
import os
import heapq
__location__ = os.path.realpath(
os.path.join(os.getcwd(), os.path.dirname(__file__)))
with open(os.path.join(__location__, 'p081_matrix.txt')) as matrix_file:
matrix = [list(map(int, n.split(','))) for n in
matrix_file.read().splitlines()]
''' Using A Star Pathfinding, with guide from-
https://www.redblobgames.com/pathfinding/a-star/implementation.html '''
class PriorityQueue:
def __init__(self):
self.elements = []
def empty(self) -> bool:
return not self.elements
def put(self, item, priority: float):
heapq.heappush(self.elements, (priority, item))
def get(self):
return heapq.heappop(self.elements)[1]
class Location:
def __init__(self, x, y):
self.x = x
self.y = y
def __repr__(self) -> str:
return f'Location: x={self.x} y={self.y}'
def __hash__(self) -> int:
return hash((self.x, self.y))
def __eq__(self, other) -> bool:
return self.x == other.x and self.y == other.y
def __lt__(self, other):
return 0
class Matrix:
def __init__(self):
self.data = matrix
@property
def width(self):
return len(self.data[0]) if self.data else 0
@property
def height(self):
return len(self.data)
def get(self, location):
return self.data[location.y][location.x]
def neighbors(self, location):
neighbors = []
if location .x < self.width - 1:
neighbors.append(Location(location.x + 1, location.y))
if location.y < self.height - 1:
neighbors.append(Location(location.x, location.y + 1))
return neighbors
def find_path(self, start, end):
frontier = PriorityQueue()
frontier.put(start, self.get(start))
came_from = dict()
cost_so_far = dict()
came_from[start] = None
cost_so_far[start] = self.get(start)
while not frontier.empty():
current = frontier.get()
if current == end:
break
for next in self.neighbors(current):
new_cost = cost_so_far[current] + self.get(next)
if next not in cost_so_far or new_cost < cost_so_far[next]:
cost_so_far[next] = new_cost
priority = new_cost
frontier.put(next, priority)
came_from[next] = current
path = [end]
while path[-1] != start:
path.append(came_from[path[-1]])
path.reverse()
return path, cost_so_far[end]
path, answer = Matrix.find_path(Matrix(), Location(0, 0), Location(79, 79))
print(answer)
|
[
"jeffanberg@gmail.com"
] |
jeffanberg@gmail.com
|
6f2de9811e01d521e3470c4e5753e7220e6f9636
|
4875c219eea4ad512043c5a1900d12ad47a933a3
|
/semana 5/dia 3/directorio/directorio/asgi.py
|
de1e8f0d35c23d102eb236ac3cc4cc47ca8b2944
|
[] |
no_license
|
AlexRodriguezVillavicencio/Backend
|
d72ade196d68a9c9916661e04a84962bc869bbf5
|
30441e5ce1a00455a3dc51a53cee6b34106b663f
|
refs/heads/master
| 2023-08-02T22:58:31.078506
| 2021-09-22T19:48:51
| 2021-09-22T19:48:51
| 387,956,937
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 397
|
py
|
"""
ASGI config for directorio project.
It exposes the ASGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/howto/deployment/asgi/
"""
import os
from django.core.asgi import get_asgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'directorio.settings')
application = get_asgi_application()
|
[
"arodriguezv@uni.pe"
] |
arodriguezv@uni.pe
|
2e4ef35449d1d781331ea2cb7d33fe4a0b23b5f8
|
05e19865de763bc478e3d2cc1817210e5db50cca
|
/migrations/versions/4a8efb0d15bd_添加博客文章模型.py
|
3293e42ae1dcd527392809b40139d8bfa290da47
|
[] |
no_license
|
jqcc/flask-blog
|
2784b931559d209edacb44946a114715844f2f8f
|
284c16951d271ce25a9352196be1c12ff60eb45b
|
refs/heads/master
| 2021-04-12T10:30:48.091192
| 2018-08-22T14:14:45
| 2018-08-22T14:14:45
| 126,832,334
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,069
|
py
|
"""添加博客文章模型
Revision ID: 4a8efb0d15bd
Revises: 98ca98ff4741
Create Date: 2018-02-13 22:01:25.366726
"""
from alembic import op
import sqlalchemy as sa
# revision identifiers, used by Alembic.
revision = '4a8efb0d15bd'
down_revision = '98ca98ff4741'
branch_labels = None
depends_on = None
def upgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.create_table('posts',
sa.Column('id', sa.Integer(), nullable=False),
sa.Column('body', sa.Text(), nullable=True),
sa.Column('timestamp', sa.DateTime(), nullable=True),
sa.Column('author_id', sa.Integer(), nullable=True),
sa.ForeignKeyConstraint(['author_id'], ['users.id'], ),
sa.PrimaryKeyConstraint('id')
)
op.create_index(op.f('ix_posts_timestamp'), 'posts', ['timestamp'], unique=False)
# ### end Alembic commands ###
def downgrade():
# ### commands auto generated by Alembic - please adjust! ###
op.drop_index(op.f('ix_posts_timestamp'), table_name='posts')
op.drop_table('posts')
# ### end Alembic commands ###
|
[
"sunjq@localhost.localdomain"
] |
sunjq@localhost.localdomain
|
47e3ec143d74d297fc210829ba70483413fb7743
|
f6e919a455b80e93e21b62a77e8302f1f2bd4ba8
|
/resume/app.py
|
364f44ca91152e18cb3b0d91175d27edee820434
|
[] |
no_license
|
nanaobeng/Resume-Parser
|
7cf461d2820604e1b35d59e55437a6cc92c3daa2
|
0e6e58e2db9d537a45a9cdf0361302d3aa49461d
|
refs/heads/master
| 2022-12-19T21:56:22.358607
| 2020-10-03T12:17:42
| 2020-10-03T12:17:42
| 279,133,364
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,939
|
py
|
import os
from flask import Flask , flash, render_template, url_for, request, redirect
from werkzeug.utils import secure_filename
import PyPDF2
import io
from pdfminer.converter import TextConverter
from pdfminer.pdfinterp import PDFPageInterpreter
from pdfminer.pdfinterp import PDFResourceManager
from pdfminer.layout import LAParams
from pdfminer.pdfpage import PDFPage
import re
from flask_bootstrap import Bootstrap
from werkzeug.utils import secure_filename
UPLOAD_FOLDER = 'static/pdf'
ALLOWED_EXTENSIONS = {'txt', 'pdf', 'png', 'jpg', 'jpeg', 'gif'}
app = Flask(__name__)
Bootstrap(app)
app.config['UPLOAD_FOLDER'] = UPLOAD_FOLDER
@app.route('/', methods=['GET', 'POST'])
def upload_file():
if request.method == 'POST':
def retrieve_name(text):
name = text.partition('\n')[0]
phone = re.findall(re.compile(r'(?:(?:\+?([1-9]|[0-9][0-9]|[0-9][0-9][0-9])\s*(?:[.-]\s*)?)?(?:\(\s*([2-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9])\s*\)|([0-9][1-9]|[0-9]1[02-9]|[2-9][02-8]1|[2-9][02-8][02-9]))\s*(?:[.-]\s*)?)?([2-9]1[02-9]|[2-9][02-9]1|[2-9][02-9]{2})\s*(?:[.-]\s*)?([0-9]{4})(?:\s*(?:#|x\.?|ext\.?|extension)\s*(\d+))?'), text)
if phone:
number = ''.join(phone[0])
if len(number) > 10:
number = '+' + number
else:
number = number
email = re.findall("([^@|\s]+@[^@]+\.[^@|\s]+)", text)
if email:
try:
email = email[0].split()[0].strip(';')
except IndexError:
return None
edu = []
EDUCATION = [
'BE','B.E.', 'B.E', 'BS', 'B.S',
'ME', 'M.E', 'M.E.', 'MS', 'M.S',
'BTECH', 'B.TECH', 'M.TECH', 'MTECH',
'SSC', 'HSC', 'CBSE', 'ICSE','Bachelor','Master','PhD','BSc','BSc.'
,'investments','A.A.','A.S.','AAS','B.A','B.S.','BFA','BAS','MBA','MFA','Ph.D','Ph.D.','J.D','J.D.','M.D.','M.D','DDS','Diploma','diploma','Certificate','certficate','Certification','certification']
eq = ''
testt = text.split('\n')
i = 0
l = 0
k = len(EDUCATION)
temp = 0
for j in testt:
for k in EDUCATION:
if(k in j):
if(j not in edu):
edu.append(j)
edu_q = (''.join(edu))
languages =[]
language_list = ['Pidgin','Creole','Mandarin','Spanish','English','Hindi','Arabic','Portuguese','Russian','German','Korean','French','Italian','Polish','Hausa','Amharic','Romanian','Igbo','Dutch','Kurdish','Greek','Sylheti','Zulu','Czech','Haitian Creole','Swedish','Xhosa','Belarusian','Balochi','Konkani']
temp = 0
for l in testt:
for k in language_list:
if(k in l):
languages.append(k)
languages = set(languages)
skills_listt =[]
skills = ['JIRA','Zendesk','Salesforce','MS Office','Google Drive','Agile','Managing Cross-Functional Teams','Scrum','Performance Tracking','Financial Modelling','Ideation Leadership','Feature Definition','Forecasting','Profit and Loss','Scope Management','Project Lifecycle Management','A/B Testing','Social Media Marketing','Sales Funnel Management','Graphic Design Skills','Email Marketing','Email Automation','Photography','CAD','Design','Prototyping','Testing','Troubleshooting','Project Launch','Lean Manufacturing','Workflow Development','SolidWorks','Budgeting','Technical Report Writing','Time management','Data analysis','Web analytics','HTML & CSS','Wordpress','Email marketing','Web scraping','A/B Testing','Data visualization','pattern-finding','Search Engine','Keyword Optimization','Project/campaign management','B2B Marketing','Brand management','Creativity','Copywriting','Six Sigma techniques','The McKinsey 7s Framework','Porter’s Five Forces','PESTEL','Emotional Intelligence','People management','Business Development','Strategic Management','Negotiation' ,'Planning','Proposal writing','Customer Relationship Management','Cold-calling','Negotiation','Public speaking','Closing','Lead generation','Buyer engagement','Teamwork','Time management','Adobe Creative Suite', 'Illustrator', 'InDesign', 'Photoshop','Dreamweaver','Infographics','Photo editing','Typography','Storyboarding','Logo creation','Interactive media design','Ad design','Enterprise Resource Planning' ,'Big Data','Cognos Analytics','VBA','Visual Basic','Numerical competence','HTML','CSS','Javascript','Wordpress','Graphic User Interfaces','Git','Version control','Github', 'gitlab','Search Engine Optimization','SEO','Application Programming Interface','API','Adobe Photoshop', 'InDesign','Content Management Systems','CMS','Testing','Debugging','Responsive design','SQL','R', 'Python', 'Scala', 'Matlab','STATA', 'SPSS', 'SAS','Data Mapping','Entity Relationship Diagrams','Wireframes','Big Data' ,'Microsoft Visio','Agile Business Analysis','Machine learning','System Context Diagrams','Business Process Modeling','Technical and non-technical communication','Active listening','Communication','Computer skills','Customer service','Interpersonal skills','Leadership','Management skills','Problem-solving','Time management','Programming','Data Structures', 'Systems Programming', 'Algorithms','Object Oriented Systems Design', 'Switching and Finite Automata', 'Theory of Compiler Design','Linear Algebra', 'Modern Algebra', 'Operations','Python', 'C', 'C++', 'Java' ,'OpenCV', 'Scikit-learn', 'Matplotlib', 'Numpy', 'Scipy','Database',' MySQL' ,'Microsoft Oce','Adobe Suite', 'MATLAB', 'GIT','Data Analytics ','Power BI', 'Web Development ', 'Robotics' , 'Systems Administration' ,'IT Infrastructure ',' Motion Graphics' ,'Typography', 'Java','Tableau',' PHP', 'Adobe','MySQL', 'SQL', 'C#', 'JavaScript', 'C++', 'Python', 'iOS/Swift', 'Ruby on Rails','System administration', 'network configuration',' software installation',' security',' Cisco',' tech support',' updates',' project management',' research',' vendor management',' TCI/IP', 'DNS',' DHCP', ' WAN/LAN', 'Windows', 'Linux/Unix', 'Ubuntu', 'virtualized networks', 'network automation', 'cloud management', 'AI/machine learning','Web development','Microsoft Office','Project Scheduling','Strategic Planning','Subject Matter Expertise','Project Lifecycle Management','Agile Software','Scrum Management','Meeting Facilitation','Financial Modelling','Kanban','Forecasting','Lean Thinking','Performance Tracking','Budgeting', 'open source', 'data structures', 'coding', 'security', 'machine learning', 'debugging','Photoshop', 'Illustrator', 'InDesign', 'Acrobat', 'Corel Draw', 'HTML/CSS','Ruby','Agile','Scrum','MS Office','Microsoft Office','Excel','Powerpoint','Access','Photoshop', 'Salesforce (CRM)',' Oracle Netsuite (ERP)','InDesign','Profit & loss analysis', 'Technical writing',' research', 'leadership','employeee training','Supplier management', 'account management','MySQL', 'WordPress','Oracle','Data processing', 'Teradata', 'IBM DB2','Microsoft Access',' Cloud Computing','Artificial Intelligence','Analytical Reasoning','People Management','UX/UI','UX Design','Mobile Application Development','Video Production','Sales Leadership','Translation','Audio Production','Natural Language Processing','Scientific Computing','Game Development','Social Media Marketing','Animation','Business Analysis','Journalism','Digital Marketing','Industrial Design','Competitive Strategies','Customer Service Systems','Software Testing','Data Science','Computer Graphics','Corporate Communications']
temp = 0
for j in testt:
for k in skills:
if(k in j):
skills_listt.append(k)
skills_listt = set(skills_listt)
joblist =[]
job_titles=['Consultant','Advisor','Intern','Web Designer','President','Project Manager','Librarian','Project Manager','Marketing Specialist','Marketing Manager','Marketing Director','Graphic Designer','Marketing Research Analyst','Marketing Communications Manager','Marketing Consultant','Product Manager','Public Relations','Social Media Assistant','Brand Manager','SEO Manager','Content Marketing Manager','Copywriter','Digital Marketing Manager','eCommerce Marketing Specialist','Brand Strategist','Vice President of Marketing','Media Relations Coordinator','Administrative Assistant','Receptionist','Office Manager','Auditing Clerk','Branch Manager','Business Manager','Quality Control Coordinator', 'Administrative Manager','Chief Executive Officer','Business Analyst',' Risk Manager','Human Resources','Office Assistant','Secretary','Office Clerk','File Clerk','Account Collector','Administrative Specialist','Executive Assistant','Program Administrator','Program Manager','Administrative Analyst','Data Entry','CEO','Chief Executive Officer','COO','Chief Operating Officer','CFO','Chief Financial Officer','CIO','Chief Information Officer','CTO','Chief Technology Officer','CMO','Chief Marketing Officer','CHRO','Chief Human Resources Officer','CDO','Chief Data Officer','CPO','Chief Product Officer','CCO',' Chief Customer Officer','Manager','Assistant Manager','Executive','Director','Coordinator','Administrator','Controller','Officer','Organizer','Supervisor','Superintendent','Head','Overseer','Chief','Foreman','Controller','Principal','President','Lead','Computer Scientist','IT Professional','UX Designer & UI Developer','UX Designer','SQL Developer','Web Designer','Web Developer','Desktop Support','Software Engineer','Data Entry','DevOps Engineer','Computer Programmer','Network Administrator','Information Security Analyst','Artificial Intelligence Engineer','Cloud Architect','IT Manager','Technical Specialist','Application Developer','Chief Technology Officer','CTO','Chief Information Officer', 'CIO','Sales Associate','Sales Representative','Sales Manager','Retail Worker','Store Manager','Sales Representative','Sales Manager','Real Estate Broker','Sales Associate','Cashier','Store Manager','Account Executive','Sales Analyst','Market Development Manager','B2B Sales Specialist','Sales Engineer','Proprietor','Principal','Owner','President','Founder','Administrator','Director','Managing Partner','Managing Member','Associate','Analyst','Board of Directors','Quality Control','Human Resources','Shipping and Receiving Staff','Office Manager','Receptionist','Operations Manager','Operations Assistant','Operations Coordinator','Operations Analyst','Operations Director','Vice President of Operations','Operations Professional','Scrum Master','Accountant','Accounting Analyst','Accounting Director','Accounts Payable/Receivable Clerk','Auditor','Budget Analyst','Controller','Financial Analyst','Finance Manager','Economist','Payroll Manager','Payroll Clerk','Financial Planner','Financial Services Representative','Finance Director','Commercial Loan Officer','Engineer','Mechanical Engineer','Civil Engineer','Electrical Engineer','Assistant Engineer','Chemical Engineer','Biological Engineer','Maintenance Engineer','Mining Engineer','Nuclear Engineer','Petroleum Engineer','Plant Engineer','Production Engineer','Quality Engineer','Safety Engineer','Sales Engineer','Researcher','Research Assistant','Data Analyst','Business Analyst','Financial Analyst','Biostatistician','Market Researcher','Title Analyst','Medical Researcher']
temp = 0
for jt in testt:
for k in job_titles:
if(k in jt):
joblist.append(jt)
joblist = set(joblist)
country_listt =[]
country = ['Afghanistan','Albania','Algeria','United Sates of America','Andorra','Angola','Anguilla','Antigua and Barbuda','Argentina','Armenia','Australia','Austria','Azerbaijan','Bahamas','Bahrain','Bangladesh','Barbados','Belarus','Belgium','Belize','Benin','Bermuda','Bhutan','Bolivia','Bosnia and Herzegovina','Botswana','Brazil','Britain','British Virgin Islands','Brunei','Bulgaria','Burkina Faso','Burma','Burundi','Cambodia','Cameroon','Canada','Cape Verde','Cayman Islands','Central African Republic','Chad','Chile','China','Colombia' ,'Comoros','Congo','Cook Islands','Costa Rica','Croatia','Cuba','Cyprus','Czech Republic','Denmark','Djibouti','Dominican Republic','Dominican Republic','Netherlands','East Timor','Ecuador','Egypt','United Arab Emirates','England','Equatorial Guinea','Eritrea','Estonia','Ethiopia','Faroe Island','Fiji','Philipines','Finland','France','Gabon','Gambia','Georgia','Germany','Ghana','Gibraltar','Greece','Greenland','Grenada','Guaman','Guatemala','Guinea-Bissau','Guinea','Guyana','Haiti','Honduras','Hong Kong','Hungary','Iceland','India','Indonesia','Iran','Iraq','Ireland','Israel','Italy','Ivory Coast','Jamaica','Japan','Jordan','Kazakhstan','Kenya','Kiribati','Kosovo','Kuwait','Kyrgyzstan','Lao','Latvia','Lebanon','Liberia','Libya','Liechtenstein','Lithuania','Luxembourg','Macedonia','Malawi','Malaysia','Maldives','Mali','Malta','Mauritania','Mauritius','Mexico','Moldova','Mongolia','Montenegro','Montserra','Morocco','Mosotho','Mozambique','Namibia','Nepal','New Zealand','Nicaragua','Niger','Nigeria','North Korean','Northern Ireland','Norway','Oman','Pakistan','Palau','Palestine','Panama','Papua New Guinea','Paraguay','Peru','Pitcairn Island','Poland','Portugal','Puerto Rico','Qatar','Romania','Russia','Rwanda','Salvador','Sammarine','Samoa','Sao Tome','Saudi Arabia','Scotland','Senegal','Serbia','Seychelles','Sierra Leone','Singapore','Slovakia','Slovenia','Solomon Islands','Somalia','South Africa','South Korea','South Sudan','Spain','Sri Lanka','St Helenia','St Lucia','Sudan','Surinam','Swaziland','Sweden','Switzerland','Syria','Taiwan','Tajikstan','Tanzania','Thailand','Togo','Tonga','Trinidad and Tobago','Tristania','Tunisia','Turkey','Turkmen Turks and Caicos Islands','Tuvalu','Uganda','Ukraine','Uruguay','Uzbekistan','Vatican City','Vanuatu','Venezuela','Vietnam','St. Vincent', 'Wales','Yemen','Zambia','Zimbabwe']
temp = 0
for j in testt:
for k in country:
if(k in j):
country_listt.append(k)
country_listt = set(country_listt)
nationality_listt =[]
nationality = ['Afghan','Albanian','Algerian','American','Andorran','Angolan','Anguillan','Citizen of Antigua and Barbuda','Argentine','Armenian','Australian','Austrian','Azerbaijani','Bahamian','Bahraini','Bangladeshi','Barbadian','Belarusian','Belgian','Belizean','Beninese','Bermudian','Bhutanese','Bolivian','Citizen of Bosnia and Herzegovina','Botswanan','Brazilian','British','British Virgin Islander','Bruneian','Bulgarian','Burkinan','Burmese','Burundian','Cambodian','Cameroonian','Canadian','Cape Verdean','Cayman Islander','Central African','Chadian','Chilean','Chinese','Colombian' ,'Comoran Congolese','Congolese','Cook Islander','Costa Rican','Croatian','Cuban','Cymraes','Cymro','Cypriot','Czech','Danish','Djiboutian','Dominican','Citizen of the Dominican Republic','Dutch','East Timorese','Ecuadorean','Egyptian','Emirati','English','Equatorial Guinean','Eritrean','Estonian','Ethiopian','Faroese','Fijian','Filipino','Finnish','French','Gabonese','Gambian','Georgian','German','Ghanaian','Gibraltarian','Greek','Greenlandic','Grenadian','Guamanian','Guatemalan','Citizen of Guinea-Bissau','Guinean','Guyanese','Haitian','Honduran','Hong Konger','Hungarian','Icelandic','Indian','Indonesian','Iranian','Iraqi','Irish','Israeli','Italian','Ivorian','Jamaican','Japanese','Jordanian','Kazakh','Kenyan','Kittitian','Citizen of Kiribati','Kosovan','Kuwaiti','Kyrgyz','Lao','Latvian','Lebanese','Liberian','Libyan','Liechtenstein citizen','Lithuanian','Luxembourger','Macanese','Macedonian','Malagasy','Malawian','Malaysian','Maldivian','Malian','Maltese','Marshallese','Martiniquais','Mauritanian','Mauritian','Mexican','Micronesian','Moldovan','Monegasque','Mongolian','Montenegrin','Montserratian','Moroccan','Mosotho','Mozambican','Namibian','Nauruan','Nepalese','New Zealander','Nicaraguan','Nigerian','Nigerien','Niuean','North Korean','Northern Irish','Norwegian','Omani','Pakistani','Palauan','Palestinian','Panamanian','Papua New Guinean','Paraguayan','Peruvian','Pitcairn Islander','Polish','Portuguese','Prydeinig','Puerto Rican','Qatari','Romanian','Russian','Rwandan','Salvadorean','Sammarinese','Samoan','Sao Tomean','Saudi Arabian','Scottish','Senegalese','Serbian','Citizen of Seychelles','Sierra Leonean','Singaporean','Slovak','Slovenian','Solomon Islander','Somali','South African','South Korean','South Sudanese','Spanish','Sri Lankan','St Helenian','St Lucian','Sudanese','Surinamese','Swazi','Swedish','Swiss','Syrian','Taiwanese','Tajik','Tanzanian','Thai','Togolese','Tongan','Trinidadian','Tristanian','Tunisian','Turkish','Turkmen','Turks and Caicos Islander','Tuvaluan','Ugandan','Ukrainian','Uruguayan','Uzbek','Vatican citizen','Citizen of Vanuatu','Venezuelan','Vietnamese','Vincentian','Wallisian' 'Welsh','Yemeni','Zambian','Zimbabwean']
temp = 0
for j in testt:
for k in nationality:
if(k in j):
nationality_listt.append(k)
nationality_listt = set(nationality_listt)
associations =[]
associate = ['National','Academy','ACCA','Certified','Certified Public Accountants','Accountants','Association','Examiners','Professionals','Society','Civil Engineers','ASCE','CFA Institute','CFA','Board of Standards','Chartered Global','Management Accountants','CIMA','Chartered Management','Chartered Institute','Commission','Institute','Internal Auditors','International','Council','Institution','Union','Federation','Associations','International','Project Management Institute','PMI','Society','Institute']
temp = 0
for j in testt:
for k in associate:
if(k in j):
associations.append(j)
associations = set(associations)
return render_template('processing.html', elist = skills_listt,edu=edu,pdf=pdf_location,name=name,languages=languages,country=country_listt,nationality=nationality_listt,email=email,number=number,joblist=joblist,associations=associations)
def extract_text_from_pdf(pdf_path):
for page in PDFPage.get_pages(pdf_path, caching=True, check_extractable=True):
# creating a resoure manager
resource_manager = PDFResourceManager()
# create a file handle
fake_file_handle = io.StringIO()
# creating a text converter object
converter = TextConverter(
resource_manager,
fake_file_handle,
codec='utf-8',
laparams=LAParams()
)
# creating a page interpreter
page_interpreter = PDFPageInterpreter(
resource_manager,
converter
)
# process current page
page_interpreter.process_page(page)
# extract text
text = fake_file_handle.getvalue()
yield(text)
# close open handles
converter.close()
fake_file_handle.close()
file = request.files['file']
filename = secure_filename(file.filename)
file.save(os.path.join(app.config['UPLOAD_FOLDER'], filename))
pdf_location = filename
text = ""
for page in extract_text_from_pdf(file):
text += ' ' + page
return(retrieve_name(text))
# creating a pdf reader object
else:
return render_template('home.html')
if __name__ == "__main__":
app.run(debug = True,host='0.0.0.0',port=int(os.environ.get('PORT',8080)))
|
[
"nanaobengmarnu@gmail.com"
] |
nanaobengmarnu@gmail.com
|
d912dcd311fbd90a3808f9202b11c0458407bd3a
|
8cca481c8dd508012aa794e2f9a07e11c3706a87
|
/presidio-image-redactor/tests/test_tesseract_ocr.py
|
7c72e09e77bbfbf5ac4c9a3f209a7573dd1ddae7
|
[
"Apache-2.0",
"LicenseRef-scancode-public-domain",
"BSD-2-Clause",
"BSD-3-Clause",
"Unlicense",
"LicenseRef-scancode-proprietary-license",
"LicenseRef-scancode-warranty-disclaimer",
"CNRI-Python",
"MIT",
"LicenseRef-scancode-secret-labs-2011",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
microsoft/presidio
|
174472891e241e292982eee26a666d71ca263d42
|
3effc1467b8714714d5112ef7b627889507ea83d
|
refs/heads/main
| 2023-08-15T20:14:00.962803
| 2023-08-14T19:13:49
| 2023-08-14T19:13:49
| 132,129,752
| 2,092
| 412
|
MIT
| 2023-09-13T18:17:58
| 2018-05-04T11:08:58
|
Python
|
UTF-8
|
Python
| false
| false
| 989
|
py
|
import pytest
from presidio_image_redactor.tesseract_ocr import TesseractOCR
def test_given_empty_dict_then_get_text_from_ocr_dict_returns_empty_str():
ocr_result = {}
expected_text = ""
text = TesseractOCR.get_text_from_ocr_dict(ocr_result)
assert expected_text == text
@pytest.mark.parametrize(
"sep, expected_text",
[
(" ", " Homey Interiors was created by Katie Cromley."),
("+", "+Homey+Interiors+was+created+by+Katie++Cromley."),
],
)
def test_given_valid_dict_then_get_text_from_ocr_dict_returns_correct_str(
get_ocr_analyzer_results, sep, expected_text
):
ocr_result, t, a = get_ocr_analyzer_results
text = TesseractOCR.get_text_from_ocr_dict(ocr_result, sep)
assert expected_text == text
def test_given_wrong_keys_in_dict_then_get_text_from_ocr_dict_returns_exception():
ocr_result = {"words": ["John"], "level": [0]}
with pytest.raises(KeyError):
TesseractOCR.get_text_from_ocr_dict(ocr_result)
|
[
"noreply@github.com"
] |
microsoft.noreply@github.com
|
e7cd99dc8975fdd04a6e72d31237472d9eb2bb7c
|
aa59f7dbbf42af3b2d1da0b33f62989a1d329a7b
|
/static/python_scripts/data_processing.py
|
374053bbdd2e96e4d975e68d33286602f83d83d6
|
[] |
no_license
|
endrehp/earthquake-digitalO
|
6012f0c898e88635bfb5de929d3709c8521f915d
|
4707b288804d67837fd34b95cc636ab091f949a0
|
refs/heads/master
| 2020-03-13T16:20:12.520983
| 2018-10-22T20:01:46
| 2018-10-22T20:01:46
| 131,195,469
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,508
|
py
|
import numpy as np
import pandas as pd
import geojson
from django.templatetags.static import static
#from django.core.files import File
def data_processing(excel_file, date, epi_lon, epi_lat):
#Import
#date='02-17-2018'
df=pd.read_excel(excel_file)
#Clean Data
lowercols = []
columns = df.columns
for col_name in columns:
name = col_name.lower()
if name == 'serial':
name = 'sn'
lowercols.append(name)
df.columns = lowercols
origin=0
count=0
time=[]
tp=0
for i in range(len(df)):
if 'hssendtime' in lowercols:
t = df['hssendtime'][i].second
else:
df['hssenddate']=pd.to_datetime(df['hssenddate'])
t = df['hssenddate'][i].second
while t<tp:
t+=60
time.append(t-origin)
tp=t
df['time']=time
df=df[['sn','lat','lon','s_gal','mdact','time']]
b=df['mdact']=='ON'
df['mdact']=b
#Get cluster
#http://localhost:8000/static/python_scripts/all_sensors.csv
#all_sensors=pd.read_csv("{% static 'python_scripts/all_sensors.csv' %}")
all_sensors=pd.read_csv("static/python_scripts/all_sensors.csv")
#all_sensors=pd.read_csv('all_sensors.csv')
clusters = list(np.zeros(len(df)))
count=0
sensors_without_cluster=[]
for i in range(len(df)):
cluster=all_sensors['Cluster'][df['sn'][i] == all_sensors['Serie']]
if len(cluster) == 1:
clusters[i]=cluster.item()
else:
sensors_without_cluster.append(df['sn'][i])
count+=1
sensors_without_cluster = pd.Series(sensors_without_cluster).unique()
for i in range(len(df)):
for j, sn in enumerate(sensors_without_cluster):
if df['sn'][i] == sn:
clusters[i] = 'no_cluster' + str(j)
df['cluster'] = clusters
clusterlist = df['cluster'].unique()
sensors=df['sn'].unique()
sensorframes=[]
for i in range(len(sensors)):
sensorframes.append(df[df['sn']==sensors[i]].reset_index().drop('index',axis=1))
t_max=df['time'].max()
timeSensorFrames = []
newTime=list(range(t_max))
for sensorframe in sensorframes:
timeSeries = np.asarray(sensorframe['time'])
S_GalSeries = np.asarray(sensorframe['s_gal'])
MdActSeries = np.asarray(sensorframe['mdact'])
counter = 0
S_Gal_new = np.zeros(t_max)
Md_Act_new = np.zeros(t_max)
Sn_new = np.ones(t_max)*sensorframe['sn'][0]
Lat_new = np.ones(t_max)*sensorframe['lat'][0]
Lon_new = np.ones(t_max)*sensorframe['lon'][0]
i = timeSeries[0]
local_max = timeSeries.max()
while i < local_max+1 and i < t_max:
if i == timeSeries[counter]:
v=[]
w=[]
while i == timeSeries[counter] and counter < len(timeSeries)-1:
v.append(S_GalSeries[counter])
w.append(int(MdActSeries[counter]))
counter+=1
if len(v) > 0:
max_v = max(v)
max_w = max(w)
S_Gal_new[i] = max_v
Md_Act_new[i] = max_w
i += 1
else:
S_Gal_new[i] = max_v
Md_Act_new[i] = max_w
i += 1
S_Gal_new[i:]=S_GalSeries[-1]*np.ones(len(S_Gal_new[i:]))
frame=pd.DataFrame()
frame['sn'] = Sn_new
frame['lat'] = Lat_new
frame['lon'] = Lon_new
frame['mdact'] = Md_Act_new
frame['s_gal'] = S_Gal_new
frame['time'] = newTime
frame['cluster'] = sensorframe['cluster'][0]
timeSensorFrames.append([sensorframe['cluster'][0], frame])
cluster_of_frames = []
for clustername in clusterlist:
local_cluster=[]
for element in timeSensorFrames:
if element[0] == clustername:
local_cluster.append(element[1])
cluster_of_frames.append(local_cluster)
for cluster in cluster_of_frames:
is_max = np.zeros((len(cluster[0]),len(cluster)))
for i in range(len(cluster[0])):
intensities=np.zeros(len(cluster))
for j, frame in enumerate(cluster):
intensities[j] = frame['s_gal'][i]
#print(intensities)
is_max[i, np.argmax(intensities)] = 1
for k, frame in enumerate(cluster):
frame['max'] = is_max[:, k]
cluster_concat = []
for cluster in cluster_of_frames:
cluster_concat.append(pd.concat(cluster,ignore_index = True))
all_concat = pd.concat(cluster_concat, ignore_index = True)
only_max = all_concat[all_concat['max'] == 1]
#To geojson
def data2geojson_private(df):
features = []
insert_features = lambda X: features.append(
geojson.Feature(geometry=geojson.Point((X["lon"],
X["lat"])),
properties=dict(Sn=X["sn"],
S_Gal=X["s_gal"],
MdAct=int(X['mdact']),
Time=X['time'],
#Max=X['max']
Description= '<strong> Sensor </strong> <p>Serial number: '
+ str(int(X['sn']))+'</p> <p>Cluster: '+ X['cluster'] + '<p/> <p> Activated: ' + str(bool(X['mdact'])) +
'</p> <p> Intensity: '+str(X['s_gal'])+'</p>'
)))
df.apply(insert_features, axis=1)
with open('media/private_' + date + '.geojson', 'w', encoding='utf8') as fp:
geojson.dump(geojson.FeatureCollection(features), fp, sort_keys=True, ensure_ascii=False)
def data2geojson_public(df):
features = []
insert_features = lambda X: features.append(
geojson.Feature(geometry=geojson.Point((X["lon"],
X["lat"])),
properties=dict(Sn=X["sn"],
S_Gal=X["s_gal"],
#MdAct=int(X['mdact']),
Time=X['time'],
Max=X['max']
#Description= '<strong> Sensor </strong> <p>Serial number: '
#+ str(int(X['sn']))+'</p> <p>Cluster: '+ X['cluster'] + '<p/> <p> Activated: ' + str(bool(X['mdact'])) +
#'</p> <p> Intensity: '+str(X['s_gal'])+'</p>'
)))
df.apply(insert_features, axis=1)
with open('media/edit_public_' + date + '.geojson', 'w', encoding='utf8') as fp:
geojson.dump(geojson.FeatureCollection(features), fp, sort_keys=True, ensure_ascii=False)
#with open('media/raw_public_' + date + '.geojson', 'w', encoding = 'utf8') as fp:
# geojson.dump(geojson.FeatureCollection(features), fp, sort_keys = True, ensure_ascii = False)
data2geojson_public(only_max)
data2geojson_private(all_concat)
print('geojson laget')
#f = open('public_' + date + '.geojson', 'r')
#return File(f)
epi_df = pd.DataFrame()
epi_df['time'] = list(range(t_max))
epi_df['radius'] = list(range(t_max))
epi_df['lon'] = epi_lon
epi_df['lat'] = epi_lat
def data2geojson_epicenter(df):
features = []
insert_features = lambda X: features.append(
geojson.Feature(geometry=geojson.Point((X["lon"],
X["lat"])),
properties=dict( Rad=X["radius"],
Time=X['time'],
)))
df.apply(insert_features, axis=1)
with open('media/epicenter_' + date + '.geojson', 'w', encoding='utf8') as fp:
geojson.dump(geojson.FeatureCollection(features), fp, sort_keys=True, ensure_ascii=False)
data2geojson_epicenter(epi_df)
|
[
"endrehp@stud.ntnu.no"
] |
endrehp@stud.ntnu.no
|
e6460d4f77241f5a29063cb8c718399d5f581bfe
|
e413ca79958757c9b2121a5f6b4478a636692caa
|
/apps/utils.py
|
1df9ce498cce86a1d59b378c70a36c7f9689ff14
|
[] |
no_license
|
gitter-badger/pythonz
|
7674ff42841c7ce633bd00cf39d5862bd4042b69
|
be1ad66f1958b7a899f890122db615bcdc439d11
|
refs/heads/master
| 2020-12-27T23:40:20.208962
| 2015-07-01T13:33:12
| 2015-07-01T13:33:12
| 38,696,317
| 0
| 0
| null | 2015-07-07T15:21:00
| 2015-07-07T15:21:00
| null |
UTF-8
|
Python
| false
| false
| 10,545
|
py
|
import os
import re
from collections import OrderedDict
from urllib.parse import urlsplit, urlunsplit
from textwrap import wrap
import requests
from PIL import Image # Для работы с jpg требуется собрать с libjpeg-dev
from bs4 import BeautifulSoup
from django.conf import settings
from django.core.cache import cache
from django.core.files.base import ContentFile
from django.utils import timezone
from django.utils.dateparse import parse_datetime
from django.utils.text import Truncator
def format_currency(val):
"""Форматирует значение валюты, разбивая его кратно
тысяче для облегчения восприятия.
:param val:
:return:
"""
return ' '.join(wrap(str(int(val))[::-1], 3))[::-1]
def get_from_url(url):
"""Возвращает объект ответа requests с указанного URL.
:param str url:
:return:
"""
headers = {'User-agent': 'Mozilla/5.0 (Ubuntu; X11; Linux i686; rv:8.0) Gecko/20100'}
r_kwargs = {
'allow_redirects': True,
'headers': headers,
'timeout': 1.5
}
return requests.get(url, **r_kwargs)
def get_json(url):
"""Возвращает словарь, созданный из JSON документа, полученного
с указанного URL.
:param str url:
:return:
"""
result = {}
try:
response = get_from_url(url)
except requests.exceptions.RequestException:
pass
else:
try:
result = response.json()
except ValueError:
pass
return result
class HhVacancyManager:
"""Объединяет инструменты для работы с вакансиями с hh.ru."""
@classmethod
def get_status(cls, url):
"""Возвращает состояние вакансии по указанному URL.
:param url:
:return:
"""
response = get_json(url)
if not response:
return
return response['archived']
@classmethod
def fetch_list(cls):
"""Возвращает словарь с данными вакансий, полученный из внешнего
источника.
:return:
"""
base_url = 'https://api.hh.ru/vacancies/'
query = (
'search_field=%(field)s&per_page=%(per_page)s'
'&order_by=publication_time&period=1&text=%(term)s' % {
'term': 'python',
'per_page': 500,
'field': 'name', # description
})
response = get_json('%s?%s' % (base_url, query))
if 'items' not in response:
return None
results = []
for item in response['items']:
salary_from = salary_till = salary_currency = ''
if item['salary']:
salary = item['salary']
salary_from = salary['from']
salary_till = salary['to']
salary_currency = salary['currency']
employer = item['employer']
url_logo = employer['logo_urls']
if url_logo:
url_logo = url_logo['90']
results.append({
'__archived': item['archived'],
'src_id': item['id'],
'src_place_name': item['area']['name'],
'src_place_id': item['area']['id'],
'title': item['name'],
'url_site': item['alternate_url'],
'url_api': item['url'],
'url_logo': url_logo,
'employer_name': employer['name'],
'salary_from': salary_from or None,
'salary_till': salary_till or None,
'salary_currency': salary_currency,
'time_published': parse_datetime(item['published_at']),
})
return results
class BasicTypograph(object):
"""Содержит базовые правила типографики.
Позволяет применить эти правила к строке.
"""
rules = OrderedDict((
('QUOTES_REPLACE', (re.compile('(„|“|”|(\'\'))'), '"')),
('DASH_REPLACE', (re.compile('(-||–|—|―|−|--)'), '-')),
('SEQUENTIAL_SPACES', (re.compile('([ \t]+)'), ' ')),
('DASH_EM', (re.compile('([ ,])-[ ]'), '\g<1>— ')),
('DASH_EN', (re.compile('(\d+)[ ]*-[ ]*(\d+)'), '\g<1>–\g<2>')),
('HELLIP', (re.compile('\.{2,3}'), '…')),
('COPYRIGHT', (re.compile('\((c|с)\)'), '©')),
('TRADEMARK', (re.compile('\(tm\)'), '™')),
('TRADEMARK_R', (re.compile('\(r\)'), '®')),
('QUOTES_CYR_CLOSE', (re.compile('(\S+)"', re.U), '\g<1>»')),
('QUOTES_CYR_OPEN', (re.compile('"(\S+)', re.U), '«\g<1>')),
))
@classmethod
def apply_to(cls, input_str):
input_str = ' %s ' % input_str.strip()
for name, (regexp, replacement) in cls.rules.items():
input_str = re.sub(regexp, replacement, input_str)
return input_str.strip()
def url_mangle(url):
"""Усекает длинные URL практически до неузноваемости, делая нефункциональным, но коротким.
Всё ради уменьшения длины строки.
:param url:
:return:
"""
if len(url) <= 45:
return url
path, qs, frag = 2, 3, 4
splitted = list(urlsplit(url))
splitted[qs] = ''
splitted[frag] = ''
if splitted[path].strip('/'):
splitted[path] = '<...>%s' % splitted[path].split('/')[-1] # Последний кусок пути.
mangled = urlunsplit(splitted)
return mangled
def get_thumb_url(realm, image, width, height, absolute_url=False):
"""Создаёт на лету уменьшенную копию указанного изображения.
:param realm:
:param image:
:param width:
:param height:
:param absolute_url:
:return:
"""
base_path = os.path.join('img', realm.name_plural, 'thumbs', '%sx%s' % (width, height))
try:
thumb_file_base = os.path.join(base_path, os.path.basename(image.path))
except (ValueError, AttributeError):
return ''
cache_key = 'thumbs|%s|%s' % (thumb_file_base, absolute_url)
url = cache.get(cache_key)
if url is None:
thumb_file = os.path.join(settings.MEDIA_ROOT, thumb_file_base)
if not os.path.exists(thumb_file):
try:
os.makedirs(os.path.join(settings.MEDIA_ROOT, base_path), mode=0o755)
except FileExistsError:
pass
img = Image.open(image)
img.thumbnail((width, height), Image.ANTIALIAS)
img.save(thumb_file)
url = os.path.join(settings.MEDIA_URL, thumb_file_base)
if absolute_url:
url = '%s%s' % (settings.SITE_URL, url)
cache.set(cache_key, url, 86400)
return url
def get_image_from_url(url):
"""Забирает изображение с указанного URL.
:param url:
:return:
"""
return ContentFile(requests.get(url).content, url.rsplit('/', 1)[-1])
def get_timezone_name(lat, lng):
"""Возвращает имя часового пояса по геокоординатам, либо None.
Использует Сервис Google Time Zone API.
:param lat: широта
:param lng: долгота
:return:
"""
url = (
'https://maps.googleapis.com/maps/api/timezone/json?'
'location=%(lat)s,%(lng)s×tamp=%(ts)s&key=%(api_key)s' % {
'lat': lat,
'lng': lng,
'ts': timezone.now().timestamp(),
'api_key': settings.GOOGLE_API_KEY,
}
)
try:
result = requests.get(url)
doc = result.json()
tz_name = doc['timeZoneId']
except Exception:
return None
return tz_name
def get_location_data(location_name):
"""Возвращает геоданные об объекте по его имени, либо None.
Использует API Яндекс.Карт.
:param location_name:
:return:
"""
url = 'http://geocode-maps.yandex.ru/1.x/?results=1&format=json&geocode=%s' % location_name
try:
result = requests.get(url)
doc = result.json()
except Exception:
return None
found = doc['response']['GeoObjectCollection']['metaDataProperty']['GeocoderResponseMetaData']['found']
if not int(found):
return None
object_dict = doc['response']['GeoObjectCollection']['featureMember'][0]['GeoObject']
object_bounds_dict = object_dict['boundedBy']['Envelope']
object_metadata_dict = object_dict['metaDataProperty']['GeocoderMetaData']
location_data = {
'requested_name': location_name,
'type': object_metadata_dict['kind'],
'name': object_metadata_dict['text'],
'country': object_metadata_dict['AddressDetails']['Country']['CountryName'],
'pos': ','.join(reversed(object_dict['Point']['pos'].split(' '))),
'bounds': '%s|%s' % (object_bounds_dict['lowerCorner'], object_bounds_dict['upperCorner']),
}
return location_data
def scrape_page(url):
"""Возвращает словарь с данными о странице (полученными при помощи
Rich Content API от Яндекса), либо None в случае ошибок.
Словарь вида:
{'title': '...', 'content_more': '...', 'content_less': '...', ...}
:param url:
:return:
"""
url = 'http://rca.yandex.com/?key=%(api_key)s&url=%(url)s&content=full' % {
'api_key': settings.YANDEX_RCA_KEY, 'url': url
}
result = get_json(url)
if 'content' not in result:
return None
content = result['content']
result['content_less'] = Truncator(content).words(30)
result['content_more'] = Truncator(content).chars(900).replace('\n', '\n\n')
return result
def make_soup(url):
"""Возвращает объект BeautifulSoup, либо None для указанного URL.
:param str url:
:return: object
:rtype: BeautifulSoup|None
"""
result = None
try:
response = get_from_url(url)
result = BeautifulSoup(response.text)
except requests.exceptions.RequestException:
pass
return result
|
[
"idlesign@yandex.ru"
] |
idlesign@yandex.ru
|
b31244208bc7571b7a6b44c135f24c38548008c9
|
643c9bddd91c038bc1ef321db9d3de3c06ef64ce
|
/main.py
|
4d4fb2907b2c5b280a7d5bfc0aa2158d47ad9772
|
[] |
no_license
|
Hemanthhari2000/image_to_speech_using_OCR
|
ea88cd308d88a443255ee958577f84db6f70d93d
|
998114d63a5b7fd805b0a0201c69b2cded6e4c2d
|
refs/heads/master
| 2022-12-17T19:41:14.024536
| 2020-09-19T07:34:00
| 2020-09-19T07:34:00
| 296,811,122
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 628
|
py
|
# importing Libraries
import cv2
import pytesseract
from speak import speak
"""
DOWNLOAD LINK FOR TESSERACT: https://digi.bib.uni-mannheim.de/tesseract/tesseract-ocr-setup-4.00.00dev.exe
"""
# Location of the tesseract.exe file
pytesseract.pytesseract.tesseract_cmd = 'XXXXX\\tesseract.exe'
# Read images using cv2.
img = cv2.imread('./imgs/1.jpg')
img = cv2.cvtColor(img, cv2.COLOR_BGR2GRAY)
# get the text from pytesseract
text = pytesseract.image_to_string(img)
# show the original image
cv2.imshow('Image', img)
# print the detected text
print(text)
# let the bot speak the text for you
speak(text)
cv2.waitKey(0)
|
[
"hemanthindhu@gmail.com"
] |
hemanthindhu@gmail.com
|
e37641418e419a5d245b8d18de6bd0dceb9e6817
|
48e124e97cc776feb0ad6d17b9ef1dfa24e2e474
|
/sdk/python/pulumi_azure_native/deploymentmanager/v20180901preview/rollout.py
|
5bdfe3d0b89b797a992d24b083a69c5531647654
|
[
"BSD-3-Clause",
"Apache-2.0"
] |
permissive
|
bpkgoud/pulumi-azure-native
|
0817502630062efbc35134410c4a784b61a4736d
|
a3215fe1b87fba69294f248017b1591767c2b96c
|
refs/heads/master
| 2023-08-29T22:39:49.984212
| 2021-11-15T12:43:41
| 2021-11-15T12:43:41
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,929
|
py
|
# coding=utf-8
# *** WARNING: this file was generated by the Pulumi SDK Generator. ***
# *** Do not edit by hand unless you're certain you know what you are doing! ***
import warnings
import pulumi
import pulumi.runtime
from typing import Any, Mapping, Optional, Sequence, Union, overload
from ... import _utilities
from . import outputs
from ._inputs import *
__all__ = ['RolloutArgs', 'Rollout']
@pulumi.input_type
class RolloutArgs:
def __init__(__self__, *,
build_version: pulumi.Input[str],
identity: pulumi.Input['IdentityArgs'],
resource_group_name: pulumi.Input[str],
step_groups: pulumi.Input[Sequence[pulumi.Input['StepArgs']]],
target_service_topology_id: pulumi.Input[str],
artifact_source_id: Optional[pulumi.Input[str]] = None,
location: Optional[pulumi.Input[str]] = None,
rollout_name: Optional[pulumi.Input[str]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None):
"""
The set of arguments for constructing a Rollout resource.
:param pulumi.Input[str] build_version: The version of the build being deployed.
:param pulumi.Input['IdentityArgs'] identity: Identity for the resource.
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[Sequence[pulumi.Input['StepArgs']]] step_groups: The list of step groups that define the orchestration.
:param pulumi.Input[str] target_service_topology_id: The resource Id of the service topology from which service units are being referenced in step groups to be deployed.
:param pulumi.Input[str] artifact_source_id: The reference to the artifact source resource Id where the payload is located.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[str] rollout_name: The rollout name.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
"""
pulumi.set(__self__, "build_version", build_version)
pulumi.set(__self__, "identity", identity)
pulumi.set(__self__, "resource_group_name", resource_group_name)
pulumi.set(__self__, "step_groups", step_groups)
pulumi.set(__self__, "target_service_topology_id", target_service_topology_id)
if artifact_source_id is not None:
pulumi.set(__self__, "artifact_source_id", artifact_source_id)
if location is not None:
pulumi.set(__self__, "location", location)
if rollout_name is not None:
pulumi.set(__self__, "rollout_name", rollout_name)
if tags is not None:
pulumi.set(__self__, "tags", tags)
@property
@pulumi.getter(name="buildVersion")
def build_version(self) -> pulumi.Input[str]:
"""
The version of the build being deployed.
"""
return pulumi.get(self, "build_version")
@build_version.setter
def build_version(self, value: pulumi.Input[str]):
pulumi.set(self, "build_version", value)
@property
@pulumi.getter
def identity(self) -> pulumi.Input['IdentityArgs']:
"""
Identity for the resource.
"""
return pulumi.get(self, "identity")
@identity.setter
def identity(self, value: pulumi.Input['IdentityArgs']):
pulumi.set(self, "identity", value)
@property
@pulumi.getter(name="resourceGroupName")
def resource_group_name(self) -> pulumi.Input[str]:
"""
The name of the resource group. The name is case insensitive.
"""
return pulumi.get(self, "resource_group_name")
@resource_group_name.setter
def resource_group_name(self, value: pulumi.Input[str]):
pulumi.set(self, "resource_group_name", value)
@property
@pulumi.getter(name="stepGroups")
def step_groups(self) -> pulumi.Input[Sequence[pulumi.Input['StepArgs']]]:
"""
The list of step groups that define the orchestration.
"""
return pulumi.get(self, "step_groups")
@step_groups.setter
def step_groups(self, value: pulumi.Input[Sequence[pulumi.Input['StepArgs']]]):
pulumi.set(self, "step_groups", value)
@property
@pulumi.getter(name="targetServiceTopologyId")
def target_service_topology_id(self) -> pulumi.Input[str]:
"""
The resource Id of the service topology from which service units are being referenced in step groups to be deployed.
"""
return pulumi.get(self, "target_service_topology_id")
@target_service_topology_id.setter
def target_service_topology_id(self, value: pulumi.Input[str]):
pulumi.set(self, "target_service_topology_id", value)
@property
@pulumi.getter(name="artifactSourceId")
def artifact_source_id(self) -> Optional[pulumi.Input[str]]:
"""
The reference to the artifact source resource Id where the payload is located.
"""
return pulumi.get(self, "artifact_source_id")
@artifact_source_id.setter
def artifact_source_id(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "artifact_source_id", value)
@property
@pulumi.getter
def location(self) -> Optional[pulumi.Input[str]]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@location.setter
def location(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "location", value)
@property
@pulumi.getter(name="rolloutName")
def rollout_name(self) -> Optional[pulumi.Input[str]]:
"""
The rollout name.
"""
return pulumi.get(self, "rollout_name")
@rollout_name.setter
def rollout_name(self, value: Optional[pulumi.Input[str]]):
pulumi.set(self, "rollout_name", value)
@property
@pulumi.getter
def tags(self) -> Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@tags.setter
def tags(self, value: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]]):
pulumi.set(self, "tags", value)
class Rollout(pulumi.CustomResource):
@overload
def __init__(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
artifact_source_id: Optional[pulumi.Input[str]] = None,
build_version: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['IdentityArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
rollout_name: Optional[pulumi.Input[str]] = None,
step_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['StepArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_service_topology_id: Optional[pulumi.Input[str]] = None,
__props__=None):
"""
Defines the PUT rollout request body.
:param str resource_name: The name of the resource.
:param pulumi.ResourceOptions opts: Options for the resource.
:param pulumi.Input[str] artifact_source_id: The reference to the artifact source resource Id where the payload is located.
:param pulumi.Input[str] build_version: The version of the build being deployed.
:param pulumi.Input[pulumi.InputType['IdentityArgs']] identity: Identity for the resource.
:param pulumi.Input[str] location: The geo-location where the resource lives
:param pulumi.Input[str] resource_group_name: The name of the resource group. The name is case insensitive.
:param pulumi.Input[str] rollout_name: The rollout name.
:param pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['StepArgs']]]] step_groups: The list of step groups that define the orchestration.
:param pulumi.Input[Mapping[str, pulumi.Input[str]]] tags: Resource tags.
:param pulumi.Input[str] target_service_topology_id: The resource Id of the service topology from which service units are being referenced in step groups to be deployed.
"""
...
@overload
def __init__(__self__,
resource_name: str,
args: RolloutArgs,
opts: Optional[pulumi.ResourceOptions] = None):
"""
Defines the PUT rollout request body.
:param str resource_name: The name of the resource.
:param RolloutArgs args: The arguments to use to populate this resource's properties.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
...
def __init__(__self__, resource_name: str, *args, **kwargs):
resource_args, opts = _utilities.get_resource_args_opts(RolloutArgs, pulumi.ResourceOptions, *args, **kwargs)
if resource_args is not None:
__self__._internal_init(resource_name, opts, **resource_args.__dict__)
else:
__self__._internal_init(resource_name, *args, **kwargs)
def _internal_init(__self__,
resource_name: str,
opts: Optional[pulumi.ResourceOptions] = None,
artifact_source_id: Optional[pulumi.Input[str]] = None,
build_version: Optional[pulumi.Input[str]] = None,
identity: Optional[pulumi.Input[pulumi.InputType['IdentityArgs']]] = None,
location: Optional[pulumi.Input[str]] = None,
resource_group_name: Optional[pulumi.Input[str]] = None,
rollout_name: Optional[pulumi.Input[str]] = None,
step_groups: Optional[pulumi.Input[Sequence[pulumi.Input[pulumi.InputType['StepArgs']]]]] = None,
tags: Optional[pulumi.Input[Mapping[str, pulumi.Input[str]]]] = None,
target_service_topology_id: Optional[pulumi.Input[str]] = None,
__props__=None):
if opts is None:
opts = pulumi.ResourceOptions()
if not isinstance(opts, pulumi.ResourceOptions):
raise TypeError('Expected resource options to be a ResourceOptions instance')
if opts.version is None:
opts.version = _utilities.get_version()
if opts.id is None:
if __props__ is not None:
raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
__props__ = RolloutArgs.__new__(RolloutArgs)
__props__.__dict__["artifact_source_id"] = artifact_source_id
if build_version is None and not opts.urn:
raise TypeError("Missing required property 'build_version'")
__props__.__dict__["build_version"] = build_version
if identity is None and not opts.urn:
raise TypeError("Missing required property 'identity'")
__props__.__dict__["identity"] = identity
__props__.__dict__["location"] = location
if resource_group_name is None and not opts.urn:
raise TypeError("Missing required property 'resource_group_name'")
__props__.__dict__["resource_group_name"] = resource_group_name
__props__.__dict__["rollout_name"] = rollout_name
if step_groups is None and not opts.urn:
raise TypeError("Missing required property 'step_groups'")
__props__.__dict__["step_groups"] = step_groups
__props__.__dict__["tags"] = tags
if target_service_topology_id is None and not opts.urn:
raise TypeError("Missing required property 'target_service_topology_id'")
__props__.__dict__["target_service_topology_id"] = target_service_topology_id
__props__.__dict__["name"] = None
__props__.__dict__["type"] = None
alias_opts = pulumi.ResourceOptions(aliases=[pulumi.Alias(type_="azure-native:deploymentmanager:Rollout"), pulumi.Alias(type_="azure-native:deploymentmanager/v20191101preview:Rollout")])
opts = pulumi.ResourceOptions.merge(opts, alias_opts)
super(Rollout, __self__).__init__(
'azure-native:deploymentmanager/v20180901preview:Rollout',
resource_name,
__props__,
opts)
@staticmethod
def get(resource_name: str,
id: pulumi.Input[str],
opts: Optional[pulumi.ResourceOptions] = None) -> 'Rollout':
"""
Get an existing Rollout resource's state with the given name, id, and optional extra
properties used to qualify the lookup.
:param str resource_name: The unique name of the resulting resource.
:param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
:param pulumi.ResourceOptions opts: Options for the resource.
"""
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
__props__ = RolloutArgs.__new__(RolloutArgs)
__props__.__dict__["artifact_source_id"] = None
__props__.__dict__["build_version"] = None
__props__.__dict__["identity"] = None
__props__.__dict__["location"] = None
__props__.__dict__["name"] = None
__props__.__dict__["step_groups"] = None
__props__.__dict__["tags"] = None
__props__.__dict__["target_service_topology_id"] = None
__props__.__dict__["type"] = None
return Rollout(resource_name, opts=opts, __props__=__props__)
@property
@pulumi.getter(name="artifactSourceId")
def artifact_source_id(self) -> pulumi.Output[Optional[str]]:
"""
The reference to the artifact source resource Id where the payload is located.
"""
return pulumi.get(self, "artifact_source_id")
@property
@pulumi.getter(name="buildVersion")
def build_version(self) -> pulumi.Output[str]:
"""
The version of the build being deployed.
"""
return pulumi.get(self, "build_version")
@property
@pulumi.getter
def identity(self) -> pulumi.Output['outputs.IdentityResponse']:
"""
Identity for the resource.
"""
return pulumi.get(self, "identity")
@property
@pulumi.getter
def location(self) -> pulumi.Output[str]:
"""
The geo-location where the resource lives
"""
return pulumi.get(self, "location")
@property
@pulumi.getter
def name(self) -> pulumi.Output[str]:
"""
The name of the resource
"""
return pulumi.get(self, "name")
@property
@pulumi.getter(name="stepGroups")
def step_groups(self) -> pulumi.Output[Sequence['outputs.StepResponse']]:
"""
The list of step groups that define the orchestration.
"""
return pulumi.get(self, "step_groups")
@property
@pulumi.getter
def tags(self) -> pulumi.Output[Optional[Mapping[str, str]]]:
"""
Resource tags.
"""
return pulumi.get(self, "tags")
@property
@pulumi.getter(name="targetServiceTopologyId")
def target_service_topology_id(self) -> pulumi.Output[str]:
"""
The resource Id of the service topology from which service units are being referenced in step groups to be deployed.
"""
return pulumi.get(self, "target_service_topology_id")
@property
@pulumi.getter
def type(self) -> pulumi.Output[str]:
"""
The type of the resource. E.g. "Microsoft.Compute/virtualMachines" or "Microsoft.Storage/storageAccounts"
"""
return pulumi.get(self, "type")
|
[
"noreply@github.com"
] |
bpkgoud.noreply@github.com
|
2451d64d6dc19e3f51147ce0502646c3a2daa3e5
|
2969458ff683329013ee4f6524436856713503dd
|
/keras_resnet/layers/_batch_normalization.py
|
9d36d3433d2b3980e948ae00a47ba68d697f4403
|
[
"BSD-3-Clause",
"BSD-2-Clause"
] |
permissive
|
holgerhennig/keras-resnet
|
ecbe739f7e807145913478f8e094d966be0afb34
|
25cbb9f983dbb07b4381b9d6f2507a2a9381df3c
|
refs/heads/master
| 2021-08-19T01:25:33.446566
| 2017-11-24T10:13:29
| 2017-11-24T10:13:29
| 111,904,208
| 0
| 0
| null | 2017-11-24T10:08:17
| 2017-11-24T10:08:17
| null |
UTF-8
|
Python
| false
| false
| 664
|
py
|
import keras
class BatchNormalization(keras.layers.BatchNormalization):
"""
Identical to keras.layers.BatchNormalization, but adds the option to freeze parameters.
"""
def __init__(self, freeze, *args, **kwargs):
self.freeze = freeze
super(BatchNormalization, self).__init__(*args, **kwargs)
# set to non-trainable if freeze is true
self.trainable = not self.freeze
def call(self, *args, **kwargs):
# return super.call, but set training
return super(BatchNormalization, self).call(training=(not self.freeze), *args, **kwargs)
def get_config(self):
return {'freeze': self.freeze}
|
[
"allen.goodman@icloud.com"
] |
allen.goodman@icloud.com
|
e8c19187b9f77e43f36410dc52bb8041533e390e
|
14d7198eae97d9cdfdce4c5a204cf1136e09b2e6
|
/state.py
|
3662a55c2095442fd723abae7c395aa7969e0bc6
|
[
"MIT"
] |
permissive
|
dark0ghost/async_py_bot
|
73c450682717f636766a2a9745a7ad949043e938
|
34d77b01f34645cabe097a02eb4e41ddd40c0b25
|
refs/heads/master
| 2022-12-12T09:50:21.447442
| 2022-08-06T13:07:55
| 2022-08-06T13:07:55
| 172,966,864
| 27
| 4
|
MIT
| 2022-12-08T07:43:23
| 2019-02-27T18:20:50
|
Python
|
UTF-8
|
Python
| false
| false
| 460
|
py
|
from aiogram.dispatcher.filters.state import StatesGroup, State
class States(StatesGroup):
"""
class for set up state user
"""
start: State = State()
end: State = State()
contact: State = State()
geo: State = State()
get_mail: State = State()
mail_ver: State = State()
save_json: State = State()
search_json: State = State()
send_paste: State = State()
qr: State = State()
wait_wallet: State = State()
|
[
"diamondlego018@gmail.com"
] |
diamondlego018@gmail.com
|
7f4e088f5baaec6b01256bc946a16f10870c7edb
|
2ace06cdd5f4c8fd100903ae734472ae1cd86503
|
/nfl/ppe.py
|
da169d2459d2ab7b09c1664f5a4456d43ff25819
|
[
"MIT"
] |
permissive
|
sansbacon/nfl
|
d51bf53c7f6ef91ef1d2c1e172e9f1033eea2227
|
11605b1a7725cce062ce0d7f15ebcf0a2f91a86e
|
refs/heads/master
| 2021-01-12T13:58:48.154355
| 2020-09-24T19:16:20
| 2020-09-24T19:16:20
| 69,254,316
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,768
|
py
|
"""
# nfl/ppe.py
# player profiler explorer
"""
from cmd import Cmd
import logging
import os
from pathlib import Path
import json
from pprint import pprint
try:
import readline
except ImportError:
readline = None
from fcache.cache import FileCache
from .pp import Scraper, Parser
from namematcher import match_fuzzy, match_interactive
def read_json(file_name):
"""
Args:
file_name:
Returns:
"""
with open(file_name, "r") as f:
return json.load(f)
class PlayerProfilerExplorer(Cmd):
"""
Interactive command line app
"""
histfile = str(Path.home() / f".{__name__}_history")
histfile_size = 1000
prompt = "player_profiler explorer> "
intro = "Welcome to Player Profiler Explorer! Type ? to list commands"
def __init__(self, file_name, **kwargs):
"""
Creates interactive app
# pylint: disable=too-many-instance-attributes
"""
super().__init__()
logging.getLogger(__name__).addHandler(logging.NullHandler())
if kwargs.get("cache_name"):
self.cache = FileCache(kwargs["cache_name"], flag="cs")
self._s = Scraper(cache_name=kwargs["cache_name"])
else:
self.cache = FileCache("ppe", flag="cs")
self._s = Scraper(cache_name="ppe")
self._p = Parser()
self.player_lookup = read_json(file_name)
def _dump_msg(self, msg):
"""
Standard message format
Args:
msg:
Returns:
"""
print("\n", "\n", msg, "\n")
def do_exit(self, inp):
"""
Quit app
Args:
inp:
Returns:
"""
print("Bye %s" % inp)
return True
def do_search_match(self, name):
"""
Args:
name:
Returns:
"""
logging.info("trying fuzzy match")
match_from = list(self.player_lookup.keys())
match_name, conf = match_fuzzy(name, match_from)
if conf >= 90:
return match_name
logging.info("trying interactive match")
match_name, conf = match_interactive(name, match_from)
if match_name:
return match_name
return None
def do_search(self, inp):
"""
Specify opponent
Args:
inp:
Returns:
"""
player_code = self.player_lookup.get(inp.strip())
if not player_code:
player_name = self.do_search_match(inp.strip())
player_code = self.player_lookup.get(player_name)
if player_code:
print(f"Getting {inp.strip()} {player_code}")
content = self._s.player_page(player_code)
player = self._p.player_core(content)
pprint(player)
else:
print(f"Invalid player name: {inp}")
def help_exit(self):
"""
Help for quitting application
Returns:
"""
msg = "exit the application. Shorthand: x q Ctrl-D."
self._dump_msg(msg)
return msg
def help_search(self):
"""
Help for search interface
Returns:
"""
msg = "Searches playerprofiler for comps"
self._dump_msg("\n".join(msg))
return msg
def preloop(self):
"""
Returns:
"""
if readline and Path(self.histfile).is_file():
readline.read_history_file(self.histfile)
def postloop(self):
"""
Returns:
"""
if readline:
readline.set_history_length(self.histfile_size)
readline.write_history_file(self.histfile)
do_EOF = do_exit
help_EOF = help_exit
if __name__ == "__main__":
pass
|
[
"eric@erictruett.com"
] |
eric@erictruett.com
|
95a58e3a0f9ba59f90301c2a4755ccee34735e0d
|
950fd3f7e2c098a17f777733ef3e47f2730a8f60
|
/shsems_proj/users/admin.py
|
c786548c62b47317eab46b18fd47374fe457fe1e
|
[] |
no_license
|
shishig/shsems
|
a28867bfad36bc8d3a5fcd2197a4dbdca961d75a
|
6b0fbffd126ba90a00dcfd40da22480c9c8bfa42
|
refs/heads/master
| 2021-09-09T22:08:20.209401
| 2019-11-22T09:28:01
| 2019-11-22T09:28:01
| 222,399,534
| 0
| 0
| null | 2021-09-08T01:27:13
| 2019-11-18T08:31:44
|
Python
|
UTF-8
|
Python
| false
| false
| 146
|
py
|
from django.contrib import admin
from .models import Participant, Designation
admin.site.register(Participant)
admin.site.register(Designation)
|
[
"catolicok1@gmail.com"
] |
catolicok1@gmail.com
|
e0b0a5f55136195693b6d27b4e48ea426c82cc89
|
5fcfeb76a2e6c0b06612ffc9764bd0dd3db207cd
|
/main/views.py
|
881f5f6bad479f79e77dd5ff4b95dd6502e8bf29
|
[] |
no_license
|
syxxjgh/Kmusic
|
c85ea116d800a4b52458ed32a444338d93e8d3fe
|
a23ebaa74d15b517eef536e542dea8edd3421501
|
refs/heads/master
| 2020-06-04T05:08:42.994027
| 2019-06-13T07:38:39
| 2019-06-13T07:38:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,942
|
py
|
import json,re
import urllib
from django.http import HttpResponse
from django.shortcuts import render, redirect
from queue import Queue
from threading import Thread
from .apis.musicsearcher import MusicSearcher
from .models import *
import requests
q = Queue() # 用于接收Thread线程的返回值
# Create your views here.
def index_views(request):
if request.session.get('user_id') and request.session.get('user_name'):
return render(request,'index.html')
else:
if request.COOKIES.get('user_id') and request.COOKIES.get('user_name'):
request.session['user_id'] = request.COOKIES['user_id']
request.session['user_name'] = request.COOKIES['user_name']
return render(request,'index.html')
def search_views(request):
target = request.GET.get('s')
target = urllib.parse.unquote(target)
searcher = MusicSearcher(target,q)
threadQQ =Thread(target=searcher.qqSearch)
threadnetE = Thread(target=searcher.netEaseSearch)
threadQQ.start()
threadnetE.start()
threadQQ.join()
threadnetE.join()
first = q.get()
if first[0] == 'qq':
params = {
'target':target,
'qqRes': first,
'netEaseRes':q.get()
}
else:
params = {
'target': target,
'netEaseRes':first,
'qqRes':q.get()
}
return render(request, 'srchresult.html', params)
def songList_views(request):
user_id = request.session.get('user_id')
songlists = Songlist.objects.filter(user_id=user_id).all()
if songlists:
list = songlists[0]
songs = Song.objects.filter(songlist_id=list.id).all()
return render(request,'songlist.html',locals())
def chgSongList_views(request):
user_id = request.session.get('user_id')
list_name = request.GET.get('listname')
songlists = Songlist.objects.filter(user_id=user_id).all()
list = Songlist.objects.filter(user_id=user_id,listname=list_name).first()
songs = Song.objects.filter(songlist_id=list.id).all()
return render(request,'songlist.html',locals())
def addSong_views(request):
if request.method == 'GET':
if request.session.get('user_id'):
user_id = request.session['user_id']
songlists = Songlist.objects.filter(user_id=user_id).all()
songlists_l = []
for songlist in songlists:
songlists_l.append(songlist.listname)
songlists_l = json.dumps(songlists_l)
return HttpResponse(songlists_l)
else:
return HttpResponse('请您先登录账号 才可使用歌单功能')
else:
user_id = request.session['user_id']
songlist_name = request.POST.get('songlist_name')
songurl = request.POST.get('songurl')
songurl = re.sub('&','&',songurl)
songname = request.POST.get('songname')
singer = request.POST.get('singer')
singer = re.sub(' ', ' ', singer)
duration = request.POST.get('duration')
songlist = Songlist.objects.filter(user_id=user_id,listname=songlist_name).first()
isExists = Song.objects.filter(url=songurl,name=songname,songlist_id=songlist.id).first()
if isExists:
return HttpResponse('exists')
else:
Song.objects.create(url=songurl,name=songname,singer=singer,duration=duration,songlist_id=songlist.id)
return HttpResponse('ok')
def listRmSong_views(request):
user_id = request.session['user_id']
songlist_name = request.GET.get('listname')
songname = request.GET.get('songname')
duration = request.GET.get('duration')
songlist = Songlist.objects.filter(user_id=user_id,listname=songlist_name).first()
song = Song.objects.filter(songlist_id=songlist.id,name=songname,duration=duration).first()
song.delete()
return HttpResponse('remove song ok')
def removeList_views(request):
user_id = request.session['user_id']
songlist_name = request.GET.get('listname')
tsonglist = Songlist.objects.filter(user_id=user_id,listname=songlist_name).all()
tsonglist.delete()
return redirect('/songlist/')
def createList_views(request):
if request.method == 'GET':
user_id = request.session['user_id']
list_name = request.GET.get('songlist_name')
list = Songlist.objects.filter(user_id=user_id,listname=list_name).first()
if list:
return HttpResponse('该歌单名已经存在')
else:
return HttpResponse(' ')
else:
user_id = request.session['user_id']
list_name = request.POST.get('songlist_name')
Songlist.objects.create(listname=list_name,user_id=user_id)
return redirect('/songlist/')
def qPlaySong_views(request):
if request.method == 'POST':
mid = request.POST.get('mid')
vkey_url = 'https://c.y.qq.com/base/fcgi-bin/fcg_music_express_mobile3.fcg'
data = {
'g_tk': '195219765',
'jsonpCallback': 'MusicJsonCallback004680169373158849',
'loginUin': '125045209',
'hostUin': '0',
'format': 'json',
'inCharset': 'utf8',
'outCharset': 'utf-8',
'notice': '0',
'platform': 'yqq',
'needNewCode': '0',
'cid': '205361747',
'callback': 'MusicJsonCallback004680169373158849',
'uin': '125045209',
'songmid': mid,
'filename': 'C400{}.m4a'.format(mid),
'guid': 'B1E901DA7379A44022C5AF79FDD9CD96'
}
res = requests.get(vkey_url, data, verify=False)
res = json.loads(res.text[36:-1])
vkey = res['data']['items'][0]['vkey']
url = 'http://111.202.85.147/amobile.music.tc.qq.com/C400{}.m4a?guid=B1E901DA7379A44022C5AF79FDD9CD96&vkey={}&uin=2521&fromtag=77'.format(mid,vkey)
return HttpResponse(url)
|
[
"44372130+Sevenforty740@users.noreply.github.com"
] |
44372130+Sevenforty740@users.noreply.github.com
|
3828f2439588e202a417ca6e5527b96a5994b7f9
|
ddadc97398c26801c33e628e7dcfc6eb87336e5c
|
/CustomLogin/Login/forms.py
|
1a4b04798454da503864a64b0371ecf2ba967480
|
[
"MIT"
] |
permissive
|
bharathjinka09/AdvancedDjango
|
278fcde564e019fce238cc8681413f7b93fec4a9
|
f06e1a0621e182ea6015b06e79eae99ddb04affb
|
refs/heads/main
| 2022-12-30T12:49:40.698658
| 2020-10-19T16:04:07
| 2020-10-19T16:04:07
| 305,616,622
| 1
| 0
|
MIT
| 2020-10-20T06:52:26
| 2020-10-20T06:52:25
| null |
UTF-8
|
Python
| false
| false
| 335
|
py
|
from django import forms
from django.contrib.auth.forms import AuthenticationForm
class CustomAuthenticationForm(AuthenticationForm):
def confirm_login_allowed(self, user):
if not user.is_active or not user.is_validated:
raise forms.ValidationError('There was a problem with your login.', code='invalid_login')
|
[
"u.garg.10@gmail.com"
] |
u.garg.10@gmail.com
|
6f4ea0a9b3e1de8c1ff074fd553a0af7e3cc0e1e
|
94adc2bfea1ef72c50eefa031fd8ccf36df947bc
|
/tests/acceptance/version_1/testcases/response/body/unexpected_key_with_null_value.py
|
3b936de3b48bcd3f9912f49185d15f22860c11e1
|
[
"MIT"
] |
permissive
|
Kalimaha/pact-test
|
a593636a54cc7e3811aadde9db857f45d851245e
|
9021202503ed452514f7ded83152f3957520d9bb
|
refs/heads/master
| 2021-01-20T00:42:56.055562
| 2020-06-04T05:37:13
| 2020-06-04T05:37:13
| 89,177,436
| 5
| 1
|
MIT
| 2020-06-04T05:37:14
| 2017-04-23T22:46:45
|
Python
|
UTF-8
|
Python
| false
| false
| 573
|
py
|
from pact_test.either import Right
from pact_test.models.response import PactResponse
from pact_test.matchers.response_matcher import match
from tests.acceptance.acceptance_test_loader import load_acceptance_test
def test_different_case():
data = load_acceptance_test(__file__)
response = PactResponse(body={
'alligator': {
'name': 'Mary',
'phoneNumber': None
}
})
interaction = {'response': {'body': data['expected']['body']}}
test_result = match(interaction, response)
assert type(test_result) is Right
|
[
"guido.barbaglia@gmail.com"
] |
guido.barbaglia@gmail.com
|
e13e594efa9f265f916b25697d9a96eabfbaca83
|
c64e6292dfb403999d2c497d7850b43034a82099
|
/final files/df.py
|
6f12dbe3eef78e7113ef548d281d8ba9d27707b0
|
[] |
no_license
|
parsabahrami16/APProject
|
14942bec466c1b06c81989a16feefea02d231435
|
1b8c2890230209fae9786091593a88670f66a2e7
|
refs/heads/main
| 2023-02-20T01:43:18.388677
| 2021-01-22T22:49:12
| 2021-01-22T22:49:12
| 304,700,693
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,208
|
py
|
import pandas as pd
import plotly.express as px
import numpy
from finalapp import*
#Define value & sort by it
DF['value'] = DF['total_points'] / DF['now_cost']
DF.sort_values(by='value',ascending=False,inplace=True)
best_df=DF.head(20)
#Piechart
#Define parameters & values of them
para_df = ['clean_sheets','goals_scored','assists','games_starts']
val_df = [best_df['clean_sheets'].mean()*4,best_df['goals_scored'].mean()*6,
best_df['assists'].mean()*3,best_df['games_starts'].mean()*2]#Parameter.mean()
#Draw piechart with plotly express
fig_df = px.pie(values=val_df,names=para_df,title='Defenders parameters',labels=para_df
,color=para_df
,color_discrete_map={"clean_sheets":'0d2a63','goals_scored':'00a08b',
'assists':'2e91e5','games_starts':'620042'
})
fig_df.update_traces(textposition='inside', textinfo='percent+label')
#Sunburst chart
DF["strength_defence"] = DF['strength_defence_home']+DF['strength_defence_away']
#Define parameters
para_adv_ldf=["DF",'clean_sheets','goals_scored','assists','games_starts',"strenght_def","xg","xa","gca"]
#Define parents (the most influential parameters)
para_adv_pdf=["","DF","DF","DF","DF",'clean_sheets','goals_scored','assists','assists']
#Define values based on the relation between parent and child
val_adv_df=[0,best_df['clean_sheets'].mean()*4,best_df['goals_scored'].mean()*6,
best_df['assists'].mean()*3,best_df['games_starts'].mean()*2
,best_df['clean_sheets'].mean()*4*0.2#ParentInfluence.mean()*regression
,best_df['goals_scored'].mean()*4
,best_df['assists'].mean()*3/2
,best_df['assists'].mean()*3*0.62]
#Draw sunburst with px
fig_adv_df = px.sunburst(names=para_adv_ldf,parents=para_adv_pdf, values=val_adv_df
,color=val_adv_df
,color_continuous_scale= px.colors.sequential.dense
,title="Defenders Sunburst chart"
,height=600
,width=600 )
#Show piechart & sunburst chart in app
def app():
st.title('Defenders')
st.plotly_chart(fig_df)#st.plotly:show plotlychart in app
st.plotly_chart(fig_adv_df)
|
[
"noreply@github.com"
] |
parsabahrami16.noreply@github.com
|
cdcc3a8635acdad21cb1216acde56ddc02f853ba
|
e09d6e18bfe3c7b124c8db1c81f1e8d6c9809a98
|
/constants.py
|
ea38f1637b7a21cccccfc1005bfc68eeecc192e0
|
[] |
no_license
|
Ben-Lapuhapo/ICS3U-Circuit-Python-7-Python
|
c2c20a37cf9b6974afe1f8693e80db9f138313cb
|
d11ecc73e9034b7abf8f1baf01bf6609744c074d
|
refs/heads/master
| 2020-08-30T10:07:15.993338
| 2019-10-30T01:02:36
| 2019-10-30T01:02:36
| 218,344,291
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 525
|
py
|
#!/usr/bin/env python3
# Created by: Ben Lapuhapo
# Created on: OCT 2019
# This module contains constants for the alien game
# CircuitPython screen size is 160x120 and sprites are 16x16
SCREEN_X = 160
SCREEN_Y = 120
SCREEN_GRID_X = 16
SCREEN_GRID_Y = 8
SPRITE_SIZE = 16
TOTAL_NUMBER_OF_ALIENS = 5
FPS = 60
SPRITE_MOVEMENT_SPEED = 1
# Using for Button State
button_state = {
"button_up": "up",
"button_just_pressed": "just pressed",
"button_still_pressed": "still pressed",
"button_released": "released"
}
|
[
"ben.lapuhapo@mths.ca"
] |
ben.lapuhapo@mths.ca
|
3d247eb4991bfe8997a200171466335fd2df1f4d
|
a40e92410b00930143b101a3b1ff30de21ffd339
|
/MicronNet/Scripts/evaluate.py
|
1c177e76cdc0848922edd6d7d9f2105da9c3b00f
|
[] |
no_license
|
ashwin2802/bosch-tsr
|
277c56075df6ecfbbe6aa859722e245f626c17db
|
86ea44b51a9fa8758e830a1d49f09eb37ac65aee
|
refs/heads/main
| 2023-04-14T15:57:49.166853
| 2021-04-28T08:14:57
| 2021-04-28T08:14:57
| 353,956,193
| 0
| 0
| null | 2021-04-02T08:29:27
| 2021-04-02T08:29:26
| null |
UTF-8
|
Python
| false
| false
| 1,925
|
py
|
from __future__ import print_function
import argparse
from tqdm import tqdm
import os
import PIL.Image as Image
import numpy as np
import torch
from torch.autograd import Variable
import torch.nn.functional as F
import torchvision.datasets as datasets
from data import initialize_data # data.py in the same folder
from model import Net
parser = argparse.ArgumentParser(description='PyTorch GTSRB evaluation script')
parser.add_argument('--data', type=str, default='../Dataset', metavar='D',
help="folder where data is located. train_data.zip and test_data.zip need to be found in the folder")
parser.add_argument('--model', type=str, metavar='M',
help="the model file to be evaluated. Usually it is of the form model_X.pth")
parser.add_argument('--outfile', type=str, default='output.csv', metavar='D',
help="name of the output csv file")
args = parser.parse_args()
state_dict = torch.load(args.model)
model = Net()
model.load_state_dict(state_dict)
model.eval()
from data import test_data_transforms
test_dir = args.data + "/test_images"
def pil_loader(path):
with open(path, 'rb') as f:
with Image.open(f) as img:
return img.convert('RGB')
output_file = open(args.outfile, "w")
output_file.write("Filename,ClassId\n")
for f in tqdm(os.listdir(test_dir)):
if 'ppm' in f:
data = test_data_transforms(image=np.array(pil_loader(test_dir + '/' + f)))["image"]
data = data.float()
# print(data)
# data = torch.from_numpy(data)
data = data.view(1, data.size(0), data.size(1), data.size(2))
data = Variable(data)
output = model(data)
pred = output.data.max(1, keepdim=True)[1]
# print(np.exp(output.detach().numpy()))
file_id = f[0:5]
output_file.write("%s,%d\n" % (file_id, pred))
output_file.close()
print("Succesfully wrote " + args.outfile)
|
[
"utkarshg99@gmail.com"
] |
utkarshg99@gmail.com
|
c922a941eabc87e9010acec836de8c6be1d9e25b
|
7e016f45db65840087dc320be11415881bffc43e
|
/UpdateKML.py
|
5bc2629bf29fae5b46e05006213c50dac172c082
|
[
"MIT"
] |
permissive
|
petrarch1603/SurveyApplications
|
67cc73912bdd7ab721a88546d3afaf1d1998d6dd
|
129a4e24123bf81687c0a60cccbe3d0a83f63e40
|
refs/heads/master
| 2021-09-26T12:17:09.559973
| 2018-10-30T03:28:07
| 2018-10-30T03:28:07
| 125,309,901
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,755
|
py
|
# import xml.sax, xml.sax.handler
# from xml.dom import minidom
import openpyxl
import xml.etree.ElementTree as et
from zipfile import ZipFile
# This is a script for taking the status of rows on a tracking spreadsheet and updating a KML based on
# the fields in that tracking spreadsheet.
# Initialize Variables
mykmz = 'source_data/map.kmz'
myxlsx = 'source_data/tracking.xlsx'
# TODO Receive emails on Raspberry PI with attachments and process them with this program
# Read the Spreadsheet
wb = openpyxl.load_workbook(myxlsx)
# Create a dictionary with all the relevant spreadsheet data
sheet = wb.sheetnames[0]
sheet = wb[sheet]
pointdata = {}
for row in range(2, sheet.max_row + 1):
pointno = sheet['A' + str(row)].value
dict = {
'typeset': sheet['H' + str(row)].value,
'setby': sheet['I' + str(row)].value,
'dateset': sheet['J' + str(row)].value,
'color': sheet['H' + str(row)].fill.start_color.index
}
pointdata.setdefault(pointno, {})
pointdata[pointno] = dict
# Unzip the KMZ and extract doc.kml
kmz = ZipFile(mykmz, 'r')
kml = kmz.open('doc.kml', 'r')
urltag = "{http://www.opengis.net/kml/2.2}"
tree = et.parse(kml)
root = tree.getroot()
d = tree.findall(".//" + urltag + "Placemark")
for i in d:
for subelem in i:
if subelem.tag == (urltag + "name"):
for k, v in pointdata.items():
if subelem.text == str(k):
i[2].text = ('Set on ' + str(v['dateset']))
tree = et.ElementTree(root)
tree.write("newkml2.kml", xml_declaration=True)
# TODO Use Filenames after UpdateKML.py as arguments
# TODO Import and parse excel spreadsheets
# TODO Import and process KMLs
# TODO Return the new KML with date as part of filename
|
[
"petrarch1603@gmail.com"
] |
petrarch1603@gmail.com
|
7abe9a194a2bade6c47b954aec532d2fffab11c7
|
b9f8e0ab9a5f73b405f865fe38358b656be28f17
|
/project/svm_ssk_approx.py
|
64a7b81c880c83dd2d3e8d170978dea911ad1b61
|
[] |
no_license
|
chris4540/DD2434_Advanced_ML
|
a9e6d5257459eb669d61a7bd6760161baac2181d
|
c3aca331a08ec6902e1b254ece08495693cf0087
|
refs/heads/master
| 2023-02-10T13:53:37.089703
| 2021-01-09T23:54:33
| 2021-01-09T23:54:33
| 328,268,994
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,684
|
py
|
import numpy as np
from sklearn.svm import SVC
from ssk_kernel.apprx_kernel import SSKKernelApprox
from sklearn.metrics import precision_recall_fscore_support
import functools
print = functools.partial(print, flush=True)
class StringSVM:
max_iter = 50
def __init__(self, name, k, decay):
self.name = name
self.kernel_obj = SSKKernelApprox(k, decay, save_entry=True)
self.documents = dict()
self.n_data = 0
self.text_clf = SVC(kernel=self.kernel_fun)
self.n_chunk = 3
def train(self, train_data, train_label):
"""
Use Iterative Chunking to train the svm
"""
# add train data into documents
for i, doc in enumerate(train_data):
self.documents[i] = doc
self.n_data = len(train_data)
# create a index matrix
X_idx = np.arange(len(train_data))
# iterative chunking training
support_vec = []
iter_ = 0
train_label = np.array(train_label)
for _ in range(self.max_iter):
if len(X_idx) < self.n_chunk:
break
# sample chuck to have at least 2 classes
for _ in range(1000):
samples = np.random.choice(X_idx, self.n_chunk)
if len(set(train_label[samples])) == 1:
# only one class was picked. resample
continue
else:
break
x = np.array(samples).reshape(-1, 1)
if len(support_vec) > 0:
x = np.vstack((x, support_vec))
y = train_label[x.flatten()]
print("Training iter:", iter_)
self.text_clf.fit(x, y)
support_vec = x[self.text_clf.support_]
# remove trained samples from x_idx
X_idx = np.setdiff1d(X_idx, support_vec.flatten())
iter_ += 1
print("# of support vectors for each class", self.text_clf.n_support_)
print("# of support vectors", np.sum(self.text_clf.n_support_))
print("# of remaining samples", len(X_idx))
# finalize the svm with only those support vectors
print("=========================")
print("Finalizing the SVM....")
print("=========================")
y = train_label[support_vec.flatten()]
self.text_clf.fit(support_vec, y)
print("# of support vectors for each class", self.text_clf.n_support_)
print("# of support vectors", np.sum(self.text_clf.n_support_))
def predict(self, test_data):
for i, doc in enumerate(test_data):
self.documents[i+self.n_data] = doc
size = len(test_data)
X_new_idx = np.arange(size) + self.n_data
self.n_data += len(test_data)
# predict by chunk
ret = list()
cnt = 0
for x in np.array_split(X_new_idx,(size // self.n_chunk)):
sub_pred = self.text_clf.predict(x.reshape(-1, 1))
ret.extend(sub_pred.tolist())
cnt += 1
print("# of iteration for prediction: ", cnt)
self.save_kernel()
return ret
def kernel_fun(self, X1, X2):
if np.array_equal(X1, X2):
return self._get_train_gram_mat(X1)
ret = np.zeros((len(X1), len(X2)))
for i in range(X1.shape[0]):
for j in range(X2.shape[0]):
k = int(X1[i][0])
l = int(X2[j][0])
doc1 = self.documents[k]
doc2 = self.documents[l]
ret[i, j] = self.kernel_obj(doc1, doc2)
return ret
def _get_train_gram_mat(self, train_set_idx):
"""
Use the sysmetric propertey to speed up
"""
size = len(train_set_idx)
ret = np.diag(np.ones(size))
# calculate the upper-triangle matrix first.
# The diagonal should be 1 be default
tu_idx = np.triu_indices(size, k=1)
for i, j in zip(*tu_idx):
k = int(train_set_idx[i][0])
l = int(train_set_idx[j][0])
doc1 = self.documents[k]
doc2 = self.documents[l]
val = self.kernel_obj(doc1, doc2)
ret[i, j] = val
ret[j, i] = val
return ret
def save_kernel(self):
self.kernel_obj.save_kernel_to_json()
if __name__ == '__main__':
from dataset import DataSet
import time
data_set = DataSet()
# make a small subset for testing
train_set = data_set.train_set
train_labels = data_set.train_labels
test_set = data_set.test_set
test_labels = data_set.test_labels
test_model = StringSVM("test_k5_lambda0.9", 5, 0.9)
try:
st = time.time()
test_model.train(train_set, train_labels)
et = time.time()
print("Training time {} sec".format(et - st))
except KeyboardInterrupt:
pass
except Exception as e:
# re-raise exception
raise e
finally:
test_model.save_kernel()
# do prediction
try:
st = time.time()
class_pred = test_model.predict(test_set)
et = time.time()
except KeyboardInterrupt:
pass
except Exception as e:
# re-raise exception
raise e
finally:
test_model.save_kernel()
print("Prediction time {} sec".format(et - st))
print(class_pred)
print(test_labels)
scores = precision_recall_fscore_support(test_labels, class_pred, average=None)
precision = scores[0]
recall = scores[1]
f1_score = scores[2]
support = scores[3]
print("precision:", precision)
print("recall:", recall)
print("f1_score:", f1_score)
|
[
"chris4540@gmail.com"
] |
chris4540@gmail.com
|
dff6bbc7b98e6577843215d5f7dc848d24d38807
|
5baf3cb8b08dcea2d53d2ef022e5c6d4b2468494
|
/swagger_client/models/io_k8s_api_core_v1_ceph_fs_persistent_volume_source.py
|
d6a48e08c7c69f9f306fa119feb119e41bac70dd
|
[] |
no_license
|
atengler/swagger-kqueen-python
|
a4fc0de38378a08c6c2e0c339032ed4ad63f09f5
|
01225c74a743636483211f0274f772193517ffaf
|
refs/heads/master
| 2021-08-07T18:16:28.453730
| 2017-11-08T17:24:53
| 2017-11-08T17:29:03
| 110,007,477
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,950
|
py
|
# coding: utf-8
"""
Kubernetes Queen API
A simple API to interact with Kubernetes clusters
OpenAPI spec version: 0.8
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class IoK8sApiCoreV1CephFSPersistentVolumeSource(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'monitors': 'list[str]',
'path': 'str',
'read_only': 'bool',
'secret_file': 'str',
'secret_ref': 'IoK8sApiCoreV1SecretReference',
'user': 'str'
}
attribute_map = {
'monitors': 'monitors',
'path': 'path',
'read_only': 'readOnly',
'secret_file': 'secretFile',
'secret_ref': 'secretRef',
'user': 'user'
}
def __init__(self, monitors=None, path=None, read_only=None, secret_file=None, secret_ref=None, user=None):
"""
IoK8sApiCoreV1CephFSPersistentVolumeSource - a model defined in Swagger
"""
self._monitors = None
self._path = None
self._read_only = None
self._secret_file = None
self._secret_ref = None
self._user = None
self.monitors = monitors
if path is not None:
self.path = path
if read_only is not None:
self.read_only = read_only
if secret_file is not None:
self.secret_file = secret_file
if secret_ref is not None:
self.secret_ref = secret_ref
if user is not None:
self.user = user
@property
def monitors(self):
"""
Gets the monitors of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Required: Monitors is a collection of Ceph monitors More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:return: The monitors of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: list[str]
"""
return self._monitors
@monitors.setter
def monitors(self, monitors):
"""
Sets the monitors of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Required: Monitors is a collection of Ceph monitors More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:param monitors: The monitors of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: list[str]
"""
if monitors is None:
raise ValueError("Invalid value for `monitors`, must not be `None`")
self._monitors = monitors
@property
def path(self):
"""
Gets the path of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: Used as the mounted root, rather than the full Ceph tree, default is /
:return: The path of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: str
"""
return self._path
@path.setter
def path(self, path):
"""
Sets the path of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: Used as the mounted root, rather than the full Ceph tree, default is /
:param path: The path of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: str
"""
self._path = path
@property
def read_only(self):
"""
Gets the read_only of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:return: The read_only of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: bool
"""
return self._read_only
@read_only.setter
def read_only(self, read_only):
"""
Sets the read_only of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: Defaults to false (read/write). ReadOnly here will force the ReadOnly setting in VolumeMounts. More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:param read_only: The read_only of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: bool
"""
self._read_only = read_only
@property
def secret_file(self):
"""
Gets the secret_file of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:return: The secret_file of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: str
"""
return self._secret_file
@secret_file.setter
def secret_file(self, secret_file):
"""
Sets the secret_file of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: SecretFile is the path to key ring for User, default is /etc/ceph/user.secret More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:param secret_file: The secret_file of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: str
"""
self._secret_file = secret_file
@property
def secret_ref(self):
"""
Gets the secret_ref of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: SecretRef is reference to the authentication secret for User, default is empty. More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:return: The secret_ref of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: IoK8sApiCoreV1SecretReference
"""
return self._secret_ref
@secret_ref.setter
def secret_ref(self, secret_ref):
"""
Sets the secret_ref of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: SecretRef is reference to the authentication secret for User, default is empty. More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:param secret_ref: The secret_ref of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: IoK8sApiCoreV1SecretReference
"""
self._secret_ref = secret_ref
@property
def user(self):
"""
Gets the user of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: User is the rados user name, default is admin More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:return: The user of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:rtype: str
"""
return self._user
@user.setter
def user(self, user):
"""
Sets the user of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
Optional: User is the rados user name, default is admin More info: https://releases.k8s.io/HEAD/examples/volumes/cephfs/README.md#how-to-use-it
:param user: The user of this IoK8sApiCoreV1CephFSPersistentVolumeSource.
:type: str
"""
self._user = user
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, IoK8sApiCoreV1CephFSPersistentVolumeSource):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"atengler@mirantis.com"
] |
atengler@mirantis.com
|
d2cfe9544bf9ad5960547d0516f3cb4560808d82
|
68da5db22ee19a9d36b15addd9add4fafe7d3cb8
|
/blog/migrations/0001_initial.py
|
d4e32093dd45519e4530f28916b0015627dca3ac
|
[] |
no_license
|
AbdullahElian1/drf-auth
|
68bd35c2e7ebc2a43cc505485ec8639b69359bda
|
2ca79e8f84e537b3f2e34bdbb4b675089544de92
|
refs/heads/master
| 2023-07-25T20:46:48.147985
| 2021-09-07T13:08:37
| 2021-09-07T13:08:37
| 403,948,059
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 842
|
py
|
# Generated by Django 3.2.7 on 2021-09-05 11:56
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Blog',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('Name', models.CharField(max_length=32)),
('Message', models.TextField()),
('description', models.TextField()),
('author', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"abodelian28@gmail.com"
] |
abodelian28@gmail.com
|
7c57bd10d57aaa6df2951509b83f5ce23e97dd2b
|
70dc25055ced5c7f59e80eb1c47cdf25bb4dd4d1
|
/cycleshare/migrations/0015_auto_20181212_1855.py
|
eed820a52b7de45d89468d564bd052ff17b9b555
|
[
"Apache-2.0"
] |
permissive
|
HemanthJella/ewallet
|
3f93d710e06b723b059fbd1a7a4791317d1333af
|
cc7f9b5abb7aa552b8769b9324c3d79630e5ea6a
|
refs/heads/master
| 2020-07-06T05:55:00.934032
| 2019-08-17T18:25:11
| 2019-08-17T18:25:11
| 202,913,168
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
# Generated by Django 2.0.9 on 2018-12-12 10:55
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('cycleshare', '0014_cycle_toprofile'),
]
operations = [
migrations.AlterField(
model_name='cycle',
name='toprofile',
field=models.ForeignKey(default='', on_delete=django.db.models.deletion.CASCADE, to='dashboard.Profile'),
),
]
|
[
"noreply@github.com"
] |
HemanthJella.noreply@github.com
|
9e0b75b4b3e4fe4df7ab296d5b7d012658b0938c
|
c8fb49b217e463ac55163ae12fe078e5fa286628
|
/Assignment_5/third.py
|
ca7345090f44f6db52e1d851347f298ef6351c75
|
[] |
no_license
|
tushars265/Dlithe
|
cf426c12efc88109c30e7818c48052398f7a692d
|
08185687354474b18a6200db49d30865095f46bb
|
refs/heads/main
| 2023-06-25T12:28:12.839340
| 2021-07-27T15:44:26
| 2021-07-27T15:44:26
| 372,475,688
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 484
|
py
|
n = int(input())
a = []
b = []
for i in range(n):
ab = list(map(int, input().split()))
a.append(ab)
b.append(a[0])
for i in range(1,n):
flag = 0
for j in range(len(b)):
if (b[j][0] >= a[i][0] and b[j][0] <= (a[i][1]+ a[i][0])) or ((b[j][1]+b[j][0]) >= a[i][0] and (b[j][1]+b[j][0]) <= (a[i][1]+ a[i][0])):
continue
else:
flag = 1
break
if flag == 0:
b.append(a[i])
print(len(b))
|
[
"noreply@github.com"
] |
tushars265.noreply@github.com
|
6e1f1cadcaf9ca3939faeba100d65e749486a4f2
|
47045b7b7ef3c6f67bef89cbbc82a597773eb366
|
/commerce/views/cart.py
|
738193581c02f6c99871727bb34cb553ad31b954
|
[
"Apache-2.0"
] |
permissive
|
felipediel/django-commerce
|
06fecdbd302b33c3cce4284ffc9fe9219a57672e
|
b992bf4c81ca6dfaad9ccd423d25fba9d255f159
|
refs/heads/master
| 2023-06-16T14:51:49.301650
| 2021-07-12T07:04:39
| 2021-07-12T07:04:39
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 9,968
|
py
|
from django.contrib import messages
from django.contrib.auth.mixins import LoginRequiredMixin
from django.contrib.contenttypes.models import ContentType
from django.shortcuts import get_object_or_404, redirect
from django.utils.translation import ugettext_lazy as _
from django.views import View
from django.views.generic import DetailView, UpdateView
from commerce import settings as commerce_settings
from commerce.forms import AddressesForm, ShippingAndPaymentForm, DiscountCodeForm
from commerce.models import Cart, Order, PaymentMethod, Item, Option, ShippingOption
from commerce.templatetags.commerce import discount_for_product
class AddToCartView(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
content_type = get_object_or_404(ContentType, id=kwargs['content_type_id'])
product = get_object_or_404(content_type.model_class(), id=kwargs['object_id'])
option = get_object_or_404(Option, slug_i18n=request.GET['option']) if 'option' in request.GET else None
cart = Cart.get_for_user(request.user)
# TODO: settings:
# TODO: check if product can be added multiple times into cart
# TODO: max items in cart
ALLOW_MULTIPLE_SAME_ITEMS = False
MAX_ITEMS = 3
if cart.items_quantity >= MAX_ITEMS:
messages.warning(request, _(f'You can order at most %d items at once') % MAX_ITEMS)
else:
if ALLOW_MULTIPLE_SAME_ITEMS or not cart.has_item(product, option):
# add item into cart
cart.add_item(product, option)
# discount
if cart.discount:
# remove discount if it is not valid anymore
if not cart.discount.is_valid:
cart.discount = None
cart.save(update_fields=['discount'])
if not cart.discount:
# if no discount is applied yet, check if there is a valid discount available for product
self.apply_discount_by_product(cart, product)
messages.info(request, _('%s was added into cart') % product)
else:
messages.warning(request, _('%s is already in cart') % product)
back_url = request.GET.get('back_url', cart.get_absolute_url())
return redirect(back_url)
def apply_discount_by_product(self, cart, product):
discount = discount_for_product({'request': self.request}, product)
if discount and discount.add_to_cart:
cart.discount = discount
cart.save(update_fields=['discount'])
class UnapplyDiscountCartView(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
cart = Cart.get_for_user(request.user)
cart.discount = None
cart.save(update_fields=['discount'])
back_url = request.GET.get('back_url', cart.get_absolute_url())
return redirect(back_url)
class RemoveFromCartView(LoginRequiredMixin, View):
def get(self, request, *args, **kwargs):
item = get_object_or_404(Item, id=kwargs['item_id'])
cart = Cart.get_for_user(request.user)
if item in cart.item_set.all():
item.quantity -= 1
item.save(update_fields=['quantity'])
if item.quantity <= 0:
item.delete()
messages.info(request, _('%s removed from cart') % item)
# discount
if cart.discount:
# remove discount if it is not valid anymore
if not cart.discount.is_valid:
cart.discount = None
cart.save(update_fields=['discount'])
# unset loyalty points
if cart.subtotal < 0 < cart.loyalty_points:
cart.update_loyalty_points()
# delete empty cart
if not cart.item_set.exists():
cart.delete()
back_url = request.GET.get('back_url', cart.get_absolute_url())
return redirect(back_url)
class CartMixin(LoginRequiredMixin):
model = Cart
def get_object(self, queryset=None):
return self.model.get_for_user(self.request.user)
class CartDetailView(CartMixin, UpdateView):
form_class = DiscountCodeForm
template_name = 'commerce/cart_detail.html'
def get_context_data(self, **kwargs):
context_data = super().get_context_data(**kwargs)
context_data.update({
'loyalty_program_enabled': commerce_settings.LOYALTY_PROGRAM_ENABLED,
})
return context_data
def get_form_kwargs(self):
form_kwargs = super().get_form_kwargs()
form_kwargs.update({
'user': self.request.user
})
return form_kwargs
class EmptyCartRedirectMixin(object):
def dispatch(self, request, *args, **kwargs):
cart = self.get_object()
if cart.is_empty():
return redirect(cart.get_absolute_url())
return super().dispatch(request, *args, **kwargs)
class CheckoutAddressesView(CartMixin, EmptyCartRedirectMixin, UpdateView):
template_name = 'commerce/checkout_form.html'
form_class = AddressesForm
def get_initial(self):
initial = super().get_initial()
user = self.object.user
last_user_order = user.order_set.last()
# TODO: refactor
if last_user_order:
initial.update({
'delivery_name': self.object.delivery_name or last_user_order.delivery_name,
'delivery_street': self.object.delivery_street or last_user_order.delivery_street,
'delivery_postcode': self.object.delivery_postcode or last_user_order.delivery_postcode,
'delivery_city': self.object.delivery_city or last_user_order.delivery_city,
'delivery_country': self.object.delivery_country or last_user_order.delivery_country,
'billing_name': self.object.billing_name or last_user_order.billing_name,
'billing_street': self.object.billing_street or last_user_order.billing_street,
'billing_postcode': self.object.billing_postcode or last_user_order.billing_postcode,
'billing_city': self.object.billing_city or last_user_order.billing_city,
'billing_country': self.object.billing_country or last_user_order.billing_country,
'reg_id': self.object.reg_id or last_user_order.reg_id,
'tax_id': self.object.tax_id or last_user_order.tax_id,
'vat_id': self.object.vat_id or last_user_order.vat_id,
'email': self.object.email or last_user_order.email,
'phone': self.object.phone or last_user_order.phone,
})
else:
initial.update({
'delivery_name': self.object.delivery_name or user.get_full_name(),
'delivery_street': self.object.delivery_street or user.street,
'delivery_postcode': self.object.delivery_postcode or user.postcode,
'delivery_city': self.object.delivery_city or user.city,
'delivery_country': self.object.delivery_country or user.country,
'billing_name': self.object.billing_name or user.get_full_name(),
'billing_street': self.object.billing_street or user.street,
'billing_postcode': self.object.billing_postcode or user.postcode,
'billing_city': self.object.billing_city or user.city,
'billing_country': self.object.billing_country or user.country,
'email': self.object.email or user.email,
'phone': self.object.phone or user.phone,
})
return initial
def form_valid(self, form):
form.save()
return redirect('commerce:checkout_shipping_and_payment')
class CheckoutShippingAndPaymentView(CartMixin, EmptyCartRedirectMixin, UpdateView):
template_name = 'commerce/checkout_form.html'
form_class = ShippingAndPaymentForm
def form_valid(self, form):
form.save()
return redirect('commerce:checkout_summary')
def get_initial(self):
initial = super().get_initial()
shipping_options = ShippingOption.objects.for_country(self.object.delivery_country)
if shipping_options.count() == 1:
initial.update({
'shipping_option': shipping_options.first()
})
payment_methods = PaymentMethod.objects.all()
if payment_methods.count() == 1:
initial.update({
'payment_method': payment_methods.first()
})
return initial
class CheckoutSummaryView(CartMixin, EmptyCartRedirectMixin, DetailView):
template_name = 'commerce/checkout_summary.html'
def get_context_data(self, **kwargs):
context_data = super().get_context_data(**kwargs)
context_data.update({
'loyalty_program_enabled': commerce_settings.LOYALTY_PROGRAM_ENABLED,
})
return context_data
class CheckoutFinishView(CartMixin, DetailView):
def get(self, request, *args, **kwargs):
cart = self.get_object()
if cart.can_be_finished():
order_status = Order.STATUS_AWAITING_PAYMENT if cart.total > 0 else Order.STATUS_PENDING
order = cart.to_order(status=order_status)
if order.status != Order.STATUS_AWAITING_PAYMENT:
return redirect(order.get_absolute_url())
if not order.payment_method:
messages.error(request, _('Missing payment method'))
return redirect(order.get_absolute_url())
if order.payment_method.method == PaymentMethod.METHOD_ONLINE_PAYMENT:
return redirect(order.get_payment_url())
return redirect(order.get_absolute_url())
else:
messages.warning(request, _('Checkout process can not be finished yet'))
return redirect(cart.get_absolute_url())
|
[
"erik.telepovsky@gmail.com"
] |
erik.telepovsky@gmail.com
|
579a3f4f1d19eae72e146c34d5512c0bc55f17c4
|
dd37e3ebc750d4cab469b00a1be0c71f3150730a
|
/geeksforgeeks/Merge Sort Tree for Range Order Statistics.py
|
bd6f12623fa4953c97c540d21be2f184a6b75acf
|
[] |
no_license
|
kwoshvick/Algorithm-practice
|
258d88a55113915706c3263b4ae6e0beec0a7f79
|
cbe2b8c22617a443d9b5ec28b60cc74f97be8fae
|
refs/heads/master
| 2023-03-10T22:38:38.920832
| 2021-03-02T13:53:32
| 2021-03-02T13:53:32
| 321,275,373
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 721
|
py
|
# https://www.geeksforgeeks.org/merge-sort-tree-for-range-order-statistics/
# Given an array of n numbers, the task is to answer the following queries:
#
# kthSmallest(start, end, k) : Find the Kth smallest
# number in the range from array
# index 'start' to 'end'.
# Input : arr[] = {3, 2, 5, 1, 8, 9|
# Query 1: start = 2, end = 5, k = 2 [2, 5, 1, 8]
# Query 2: start = 1, end = 6, k = 4 [3, 2, 5, 1, 8, 9]
# Output : 2
# 5
def getKSmallest(list,start,end,k):
elements = list[start-1:end+1]
elements.sort()
return elements[k-1]
print(getKSmallest([3, 2, 5, 1, 8, 9],2,5,2))
print(getKSmallest([3, 2, 5, 1, 8, 9],1,6,4))
|
[
"kwoshvick@gmail.com"
] |
kwoshvick@gmail.com
|
3deb8fd5e6f39b10767ebe5aeb3d4bfbb254661f
|
4dec1fb54f1f24d30047af325b63a6117a4c4495
|
/permit/migrations/0010_auto_20160109_1950.py
|
f3668e19ae8d5b44875bdd1f10c5846ecb83997f
|
[] |
no_license
|
wanjohikibui/biz
|
37c67de6361eaa421f557dbf5491915f3a9df655
|
dabc6ffcd1d81a5daca021f7c204126213dd3e2a
|
refs/heads/master
| 2021-01-10T01:37:07.289635
| 2016-03-07T09:34:31
| 2016-03-07T09:34:31
| 53,312,990
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,405
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.9 on 2016-01-09 16:50
from __future__ import unicode_literals
from django.db import migrations, models
import permit.models
class Migration(migrations.Migration):
dependencies = [
('permit', '0009_auto_20160109_1706'),
]
operations = [
migrations.AddField(
model_name='profile',
name='address',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Postal Code'),
),
migrations.AddField(
model_name='profile',
name='customer_type',
field=models.CharField(blank=True, choices=[(b'Owner', b'Owner'), (b'Employee', b'Employee'), (b'Agent', b'Agent')], max_length=54, null=True, verbose_name='Customer Type'),
),
migrations.AddField(
model_name='profile',
name='id_no',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='ID No'),
),
migrations.AddField(
model_name='profile',
name='phone_no',
field=models.CharField(blank=True, max_length=255, null=True, verbose_name='Phone no:'),
),
migrations.AddField(
model_name='profile',
name='picture',
field=models.ImageField(blank=True, upload_to=permit.models.upload_folder),
),
]
|
[
"swanjohi9@gmail.com"
] |
swanjohi9@gmail.com
|
0d279a8f9bc7a0152339ef91e56b447ffc7273b8
|
f98418686ebdc68d12b6c8bc657e93b8df3a6755
|
/sunny/6221.py
|
c1b758b8c077f1eb5e882f522fd6397aed510ef1
|
[] |
no_license
|
sunnyyong2/algorithm
|
a666924a37ce20b74488040d401bb0745f3da8bf
|
75ab06d0d2b1ae595742fe14d9d81e240bcc28d0
|
refs/heads/master
| 2020-07-04T16:01:10.526471
| 2019-10-02T00:12:30
| 2019-10-02T00:12:30
| 202,331,883
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 462
|
py
|
man1 = input()
man2 = input()
if man1 == '바위' and man2 == '가위':
print('Result : Man1 Win!')
elif man1 == '가위' and man2 == '보':
print('Result : Man1 Win!')
elif man1 == '보' and man2 == '주먹':
print('Result : Man1 Win!')
elif man1 == '바위' and man2 == '바위':
print('Result : Draw')
elif man1 == '가위' and man2 == '가위':
print('Result : Draw')
elif man1 == '보' and man2 == '보':
print('Result : Draw')
|
[
"sunnyyong2@gmail.com"
] |
sunnyyong2@gmail.com
|
5bf2109740e2ae9c23c2f9a0d762912f1a24325e
|
b29884f90142460d4d8dec711fff09300df4e0b7
|
/autorest/python/emsapi/models/adi_ems_web_api_v2_dto_weather_metar_runway_visual_range.py
|
72eb46766fed6671f644c906e02a9ea341f8b32c
|
[
"MIT"
] |
permissive
|
ge-flight-analytics/ems-api-sdk
|
f8ec42a1f0200cdf87955b38a06b40350b5d7f68
|
eb88e42b3befabb3d4efe3f71e78c90ea0c7663e
|
refs/heads/master
| 2023-01-24T14:08:00.140788
| 2023-01-17T20:35:13
| 2023-01-17T20:35:13
| 79,595,424
| 3
| 2
|
MIT
| 2023-01-17T20:35:15
| 2017-01-20T20:29:05
|
Python
|
UTF-8
|
Python
| false
| false
| 2,859
|
py
|
# coding=utf-8
# --------------------------------------------------------------------------
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from msrest.serialization import Model
class AdiEmsWebApiV2DtoWeatherMetarRunwayVisualRange(Model):
"""Describes the current 10-minute average visual range for a runway, which is
the distance over which a pilot
can see the runway surface markings.
:param runway: The name/number/approach direction identifying the runway
:type runway: str
:param visibility_primary: The primary runway visibility distance in feet.
If a variable range is set, this value describes the
minimum visibility distance in a visual range
:type visibility_primary: float
:param visibility_primary_qualifier: A qualifier describing the primary
visibility distance. Possible values include: 'equal', 'greaterThan',
'lessThan'
:type visibility_primary_qualifier: str or ~emsapi.models.enum
:param visibility_variable: The variable runway visibility distance in
feet, describing the maximum visibility distance in the
visual range
:type visibility_variable: float
:param visibility_variable_qualifier: A qualifier describing the variable
visibility distance. Possible values include: 'equal', 'greaterThan',
'lessThan'
:type visibility_variable_qualifier: str or ~emsapi.models.enum
:param trend: The trend of the runway visibility distance. Possible values
include: 'unknown', 'noChange', 'increasing', 'decreasing'
:type trend: str or ~emsapi.models.enum
"""
_validation = {
'runway': {'required': True},
}
_attribute_map = {
'runway': {'key': 'runway', 'type': 'str'},
'visibility_primary': {'key': 'visibilityPrimary', 'type': 'float'},
'visibility_primary_qualifier': {'key': 'visibilityPrimaryQualifier', 'type': 'str'},
'visibility_variable': {'key': 'visibilityVariable', 'type': 'float'},
'visibility_variable_qualifier': {'key': 'visibilityVariableQualifier', 'type': 'str'},
'trend': {'key': 'trend', 'type': 'str'},
}
def __init__(self, runway, visibility_primary=None, visibility_primary_qualifier=None, visibility_variable=None, visibility_variable_qualifier=None, trend=None):
super(AdiEmsWebApiV2DtoWeatherMetarRunwayVisualRange, self).__init__()
self.runway = runway
self.visibility_primary = visibility_primary
self.visibility_primary_qualifier = visibility_primary_qualifier
self.visibility_variable = visibility_variable
self.visibility_variable_qualifier = visibility_variable_qualifier
self.trend = trend
|
[
"42385593+danverkamp@users.noreply.github.com"
] |
42385593+danverkamp@users.noreply.github.com
|
3d320b1e85ec7240d8d01b37ad7b0496b8d73008
|
de244fab3f8674d620d5be49812279fc7264e070
|
/bin/wheel
|
bdddbb62728b8d673e03de518f5bbe4caaf183da
|
[] |
no_license
|
cometa99/sportx1.0
|
5e19f6568a61458c862a5337cf0ac5667784a245
|
03685bee81d0f76b4ba1f27513403f7ef73b84ee
|
refs/heads/master
| 2021-07-08T16:10:45.733742
| 2017-10-01T14:06:23
| 2017-10-01T14:06:23
| 105,445,651
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 254
|
#!/Users/jh/Documents/django/mywebsite/newapp/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from wheel.tool import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"jh94798@gmail.com"
] |
jh94798@gmail.com
|
|
7163b2059a5a550d06748e9e97bcd377694cdabd
|
85d399dc9c32939a6642a92435f1c77778e9c823
|
/ch10/vector_v1.py
|
e09551288251495ce6d9612e972bfca65cff6782
|
[] |
no_license
|
roxanneduoduo/fluent_python_notes
|
adce35ccfa164a9330d2f43407a3c8b7ca52f09b
|
d3aab51d9e943c53172f8d6aa30111086c436649
|
refs/heads/master
| 2020-04-17T21:08:21.512093
| 2019-02-25T07:39:50
| 2019-02-25T07:39:50
| 166,933,493
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,097
|
py
|
from array import array
import reprlib
import math
class Vector:
typecode = 'd'
def __init__(self, components):
self._components = array(self.typecode, components)
def __iter__(self):
return iter(self._components)
def __repr__(self):
components = reprlib.repr(self._components)
components = components[components.find('['):-1]
return 'Vector({})'.format(components)
def __str__(self):
return str(tuple(self))
def __bytes__(self):
return (bytes([ord(self.typecode)]) +
bytes(self._components))
def __eq__(self, other):
return tuple(self) == tuple(other)
def __abs__(self):
return math.sqrt(sum(x * x for x in self))
def __bool__(self):
return bool(abs(self))
@classmethod
def frombytes(cls, octets):
typecode = chr(octets[0])
memv = memoryview(octets[1:]).cast(typecode)
return cls(memv)
def __len__(self):
return len(self._components)
def __getitem__(self, index):
return self._components[index]
|
[
"roxanne_duoduo@hotmail.com"
] |
roxanne_duoduo@hotmail.com
|
59df5ee58cb327cc012000841a5bb571f34ce784
|
12c30d7d0fb5ddac9392dd2122830df26d6eee64
|
/award_app/migrations/0001_initial.py
|
ab2cf4a13391d8aa9b63753a6d68b7e867094aae
|
[] |
no_license
|
mukamisha/Award
|
1543d40f7fc75334b5d4674803c79a5cfbc98271
|
72f294920e618802d0f6511bf15dfcddd12acbb4
|
refs/heads/master
| 2021-09-09T13:24:27.607663
| 2019-11-01T10:30:03
| 2019-11-01T10:30:03
| 218,336,248
| 0
| 0
| null | 2021-09-08T01:24:25
| 2019-10-29T16:44:06
|
Python
|
UTF-8
|
Python
| false
| false
| 2,891
|
py
|
# -*- coding: utf-8 -*-
# Generated by Django 1.11.23 on 2019-10-31 10:16
from __future__ import unicode_literals
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
migrations.swappable_dependency(settings.AUTH_USER_MODEL),
]
operations = [
migrations.CreateModel(
name='Comment',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('comment', models.CharField(max_length=20, null=True)),
],
),
migrations.CreateModel(
name='Image',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('image', models.ImageField(upload_to='image/')),
('title', models.CharField(max_length=30)),
('img_caption', models.TextField()),
('comments', models.TextField()),
('link', models.URLField(max_length=700)),
('design', models.IntegerField(choices=[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)], default=0)),
('usability', models.IntegerField(choices=[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)], default=0)),
('content', models.IntegerField(choices=[(0, 0), (1, 1), (2, 2), (3, 3), (4, 4), (5, 5), (6, 6), (7, 7), (8, 8), (9, 9), (10, 10)], default=0)),
('vote_submissions', models.IntegerField(default=0)),
('user', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
migrations.CreateModel(
name='Profile',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('profile_picture', models.ImageField(upload_to='images/')),
('bio', models.TextField(max_length=700)),
('name', models.CharField(max_length=200)),
('user', models.OneToOneField(on_delete=django.db.models.deletion.CASCADE, related_name='profile', to=settings.AUTH_USER_MODEL)),
],
),
migrations.AddField(
model_name='comment',
name='comment_pic',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to='award_app.Image'),
),
migrations.AddField(
model_name='comment',
name='posted_by',
field=models.ForeignKey(null=True, on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL),
),
]
|
[
"mukamishajacky97@gmail.com"
] |
mukamishajacky97@gmail.com
|
8afdd0f7b5db9c675c341b8e82342041fd87f46b
|
0170d7b0b61d51a00f63af041054777a809e41dc
|
/Ejercicios_Clase_Tony/Ejercicios Rápidos/Sales.py
|
e01561543089ce57f4480330f309543e11e80966
|
[] |
no_license
|
baropsx2/python_clase
|
917022036aca7daaa382705da2735e1f21f61760
|
c1b8b24cfedd9e8b6bdfd1c0b867075eed85e4b5
|
refs/heads/master
| 2022-01-11T08:48:36.142738
| 2019-07-26T01:30:30
| 2019-07-26T01:30:30
| 197,830,012
| 0
| 0
| null | 2019-07-26T01:30:31
| 2019-07-19T19:20:16
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 1,523
|
py
|
"""
Crear función que reciba 3 diccionarios, que devuelva todos los diccionarios y tiene que devolver otro diccionario con
el id del empleado y lo que se le debe pagar en total:
- El primero tiene el id del empleado y apunta a una tupla con los siguientes datos (el nombre del empleado y la
comisión por venta realizada).
- El segundo tiene (el id de la venta y el monto de la venta).
- Hay un tercero que viene el id del empleado y una lista con los id’s de las ventas que hizo.
"""
def calc_comisiones(empleados, ventas, vendedor_ventas):
pagos = {}
# el método items() regresa un objeto vista que muestra una lista del diccionario dado en un par de tuplas
# ej. sales = { 'apple': 2, 'orange': 3, 'grapes': 4 }
# print(sales.items()) -> dict_items([('grapes', 4), ('apple', 2), ('orange', 3)])
for emp_id, emp_tupla in empleados.items():
nombre, comision = emp_tupla
lista_ventas = vendedor_ventas.get(emp_id)
if lista_ventas:
pago_total = 0
for id_venta in lista_ventas:
valor_venta = ventas.get(id_venta)
pago_venta = valor_venta * comision
pago_total += pago_venta
pagos.update({emp_id: pago_total})
print(pagos)
return pagos
empleados = {
1: ("Lucero", 0.05),
2: ("David", 0.07),
3: ("Tony", 0.03)
}
ventas = {
10: 227.00,
4: 333.11,
22: 101.00
}
vendedor_ventas = {
1: [10, 4],
2: [22]
}
calc_comisiones(empleados, ventas, vendedor_ventas)
|
[
"baropsx2@gmail.com"
] |
baropsx2@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.