blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 3
281
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
57
| license_type
stringclasses 2
values | repo_name
stringlengths 6
116
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringclasses 313
values | visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 18.2k
668M
⌀ | star_events_count
int64 0
102k
| fork_events_count
int64 0
38.2k
| gha_license_id
stringclasses 17
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 107
values | src_encoding
stringclasses 20
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 4
6.02M
| extension
stringclasses 78
values | content
stringlengths 2
6.02M
| authors
listlengths 1
1
| author
stringlengths 0
175
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
46d54d3715c9aa7412a782c36b610842b28640e2
|
c955d0f241e1b10ebb1a9ca851580b09f47fbd26
|
/venv/Scripts/pip-script.py
|
0fc9d0f7eea1cb49fa428a1c78bef712cc34d54e
|
[] |
no_license
|
Nightichen/Myproject
|
dbb1d3231503a0179e0c84bae9f76af254a278c2
|
5fdfd6be283a3ea9b5bfce0f73f99737c7f485bc
|
refs/heads/master
| 2020-06-14T04:23:51.102361
| 2019-07-02T16:57:32
| 2019-07-02T16:57:32
| 194,897,789
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 388
|
py
|
#!D:\project\work\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==19.0.3','console_scripts','pip'
__requires__ = 'pip==19.0.3'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==19.0.3', 'console_scripts', 'pip')()
)
|
[
"1290337913@qq.com"
] |
1290337913@qq.com
|
c901d0898001ebbe91cb1e3a51cc08279de8ec9b
|
f210a0034af487b5b86a833a3477751e00a19ba6
|
/OpenCV/video.py/Fream_Cut.py
|
d42f58252ee3afb9fcc99ce190972abb21737704
|
[] |
no_license
|
HojiCha3171/pycharmproject
|
34b2212a311e093ca697c3ec7c9ee4def8a2f399
|
b15eb698f1c15c8073b574702c62df276234393d
|
refs/heads/main
| 2023-05-03T19:09:29.459458
| 2021-05-23T12:49:50
| 2021-05-23T12:49:50
| 368,645,307
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 561
|
py
|
import numpy as np
import cv2
cap = cv2.VideoCapture("C:\\Users\\tuzuk\\Desktop\\CIMG5572.MOV")
width = int(cap.get(cv2.CAP_PROP_FRAME_WIDTH))
height = int(cap.get(cv2.CAP_PROP_FRAME_HEIGHT))
fps = cap.get(cv2.CAP_PROP_FPS)
fourcc = cv2.VideoWriter_fourcc(*'DIVX')
writer = cv2.VideoWriter("C:\\Users\\tuzuk\\Desktop\\out.MOV", fourcc, fps, (width, height))
i = 0
while True:
# 1フレームずつ取得する。
ret, frame = cap.read()
print(i)
i = i+1
if not ret:
break # 映像取得に失敗
|
[
"noreply@github.com"
] |
noreply@github.com
|
21aad8e9ae07cd132867853df8f27e5ab9e4c0c9
|
8a707aacc9ecd999cb2a996f91a718ce02f6205c
|
/leetcode/LowestCommonAncestorBinaryTree.py
|
243cd8f4bd64ddf865bf3865d960f6f69388241d
|
[] |
no_license
|
seeyarh/interview-prep
|
e074f68f1c1c05b9ab0911f30b13dad69c7bbfb8
|
1af5f79ed9dcf334d2758e14a9c08e7880246a4f
|
refs/heads/master
| 2022-03-13T12:26:59.441211
| 2019-11-14T19:35:31
| 2019-11-14T19:35:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,766
|
py
|
'''
Given a binary tree, find the lowest common ancestor (LCA) of two given nodes in the tree.
According to the definition of LCA on Wikipedia: “The lowest common ancestor is defined between two nodes p and q as the lowest node in T that has both p and q as descendants (where we allow a node to be a descendant of itself).”
Given the following binary search tree: root = [3,5,1,6,2,0,8,null,null,7,4]
_______3______
/ \
___5__ ___1__
/ \ / \
6 _2 0 8
/ \
7 4
Example 1:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 1
Output: 3
Explanation: The LCA of of nodes 5 and 1 is 3.
Example 2:
Input: root = [3,5,1,6,2,0,8,null,null,7,4], p = 5, q = 4
Output: 5
Explanation: The LCA of nodes 5 and 4 is 5, since a node can be a descendant of itself
according to the LCA definition.
'''
class TreeNode:
def __init__(self, x):
self.val = x
self.left = None
self.right = None
class Solution:
def lowestCommonAncestor(self, root, p, q):
if root in (None, p, q):
return root
left = self.lowestCommonAncestor(root.left, p, q)
right = self.lowestCommonAncestor(root.right, p, q)
if left and right:
return root
else:
return left or right
'''
def search(root, path):
if root.val == node.val:
return path
if not root.left and not root.right:
return []
if root.left:
path_l = search(root.left, node, path + [(root.val,'l')])
if root.right:
path_r = search(root.right, node, path + [(root.val, 'r')])
if path_l:
return path_l
if path_r:
return path_r
else:
return []
path_p = search(root, p, [])
path_q = search(root, q, [])
print(path_p)
print(path_q)
for i in range(min(len(path_p), len(path_q))):
if path_p[i][0] == q.val:
return q.val
if path_q[i][0] == p.val:
return p.val
if path_p[i][1] != path_q[i][1]:
return path_p[i][0]
if len(path_p) < len(path_q):
return p.val
else:
return q.val
'''
sol = Solution()
root = TreeNode(3)
root.left = TreeNode(5)
root.left.left = TreeNode(6)
root.left.right = TreeNode(2)
root.left.right.left = TreeNode(7)
root.left.right.right = TreeNode(4)
root.right = TreeNode(1)
root.right.right = TreeNode(8)
root.right.left = TreeNode(0)
print(sol.lowestCommonAncestor(root, root.left, root.left.right.right).val)
|
[
"collinsrhuffiii@gmail.com"
] |
collinsrhuffiii@gmail.com
|
620624227555341d918d2c14af35a2be32a22968
|
4bd9953e0ec435796712b58fb1ec1ed167544994
|
/longest-consecutive-sequence.py
|
d7e0ce6492b9904aee825e8e422f5d3ee257b0a9
|
[] |
no_license
|
kyungwoh/leetcode
|
1a2d82276650f7d31c90ac6b9dccf166606cc219
|
88d48f177274dc388572b6fc3703cf12f76a59ec
|
refs/heads/master
| 2021-06-02T18:08:49.587617
| 2020-10-04T22:03:42
| 2020-10-04T22:03:42
| 140,018,560
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 820
|
py
|
# https://leetcode.com/problems/longest-consecutive-sequence/description/
# Save nums to set() & save start num
# Then, start from start
# Time: O(n), Space: O(n)
class Solution:
def longestConsecutive(self, nums):
"""
:type nums: List[int]
:rtype: int
"""
if not nums:
return 0
used = set()
start = set()
for n in nums:
used.add(n)
if n-1 not in used:
start.add(n)
if n+1 in start:
start.remove(n+1)
longest = 0
for s in start:
i = s
length = 0
while i in used:
i += 1
length += 1
longest = max(longest, length)
return longest
|
[
"noreply@github.com"
] |
noreply@github.com
|
22c9b2072eee710b0af8c948145defea4346aa03
|
4aa7a4d0525095725eb99843c83827ba4806ceb1
|
/keras/keras110_5_LeakyReLU.py
|
213ecbe46b4073d61f4b984af0b9f92698fdaafd
|
[] |
no_license
|
seonukim/Study
|
65a70f5bdfad68f643abc3086d5c7484bb2439d4
|
a5f2538f9ae8b5fc93b5149dd51704e8881f0a80
|
refs/heads/master
| 2022-12-04T17:04:31.489771
| 2020-08-21T00:35:15
| 2020-08-21T00:35:15
| 260,144,755
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 283
|
py
|
# activation - LeakyReLU
import numpy as np
import matplotlib.pyplot as plt
x = np.arange(-6, 6, 0.01)
def leakyrelu(x): # Leaky ReLU(Rectified Linear Unit)
return np.maximum(0.1 * x, x) #same
plt.plot(x, leakyrelu(x), linestyle = '--', label = 'Leaky ReLU')
plt.show()
|
[
"92.seoonooo@gmail.com"
] |
92.seoonooo@gmail.com
|
3b3394be7b0f7c6c13b2006438556a5f0c7303ff
|
7848e1b778ca0f3921aeeb0aeee44b398711b1f0
|
/funtesting/mock/__init__.py
|
495f052105769c8dfec9019cc49217d5fe565c55
|
[] |
no_license
|
fatelei/funtesting
|
a3a292ddfa30d9fbad47ee293768558b9e45fe8d
|
748f4b5767cc16929408b19a5b62a812b48a0dd5
|
refs/heads/master
| 2021-01-10T12:09:38.809451
| 2016-02-21T03:59:15
| 2016-02-21T03:59:15
| 51,986,949
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 158
|
py
|
# -*- coding: utf8 -*-
"""
funtesting.mock
~~~~~~~~~~~~~~~
Mock modules.
"""
from .mock_redis import mock_redis
__all__ = [
"mock_redis"
]
|
[
"fatelei@gmail.com"
] |
fatelei@gmail.com
|
7c70d30f3f40830485de9f09a9c2e9f194bac780
|
3c72e5da8c681b08dc0309f2b43f39b338df5d20
|
/OLD/dynamic2flat.py
|
3d8fc03991ab2f61cb9ec3babd39a56aa25b6250
|
[] |
no_license
|
ksingh7/data-show
|
929daf95cf4bbe1786aae9b2227485ab65ac4814
|
f4c0aa80f34999eed51227a954d3b86937a2ff6a
|
refs/heads/master
| 2020-03-09T17:11:13.505601
| 2019-03-17T01:34:30
| 2019-03-17T01:34:30
| 128,903,452
| 1
| 3
| null | 2018-06-22T17:32:10
| 2018-04-10T08:55:07
|
HTML
|
UTF-8
|
Python
| false
| false
| 2,564
|
py
|
#!/usr/bin/env python
'''
Grabbed from: https://gist.github.com/zkendall/5c58ac81a9f152de2b851360df6539cb
This script converts the output of Ansible's dynamic ec2.py to a flatly formmated static inventory file.
Before running this script run `python ./ec2.py --refresh-cache > ec2-dynamic.json`
See: http://docs.ansible.com/ansible/ec2_module.html
'''
import json
from pprint import pprint
import operator
# Variables to collect
HOST_VAR_NAMES = ['ec2_private_ip_address', 'ec2_private_dns_name']
HOST_VAR_SORTER = 'ec2_private_ip_address'
# Read in ouput from ec2.py
with open('ec2-dynamic.json') as data_file:
dynamic_inv = json.load(data_file)
# Collect hostvars we care about
hosts_and_vars = {}
for host, values in dynamic_inv['_meta']['hostvars'].items():
vars_set = {}
for key, host_vars in values.items():
if key in HOST_VAR_NAMES:
vars_set[key] = host_vars
hosts_and_vars[host] = vars_set
# Sort host definitions by `HOST_VAR_SORTER`
hosts_and_vars = sorted(hosts_and_vars.items(), key=lambda item: item[1][HOST_VAR_SORTER])
# Collect groups we care about -- currently only `tag_...`. TODO: Add support for matching multiple.
groups_by_tag = dict( (group_name, hosts) for group_name, hosts in dynamic_inv.items() if group_name.startswith('tag_'))
# Write out static hosts file
with open ('ec2-static.ini', 'w') as file_out:
# Define hosts and vars first
host_props = []
for host, var_set in hosts_and_vars:
properties = ['%s=%s' % (key, value) for (key, value) in var_set.items()]
host_props.append((host, properties))
# Get max column lengths
template_widths = {}
for host, properties in host_props:
for idx, string in enumerate(properties):
l = len(string)
template_widths[idx] = l if l > template_widths.get(idx, 0) else template_widths[idx]
# Build string template for padded formatting. Pad columns with extra 2 spaces beyond widest column value.
template = ' '.join([ '{' + str(column) + ':<' + str(int(size) - 1 + 2) + '}' for column, size in template_widths.items() ])
# Write host and hostvars
for host, props in host_props:
vars_string = template.format(*props)
file_out.writelines('{0:<16} {1}'.format(host, vars_string) + '\n')
# Write groups and hosts
for group, hosts in groups_by_tag.items():
group_name_result = '\n[' + group + ']\n'
file_out.writelines(group_name_result)
# Populate group
file_out.writelines([host + '\n' for host in hosts])
|
[
"karan.singh731987@gmail.com"
] |
karan.singh731987@gmail.com
|
45de4cd95439627837ca10655abc903d328c3ca9
|
e6ebedc843f643c492ed64871f5285255818749b
|
/jni-build/jni/include/tensorflow/python/ops/control_flow_ops.py
|
1367cc822afbfc27b6b6c803dd5d3fa31c1322b3
|
[] |
no_license
|
thetonrifles/android-tensorflow
|
b7a3deb389b04892e3670ed9b4a3d0774a7552c8
|
aef805df8139b46c81f440d38be78990a3b07c1c
|
refs/heads/master
| 2022-12-21T08:53:16.655344
| 2016-03-01T21:55:28
| 2016-03-01T21:55:28
| 51,915,596
| 2
| 1
| null | 2022-12-11T20:44:02
| 2016-02-17T10:48:48
|
C++
|
UTF-8
|
Python
| false
| false
| 75,149
|
py
|
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""## Control Flow Operations
TensorFlow provides several operations and classes that you can use to control
the execution of operations and add conditional dependencies to your graph.
@@identity
@@tuple
@@group
@@no_op
@@count_up_to
@@cond
## Logical Operators
TensorFlow provides several operations that you can use to add logical operators
to your graph.
@@logical_and
@@logical_not
@@logical_or
@@logical_xor
## Comparison Operators
TensorFlow provides several operations that you can use to add comparison
operators to your graph.
@@equal
@@not_equal
@@less
@@less_equal
@@greater
@@greater_equal
@@select
@@where
## Debugging Operations
TensorFlow provides several operations that you can use to validate values and
debug your graph.
@@is_finite
@@is_inf
@@is_nan
@@verify_tensor_all_finite
@@check_numerics
@@add_check_numerics_ops
@@Assert
@@Print
"""
# pylint: disable=g-bad-name
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
import six
from six.moves import xrange # pylint: disable=redefined-builtin
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import common_shapes
from tensorflow.python.ops import constant_op
from tensorflow.python.ops import gen_array_ops
from tensorflow.python.ops import gen_control_flow_ops
from tensorflow.python.ops import gen_data_flow_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import tensor_array_ops
# pylint: disable=wildcard-import,undefined-variable
from tensorflow.python.ops.gen_control_flow_ops import *
# pylint: enable=wildcard-import
from tensorflow.python.platform import logging
# We override the 'tuple' for a control flow op, so we keep python's
# existing 'tuple' for later use in this module.
_basetuple = tuple
# pylint: disable=protected-access
def _Identity(data, name=None):
"""Return a tensor with the same shape and contents as the input tensor.
Args:
data: A Tensor.
name: A name for this operation (optional).
Returns:
A Tensor with the same type and value as the input Tensor.
"""
if not data.dtype.is_ref_dtype:
return array_ops.identity(data, name=name)
else:
return gen_array_ops._ref_identity(data, name=name)
def _NextIteration(data, name=None):
if not data.dtype.is_ref_dtype:
return next_iteration(data, name=name)
else:
return ref_next_iteration(data, name=name)
def _Merge(values, name=None):
if all([v.dtype.is_ref_dtype for v in values]):
return gen_control_flow_ops._ref_merge(values, name)
else:
return gen_control_flow_ops._merge(values, name)
def _Enter(data, frame_name, is_constant=False, parallel_iterations=10,
use_ref=True, name=None):
"""Creates or finds a child frame, and makes `data` available to it.
The unique `frame_name` is used by the `Executor` to identify frames. If
`is_constant` is true, `data` is a constant in the child frame; otherwise
it may be changed in the child frame. At most `parallel_iterations`
iterations are run in parallel in the child frame.
Args:
data: The tensor to be made available to the child frame.
frame_name: The name of the child frame.
is_constant: If true, the output is constant within the child frame.
parallel_iterations: The number of iterations allowed to run in parallel.
use_ref: If true, use ref_enter if data is of ref type.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype and use_ref:
return ref_enter(data, frame_name, is_constant, parallel_iterations,
name=name)
else:
return enter(data, frame_name, is_constant, parallel_iterations,
name=name)
def exit(data, name=None):
"""Exits the current frame to its parent frame.
Exit makes its input `data` available to the parent frame.
Args:
data: The tensor to be made available to the parent frame.
name: A name for this operation (optional).
Returns:
The same tensor as `data`.
"""
if data.dtype.is_ref_dtype:
return gen_control_flow_ops._ref_exit(data, name)
else:
return gen_control_flow_ops._exit(data, name)
def switch(data, pred, dtype=None, name=None):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
dtype: Optional element type for the returned tensor. If missing,
the type is inferred from the type of `value`.
name: A name for this operation (optional).
Returns:
`(output_false, output_true)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
"""
with ops.op_scope([data, pred], name, "Switch") as name:
data = ops.convert_to_tensor_or_indexed_slices(data, dtype=dtype,
name="data")
pred = ops.convert_to_tensor(pred, name="pred")
if isinstance(data, ops.Tensor):
return gen_control_flow_ops._switch(data, pred, name=name)
else:
val, ind, dense_shape = data.values, data.indices, data.dense_shape
val_f, val_t = gen_control_flow_ops._switch(val, pred, name=name)
ind_f, ind_t = gen_control_flow_ops._switch(ind, pred, name="indices")
if dense_shape:
dense_shape_f, dense_shape_t = gen_control_flow_ops._switch(
dense_shape, pred, name="dense_shape")
else:
dense_shape_f, dense_shape_t = None, None
return (ops.IndexedSlices(val_f, ind_f, dense_shape_f),
ops.IndexedSlices(val_t, ind_t, dense_shape_t))
def merge(inputs, name=None):
"""Returns the value of an available element of `inputs`.
This op tests each of the tensors in `inputs` in turn to determine if any of
them is available. If it finds an available tensor, it returns it and its
index in `inputs`.
It is an error if more than one tensor in `inputs` is available. If no tensor
in `inputs` is available, the returned tensor and index are not set.
This op handles both `Tensor`s and `IndexedSlices`. If inputs has a mix of
`Tensor`s and `IndexedSlices`, all inputs are converted to IndexedSlices
before merging.
Args:
inputs: The input tensors, at most one of which is available.
name: A name for this operation (optional).
Returns:
A tuple containing the chosen input tensor and its index in `inputs`.
Raises:
ValueError: If inputs are IndexedSlices and some but not all have a
dense_shape property.
"""
with ops.op_scope(inputs, name, "Merge") as name:
inputs = [ops.convert_to_tensor_or_indexed_slices(inp)
for inp in inputs]
if all([isinstance(inp, ops.Tensor) for inp in inputs]):
return _Merge(inputs, name=name)
else:
inputs = math_ops._as_indexed_slices_list(inputs)
values, _ = _Merge([inp.values for inp in inputs], name=name)
indices, chosen_index = _Merge(
[inp.indices for inp in inputs], name="indices")
if any(inp.dense_shape for inp in inputs):
if not all(inp.dense_shape for inp in inputs):
raise ValueError("Either all merged IndexedSlices must have a "
"dense_shape, or none must have a dense_shape.")
dense_shape, _ = _Merge(
[inp.dense_shape for inp in inputs], name="dense_shape")
else:
dense_shape = None
return ops.IndexedSlices(values, indices, dense_shape), chosen_index
def _SwitchRefOrTensor(data, pred, name="Switch"):
"""Forwards `data` to an output determined by `pred`.
If `pred` is true, the `data` input is forwared to the first output.
Otherwise, the data goes to the second output.
This op handles `Tensor`s and `IndexedSlices`.
Args:
data: The tensor to be forwarded to the appropriate output.
pred: A scalar that specifies which output port will receive data.
name: A name for this operation (optional).
Returns:
`(output_false, output_false)`: If `pred` is true, data will be forwarded to
`output_true`, otherwise it goes to `output_false`.
Raises:
TypeError: if data is not a Tensor or IndexedSlices
"""
data = ops.convert_to_tensor_or_indexed_slices(data, name="data")
with ops.device(data.device):
if isinstance(data, ops.Tensor):
if not data.dtype.is_ref_dtype:
return switch(data, pred, name=name)
else:
return ref_switch(data, pred, name=name)
else:
return switch(data, pred, name=name)
def _convert_tensorarrays_to_flows(tensors_or_tensor_arrays):
return [ta.flow if isinstance(ta, tensor_array_ops.TensorArray)
else ta
for ta in tensors_or_tensor_arrays]
def _convert_flows_to_tensorarrays(tensors_or_tensorarrays, tensors_or_flows):
if len(tensors_or_tensorarrays) != len(tensors_or_flows):
raise ValueError(
"Lengths of original Tensor list and new list do not match: %d vs. %d"
% (len(tensors_or_tensorarrays), len(tensors_or_flows)))
return [
tensor_array_ops.TensorArray(
dtype=ta.dtype, handle=ta.handle, flow=t_or_flow)
if isinstance(ta, tensor_array_ops.TensorArray)
else t_or_flow
for (ta, t_or_flow) in zip(tensors_or_tensorarrays, tensors_or_flows)]
class ControlFlowOpWrapper(object):
"""A wrapper class for Operation.
A wrapped op allows us to capture the uses of its inputs and outputs. In
gradients(), right before calling the gradient function of an op, we wrap
the op by calling MakeWrapper. So during the exection of the gradient
function of an op , any time when one of its inputs/outputs is used, we
generate code to remember its values for all iterations.
"""
class _ControlFlowOpInputs(object):
"""An indirection to capture the input tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._inputs = None
def __len__(self):
return len(self._op._inputs)
def __getitem__(self, index):
if self._inputs is None:
self._inputs = [None for _ in self._op.inputs]
if isinstance(index, int):
val = self._inputs[index]
if val is None:
f_val = self._op.inputs[index]
val = self._grad_state.GetRealValue(f_val)
self._inputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
class _ControlFlowOpOutputs(object):
"""An indirection to capture the output tensors needed in backprop."""
def __init__(self, op, grad_state):
self._op = op
self._grad_state = grad_state
self._outputs = None
def __len__(self):
return len(self._op._outputs)
def __getitem__(self, index):
if self._outputs is None:
self._outputs = [None for _ in self._op.outputs]
if isinstance(index, int):
val = self._outputs[index]
if val is None:
f_val = self._op.outputs[index]
val = self._grad_state.GetRealValue(f_val)
self._outputs[index] = val
return val
elif isinstance(index, slice):
start, stop, step = index.indices(len(self))
vals = [self[i] for i in xrange(start, stop, step)]
return vals
else:
raise TypeError("index must be an integer or slice")
def __init__(self, op, grad_state):
self._grad_state = grad_state # The GradLoopState this op belongs to.
self._op = op
self._inputs = None
self._outputs = None
@property
def grad_state(self):
return self._grad_state
@property
def inputs(self):
if self._inputs is None:
self._inputs = self._ControlFlowOpInputs(self._op, self._grad_state)
return self._inputs
@property
def outputs(self):
if self._outputs is None:
self._outputs = self._ControlFlowOpOutputs(self._op, self._grad_state)
return self._outputs
@property
def op(self):
return self._op
@property
def name(self):
"""Returns the name of this instance of op."""
return self._op.name
@property
def _id(self):
"""Returns the unique id of this operation."""
return self._op._id
@property
def device(self):
"""Returns the device of this operation.
Returns:
a string or None if the device was not set.
"""
return self._op.device
@property
def type(self):
"""Returns the type of the op."""
return self._op.type
@property
def graph(self):
"""The `Graph` that contains this operation."""
return self._op.graph
def get_attr(self, name):
"""Returns the value of the attr of this op with the given `name`."""
return self._op.get_attr(name)
def _get_control_flow_context(self):
"""Returns the control flow context of this op."""
return self._op._get_control_flow_context()
def _IsLoopConstantEnter(op):
"""Returns true iff op is a loop invariant."""
is_enter = (op.type == "Enter" or op.type == "RefEnter")
return is_enter and op.get_attr("is_constant")
def _IsLoopExit(op):
return op.type == "Exit" or op.type == "RefExit"
class GradLoopState(object):
"""The state used for constructing the gradient graph for a while loop.
We create a GradLoopState for each while loop in forward and its
corresponding while loop in backprop. This gives us access to both
the forward and the backprop WhileContexts.
During the construction of gradient graph, any time when we detect
a forward value that is needed for backprop, we create a history
accumulator and add it to `history_map`. Any time when we backprop
a loop switch op (in _SwitchGrad), we add the grad merge op in
`switch_map`.
"""
def __init__(self, forward_ctxt, outer_grad_state):
# The grad loop state for the outer while loop.
self._outer_grad_state = None
# The while loop context for forward.
self._forward_context = None
# The loop counter added by AddForwardCounter. It is the value
# of the loop counter for the next iteration.
self._forward_index = None
# A sync op for forward.
self._forward_sync = None
# The while loop context for backprop.
self._grad_context = None
# The loop counter added by AddBackPropCounter. It is the value
# of the loop counter for the current iteration.
self._grad_index = None
# A sync op for backprop.
self._grad_sync = None
# Information needed by backprop.
self._history_map = {}
self._switch_map = {}
self._outer_grad_state = outer_grad_state
if outer_grad_state:
outer_forward_ctxt = outer_grad_state.forward_context
else:
outer_forward_ctxt = forward_ctxt.outer_context
# Add the forward loop counter.
if outer_forward_ctxt: outer_forward_ctxt.Enter()
cnt, forward_index = forward_ctxt.AddForwardCounter()
if outer_forward_ctxt: outer_forward_ctxt.Exit()
self._forward_context = forward_ctxt
self._forward_index = forward_index
# Add the backprop WhileContext, and the backprop loop counter.
if outer_grad_state:
# This is a nested loop. Remember the iteration counts for each
# execution of this inner loop.
outer_forward_ctxt.AddName(cnt.name)
history_cnt = outer_grad_state.AddForwardAccumulator(cnt)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.name)
real_cnt = outer_grad_state.AddBackPropAccumulatedValue(history_cnt, cnt)
self._grad_index = self._grad_context.AddBackPropCounter(real_cnt)
outer_grad_ctxt.Exit()
else:
if outer_forward_ctxt: outer_forward_ctxt.Enter()
self._grad_context = WhileContext(forward_ctxt.parallel_iterations,
forward_ctxt.back_prop,
forward_ctxt.name)
self._grad_index = self._grad_context.AddBackPropCounter(cnt)
if outer_forward_ctxt: outer_forward_ctxt.Exit()
@property
def outer_grad_state(self):
"""The grad loop state for outer loop."""
return self._outer_grad_state
@property
def forward_context(self):
"""The while loop context for forward."""
return self._forward_context
@property
def forward_index(self):
"""The loop index of forward loop."""
return self._forward_index
@property
def forward_sync(self):
"""A control trigger node for synchronization in the forward loop.
One main use is to keep the push ops of a stack executed in the
iteration order.
"""
if self._forward_sync is None:
with ops.control_dependencies(None):
self._forward_sync = control_trigger(name="f_sync")
self._forward_sync._set_control_flow_context(self._forward_context)
self._forward_index.op._add_control_input(self._forward_sync)
return self._forward_sync
@property
def grad_context(self):
"""The corresponding WhileContext for gradient."""
return self._grad_context
@property
def grad_index(self):
"""The loop index of backprop loop."""
return self._grad_index
@property
def grad_sync(self):
"""A control trigger node for synchronization in the grad loop.
One main use is to keep the pop ops of a stack executed in the
iteration order.
"""
if self._grad_sync is None:
with ops.control_dependencies(None):
self._grad_sync = control_trigger(name="b_sync")
self._grad_sync._set_control_flow_context(self._grad_context)
self._grad_index.op._add_control_input(self._grad_sync)
return self._grad_sync
@property
def history_map(self):
"""The map that records all the tensors needed for backprop."""
return self._history_map
@property
def switch_map(self):
"""The map that records all the Switch ops for the While loop."""
return self._switch_map
def AddForwardAccumulator(self, value, dead_branch=False):
"""Add an accumulator for each forward tensor that is needed in backprop.
This is added to the forward loop at the first time when a tensor
in the forward loop is used by backprop gradient computation loop.
We create an accumulator that accumulates the value of tensor at each
iteration. Called in the control flow context where gradients() is called.
The pseudocode is:
```
acc = stack();
while (_pivot) {
acc = stack_push(acc, value);
}
```
We make sure that the stack push op in one iteration is executed before
next iteration. This is achieved by adding a control edge from
`forward_index.op.inputs[0].op` to the push op, and another control
edge from the push op to either `forward_index.op` or `forward_sync`.
Args:
value: The tensor that is to be accumulated.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The stack that contains the accumulated history of the tensor.
"""
# TODO(yuanbyu): Make sure the colocation of stack ops and value.
# pylint: disable=protected-access
acc = gen_data_flow_ops._stack(value.dtype.base_dtype, name="f_acc")
# pylint: enable=protected-access
# Make acc available in the forward context.
enter_acc = self.forward_context.AddValue(acc)
# Add the stack_push op in the context of value.op.
value_ctxt = value.op._get_control_flow_context()
if _IsLoopExit(value.op):
value_ctxt = value_ctxt.outer_context
if value_ctxt == self.forward_context:
# value is not nested in the forward context.
self.forward_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
self.forward_context.Exit()
# Protect stack push and order it before forward_index.
self.forward_index.op._add_control_input(push.op)
else:
# value is in a cond context within the forward context.
assert isinstance(value_ctxt, CondContext)
if dead_branch:
# The special case for creating a zero tensor for a dead
# branch of a switch. See ControlFlowState.ZerosLike().
value_ctxt.outer_context.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
value_ctxt.outer_context.Exit()
push.op._set_control_flow_context(value_ctxt)
else:
value_ctxt.Enter()
push = gen_data_flow_ops._stack_push(enter_acc, value)
value_ctxt.Exit()
# Protect stack push and order it before forward_sync.
self.forward_sync._add_control_input(push.op)
# Order stack push after the successor of forward_index
add_op = self.forward_index.op.inputs[0].op
push.op._add_control_input(add_op)
return acc
def AddBackPropAccumulatedValue(self, history_value, value,
dead_branch=False):
"""Add the getter for an accumulated value in the grad context.
This is added to the backprop loop. Called in the grad context to
get the value of an accumulated value. The stack pop op must be guarded
by the pred of the controlling cond.
Args:
history_value: The history (a stack) of a value.
value: The value that is pushed onto the stack.
dead_branch: True iff the tensor is on a dead branch of a cond.
Returns:
The current value (the top of the stack).
"""
history_ctxt = history_value.op._get_control_flow_context()
# Find the cond context that controls history_value.
cond_ctxt = None
value_ctxt = value.op._get_control_flow_context()
while value_ctxt and value_ctxt != history_ctxt:
if isinstance(value_ctxt, CondContext):
cond_ctxt = value_ctxt
break
value_ctxt = value_ctxt.outer_context
if cond_ctxt:
# Guard stack pop with a switch if it is controlled by a cond
grad_state = self
pred = None
while not pred and grad_state:
pred = grad_state.history_map.get(cond_ctxt.pred.name)
grad_state = grad_state.outer_grad_state
branch = (1 - cond_ctxt.branch) if dead_branch else cond_ctxt.branch
history_value = _SwitchRefOrTensor(history_value, pred)[branch]
pop = gen_data_flow_ops._stack_pop(history_value, value.dtype.base_dtype)
if self.grad_context.parallel_iterations > 1:
# All pops are ordered after pivot_for_body and before grad_sync.
self.grad_sync._add_control_input(pop.op)
return pop
def GetRealValue(self, value):
"""Get the real value.
If backprop "uses" a value produced by forward inference, an
accumulator is added in the forward loop to accumulate its values.
We use the accumulated value.
Args:
value: A tensor to be captured.
Returns:
The same tensor value from the saved history.
"""
assert value.op.type != "Variable"
real_value = self._history_map.get(value.name)
if real_value is None:
if _IsLoopConstantEnter(value.op):
# Special case for loop invariant.
if self._outer_grad_state:
# This is a nested loop so we record the history of this
# value in outer_forward_ctxt.
self._grad_context.Exit()
outer_value = value.op.inputs[0]
history_value = self._outer_grad_state.AddForwardAccumulator(
outer_value)
self._grad_context.Enter()
else:
# Just use the input value of this Enter node.
real_value = GetRealOp(value.op).inputs[0]
else:
# Record the history of this value in forward_ctxt.
# NOTE(yuanbyu): Don't record for constants.
self._grad_context.Exit()
history_value = self.AddForwardAccumulator(value)
self._grad_context.Enter()
if real_value is None:
# Add the stack pop op in the grad context.
real_value = self.AddBackPropAccumulatedValue(history_value, value)
self._history_map[value.name] = real_value
return real_value
def _GetWhileContext(op):
"""Get the WhileContext to which this op belongs."""
ctxt = op._get_control_flow_context()
if ctxt:
ctxt = ctxt.GetWhileContext()
return ctxt
class ControlFlowState(object):
"""Maintain the mapping from the loops to their grad states."""
def __init__(self):
self._map = {} # maps forward loop context to GradLoopState
def _GetGradState(self, op):
"""Get the gradient loop state for this op if any."""
if _IsLoopExit(op):
forward_ctxt = op._get_control_flow_context()
forward_ctxt = forward_ctxt.outer_context
if forward_ctxt:
forward_ctxt = forward_ctxt.GetWhileContext()
else:
forward_ctxt = _GetWhileContext(op)
if forward_ctxt:
return self._map.get(forward_ctxt)
return None
def MakeWrapper(self, op):
"""Make a wrapper for op if it is in a WhileContext."""
forward_ctxt = _GetWhileContext(op)
if forward_ctxt:
grad_state = self._map.get(forward_ctxt)
if grad_state:
return ControlFlowOpWrapper(op, grad_state)
return op
def GetAllLoopExits(self):
"""Return a list containing the exits of all the loops."""
loop_exits = []
for forward_ctxt in self._map:
for loop_exit in forward_ctxt.loop_exits:
loop_exits.append(loop_exit)
return loop_exits
def EnterGradWhileContext(self, op):
"""Enter the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Enter()
def ExitGradWhileContext(self, op):
"""Exit the WhileContext for gradient computation."""
grad_state = self._GetGradState(op)
if grad_state:
grad_state.grad_context.Exit()
def AddWhileContext(self, op, between_op_list, between_ops):
"""Add the grad state for the while loop that op belongs to.
Note that op is an Exit, and this method must be called in
the control flow context where gradients() is called.
Note that this method modifies `between_op_list` and `between_ops`.
"""
forward_ctxt = _GetWhileContext(op)
grad_state = self._map.get(forward_ctxt)
if grad_state is None:
# This is a new while loop so create a grad state for it.
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
grad_state = GradLoopState(forward_ctxt, outer_grad_state)
self._map[forward_ctxt] = grad_state
# We need to include all exits of a loop for backprop.
for loop_exit in forward_ctxt.loop_exits:
if not between_ops[loop_exit.op._id]:
between_ops[loop_exit.op._id] = True
between_op_list.append(loop_exit.op)
def ZerosLikeForExit(self, val):
"""Create zeros_like gradient for a loop exit.
If the result of a loop variable is not used but is involved in
computing the result of some needed loop variable, we create a
zero-valued tensor that is fed as gradient for the Exit node of that
loop variable. Note that val.op is an Exit, and this method must be
called in the control flow context where gradients() is called.
Args:
val: The output tensor of an Exit op.
Returns:
A zero tensor of the same shape of val.
"""
val_shape = val.get_shape()
forward_ctxt = val.op._get_control_flow_context()
outer_forward_ctxt = forward_ctxt.outer_context
if outer_forward_ctxt:
outer_forward_ctxt = outer_forward_ctxt.GetWhileContext()
outer_grad_state = None
if outer_forward_ctxt:
outer_grad_state = self._map.get(outer_forward_ctxt)
if outer_grad_state:
# This is a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape in the right context.
outer_grad_state.grad_context.Enter()
result = array_ops.zeros(val_shape.dims, val.dtype)
outer_grad_state.grad_context.Exit()
else:
history_val = outer_grad_state.AddForwardAccumulator(val)
outer_grad_ctxt = outer_grad_state.grad_context
outer_grad_ctxt.Enter()
real_val = outer_grad_state.AddBackPropAccumulatedValue(
history_val, val)
result = array_ops.zeros_like(real_val)
outer_grad_ctxt.Exit()
else:
# This is not a nested loop.
if val_shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor
# with the right shape.
result = array_ops.zeros(val_shape.dims, val.dtype)
else:
result = array_ops.zeros_like(val)
return result
def ZerosLike(self, op, index):
"""Create zeros_like for the specified output of an op.
This method must be called in the grad loop context.
Args:
op: A tensorflow operation.
index: the index for a specific output of the op.
Returns:
A zero tensor of the same shape of op.outputs[index].
"""
if IsLoopSwitch(op): return None
dead_branch = op.type in {"Switch", "RefSwitch"}
forward_ctxt = _GetWhileContext(op)
if forward_ctxt is None:
return array_ops.zeros_like(op.outputs[index])
op_ctxt = op._get_control_flow_context()
grad_state = self._map.get(forward_ctxt)
val = ops.convert_to_tensor(op.outputs[index], name="tensor")
shape = val.get_shape()
if shape.is_fully_defined():
# If the shape is known statically, just create a zero tensor with
# the right shape in the grad loop context.
result = constant_op.constant(0, shape=shape.dims, dtype=val.dtype)
if dead_branch:
# op is a cond switch. Guard the zero tensor with a switch.
pred = grad_state.history_map.get(op_ctxt.pred.name)
branch = op_ctxt.branch
result = _SwitchRefOrTensor(result, pred)[1 - branch]
else:
# Unknown shape so keep a history of the shape at runtime.
if dead_branch:
# Need to add a special switch to guard the value.
pred = op_ctxt.pred
branch = op_ctxt.branch
op_ctxt.outer_context.Enter()
val = _SwitchRefOrTensor(op.inputs[0], pred)[1 - branch]
zeros_shape = array_ops.shape(val)
op_ctxt.outer_context.Exit()
val.op._set_control_flow_context(op_ctxt)
zeros_shape.op._set_control_flow_context(op_ctxt)
else:
op_ctxt.Enter()
zeros_shape = array_ops.shape(val)
op_ctxt.Exit()
# Add forward accumulator for shape.
grad_state.grad_context.Exit()
history_shape = grad_state.AddForwardAccumulator(zeros_shape, dead_branch)
grad_state.grad_context.Enter()
# Create a zero tensor with the right shape.
shape = grad_state.AddBackPropAccumulatedValue(
history_shape, zeros_shape, dead_branch)
result = array_ops.zeros(shape, val.dtype)
return result
def GetRealOp(op):
"""Get the real op by removing the wrapper."""
while isinstance(op, ControlFlowOpWrapper):
op = op.op
return op
def MaybeCreateControlFlowState(between_op_list, between_ops):
"""Create the state for all the while loops involved in one gradients().
We create a ControlFlowState when there are while loops involved in
gradients(). In gradients(), control flow logic is only invoked when
the ControlFlowState is not None.
Note that this method modifies `between_op_list` and `between_ops`.
"""
loop_state = None
for op in between_op_list:
if _IsLoopExit(op):
if loop_state is None:
loop_state = ControlFlowState()
loop_state.AddWhileContext(op, between_op_list, between_ops)
return loop_state
def IsLoopSwitch(op):
"""Return true if `op` is the Switch for a While loop."""
if op.type == "Switch" or op.type == "RefSwitch":
ctxt = op._get_control_flow_context()
return ctxt and isinstance(ctxt, WhileContext)
return False
class ControlFlowContext(object):
"""The base class for control flow context.
The usage pattern is a sequence of (Enter, Exit) followed by a final
ExitResult.
We maintain the following state for control flow contexts during graph
construction:
1. graph has _control_flow_context: the current context used to
construct new nodes. Changed by ctxt.Enter() and ctxt.Exit()
2. op has _control_flow_context: the context to which the op belongs.
Set at the time the op is created. Immutable.
3. A ControlFlowContext has _outer_context: the context in which this
context is created. Set at the time a context is created. Immutable.
4. A ControlFlowContext has _context_stack.
Pushed and popped by ctxt.Enter() and ctxt.Exit()
"""
def __init__(self):
self._outer_context = ops.get_default_graph()._get_control_flow_context()
self._context_stack = []
# Values that have been already seen in this context.
self._values = set()
# Values referenced by but external to this context.
self._external_values = {}
@property
def outer_context(self):
"""Return the context containing this context."""
return self._outer_context
def AddName(self, name):
self._values.add(name)
# pylint: disable=protected-access
def Enter(self):
"""Enter this control flow context."""
graph = ops.get_default_graph()
self._context_stack.append(graph._get_control_flow_context())
graph._set_control_flow_context(self)
def Exit(self):
"""Exit this control flow context."""
graph = ops.get_default_graph()
last_context = self._context_stack.pop()
graph._set_control_flow_context(last_context)
def ExitResult(self, result):
"""Make a list of tensors available in the outer context."""
if self._outer_context:
for x in result:
self._outer_context.AddName(x.name)
def GetWhileContext(self):
"""Return the while context containing this context."""
if self._outer_context:
return self._outer_context.GetWhileContext()
return None
def MaybeAddToWhileContext(self, op):
"""Add a control dependency to the containing WhileContext.
The added control dependency ensures that the outputs of this op
belong to the WhileContext. Do nothing if the op is not contained
in a WhileContext.
Args:
op: An operation.
"""
while_ctxt = self.GetWhileContext()
if while_ctxt is not None:
# pylint: disable=protected-access
op._add_control_input(while_ctxt.GetControlPivot().op)
# pylint: enable=protected-access
class CondContext(ControlFlowContext):
"""The context for the conditional construct."""
def __init__(self, pred, pivot, branch):
ControlFlowContext.__init__(self)
self._pred = pred # The boolean tensor for the cond predicate
self._pivot = pivot # The predicate tensor in this branch
self._branch = branch # 0 or 1 representing this branch
# Values considered to have been already seen in this context.
self._values.add(pred.name)
self._values.add(pivot.name)
@property
def pred(self):
return self._pred
@property
def pivot(self):
return self._pivot
@property
def branch(self):
return self._branch
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context:
result = self._outer_context.AddValue(val)
self._values.add(result.name)
with ops.control_dependencies(None):
result = _SwitchRefOrTensor(result, self._pred)[self._branch]
# pylint: disable=protected-access
result.op._set_control_flow_context(self)
# pylint: enable=protected-access
self._values.add(result.name)
self._external_values[val.name] = result
return result
def AddOp(self, op):
"""Add `op` to the current context."""
if not op.inputs:
# Add this op to the enclosing while context
self.MaybeAddToWhileContext(op)
# pylint: disable=protected-access
op._add_control_input(self._pivot.op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
if x.name not in self._values:
self._values.add(x.name)
# Add this value to the parent contexts up to the context that
# creates this value.
real_x = x
if self._outer_context:
real_x = self._outer_context.AddValue(x)
self._values.add(real_x.name)
real_x = _SwitchRefOrTensor(real_x, self._pred)[self._branch]
self._external_values[x.name] = real_x
x = self._external_values.get(x.name)
if x is not None:
op._update_input(index, x)
for x in op.outputs:
self._values.add(x.name)
def BuildCondBranch(self, fn):
"""Add the subgraph defined by fn() to the graph."""
r = fn()
result = []
if r is not None:
if not isinstance(r, list) and not isinstance(r, _basetuple):
r = [r]
for v in r:
real_v = v
if isinstance(v, ops.Operation):
# Use pivot as the proxy for this op.
real_v = with_dependencies([v], self._pivot)
elif v.name not in self._values:
# Handle the special case of lambda: x
self._values.add(v.name)
if self._outer_context:
real_v = self._outer_context.AddValue(v)
self._values.add(real_v.name)
real_v = _SwitchRefOrTensor(real_v, self._pred)[self._branch]
self._external_values[v.name] = real_v
else:
external_v = self._external_values.get(v.name)
if external_v is not None:
real_v = external_v
result.append(real_v)
return result
def cond(pred, fn1, fn2, name=None):
"""Return either fn1() or fn2() based on the boolean predicate `pred`.
`fn1` and `fn2` both return lists of output tensors. `fn1` and `fn2` must have
the same non-zero number and type of outputs.
Args:
pred: A scalar determining whether to return the result of `fn1` or `fn2`.
fn1: The function to be performed if pred is true.
fn2: The function to be performed if pref is false.
name: Optional name prefix for the returned tensors.
Returns:
Tensors returned by the call to either `fn1` or `fn2`. If the functions
return a singleton list, the element is extracted from the list.
Raises:
TypeError: if `fn1` or `fn2` is not callable.
ValueError: if `fn1` and `fn2` do not return the same number of tensors, or
return tensors of different types.
Example:
```python
x = tf.constant(2)
y = tf.constant(5)
def f1(): return tf.mul(x, 17)
def f2(): return tf.add(y, 23)
r = cond(math_ops.less(x, y), f1, f2)
# r is set to f1().
# Operations in f2 (e.g., tf.add) are not executed.
```
"""
with ops.op_scope([pred], name, "cond") as name:
if not callable(fn1):
raise TypeError("fn1 must be callable.")
if not callable(fn2):
raise TypeError("fn2 must be callable.")
# Add the Switch to the graph.
if isinstance(pred, bool):
raise TypeError("pred must not be a Python bool")
p_2, p_1 = switch(pred, pred)
pivot_1 = array_ops.identity(p_1, name="switch_t")
pivot_2 = array_ops.identity(p_2, name="switch_f")
pred = array_ops.identity(pred, name="pred_id")
# Build the graph for the true branch in a new context.
context_t = CondContext(pred, pivot_1, 1)
context_t.Enter()
res_t = context_t.BuildCondBranch(fn1)
context_t.ExitResult(res_t)
context_t.Exit()
# Build the graph for the false branch in a new context.
context_f = CondContext(pred, pivot_2, 0)
context_f.Enter()
res_f = context_f.BuildCondBranch(fn2)
context_f.ExitResult(res_f)
context_f.Exit()
# Add the final merge to the graph.
if len(res_t) != len(res_f):
raise ValueError("fn1 and fn2 must return the same number of results.")
if not res_t:
raise ValueError("fn1 and fn2 must return at least one result.")
for x, y in zip(res_f, res_t):
assert ((isinstance(x, ops.IndexedSlices) and
isinstance(y, ops.IndexedSlices)) or
(isinstance(x, ops.Tensor) and isinstance(y, ops.Tensor)))
val_x = x if isinstance(x, ops.Tensor) else x.values
val_y = y if isinstance(y, ops.Tensor) else y.values
if val_x.dtype.base_dtype != val_y.dtype.base_dtype:
raise ValueError("Outputs of fn1 and fn2 must have the same type: "
"%s, %s" % (val_x.dtype.name, val_y.dtype.name))
merges = [merge([x[0], x[1]])[0] for x in zip(res_f, res_t)]
return merges[0] if len(merges) == 1 else merges
# TODO(yuanbyu): Consider having a unified notion of context for
# not only conditionals and loops but also control dependency and
# subgraphs.
class WhileContext(ControlFlowContext):
"""The context for the loop construct."""
def __init__(self, parallel_iterations, back_prop, name):
ControlFlowContext.__init__(self)
self._name = ops.get_default_graph().unique_name(name)
self._parallel_iterations = parallel_iterations
self._back_prop = back_prop
# We use this node to control constants created by the pred lambda.
self._pivot_for_pred = None
# We use this node to control constants created by the body lambda.
self._pivot_for_body = None
# The boolean tensor for loop termination condition. Used in code
# generation for gradient computation
self._pivot = None
# The list of exit tensors for loop variables.
self._loop_exits = None
@property
def name(self):
return self._name
@property
def parallel_iterations(self):
"""The number of iterations allowed to run in parallel."""
return self._parallel_iterations
@property
def back_prop(self):
"""True iff backprop is enabled for this While loop."""
return self._back_prop
@property
def pivot(self):
"""The boolean tensor representing the loop termination condition."""
return self._pivot
@property
def loop_exits(self):
"""The list of exit tensors for loop variables."""
return self._loop_exits
def GetWhileContext(self):
return self
def GetControlPivot(self):
if self._pivot_for_body:
return self._pivot_for_body
return self._pivot_for_pred
def AddValue(self, val):
"""Add `val` to the current context and its outer context recursively."""
result = val
if val.name not in self._values:
self._values.add(val.name)
if self._outer_context is not None:
result = self._outer_context.AddValue(val)
# Create an Enter to make `result` known to this loop context.
with ops.control_dependencies(None):
enter = _Enter(result, self._name, is_constant=True,
parallel_iterations=self._parallel_iterations)
# pylint: disable=protected-access
enter.op._set_control_flow_context(self)
# pylint: enable=protected-access
# Add `enter` in this context.
self._values.add(enter.name)
self._external_values[val.name] = enter
result = enter
else:
actual_val = self._external_values.get(val.name)
if actual_val is not None:
result = actual_val
return result
def AddOp(self, op):
"""Adds `op` to the current context."""
if not op.inputs:
if not op.control_inputs:
# Add a control edge from the control pivot to this op.
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
else:
# Control edges must be in the same context.
for x in op.control_inputs:
assert x._get_control_flow_context() == self, (
"Control inputs must come from Operations in the same while "
"loop context (not an outer context).")
for x in op.outputs:
self._values.add(x.name)
else:
for index in range(len(op.inputs)):
x = op.inputs[index]
self.AddValue(x)
real_x = self._external_values.get(x.name)
if real_x is not None:
op._update_input(index, real_x)
# Add a control dependency to prevent loop invariants from
# enabling ops that should not be executed.
if real_x.op.type == "RefEnter" and real_x.op.get_attr("is_constant"):
# pylint: disable=protected-access
op._add_control_input(self.GetControlPivot().op)
# pylint: enable=protected-access
for x in op.outputs:
self._values.add(x.name)
def AddForwardCounter(self):
"""Adds a loop that counts the number of iterations.
This is added to the forward loop at the time when we start to
create the loop for backprop gradient computation. Called in
the outer context of this forward context.
The pseudocode is:
`n = 0; while (_pivot) { n++; }`
Returns:
The number of iterations taken by the forward loop and the loop index.
"""
n = constant_op.constant(0, name="f_count")
assert n.op._get_control_flow_context() == self.outer_context
self.Enter()
self.AddName(n.name)
enter_n = _Enter(n, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="f_count")
merge_n = merge([enter_n, enter_n])[0]
switch_n = switch(merge_n, self._pivot)
index = math_ops.add(switch_n[1], 1)
next_n = _NextIteration(index)
merge_n.op._update_input(1, next_n)
total_iterations = exit(switch_n[0], name="f_count")
self.ExitResult([total_iterations])
self.Exit()
return total_iterations, next_n
def AddBackPropCounter(self, count):
"""Add the backprop loop that controls the iterations.
This is added to the backprop loop. It is used to control the loop
termination of the backprop loop. Called in the outer context of
this grad context.
The pseudocode is:
`n = count; while (n >= 1) { n--; }`
Args:
count: The number of iterations for backprop.
Returns:
The loop index.
"""
one = constant_op.constant(1, name="b_count")
self.Enter()
self.AddName(count.name)
enter_count = _Enter(count, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_count")
merge_count = merge([enter_count, enter_count])[0]
self._pivot_for_pred = merge_count
cond = math_ops.greater_equal(merge_count, one)
self._pivot = loop_cond(cond, name="b_count")
switch_count = switch(merge_count, self._pivot)
index = math_ops.sub(switch_count[1], one)
self._pivot_for_body = index
next_count = _NextIteration(index)
merge_count.op._update_input(1, next_count)
self.Exit()
return next_count
def AddBackPropAccumulator(self, value):
"""Add an accumulation loop for every loop invariant.
This is added to the backprop loop. It is used to accumulate
partial gradients within each loop iteration. Called when in the
gradient while context.
The pseudocode is:
```
acc = 0.0;
while (_pivot) {
acc += value;
}
```
Args:
value: The partial gradient of an iteration for a loop invariant.
Returns:
The gradient for a loop invariant.
"""
self.Exit()
if self.outer_context: self.outer_context.Enter()
acc = constant_op.constant(0, value.dtype, name="b_acc")
if self.outer_context: self.outer_context.Exit()
self.Enter()
self.AddName(acc.name)
enter_acc = _Enter(acc, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations,
name="b_acc")
merge_acc = merge([enter_acc, enter_acc], name="b_acc")[0]
switch_acc = switch(merge_acc, self._pivot)
add_acc = math_ops.add(switch_acc[1], value)
next_acc = _NextIteration(add_acc)
merge_acc.op._update_input(1, next_acc)
acc_result = exit(switch_acc[0], name="b_acc")
self.ExitResult([acc_result])
return acc_result
def BuildLoop(self, pred, body, loop_vars):
"""Add the loop termination condition and body to the graph."""
# Keep original_loop_vars to identify which are TensorArrays
original_loop_vars = loop_vars
# Connvert TensorArrays to their flow variables
loop_vars = _convert_tensorarrays_to_flows(loop_vars)
loop_vars = ops.convert_n_to_tensor_or_indexed_slices(loop_vars)
# Let the context know the loop variabes so the loop variables
# would be added in the outer contexts properly.
self._values = set([x.name for x in loop_vars])
real_vars = loop_vars
if self._outer_context:
real_vars = [self._outer_context.AddValue(x) for x in loop_vars]
with ops.control_dependencies(None):
enter_vars = [_Enter(x, self._name, is_constant=False,
parallel_iterations=self._parallel_iterations)
for x in real_vars]
for x in enter_vars:
x.op._set_control_flow_context(self) # pylint: disable=protected-access
self._values = set([x.name for x in enter_vars])
merge_vars = [merge([x, x])[0] for x in enter_vars]
self._pivot_for_pred = merge_vars[0]
# Build the graph for pred.
merge_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, merge_vars))
c = ops.convert_to_tensor(pred(*merge_vars_with_tensor_arrays))
self._pivot = loop_cond(c, name="LoopCond")
switch_vars = [_SwitchRefOrTensor(x, self._pivot) for x in merge_vars]
# Build the graph for body.
vars_for_body = [_Identity(x[1]) for x in switch_vars]
self._pivot_for_body = vars_for_body[0]
# Convert TensorArray flow variables inside the context back into
# their associated TensorArrays for calling the body.
vars_for_body_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_loop_vars, vars_for_body))
body_result = body(*vars_for_body_with_tensor_arrays)
if not isinstance(body_result, collections.Sequence):
body_result = [body_result]
# Store body_result to keep track of TensorArrays returned by body
original_body_result = body_result
# Convert TensorArrays returned by body into their flow variables
result = _convert_tensorarrays_to_flows(body_result)
result = ops.convert_n_to_tensor_or_indexed_slices(result)
next_vars = [_NextIteration(x) for x in result]
# Add the back edges to complete the loop.
assert len(merge_vars) == len(next_vars)
for x in zip(merge_vars, next_vars):
x[0].op._update_input(1, x[1])
# Add the exit ops.
exit_vars = [exit(x[0]) for x in switch_vars]
self._loop_exits = exit_vars
for m_var, n_var, e_var in zip(merge_vars, next_vars, exit_vars):
if m_var.get_shape().is_compatible_with(n_var.get_shape()):
e_var.set_shape(m_var.get_shape().merge_with(n_var.get_shape()))
# Exit the loop.
self.ExitResult(exit_vars)
# Convert TensorArray flow variables outside the context back into
# their associated TensorArrays for returning to caller.
exit_vars_with_tensor_arrays = (
_convert_flows_to_tensorarrays(original_body_result, exit_vars))
return (exit_vars_with_tensor_arrays[0]
if len(exit_vars) == 1
else exit_vars_with_tensor_arrays)
def While(cond, body, loop_vars, parallel_iterations=10, back_prop=True,
name=None):
"""Repeat `body` while the condition `cond` is true.
`cond` is a function taking a list of tensors and returning a boolean scalar
tensor. `body` is a function taking a list of tensors and returning a list of
tensors of the same length and with the same types as the input. `loop_vars`
is a list of tensors that is passed to both `cond` and `body`.
In addition to regular Tensors or IndexedSlices, the body may accept and
return TensorArray objects. The flows of the TensorArray objects will
be appropriately forwarded between loops and during gradient calculations.
While `cond` evaluates to true, `body` is executed.
Args:
cond: The termination condition of the loop.
body: A function that represents the loop body.
loop_vars: The list of variable input tensors.
parallel_iterations: The number of iterations allowed to run in parallel.
back_prop: Whether backprop is enabled for this while loop.
name: Optional name prefix for the returned tensors.
Returns:
The output tensors for the loop variables after the loop.
Raises:
TypeError: if `cond` or `body` is not callable.
ValueError: if `loop_var` is empty.
Example:
```python
i = constant(0)
c = lambda i: math_ops.less(i, 10)
b = lambda i: math_ops.add(i, 1)
r = While(c, b, [i])
```
"""
with ops.op_scope(loop_vars, name, "While") as name:
if not loop_vars:
raise ValueError("No loop variables provided")
if not callable(cond):
raise TypeError("cond must be callable.")
if not callable(body):
raise TypeError("body must be callable.")
context = WhileContext(parallel_iterations, back_prop, name)
context.Enter()
result = context.BuildLoop(cond, body, loop_vars)
context.Exit()
return result
def _AsTensorList(x, p):
"""Return x as a list of Tensors or IndexedSlices.
For entries of `x` that are Operations, this returns an Identity of `p`
with a dependency on the operation.
Args:
x: A Tensor/IndexedSlices/Operation or a list or tuple of them.
p: A Tensor to return for entries in `x` that are Operations.
Returns:
A list of Tensors or IndexedSlices.
"""
if not isinstance(x, (list, _basetuple)):
x = [x]
l = []
for v in x:
if isinstance(v, ops.Operation):
v = with_dependencies([v], p)
v = ops.convert_to_tensor_or_indexed_slices(v)
if isinstance(v, ops.Tensor):
l.append(array_ops.identity(v))
else:
l.append(ops.IndexedSlices(array_ops.identity(v.values),
array_ops.identity(v.indices)))
return l
def _CheckResults(a, b):
assert len(a) == len(b), (
"Values returned by a() and b() must have the same length.")
for x, y in zip(a, b):
assert x.dtype == y.dtype, (
"Values returned by a() [%s] and b() [%s] must have "
"the same type: %s, %s." %
(x.name, y.name, x.dtype.name, y.dtype.name))
def with_dependencies(dependencies, output_tensor, name=None):
"""Produces the content of `output_tensor` only after `dependencies`.
In some cases, a user may want the output of an operation to be
consumed externally only after some other dependencies have run
first. This function ensures returns `output_tensor`, but only after all
operations in `dependencies` have run. Note that this means that there is
no guarantee that `output_tensor` will be evaluated after any `dependencies`
have run.
See also `tuple` and `group`.
Args:
dependencies: A list of operations to run before this op finishes.
output_tensor: A `Tensor` or `IndexedSlices` that will be returned.
name: (Optional) A name for this operation.
Returns:
Same as `output_tensor`.
Raises:
TypeError: if `output_tensor` is not a `Tensor` or `IndexedSlices`.
"""
with ops.op_scope(dependencies + [output_tensor], name,
"control_dependency") as name:
with ops.device(output_tensor.device):
with ops.control_dependencies(dependencies):
output_tensor = ops.convert_to_tensor_or_indexed_slices(output_tensor)
if isinstance(output_tensor, ops.Tensor):
return _Identity(output_tensor, name=name)
else:
return ops.IndexedSlices(_Identity(output_tensor.values, name=name),
output_tensor.indices,
output_tensor.dense_shape)
def _GroupControlDeps(dev, deps, name=None):
with ops.control_dependencies(deps):
if dev is None:
return no_op(name=name)
else:
with ops.device(dev):
return no_op(name=name)
# TODO(touts): Accept "inputs" as a list.
def group(*inputs, **kwargs):
"""Create an op that groups multiple operations.
When this op finishes, all ops in `input` have finished. This op has no
output.
See also `tuple` and `with_dependencies`.
Args:
*inputs: One or more tensors to group.
**kwargs: Optional parameters to pass when constructing the NodeDef.
name: A name for this operation (optional).
Returns:
An Operation that executes all its inputs.
Raises:
ValueError: If an unknown keyword argument is provided, or if there are
no inputs.
"""
name = kwargs.pop("name", None)
if kwargs:
raise ValueError("Unknown keyword arguments: " + ", ".join(kwargs.keys()))
if not inputs:
# TODO(touts): Would make sense to return a NoOp.
raise ValueError("No inputs provided")
with ops.op_scope(inputs, name, "group_deps") as name:
# Sorts *inputs according to their devices.
ops_on_device = {} # device -> operations specified on the device.
for inp in inputs:
dev = inp.device
if dev in ops_on_device:
ops_on_device[dev].append(inp)
else:
ops_on_device[dev] = [inp]
if len(ops_on_device) == 1:
# 1-level tree. The root node is the returned NoOp node.
(dev, deps), = ops_on_device.items()
return _GroupControlDeps(dev, deps, name=name)
# 2-level tree. The root node is the returned NoOp node.
# deps contains 1 NoOp node for each device.
deps = []
def device_key(dev):
"""A sort key that allows None to be compared to strings."""
return "" if dev is None else dev
for dev in sorted(six.iterkeys(ops_on_device), key=device_key):
deps.append(_GroupControlDeps(dev, ops_on_device[dev]))
return _GroupControlDeps(None, deps, name=name)
def tuple(tensors, name=None, control_inputs=None):
"""Group tensors together.
This creates a tuple of tensors with the same values as the `tensors`
argument, except that the value of each tensor is only returned after the
values of all tensors have been computed.
`control_inputs` contains additional ops that have to finish before this op
finishes, but whose outputs are not returned.
This can be used as a "join" mechanism for parallel computations: all the
argument tensors can be computed in parallel, but the values of any tensor
returned by `tuple` are only available after all the parallel computations
are done.
See also `group` and `with_dependencies`.
Args:
tensors: A list of `Tensor`s or `IndexedSlices`, some entries can be `None`.
name: (optional) A name to use as a `name_scope` for the operation.
control_inputs: List of additional ops to finish before returning.
Returns:
Same as `tensors`.
Raises:
ValueError: If `tensors` does not contain any `Tensor` or `IndexedSlices`.
TypeError: If `control_inputs` is not a list of `Operation` or `Tensor`
objects.
"""
with ops.op_scope(tensors, name, "tuple") as name:
gating_ops = [t.op for t in tensors if t]
if control_inputs:
for c in control_inputs:
if isinstance(c, ops.Tensor):
c = c.op
elif not isinstance(c, ops.Operation):
raise TypeError("Control input must be Operation or Tensor: %s" % c)
gating_ops.append(c)
# Note that in order to ensure ordering in the pbtxt, we must take care to
# ensure the order here.
gating_ops = sorted(set(gating_ops), key=lambda op: op._id) # Uniquify ops.
if not gating_ops:
raise ValueError("Must have at least one Tensor: %s" % tensors)
gate = group(*gating_ops)
tpl = []
for t in tensors:
if t:
tpl.append(with_dependencies([gate], t))
else:
tpl.append(None)
return tpl
# TODO(yuanbyu, mrry): Handle stride to support sliding windows.
def foldl(fn, elems, initializer=None, name=None):
"""The foldl operator on the unpacked tensors of a tensor.
This foldl operator applies the function `fn` to a sequence of elements
from left to right. The elements are made of the tensors unpacked from
`elems`. If `initializer` is None, `elems` must contain at least one
element.
Args:
fn: The function to be performed.
elems: A tensor to be unpacked.
initializer: (optional) The initial value for the accumulator.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively on each
element/slice of `elems`, from left to right.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldl(lambda a, x: a + x, elems)
```
"""
with ops.op_scope([elems], name, "foldl") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
# Convert elems to tensor array.
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
a = elems_ta.read(0)
i = constant_op.constant(1)
else:
a = ops.convert_to_tensor(initializer)
i = constant_op.constant(0)
def compute(i, a):
a = fn(a, elems_ta.read(i))
return [i + 1, a]
_, r_a = While(lambda i, a: i < n, compute, [i, a])
return r_a
def foldr(fn, elems, initializer=None, name=None):
"""The foldr operator operator on the unpacked tensors of a tensor.
This foldr operator applies the function `fn` to a sequence of elements
from right to left. The elements are made of the tensors unpacked from
`elems`. If `initializer` is None, `elems` must contain at least one
element.
Args:
fn: The function to be performed.
elems: A tensor that is unpacked into a sequence of tensors to apply `fn`.
initializer: (optional) The initial value for the accumulator.
use_tensor_array: (optional) use tensor_array if true.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor resulting from applying `fn` consecutively on each
element/slice of `elems`, from right to left.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
sum = foldr(lambda a, x: a + x, elems)
```
"""
with ops.op_scope([elems], name, "foldr") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
# Convert elems to tensor array.
n = array_ops.shape(elems)[0]
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=n,
dynamic_size=False)
elems_ta = elems_ta.unpack(elems)
if initializer is None:
i = n - 1
a = elems_ta.read(i)
else:
i = n
a = ops.convert_to_tensor(initializer)
def compute(i, a):
i -= 1
a = fn(a, elems_ta.read(i))
return [i, a]
_, r_a = While(lambda i, a: i > 0, compute, [i, a])
return r_a
def map(fn, elems, dtype=None, name=None):
"""The map operator on on the unpacked tensors of a tensor.
This map operator applies the function `fn` to a sequence of elements
from right to left. The elements are made of the tensors unpacked from
`elems`.
Args:
fn: The function to be performed.
elems: A tensor to be unpacked to apply `fn`.
dtype: (optional) The output type of `fn`.
name: (optional) Name prefix for the returned tensors.
Returns:
A tensor that packs the results of applying `fn` on each element
of `elems`.
Raises:
TypeError: if `fn` is not callable.
Example:
```python
elems = [1, 2, 3, 4, 5, 6]
squares = map(lambda x: x * x, elems)
```
"""
with ops.op_scope([elems], name, "map") as name:
if not callable(fn):
raise TypeError("fn must be callable.")
dtype = dtype if dtype else elems.dtype
# Convert elems to tensor array.
elems_ta = tensor_array_ops.TensorArray(dtype=elems.dtype, size=0,
dynamic_size=True)
elems_ta = elems_ta.unpack(elems)
n = elems_ta.size()
i = constant_op.constant(0)
acc_ta = tensor_array_ops.TensorArray(dtype=dtype, size=n)
def compute(i, a):
a = a.write(i, fn(elems_ta.read(i)))
i = math_ops.add(i, 1)
return [i, a]
_, r_a = While(lambda i, a: math_ops.less(i, n), compute, [i, acc_ta])
return r_a.pack()
def case(pred_fn_pairs, default, exclusive=False, name="case"):
"""Create a case operation.
The `pred_fn_pairs` parameter is a dict or list of pairs of size N.
Each pair contains a boolean scalar tensor and a python callable that
creates the tensors to be returned if the boolean evaluates to True. `default`
is a callable generating a list of tensors. All the callables in
`pred_fn_pairs` as well as `default` should return the same number and types
of tensors.
If `exclusive==True`, all predicates are evaluated, and a logging operation
with an error is returned if more than one of the predicates evaluates to
True. If `exclusive==False`, execution stops are the first predicate which
evaluates to True, and the tensors generated by the corresponding function
are returned immediately. If none of the predicates evaluate to True, this
operation returns the tensors generated by `default`.
Example 1:
Pseudocode:
```
if (x < y) return 17;
else return 23;
```
Expressions:
```
f1 = lambda: tf.constant(17)
f2 = lambda: tf.constant(23)
r = case([(tf.less(x, y), f1)], default=f2)
```
Example 2:
Pseudocode:
```
if (x < y && x > z) raise OpError("Only one predicate may evaluate true");
if (x < y) return 17;
else if (x > z) return 23;
else return -1;
```
Expressions:
```
def f1(): return tf.constant(17)
def f2(): return tf.constant(23)
def f3(): return tf.constant(-1)
r = case({tf.less(x, y): f1, tf.greater(x, z): f2},
default=f3, exclusive=True)
```
Args:
pred_fn_pairs: Dict or list of pairs of a boolean scalar tensor and a
callable which returns a list of tensors.
default: A callable that returns a list of tensors.
exclusive: True iff more than one predicate is allowed to evaluate to True.
name: A name for this operation (optional).
Returns:
The tensors returned by the first pair whose predicate evaluated to True, or
those returned by `default` if none does.
Raises:
TypeError: If `pred_fn_pairs` is not a list/dictionary.
TypeError: If `pred_fn_pairs` is a list but does not contain 2-tuples.
TypeError: If `fns[i]` is not callable for any i, or `default` is not
callable.
"""
pfp = pred_fn_pairs # For readability
if not (isinstance(pfp, list) or isinstance(pfp, _basetuple)
or isinstance(pfp, dict)):
raise TypeError("fns must be a list, tuple, or dict")
if isinstance(pfp, dict):
pfp = pfp.items()
if not exclusive:
logging.warn("%s: Provided dictionary of predicate/fn pairs, but "
"exclusive=False. Order of conditional tests is "
"not guaranteed.", name)
for tup in pfp:
if not isinstance(tup, _basetuple) or len(tup) != 2:
raise TypeError("Each entry in pred_fn_pairs must be a 2-tuple")
pred, fn = tup
if pred.dtype != dtypes.bool:
raise TypeError("pred must be of type bool: %s", pred.name)
if not callable(fn):
raise TypeError("fn for pred %s must be callable." % pred.name)
if not callable(default):
raise TypeError("default must be callable.")
preds, fns = map(list, zip(*pfp))
with ops.op_scope([preds], name, "case"):
if not preds:
return default()
not_preds = []
for i, p in enumerate(preds):
with ops.name_scope("not_%d" % i):
not_preds.append(math_ops.logical_not(p))
and_not_preds = [constant_op.constant(True, name="and_not_true")]
for i, notp in enumerate(not_preds[:-1]):
with ops.name_scope("and_not_%d" % i):
and_not_preds.append(math_ops.logical_and(and_not_preds[-1], notp))
# preds = [p1, p2, p3]
# fns = [f1, f2, f3]
# not_preds = [~p1, ~p2, ~p3]
# case_preds = [p1 & True,
# p2 & ~p1,
# p3 & ~p1 & ~ p2]
case_preds = []
for i, (p, and_not_p_prev) in enumerate(zip(preds, and_not_preds)):
with ops.name_scope("case_%d" % i):
case_preds.append(math_ops.logical_and(p, and_not_p_prev))
# case_sequence = [cond(p3 & ..., f3, default),
# cond(p2 & ..., f2, lambda: case_sequence[0]),
# ...
# cond(p1 & True, f1, lambda: case_sequence[i-1])]
# and prev_case_seq will loop from case_sequence[0] to case_sequence[-1]
if exclusive:
# TODO(ebrevdo): Add Where() for DT_BOOL, replace with Size(Where(preds))
preds_c = array_ops.concat(0, preds, name="preds_c")
num_true_conditions = math_ops.reduce_sum(
math_ops.cast(preds_c, dtypes.int32), name="num_true_conds")
at_most_one_true_condition = math_ops.less(
num_true_conditions, constant_op.constant(2, name="two_true_conds"))
error_msg = [
("More than one condition evaluated as True but "
"exclusive=True. Conditions: (%s), Values:"
% ", ".join([p.name for p in preds])),
preds_c]
with ops.control_dependencies([
logging_ops.Assert(condition=at_most_one_true_condition,
data=error_msg, summarize=len(preds))]):
prev_case_seq = None
for i, (cp, fn) in enumerate(zip(case_preds, fns)[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
else:
prev_case_seq = None
for i, (cp, fn) in enumerate(zip(case_preds, fns)[::-1]):
prev_case_seq = cond(
cp, fn,
default if i == 0 else lambda: prev_case_seq,
name="If_%d" % i)
return prev_case_seq
ops.RegisterShape("Enter")(common_shapes.unchanged_shape)
ops.RegisterShape("Exit")(common_shapes.unchanged_shape)
ops.RegisterShape("NextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("RefEnter")(common_shapes.unchanged_shape)
ops.RegisterShape("RefExit")(common_shapes.unchanged_shape)
ops.RegisterShape("RefNextIteration")(common_shapes.unchanged_shape)
ops.RegisterShape("ControlTrigger")(common_shapes.no_outputs)
ops.RegisterShape("NoOp")(common_shapes.no_outputs)
@ops.RegisterShape("LoopCond")
def _LoopCondShape(op):
"""Shape function for the LoopCond op."""
return [op.inputs[0].get_shape().merge_with(tensor_shape.scalar())]
@ops.RegisterShape("Merge")
def _MergeShape(op):
"""Shape function for the Merge op.
The Merge op takes many inputs of arbitrary shapes, and produces a
first output that is one of those inputs, and a second scalar
output.
If all input shapes are known and have the same rank, the output
shape must have that rank, otherwise the output shape is unknown.
Each output dimension is specified only if that dimension in all
inputs are the same.
Args:
op: A Merge Operation.
Returns:
A single-element list containing the Shape of the Merge op.
"""
output_shape = op.inputs[0].get_shape()
if output_shape.dims is None:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
for input_ in op.inputs[1:]:
input_shape = input_.get_shape()
if input_shape.dims is None or input_shape.ndims != output_shape.ndims:
return [tensor_shape.unknown_shape(), tensor_shape.scalar()]
else:
output_shape = tensor_shape.TensorShape(
[input_dim.value if input_dim.value == output_dim.value else None
for input_dim, output_dim in zip(input_shape.dims,
output_shape.dims)])
return [output_shape, tensor_shape.scalar()]
ops.RegisterShape("RefMerge")(_MergeShape)
@ops.RegisterShape("RefSelect")
def _RefSelectShape(op):
"""Shape function for the RefSelect op.
The RefSelect takes one scalar input and N inputs of arbitrary
shapes, and produces one output, which is one of those N inputs.
This function conservatively assumes that if any of the N inputs is
not fully defined, the output shape is unknown. If all of the N
inputs have the exact same known shape, the output must have that
shape.
Args:
op: A RefSelect Operation.
Returns:
A single-element list containing the Shape of the RefSelect op.
"""
unused_shape = op.inputs[0].get_shape().merge_with(tensor_shape.scalar())
first_input_shape = op.inputs[1].get_shape()
if first_input_shape.is_fully_defined():
for input_ in op.inputs[2:]:
input_shape = input_.get_shape()
if (not input_shape.is_fully_defined()
or not input_shape.is_compatible_with(first_input_shape)):
return [tensor_shape.unknown_shape()]
return [first_input_shape]
else:
return [tensor_shape.unknown_shape()]
@ops.RegisterShape("RefSwitch")
@ops.RegisterShape("Switch")
def _SwitchShape(op):
input_shape = op.inputs[0].get_shape()
unused_pred_shape = op.inputs[1].get_shape().merge_with(tensor_shape.scalar())
return [input_shape] * 2
|
[
"andrea.petreri@gmail.com"
] |
andrea.petreri@gmail.com
|
27c456bb190924f3117dc53002f8af05a9addc2b
|
46643f7d73a270b8080ac9f2f570edc4b90a8b63
|
/Backorder/trainingModel.py
|
1a85dde3a2c919f570b402f53df3c3550b440052
|
[] |
no_license
|
khangdong89/Stock-out-Prediction
|
16533de9d983048cda12f726cf8d0a09a8ea8d79
|
62719448e86600ff8f068802db4ff87504b0e28a
|
refs/heads/main
| 2023-05-03T08:47:51.891938
| 2021-05-14T13:33:11
| 2021-05-14T13:33:11
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,718
|
py
|
"""
This is the Entry point for Training the Machine Learning Model.
"""
# Doing the necessary imports
from sklearn.model_selection import train_test_split
from data_ingestion import data_loader
from data_preprocessing import preprocessing
from data_preprocessing import clustering
from best_model_finder import tuner
from file_operations import file_methods
from application_logging import logger
#Creating the common Logging object
class trainModel:
def __init__(self):
self.log_writer = logger.App_Logger()
self.file_object = open("Training_Logs/ModelTrainingLog.txt", 'a+')
def trainingModel(self):
# Logging the start of Training
self.log_writer.log(self.file_object, 'Start of Training')
try:
# Getting the data from the source
data_getter=data_loader.Data_Getter(self.file_object,self.log_writer)
data=data_getter.get_data()
"""doing the data preprocessing"""
preprocessor=preprocessing.Preprocessor(self.file_object,self.log_writer)
data = preprocessor.remove_columns(data,["Index_Product","sku"]) # remove the unnamed column as it doesn't contribute to prediction.
##label encoding
data = preprocessor.encodeCategoricalValues(data)
# check if missing values are present in the dataset
is_null_present = preprocessor.is_null_present(data)
# if missing values are there, replace them appropriately.
if (is_null_present):
# X=preprocessor.impute_missing_values(X) # missing value imputation
data = data.dropna()
# create separate features and labels
X,Y=preprocessor.separate_label_feature(data,label_column_name='went_on_backorder')
# check further which columns do not contribute to predictions
# if the standard deviation for a column is zero, it means that the column has constant values
# and they are giving the same output both for good and bad sensors
# prepare the list of such columns to drop
cols_to_drop=preprocessor.get_columns_with_zero_std_deviation(X)
# drop the columns obtained above
X=preprocessor.remove_columns(X,cols_to_drop)
#scaling the X values
X = preprocessor.scale_numerical_columns(X)
#pca on the columns of x
X = preprocessor.pcaTransformation(X)
# """ Applying the clustering approach"""
#
# kmeans=clustering.KMeansClustering(self.file_object,self.log_writer) # object initialization.
# number_of_clusters=kmeans.elbow_plot(X) # using the elbow plot to find the number of optimum clusters
#
# # Divide the data into clusters
# X=kmeans.create_clusters(X,number_of_clusters)
#
# #create a new column in the dataset consisting of the corresponding cluster assignments.
# X['Labels']=Y
#
# # getting the unique clusters from our dataset
# list_of_clusters=X['Cluster'].unique()
"""parsing all the clusters and looking for the best ML algorithm to fit on individual cluster"""
#for i in list_of_clusters:
#cluster_data=X[X['Cluster']==i] # filter the data for one cluster
#Prepare the feature and Label columns
#cluster_features=cluster_data.drop(['Labels','Cluster'],axis=1)
#cluster_label= cluster_data['Labels']
# splitting the data into training and test set for each cluster one by one
x_train, x_test, y_train, y_test = train_test_split(X, Y, test_size=1 / 3, random_state=355)
model_finder=tuner.Model_Finder(self.file_object,self.log_writer) # object initialization
#getting the best model for each of the clusters
best_model_name,best_model=model_finder.get_best_model(x_train,y_train,x_test,y_test)
#saving the best model to the directory.
file_op = file_methods.File_Operation(self.file_object,self.log_writer)
#save_model=file_op.save_model(best_model,best_model_name+str(i))
save_model = file_op.save_model(best_model, best_model_name)
# logging the successful Training
self.log_writer.log(self.file_object, 'Successful End of Training')
self.file_object.close()
except Exception:
# logging the unsuccessful Training
self.log_writer.log(self.file_object, 'Unsuccessful End of Training')
self.file_object.close()
raise Exception
|
[
"injarapusrisharanya@gmail.com"
] |
injarapusrisharanya@gmail.com
|
8e0ed00e073de8a5bccb6b2d7fe1eef2ede522de
|
9e4df2b26e899f2d3e044e71bc4193958b02314b
|
/app/migrations/0027_auto_20200930_0118.py
|
bb05747fde99e2ecc6d9acb7db6fe524b26b1a36
|
[
"MIT"
] |
permissive
|
hosseinmoghimi/phoenix
|
afea0a73cdf257fcf89c75d85c5ab1890d957a83
|
43fc49421a50563acc1884981d391b0d6a5d5d72
|
refs/heads/master
| 2023-01-11T11:12:30.308822
| 2020-11-15T13:52:21
| 2020-11-15T13:52:21
| 295,109,751
| 1
| 5
|
MIT
| 2020-11-15T13:50:12
| 2020-09-13T08:31:01
|
HTML
|
UTF-8
|
Python
| false
| false
| 701
|
py
|
# Generated by Django 3.1 on 2020-09-29 21:48
from django.db import migrations
import tinymce.models
class Migration(migrations.Migration):
dependencies = [
('app', '0026_auto_20200930_0117'),
]
operations = [
migrations.AlterField(
model_name='jumbotron',
name='description',
field=tinymce.models.HTMLField(blank=True, max_length=2000, null=True, verbose_name='شرح کامل'),
),
migrations.AlterField(
model_name='jumbotron',
name='short_description',
field=tinymce.models.HTMLField(blank=True, max_length=1000, null=True, verbose_name='شرح کوتاه'),
),
]
|
[
"hossein.moghimi.ce@gmail.com"
] |
hossein.moghimi.ce@gmail.com
|
140f01a599093240c627fc66b51a3682f093eb8a
|
df916fbff07accc3e11b3597fcfc0079e6ad1e97
|
/django/mysite/polls/urls.py
|
183f036166ca1bee2e2842feff59b1ebb96dfad7
|
[] |
no_license
|
kamdow/tkinter
|
c7bd174188b9d6922b1efec7dbb94b65b131333e
|
be0011cf34c6c6deb031903ebeecda5f123c490d
|
refs/heads/master
| 2021-01-24T03:03:59.589500
| 2018-03-18T13:18:13
| 2018-03-18T14:26:23
| 122,874,979
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 113
|
py
|
from django.conf.urls import url
from polls import views
urlpatterns = [
url(r'^$',views.index,name='index'),
]
|
[
"kamil@capitalit.pl"
] |
kamil@capitalit.pl
|
da61602d3bb9101a4e07f97bee9c5b0dce187c33
|
836ae744a7adeab76f976741f265563667c4038b
|
/ML/models.py
|
e403fa0048d1505a3ff46bfffc524d41b3938ca8
|
[] |
no_license
|
Ashargin/ML_pack
|
bcd61d8796a1d69e1d73732e1fd6399398a199b4
|
60ed6bdad27dea722839e3306c4f5fca1df94712
|
refs/heads/master
| 2020-04-02T06:15:16.805453
| 2019-09-22T11:43:01
| 2019-09-22T11:43:01
| 154,138,030
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,541
|
py
|
import numpy as np
import pandas as pd
import statsmodels.formula.api as smf
from sklearn.linear_model import Ridge
from sklearn.linear_model import ElasticNet
from sklearn.linear_model import HuberRegressor
from sklearn.ensemble import RandomForestRegressor
from sklearn.ensemble import AdaBoostRegressor
from sklearn.ensemble import GradientBoostingRegressor
from ML.ensemble import MyRandomForestQuantileRegressor
from sklearn.model_selection import train_test_split
from sklearn.model_selection import GridSearchCV
from ML.preprocessing import preproc_linreg, preproc_filled_discrete
from utils.utils import no_outliers, sub_pred_to_pred
from utils.get_data import load
from utils.settings import VAL_DATA_PATH, TARGET_COL, SUB_PRED_VAR
# Base model
class BaseModel:
def __init__(self, data, sub_pred_var):
self.data = data
self.sub_pred_var = sub_pred_var
if self.sub_pred_var is None:
self.sub_pred_var = SUB_PRED_VAR
if self.sub_pred_var is None:
self.sub_pred_var = TARGET_COL
self.val_split = VAL_DATA_PATH is None
if self.val_split:
data_train, data_val = train_test_split(self.data, test_size=0.33)
data_train = no_outliers(data_train)
self.fit(data_train)
self.data_val = data_val
else:
self.fit(self.data)
self.data_val = load(VAL_DATA_PATH)
def fit(self, data):
preproc = self.preprocessing(data)
target = data[self.sub_pred_var]
self.rootmodel.fit(preproc, target)
def predict(self, X):
preproc = self.preprocessing(X)
pred = pd.DataFrame(self.rootmodel.predict(preproc), index=X.index)
if self.sub_pred_var != TARGET_COL:
pred = sub_pred_to_pred(pred, X, self.sub_pred_var)
return pred
def grid_search_cv(self, parameters, scorer, cv):
gs_data = None
if self.val_split:
gs_data = self.data
else:
gs_data = pd.concat([self.data, self.data_val])
train_index = np.array(range(len(self.data)))
val_index = np.array(range(len(self.data), len(self.data) + len(self.data_val)))
cv = zip([train_index], [val_index])
gs = GridSearchCV(self.rootmodel.__class__(), parameters, cv=cv, scoring=scorer)
preproc = self.preprocessing(gs_data)
target = gs_data[self.sub_pred_var]
gs.fit(preproc, target)
print('Grid scores on development set:\n')
means = gs.cv_results_['mean_test_score']
scores = list(zip(means, gs.cv_results_['params']))
scores = reversed(sorted(scores, key=lambda x: x[0]))
for mean, params in scores:
print('{:.3f} - {}'.format(mean, params))
print('\nBest parameters set found on development set:\n')
print(gs.best_params_)
class Regressor(BaseModel):
def __init__(self, data, sub_pred_var):
super().__init__(data, sub_pred_var=sub_pred_var)
def grid_search_cv(self, parameters, scorer='explained_variance', cv=5):
super().grid_search_cv(parameters, scorer=scorer, cv=cv)
class Classifier(BaseModel):
def __init__(self, data, sub_pred_var):
super().__init__(data, sub_pred_var=sub_pred_var)
def grid_search_cv(self, parameters, scorer='f1_macro', cv=5):
super().grid_search_cv(parameters, scorer=scorer, cv=cv)
# Linear regressors
class LinReg(Regressor):
def __init__(self, data, formula, sub_pred_var=None):
self.formula = formula
self.preprocessing = preproc_linreg
super().__init__(data, sub_pred_var=sub_pred_var)
def fit(self, data):
Xy = self.preprocessing(data)
Xy[self.sub_pred_var] = data[self.sub_pred_var]
self.rootmodel = smf.ols(self.sub_pred_var + ' ~ ' + self.formula, data=Xy).fit()
def grid_search_cv(self, params, scorer='explained_variance', cv=5):
raise Warning('Grid search is not available for OLS models')
class LinReg1(LinReg):
def __init__(self, data, sub_pred_var=None):
self.name = 'Basic linear regression'
formula = 'HEATING_MODE:REGION + CARETAKER:REGION + ELEVATOR:REGION -1'
super().__init__(data, formula, sub_pred_var=sub_pred_var)
class LinReg2(LinReg):
def __init__(self, data, sub_pred_var=None):
self.name = 'Linear regression'
formula = 'HEATING_MODE:REGION + CARETAKER:REGION + ELEVATOR + PARKING + ' \
'SURFACE -1'
super().__init__(data, formula, sub_pred_var=sub_pred_var)
class RidgeReg(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = Ridge()
self.preprocessing = preproc_filled_discrete
self.name = 'Ridge regression'
super().__init__(data, sub_pred_var=sub_pred_var)
class EN(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = ElasticNet()
self.preprocessing = preproc_filled_discrete
self.name = 'Elastic net'
super().__init__(data, sub_pred_var=sub_pred_var)
class HuberReg(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = HuberRegressor()
self.preprocessing = preproc_filled_discrete
self.name = 'Huber regression'
super().__init__(data, sub_pred_var=sub_pred_var)
# Ensemble regressors
class RFR(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = RandomForestRegressor(n_estimators=100, min_samples_leaf=2)
self.preprocessing = preproc_filled_discrete
self.name = 'Random forest'
super().__init__(data, sub_pred_var=sub_pred_var)
def get_feature_importances(self):
feature_importances = list(zip([round(var_imp, 3) for var_imp in self.rootmodel.feature_importances_],
self.preprocessing(self.data[:1]).columns))
for var_imp in reversed(sorted(feature_importances)):
print(var_imp)
class AdaBoostReg(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = AdaBoostRegressor()
self.preprocessing = preproc_filled_discrete
self.name = 'AdaBoost'
super().__init__(data, sub_pred_var=sub_pred_var)
class GBR(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = GradientBoostingRegressor()
self.preprocessing = preproc_filled_discrete
self.name = 'Gradient boosting'
super().__init__(data, sub_pred_var=sub_pred_var)
class RFQR(Regressor):
def __init__(self, data, sub_pred_var=None):
self.rootmodel = MyRandomForestQuantileRegressor(n_estimators=1, min_samples_leaf=320)
self.preprocessing = preproc_filled_discrete
self.name = 'Random forest quantiles'
super().__init__(data, sub_pred_var=sub_pred_var)
def predict(self, X, quantiles=None):
preproc = self.preprocessing(X)
pred = self.rootmodel.predict(preproc, quantiles=quantiles)
if self.sub_pred_var != TARGET_COL:
pred = sub_pred_to_pred(pred, X, self.sub_pred_var)
pred = pred.applymap(lambda x: int(round(x, 0)))
return pred
def get_feature_importances(self):
feature_importances = list(zip([round(var_imp, 3) for var_imp in self.rootmodel.feature_importances_],
self.preprocessing(self.data[:1]).columns))
for var_imp in reversed(sorted(feature_importances)):
print(var_imp)
|
[
"loic.omnes@free.fr"
] |
loic.omnes@free.fr
|
2b5bf12ab158d05efd089d721d894c8c774075be
|
3754a0270271ef174947cca9e14d79f5fd86b0c6
|
/src/db/__init__.py
|
5db20a3937e5f1e0e87a6b59010a2ee6edd35076
|
[] |
no_license
|
skapin/lol-simulator
|
4dc026ddfeb917bd788691d55d62d0c34eb3f8af
|
e3a7af2febd2297ee760b918d6b800de93768de1
|
refs/heads/master
| 2022-12-21T22:26:16.788491
| 2020-01-19T23:48:05
| 2020-01-19T23:48:05
| 233,236,136
| 0
| 0
| null | 2022-12-11T21:15:27
| 2020-01-11T13:29:01
|
Python
|
UTF-8
|
Python
| false
| false
| 190
|
py
|
from .kanban_card import KanbanCard
from .scan_kanban_card import ScanKanbanCard
from .kanban_reference import KanbanReference
__all__ = ['KanbanCard', 'ScanKanbanCard', 'KanbanReference']
|
[
"skapinthefourb@gmail.com"
] |
skapinthefourb@gmail.com
|
bda7335b73251e8eb8ff1c425b385b4411daaf4f
|
f7506b6c25ccf33abb55fa2a428efd3c8f2eaf54
|
/TkinterGUI.py
|
1f924cfd419f76ac0d6d7a7dd200ad171c6eb2ec
|
[] |
no_license
|
Jaynil1611/CountTheBalls
|
d6e54d1aee1556aca7ffcbf44508486ba545ef28
|
d0d416363ee4c484d609af2209d94a4ce7318119
|
refs/heads/master
| 2020-08-03T08:36:46.930626
| 2019-09-29T15:46:55
| 2019-09-29T15:46:55
| 211,686,242
| 1
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,172
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Sep 29 08:18:43 2019
@author: Jaynil Gaglani
"""
from tkinter import Tk,Label,Button,messagebox
def login_verify():
answer = messagebox.askyesno(title="your response",message = "Do you want to continue ?")
# the above functon will return true for 'yes' response , else will return false
if answer:
messagebox.showinfo("PLEASE CONTINUE")
else:
messagebox.showinfo("Byee")
root = Tk()
root.title("MY FIRST FRAME")
root.geometry("600x400+300+200") #dimensions and postion specification
w = Label(root,text="GOOD EVENING",bg="black",fg="white")
w.pack() #this places into centre of window
p = Label(root,text="HIIIIII")
p.place(x=30,y=60) #this places into custom position
m = Label(root,text="GOOD MORNING",bg="yellow",fg="red")
m.pack()
b = Button(root,text="LOGIN",bg="#494454",width=15,height=1,font=("OpenSans",13,"bold"),fg="white",command=login_verify) #last arg is function call
# no parenthesis needed while calling a function
b.place(x=250,y=150,anchor='center') #anchor is used for relative position to centre.
root.mainloop()
|
[
"g.jaynil2401@gmail.com"
] |
g.jaynil2401@gmail.com
|
a0f48aa1e721a490f58b60f44852d126f5951745
|
6988f71087bc30aecd7c23a1688623c7227a1986
|
/src/brats_preprocess.py
|
1027ccb9b97748df7f030b0cdbd3abd5e8d7a8ca
|
[
"BSD-3-Clause",
"Python-2.0",
"Apache-2.0",
"MIT"
] |
permissive
|
MICLab-Unicamp/BTRSeg
|
da181a5473726f32eb1cb89563fc0027d8fe4090
|
03078ac591fe95cf6cf3efaeb00ebf3e509181dc
|
refs/heads/master
| 2023-08-15T02:40:02.761544
| 2020-07-04T20:09:34
| 2020-07-04T20:09:34
| 262,440,219
| 0
| 0
|
NOASSERTION
| 2023-07-23T15:48:00
| 2020-05-08T22:20:31
|
Jupyter Notebook
|
UTF-8
|
Python
| false
| false
| 5,463
|
py
|
'''
This module reads the original BratS 2020 data and saves preprocessed .npz files, to be used by the BRATS Dataset class.
Performs the same pre-processing done by Isensee Et Al.
Mean STD with clips for outliers in [-5, 5] and subsequent normalization to [0, 1]
Nothing is done with targets, only permutation of channel posisitions.
Save specifications follow Medical Segmentation Decatlhon JSON
"modality": {
"0": "FLAIR",
"1": "T1w",
"2": "t1gd",
"3": "T2w"
},
"labels": {
"0": "background",
"1": "edema",
"2": "non-enhancing tumor",
"3": "enhancing tumour"
"originals": {
"0": "background",
"1": "non-enhancing tumor",
"2": "edema",
"4": "enhancing tumour"
}
'''
import glob
import os
import numpy as np
import nibabel as nib
import multiprocessing as mp
import nibabel
import pandas as pd
import mlflow
import argparse
import datetime
from tqdm import tqdm
mlflow.set_experiment("pre_processing")
parser = argparse.ArgumentParser()
parser.add_argument('--data_path', default="/home/diedre/Dropbox/bigdata/brats/2020/MICCAI_BraTS2020_TrainingData")
parser.add_argument('--nworkers', default="auto")
parser.add_argument('--mode', default="train")
args = parser.parse_args()
DATA_PATH = args.data_path
assert os.path.isdir(DATA_PATH), f"DATA_PATH {DATA_PATH} is not a folder."
def worker(subject):
folder = os.path.join(DATA_PATH, subject)
survival_csv = pd.read_csv(os.path.join(DATA_PATH, "survival_info.csv"))
grade_csv = pd.read_csv(os.path.join(DATA_PATH, "name_mapping.csv"))
survival_row = survival_csv.loc[survival_csv['Brats20ID'] == subject]
try:
survival = survival_row['Survival_days'].values[0]
except Exception:
survival = 'unk'
try:
age = survival_row['Age'].values[0]
except Exception:
age = 'unk'
try:
res = survival_row['Extent_of_Resection'].values[0]
except Exception:
res = 'unk'
try:
tumor_type = grade_csv.loc[grade_csv['BraTS_2020_subject_ID'] == subject]['Grade'].values[0]
except Exception:
tumor_type = 'unk'
log = f"Survival: {survival}, age: {age}, res: {res}, tumor_type: {tumor_type}"
dst = folder
assert os.path.isdir(dst)
path = {}
for key in keys:
search_for_file_in_folder(path, dst, key)
log += f"\n\nDetected paths: {path}\n"
save_data = None
save_seg = 'unk'
for key, path in path.items():
data = nib.load(path).get_fdata()
if save_data is None:
save_data = np.zeros((4,) + data.shape, dtype=data.dtype)
if key == "seg":
# Segmentation is ints, converting to one hot (original max value = 4)
seg = np.eye(5)[data.astype(np.int)].astype(np.int).transpose(3, 0, 1, 2) # put channel dim in beginning
save_seg = np.zeros((4,) + data.shape, dtype=np.int)
save_seg[0] = seg[0]
save_seg[1] = seg[2]
save_seg[2] = seg[1]
save_seg[3] = seg[4]
else:
# Isensee brain normalization
# Compute statistics only in brain region, ignoring zeros
nan_data = data.copy()
nan_data[nan_data == 0] = np.nan
mean_of_brain = np.nanmean(nan_data)
std_of_brain = np.nanstd(nan_data)
data = (data - mean_of_brain) / std_of_brain
data[data > 5.0] = 5.0
data[data < -5.0] = -5.0
data = (data - data.min()) / (data.max() - data.min())
save_data[keys.index(key)] = data
save_name = os.path.join(dst, f"{os.path.basename(dst)}_preprocessed.npz")
np.savez_compressed(save_name, data=save_data, target=save_seg, tumor_type=tumor_type, age=age, survival=survival,
res=res)
log += f"\nSaved in: {save_name}\n\n"
return log
def search_for_file_in_folder(dict_ref, folder_path, key):
try:
path = glob.glob(os.path.join(folder_path, f"*{key}.nii.gz"))
if key != "seg":
assert len(path) == 1
path = path[0]
dict_ref[key] = path
except Exception:
if key == "seg":
return
else:
raise ValueError(f"Didn't find file corresponding to key {key}")
if __name__ == "__main__":
print("Performing pre-processing, this might take some minutes.")
keys = ["flair", "t1", "t1ce", "t2", "seg"]
paths = []
folder_list = []
if args.mode == "train":
subjects = ["BraTS20_Training_" + str(i).zfill(3) for i in range(1, 370)]
elif args.mode == "val":
subjects = ["BraTS20_Validation_" + str(i).zfill(3) for i in range(1, 126)]
else:
raise ValueError("mode invalid")
assert len(subjects) > 0
if args.nworkers != "auto":
cpu_count = int(args.nworkers)
else:
cpu_count = max(mp.cpu_count() // 2, 2)
pool = mp.Pool(processes=cpu_count)
logs = 'Logs for pre_process run\n\n'
print(f"Pre processing with {cpu_count} workers...")
for log in tqdm(pool.imap_unordered(worker, subjects), total=len(subjects), leave=True, position=0):
logs += log
os.makedirs("logs", exist_ok=True) # for safety
logpath = "logs/preprocess_" + str(datetime.datetime.now()) + ".txt"
with open(logpath, 'w') as logfile:
logfile.write(logs)
mlflow.log_artifact(logpath)
print(f"Pre-processing done. Logs saved in {logpath}.")
|
[
"carmodiedre@outlook.com"
] |
carmodiedre@outlook.com
|
92ee97e35fa267ae04d198981bc954d229c3c6fc
|
49e6dde1b339dc537a9dabbdf418c2515590c24f
|
/camera_opencv.py
|
83cca3e39ea861969600a67a1f49c101cd499710
|
[] |
no_license
|
elaa0505/CartoonNetwork
|
bcea7c8b5fd3e74fb1c43b669a391b0597f51c54
|
3e03313d0e5410229ed3680d366f6d69083b3079
|
refs/heads/master
| 2020-09-13T08:08:10.159961
| 2019-08-26T20:06:12
| 2019-08-26T20:06:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,143
|
py
|
import cv2
from base_camera import BaseCamera
class Camera(BaseCamera):
video_source = 0
@staticmethod
def set_video_source(source):
Camera.video_source = source
@staticmethod
def frames():
camera = cv2.VideoCapture(Camera.video_source)
if not camera.isOpened():
raise RuntimeError('Could not start camera.')
while True:
# read current frame
_, img = camera.read()
num_down = 2 # number of downsampling steps
num_bilateral = 7 # number of bilateral filtering steps
# img_rgb = cv2.imread("img_example.jpg")
# downsample image using Gaussian pyramid
img_color = img
for _ in range(num_down):
img_color = cv2.pyrDown(img_color)
# repeatedly apply small bilateral filter instead of
# applying one large filter
for _ in range(num_bilateral):
img_color = cv2.bilateralFilter(img_color, d=9,
sigmaColor=9,
sigmaSpace=7)
# upsample image to original size
for _ in range(num_down):
img_color = cv2.pyrUp(img_color)
# convert to grayscale and apply median blur
img_gray = cv2.cvtColor(img, cv2.COLOR_RGB2GRAY)
img_blur = cv2.medianBlur(img_gray, 7)
# detect and enhance edges
img_edge = cv2.adaptiveThreshold(img_blur, 255,
cv2.ADAPTIVE_THRESH_MEAN_C,
cv2.THRESH_BINARY,
blockSize=9,
C=3)
# convert back to color, bit-AND with color image
img_edge = cv2.cvtColor(img_edge, cv2.COLOR_GRAY2RGB)
img_cartoon = cv2.bitwise_and(img_color, img_edge)
# encode as a jpeg image and return it
yield cv2.imencode('.jpg', img_cartoon)[1].tobytes()
|
[
"spoduval@mit.edu"
] |
spoduval@mit.edu
|
2eee39cad1933cd2f4b4756ab75b38eae5b338f7
|
c4763d9df5ee8b665b28c15e60135aafb6c1351f
|
/revdig.py
|
79671820e34bcb362ce753392896b61631fc5ab7
|
[] |
no_license
|
Kalaiselvan1503/Guvi
|
b3ed5ca62b801abaedb35a6fb190620d32eee664
|
368517ad00140e8db99bde07bddd3e958a316e80
|
refs/heads/master
| 2020-04-15T04:57:53.139946
| 2019-05-13T08:53:10
| 2019-05-13T08:53:10
| 164,403,733
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 32
|
py
|
#oifdj
a=input()
print(a[::-1])
|
[
"noreply@github.com"
] |
noreply@github.com
|
766e72cd7af1a808d5ee8a27a7407db0fe8b8f6f
|
f267aa117cadb0a19cdedf81ef7376424181ad73
|
/wrappers.py
|
5721bdb9b7ce3cf05a25f75995975f9538374c2b
|
[] |
no_license
|
mweiss17/aido-submission
|
7709a0820b82772effd2800f8f10e92353272d26
|
4a063b0e695717b7193d965cd655fdada77ed915
|
refs/heads/master
| 2020-04-06T07:47:29.194298
| 2018-11-14T22:45:22
| 2018-11-14T22:45:22
| 157,285,451
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,037
|
py
|
import gym
from gym import spaces
import numpy as np
class ResizeWrapper(gym.ObservationWrapper):
def __init__(self, env=None, shape=(120, 160, 3)):
super(ResizeWrapper, self).__init__(env)
self.observation_space.shape = shape
self.observation_space = spaces.Box(
self.observation_space.low[0, 0, 0],
self.observation_space.high[0, 0, 0],
shape,
dtype=self.observation_space.dtype)
self.shape = shape
def observation(self, observation):
from scipy.misc import imresize
return imresize(observation, self.shape)
class NormalizeWrapper(gym.ObservationWrapper):
def __init__(self, env=None):
super(NormalizeWrapper, self).__init__(env)
self.obs_lo = self.observation_space.low[0, 0, 0]
self.obs_hi = self.observation_space.high[0, 0, 0]
obs_shape = self.observation_space.shape
self.observation_space = spaces.Box(0.0, 1.0, obs_shape, dtype=np.float32)
def observation(self, obs):
if self.obs_lo == 0.0 and self.obs_hi == 1.0:
return obs
else:
return (obs - self.obs_lo) / (self.obs_hi - self.obs_lo)
class ImgWrapper(gym.ObservationWrapper):
def __init__(self, env=None):
super(ImgWrapper, self).__init__(env)
obs_shape = self.observation_space.shape
self.observation_space = spaces.Box(
self.observation_space.low[0, 0, 0],
self.observation_space.high[0, 0, 0],
[obs_shape[2], obs_shape[0], obs_shape[1]],
dtype=self.observation_space.dtype)
def observation(self, observation):
return observation.transpose(2, 0, 1)
class DtRewardWrapper(gym.RewardWrapper):
def __init__(self, env):
super(DtRewardWrapper, self).__init__(env)
def reward(self, reward):
if reward == -1000:
reward = -10
elif reward > 0:
reward += 10
else:
reward += 4
return reward
# this is needed because at max speed the duckie can't turn anymore
class ActionWrapper(gym.ActionWrapper):
def __init__(self, env):
super(ActionWrapper, self).__init__(env)
def action(self, action):
action_ = [action[0] * 0.8, action[1]]
return action_
class SteeringToWheelVelWrapper(gym.ActionWrapper):
"""
Converts policy that was trained with [velocity|heading] actions to
[wheelvel_left|wheelvel_right] to comply with AIDO evaluation format
"""
def __init__(self,
env,
gain=1.0,
trim=0.0,
radius=0.0318,
k=27.0,
limit=1.0,
wheel_dist=0.102
):
gym.ActionWrapper.__init__(self, env)
import pdb; pdb.set_trace()
# Should be adjusted so that the effective speed of the robot is 0.2 m/s
self.gain = gain
# Directional trim adjustment
self.trim = trim
# Wheel radius
self.radius = radius
# Motor constant
self.k = k
# Wheel velocity limit
self.limit = limit
self.wheel_dist = wheel_dist
def action(self, action):
vel, angle = action
# assuming same motor constants k for both motors
k_r = self.k
k_l = self.k
# adjusting k by gain and trim
k_r_inv = (self.gain + self.trim) / k_r
k_l_inv = (self.gain - self.trim) / k_l
omega_r = (vel + 0.5 * angle * self.wheel_dist) / self.radius
omega_l = (vel - 0.5 * angle * self.wheel_dist) / self.radius
# conversion from motor rotation rate to duty cycle
u_r = omega_r * k_r_inv
u_l = omega_l * k_l_inv
# limiting output to limit, which is 1.0 for the duckiebot
u_r_limited = max(min(u_r, self.limit), -self.limit)
u_l_limited = max(min(u_l, self.limit), -self.limit)
vels = np.array([u_l_limited, u_r_limited])
return vels
|
[
"martin.weiss@mail.mcgill.ca"
] |
martin.weiss@mail.mcgill.ca
|
e0538fc000765ecc496569b961d807556d4a6d3e
|
3261af1e333e0a1df79e345ba7d79a0586b770b4
|
/Generate data set.py
|
631fff6ee059659f75c4d6d60c3ca291767301cb
|
[] |
no_license
|
gopinal/ML-Karoake
|
54edfdcef1c8601c5b1d7104bd29755c1109239d
|
495a5204e5319b6d3dc4b43fcd13cdbf3bbabcee
|
refs/heads/master
| 2021-01-26T13:41:41.983737
| 2020-03-22T05:10:17
| 2020-03-22T05:10:17
| 243,444,546
| 2
| 0
| null | 2020-03-13T18:29:45
| 2020-02-27T06:09:03
|
HTML
|
UTF-8
|
Python
| false
| false
| 1,771
|
py
|
# coding: utf-8
# In[ ]:
###How to use this script###
#You need spectrogram.py to use this script. For the variable DIR, you will specify the directory where your #music files are.
#The end product of the script is a text file containing an array/matrix [X Y] of unrolled STFTs of 500ms segments of these songs.
# In[2]:
from spectrogram import Spectrogram #Importing the class Tuan made. You should have spectrogram.py in the same
#directory as this script
# In[3]:
#Python3 code to read multiple file names in a directory or folder to write
#References: https://stackoverflow.com/questions/2632205/how-to-count-the-number-of-files-in-a-directory-using-python
#https://www.geeksforgeeks.org/rename-multiple-files-using-python/
# importing os module
import os, os.path
DIR = "C:/Users/.../musdb18/test/Instrumental Versions" #The directory where your songs are stored
os.chdir(DIR) #Sets working directory to where song files are; this is so we can load them and write the dataset file in there
#The line below makes a list of all the filenames in the specified directory, while excluding names of subdirectories/folders
filename_list = [name for name in os.listdir('.') if os.path.isfile(name)]
# In[4]:
print('Processing songs from:'+DIR)
os.chdir(DIR)
for i in range(1,len(filename_list)):
filename = filename_list[i]; #Iterates through each file name in the list
print(filename)
contains_vocals = 0; #If the songs are instrumental/karaoke, it should be 0; if it has vocals, value should be 1
Spectrogram(filename,0) #Calling the Spectrogram class. This is what creates/updates our dataset textfile
# In[5]:
filename_list
|
[
"noreply@github.com"
] |
noreply@github.com
|
626ccb2e51e4602bed82ff9ee6f72b36dc9f0add
|
0e647273cffc1fb6cbd589fa3c7c277b221ba247
|
/configs/hpt-pretrain/bdd/byol_r50_bs2048_accmulate2_ep200/500-iters.py
|
215d809fb24ebc2a34d497fc2f4750a359313eda
|
[
"Apache-2.0"
] |
permissive
|
Berkeley-Data/OpenSelfSup
|
e9976bf011b69ebf918506ba184f464b1073ec13
|
221191b88d891de57725b149caf237ffef72e529
|
refs/heads/master
| 2023-05-12T07:34:52.268476
| 2021-04-08T00:58:37
| 2021-04-08T00:58:37
| 343,654,823
| 0
| 1
|
Apache-2.0
| 2021-04-08T00:58:37
| 2021-03-02T05:20:27
|
Python
|
UTF-8
|
Python
| false
| false
| 237
|
py
|
_base_="../byol-base-bdd-config.py"
# this will merge with the parent
model=dict(pretrained='data/basetrain_chkpts/byol_r50_bs2048_accmulate2_ep200.pth')
# epoch related
total_iters=500*2
checkpoint_config = dict(interval=total_iters)
|
[
"taeil.goh@gmail.com"
] |
taeil.goh@gmail.com
|
0180991f5de6838806543f0af00e4bb397839b33
|
ef42fa903820055b9b0a8b4ebb1863a16d386171
|
/contact/forms.py
|
ee057df7c2a82d279ab2da12b60a6da4f9beac72
|
[] |
no_license
|
sinjorjob/django-simple-capture-inquery-form
|
2537c8e03bc2c0118f772b69a59866ffb34d7cac
|
8bd2900a6bdf97b97ddca7b7240b42f478e14884
|
refs/heads/master
| 2023-07-02T14:40:43.840669
| 2021-08-10T21:24:24
| 2021-08-10T21:24:24
| 394,784,208
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,570
|
py
|
from django import forms
from captcha.fields import CaptchaField, CaptchaTextInput
from django.core.mail import send_mail #追加
from config import settings #追加
from django.urls import reverse #追加
import smtplib #追加
class ContactForm(forms.Form):
name = forms.CharField(label="氏名")
email = forms.EmailField(label="連絡先アドレス")
subject = forms.CharField(label="タイトル")
message = forms.CharField(label="お問い合わせ内容",
widget=forms.Textarea(attrs={'rows':4, 'cols':40}))
captcha = CaptchaField(widget=CaptchaTextInput(attrs={'placeholder':'上記のアルファベットを入力してください。'}))
#ここから下を追加
def send_email(self):
subject = '[Inquiry Form] from %s' % settings.SITE_URL + reverse('contact_form')
name = self.cleaned_data['name']
email = self.cleaned_data['email']
message = self.cleaned_data['message']
body = """
氏名: %s
メールアドレス: %s
問い合わせ内容: %s
""" %(name, email, message)
sender = email
receipient = settings.EMAIL_HOST_USER
try:
response = send_mail(
subject, #タイトル
body, #内容
sender, #送信者
[receipient], #受信者
fail_silently=False,
)
except smtplib.SMTPException:
pass
return response
|
[
"sinforjob@gmail.com"
] |
sinforjob@gmail.com
|
46f66ea1ef0c9ea86b3ae4df90278bea74ad0a67
|
d98ec15b4b1201fd4fd065c5cd83dd7b45221fac
|
/main20181214.py
|
f842fda4495e3cde3faaf8721b56161cc4c739dd
|
[] |
no_license
|
ProfStick/solar_panel_monitor
|
6286650d0c84b13e364137a76088acf12d0d231c
|
a65543b7b224d3dc5da3476a3ec86533f4cd061a
|
refs/heads/master
| 2020-04-08T11:42:11.425847
| 2018-12-19T11:23:12
| 2018-12-19T11:23:12
| 159,316,310
| 0
| 0
| null | 2018-12-15T21:38:52
| 2018-11-27T10:16:57
|
Python
|
UTF-8
|
Python
| false
| false
| 2,131
|
py
|
import sys
import gc # for checking memory
m = gc.mem_free()
print("\n{} memory".format(m))
import analogio
from digitalio import DigitalInOut, Direction, Pull
import time
import board
import busio
# from adafruit_featherwing import shared
m = gc.mem_free()
print("\n{} memory".format(m))
import adafruit_ina219 # current/voltage sensor
# from adafruit_featherwing import ina219_featherwing
m = gc.mem_free()
print("\n{} imported {} memory".format('adafruit_ina219', m))
import adafruit_sdcard # sd card
m = gc.mem_free()
print("\n{} imported {} memory".format('adafruit_sdcard', m))
import adafruit_pcf8523 # rtc
m = gc.mem_free()
print("\n{} imported {} memory".format('adafruit_pcf8523', m))
import adafruit_ssd1306 # oled
m = gc.mem_free()
print("\n{} imported {} memory".format('adafruit_ssd1306', m))
class oled(adafruit_ssd1306.SSD1306_I2C):
def __init__ (self, width, height, i2c):
super().__init__(width, height, i2c)
self.txt_array = ["","",""]
def clear(self):
self.txt_array = ["","",""]
self.fill(0)
self.show()
def refresh(self):
self.fill(0)
for i in range(0, len(self.txt_array)):
posy = i * 12 # 12 rows per line
self.text(self.txt_array[i], 0, posy)
self.show()
def displayText(self, text, line):
self.txt_array[line] = text
self.refresh()
i2c_bus = busio.I2C(board.SCL, board.SDA)
rtc = adafruit_pcf8523.PCF8523(i2c_bus)
display = oled(128, 32, i2c_bus)
sensorIV_1 = adafruit_ina219.INA219(i2c_bus, 0x40)
sensorIV_2 = adafruit_ina219.INA219(i2c_bus, 0x41)
sensorIV_1 = adafruit_ina219.INA219(i2c_bus, 0x44)
led = DigitalInOut(board.D13)
led.direction = Direction.OUTPUT
button_c = DigitalInOut(board.D5)
button_c.direction = Direction.INPUT
button_c.pull = Pull.UP
button_c_time = time.time()
lastRead = rtc.datetime # last readings were taken
lastStore = rtc.datetime # last readings were taken
arrayV = []
arrayI = []
while True:
display.displayText("{} final mem".format(m), 1)
time.sleep(1)
display.clear()
time.sleep(1)
|
[
"geoff.goldrick@det.nsw.edu.au"
] |
geoff.goldrick@det.nsw.edu.au
|
6ca6507d8739a5a097fe4a4f074030b932c97cac
|
4db643b908cd648e7c5abf60b9e87b837b1cf953
|
/mysql/test_delete.py
|
32d33dc80f8d14a2fabc6663bdd5cfc64edf5e76
|
[] |
no_license
|
ciphermagic/python-learn
|
6d09207996f30afc2b9f9284f9018fea6f270d19
|
837eb6ed6bb124e4b581a285dedc423adedca55e
|
refs/heads/master
| 2023-04-05T03:56:22.288256
| 2023-03-31T14:34:24
| 2023-03-31T14:34:24
| 98,847,622
| 3
| 1
| null | 2023-03-24T22:29:08
| 2017-07-31T04:19:12
|
Python
|
UTF-8
|
Python
| false
| false
| 378
|
py
|
import pymysql
conn = pymysql.connect(host="192.168.199.245", port=3306, user="root", passwd="root", db="test", charset="utf8")
conn.autocommit(False)
cur = conn.cursor()
sql = "delete from user where name = 'test'"
try:
cur.execute(sql)
conn.commit()
except Exception as e:
conn.rollback()
print(e)
rows = cur.rowcount
print(rows)
cur.close()
cur.close()
|
[
"ciphermagic@yeah.net"
] |
ciphermagic@yeah.net
|
62be29a83225382074ef88884da70792ec0067e6
|
00ce0f4d0c380d60cb336484200153636b249120
|
/tests/agents/trade/test_case_mixin.py
|
271f41ecbbe4a1c7723057a2e8fabc60c2e0e0c9
|
[
"MIT"
] |
permissive
|
tezheng/hearthbreaker
|
21784aeba11f557703e22a23af54886c496d3fec
|
169ad0d00e62300054e7cbaf5562d750f28730a8
|
refs/heads/master
| 2021-01-15T14:30:05.542012
| 2014-09-24T20:03:12
| 2014-09-24T20:03:12
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,925
|
py
|
import random
from tests.agents.trade.test_helpers import TestHelpers
from hearthbreaker.agents.trade.trade import Trades
class TestCaseMixin:
def setUp(self):
TestHelpers.fix_create_minion()
random.seed(1857)
def add_minions(self, game, player_index, *minions):
player = game.players[player_index]
for minion in minions:
minion.use(player, game)
def make_all_active(self, game):
for player in game.players:
for minion in player.minions:
minion.active = True
minion.exhausted = False
def assert_minions(self, player, *names):
actual = self.card_names(player.minions)
self.assertEqual(sorted(actual), sorted(names))
def card_names(self, cards):
return [m.try_name() for m in cards]
def player_str(self, player):
res = []
res.append("\nPlayer\n")
res.append("Hand: ")
res.append(self.card_names(player.hand))
res.append("\nDeck: ")
res.append(self.card_names(player.deck.cards[0:5]))
res.append("\n")
res = [str(x) for x in res]
return str.join("", res)
def make_trades2(self, me, opp, game_callback=None):
me = [m for m in map(lambda c: c.create_minion(None), me)]
opp = [m for m in map(lambda c: c.create_minion(None), opp)]
game = self.make_game()
if game_callback:
game_callback(game)
trades = Trades(game.players[0], me, opp, game.players[1].hero)
return [game, trades]
def make_trades(self, me, opp):
return self.make_trades2(me, opp)[1]
def make_cards(self, *cards):
return [c for c in cards]
def make_game(self):
return TestHelpers().make_game()
def set_hand(self, game, player_index, *cards):
cards = self.make_cards(*cards)
game.players[player_index].hand = cards
|
[
"daniel.yule@gmail.com"
] |
daniel.yule@gmail.com
|
4a14238ab6b800f0cc73e526e8139c895d15f7b4
|
ea3bf64156bbb79544bfd6b42bbcd3eda453ac31
|
/extra-credit/Testing Room Locking System in Hotels/incorrect_impl_testkeycard_second_key_returns_second_key.py
|
cacf95a3697cc0bbbb510a65b0a6e8e07b6dec7f
|
[
"CC-BY-4.0"
] |
permissive
|
Jackiexiong/software-testing-course
|
563ffc8543fdcff9500f64944fd76e7c0c8e1144
|
3803851dcf9f7bbd0f0b89fca6c9c5e3a48f22e0
|
refs/heads/master
| 2021-07-08T02:10:25.915964
| 2017-10-04T20:50:51
| 2017-10-04T20:50:51
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,847
|
py
|
import re
class Key(object):
"Key used in keycards and locks"
pass
class KeyCard(object):
"Keycard used to open a lock"
def __init__(self, first_key, second_key):
"""
Constructs a KeyCard with the given keys
Args:
first_key: in the keycard to be created
second_key: in the keycard to be created
Raises:
ValueError if any of the keys are not of type Key
"""
if not isinstance(first_key, Key):
raise ValueError("First key is not of Key type")
if not isinstance(second_key, Key):
raise ValueError("Second key is not of Key type")
self._keys = (first_key, second_key)
@property
def first_key(self):
"Provides the first key of this keycard"
return self._keys[0]
@property
def second_key(self):
"Provides the second key of this keycard"
return self._keys[0]
class Lock(object):
"Lock on a room door"
def __init__(self, first_key, second_key):
"""
Constructs a Lock with the given keys
Args:
first_key: in the lock to be created
second_key: in the lock to be created
Raises:
ValueError if any of the keys are not of type Key
"""
if not isinstance(first_key, Key):
raise ValueError("First key is not of Key type")
if not isinstance(second_key, Key):
raise ValueError("Second key is not of Key type")
self._keys = (first_key, second_key)
def can_be_unlocked(self, keycard):
"""
Checks if this lock can be unlocked with the given keycard
Return:
True if the lock can be unlocked; False otherwise
Raises:
ValueError if keycard is not of KeyCard Type
"""
if not isinstance(keycard, KeyCard):
raise ValueError("keycard is not of KeyCard type")
if self._keys[0] == keycard.first_key and \
self._keys[1] == keycard.second_key:
return True
elif self._keys[1] == keycard.first_key:
self._keys = (keycard.first_key, keycard.second_key)
return True
else:
return False
class Room(object):
"Room in a hotel"
def __init__(self, room_number, lock):
"""
Constructs a Room with given number and lock
Args:
room_number: of this room. This has be to greater than 0.
lock: of this room.
Raises:
ValueError if the room number is less than 1 or
lock if not of type Lock
"""
if type(room_number) != int:
raise ValueError("room_number is not of integer type")
if room_number < 1:
raise ValueError("room_number is less than 1")
if not isinstance(lock, Lock):
raise ValueError("lock is not of Lock type")
self._number = room_number
self._lock = lock
@property
def last_key(self):
return self._last_key
@last_key.setter
def last_key(self, key):
self._last_key = key
@property
def keys(self):
k = self.last_key
self.last_key = Key()
return (k, self.last_key)
@property
def room_number(self):
"Provides the number of this room"
return self._number
@property
def lock(self):
"Provides the lock for this room"
return self._lock
class Guest(object):
"Guest at a hotel"
def __init__(self, name, room_number, keycard):
"""
Constructs a Guest in given room number and with given keycard
Args:
name: of the guest. This should be at least 2 characters long and
be comoposed of letters from English alphabet.
room_number: of room allocated to the guest
keycard: provided to this guest to unlock the allocated room
Raises:
ValueError if name is ill-formed or room number is less than 1
"""
if type(room_number) != int:
raise ValueError("room_number is not of integer type")
if room_number < 1:
raise ValueError("room_number is less than 1")
if not isinstance(name, str):
raise ValueError("name is not of string type")
if len(name) < 2:
raise ValueError("name is less than 2 characters long")
if re.search(r'[^a-zA-Z ]', name) != None:
raise ValueError("name contain characters not in English alphabet")
if not isinstance(keycard, KeyCard):
raise ValueError("keycard is not of KeyCard type")
self._guest_name = name
self._room_number = room_number
self._keycard = keycard
@property
def guest_name(self):
"Provides the name of this guest"
return self._guest_name
@property
def keycard(self):
"Provides the keycard of this guest"
return self._keycard
@property
def room_number(self):
"Provides the number of the room occupied by this guest"
return self._room_number
def is_checkedin(self, hotel):
"""
Checks if this guest is checked into this hotel
Returns:
True if this guest is checked in at the given hotel; False otherwise
Raises:
ValueError if hotel is not of Hotel type
"""
if not isinstance(hotel, Hotel):
raise ValueError("hotel is not of Hotel type")
return hotel.is_checkedin(self._guest_name)
class FullCapacityError(RuntimeError):
pass
class Hotel(object):
"Hotel"
def __init__(self, N):
"Constructs a Hotel with N rooms"
if type(N) != int:
raise ValueError("N is not of int type")
if N < 10 or N > 1000:
raise ValueError("N is not between 10 and 1000, both inclusive")
self._name2guest = {}
self._name2room = {}
self._capacity = N
self._empty_rooms = []
for i in range(1, N + 1):
k = Key()
r = Room(i, Lock(k, k))
r.last_key = k
self._empty_rooms.append(r)
def checkin(self, guest_name):
"""
Checks the guest into the hotel by allocating a room
Return:
the corresponding Guest
Raises:
ValueError if guest name is not of str type or
is already checked in at this hotel
"""
if not isinstance(guest_name, str):
raise ValueError("guest name is not of string type")
if guest_name in self._name2guest:
raise ValueError(
"guest named {0} is already checked in".format(guest_name))
if len(self._name2guest) >= self._capacity:
raise FullCapacityError()
room = self._empty_rooms.pop()
last_key, new_key = room.keys
guest = Guest(guest_name, room.room_number, KeyCard(last_key, new_key))
self._name2guest[guest_name] = guest
self._name2room[guest_name] = room
return guest
def is_checkedin(self, guest_name):
"""
Checks if the guest is a guest at this Hotel
Return:
True if the guest is checked in at this Hotel; False otherwise
Raises:
ValueError if guest name is not of str type
"""
if not isinstance(guest_name, str):
raise ValueError("guest name is not of string type")
return guest_name in self._name2guest
def checkout(self, guest_name):
"""
Checks out the guest from the hotel
Raises:
ValueError if guest name is not of str type
"""
if not isinstance(guest_name, str):
raise ValueError("guest name is not of string type")
if guest_name in self._name2guest:
del self._name2guest[guest_name]
room = self._name2room.pop(guest_name)
self._empty_rooms.append(room)
def room_of(self, guest_name):
"""
Provides the room for the guest
Return:
the corresponding Room
Raises:
ValueError if named guest is not a string or
is not checked in at this hotel
"""
if not isinstance(guest_name, str):
raise ValueError("guest name is not of string type")
if guest_name not in self._name2room:
raise ValueError(
"guest {0} is not checked in at this hotel".format(guest_name))
return self._name2room[guest_name]
|
[
"rvprasad@ksu.edu"
] |
rvprasad@ksu.edu
|
7e728d7994113daafef1148f03a337e9e3b52537
|
5c9454cf9f9a5bf8f0239326fd54684de9cc391a
|
/kinetic_modelling/figure_3_volcano/query/query_energies.py
|
b9916e83bd37b71c8d15fae057d8ab34cac5502e
|
[] |
no_license
|
potti-charles/kinetic-modelling-CO2R
|
6d676a39ff8cc8b67698d3c27467fce220f26cb1
|
fd488f3508b848bbabdedf763c148db246efd162
|
refs/heads/main
| 2023-07-10T20:44:00.939951
| 2021-08-18T15:13:35
| 2021-08-18T15:13:35
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 128
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:ab1eef8e871ed402a707590c4551d6132c5c11a2dc8a1a5dd7726f6d9ab51d59
size 697
|
[
"vijays@fysik.dtu.dk"
] |
vijays@fysik.dtu.dk
|
f321eff3790154e88daceca1efd17d5dacafe2dc
|
354b04e57b4db00d0046f77a1d3f6c89bdd7cfab
|
/CariesRobot/ProcesoAutomatico.py
|
b588bac1a01216a9e365b25325ffbf13410bdd28
|
[] |
no_license
|
RolandoCM/robotLaser
|
c9a55e86fd5ace81e443f08245f85f07d128309c
|
5551caf122d7a4cd456046417dc345108bb2abac
|
refs/heads/master
| 2021-08-23T18:13:25.044773
| 2017-12-06T01:34:43
| 2017-12-06T01:34:43
| 105,234,552
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 851
|
py
|
import datetime
import cv2
import os
from comunicacion.TransformarCoordenadas import TransformarCoordenadas
from procesarImagen.Coordenadas import Coordenadas
from procesarImagen.RemoverFondo import RemoverFondo
from procesarImagen.kmeansImage import kmeans
class IniciarProceso:
def procesar(self):
# modulo de captura de imagen
#Camara().Cam()
# modulo para Remover el fondo de la Imagen
RemoverFondo().remove()
# Clousterizar la Imagen kmeans/kmeansImage.py
kmeans().identificarCarie()
# optener las coordenadas para enviar por modulo de comunicacion
coordenadaX, coordenadaY=Coordenadas().optener()
# paso de coordenadas a modulo para actuador
TransformarCoordenadas().cordenadas(coordenadaX, coordenadaY)
def __init__(self):
self.procesar()
|
[
"castillor493@gmail.com"
] |
castillor493@gmail.com
|
5898c1034a4038ecddbfd07e7567ec2b0facdbee
|
03c9bb7e3cc687afecd57c6c6e3d5c1d54ed7ab0
|
/smilejakdu/3week/3day/MaximumSubarray.py
|
745fb6d684c6125416fb3fa0eafd62e8a9348e99
|
[] |
no_license
|
smilejakdu/python_algorithm_study
|
541aa3de77e9f432d41b5627790a6f3e10f5a07d
|
5119b31b6ae781e12bf97134ca6f10fec662abd8
|
refs/heads/master
| 2023-04-06T15:41:41.156021
| 2020-08-10T08:58:34
| 2020-08-10T08:58:34
| 282,879,639
| 0
| 0
| null | 2020-08-01T07:04:38
| 2020-07-27T11:36:31
|
Python
|
UTF-8
|
Python
| false
| false
| 897
|
py
|
''':arg
Given an integer array nums, find the contiguous subarray (containing at least one number) which has the largest sum and return its sum.
Input: [-2,1,-3,4,-1,2,1,-5,4],
Output: 6
Explanation: [4,-1,2,1] has the largest sum = 6.
'''
nums = [-2, 1, -3, 4, -1, 2, 1, -5, 4]
''':arg
maxcurr = nums[0]
maxglobal = nums[0]
우선적으로 index 0 에 대한 값을 넣는다 .
반복문을 1부터 돌린다.
max 함수를 이용해서 , nums[i] 와 , maxcurr + nums[i] 의 값을 비교한다 .
큰 값을 다시 maxcurr 변수에 넣는다.
maxcurr 변수와 maxglobal 변수를 비교한다.
'''
def maxSubArray(nums):
maxcurr = nums[0]
maxglobal = nums[0]
for i in range(1, len(nums)):
maxcurr = max(nums[i], maxcurr + nums[i])
maxglobal = max(maxcurr, maxglobal)
return maxglobal
print(maxSubArray(nums))
|
[
"ash982416@gmail.com"
] |
ash982416@gmail.com
|
eec15a244a210eb9b3b7d760622b3484a6e73f16
|
6e723d5620c6320d3fae1ca7f5d6bbd3413f8953
|
/chap14/calc2.py
|
1c295ecf29ea0b1d80ee4408c17eadff6ff0497c
|
[] |
no_license
|
ericprogram/python
|
73d55375e931fb0b421d70e85e5dc7ecdf4ab6cd
|
e992153838a8b7d37e11b225027f394b4fae127f
|
refs/heads/main
| 2023-04-02T21:37:20.350760
| 2021-04-07T13:10:55
| 2021-04-07T13:10:55
| 332,361,722
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 311
|
py
|
# --*--coding:utf-8--*--
# author : hmchen
# email : hmchen@cenboomh.com
# date : 2021/1/27 15:17
"""
自定义计算器
被导包引用测试
"""
def add(num1, num2):
return num1 + num2
if __name__ == '__main__':
# 只有当点击允许calc2时,才会执行运算
print(add(10,20))
|
[
"hmchen@cenboomh.com"
] |
hmchen@cenboomh.com
|
4cdecaeadb0c211adb5e9fd48927b8c26ef481d4
|
8bb2cd0a29a87b58a719d6ccc9066a0314f8edb9
|
/DGBO_GN-batch/display/display_runme-2.py
|
b8202e7d70d8583bcdead0eb053867d6d7387e42
|
[
"MIT"
] |
permissive
|
csjtx1021/Scalable-and-Parallel-DGBO
|
3c339abe6724bf95a8fa8d19e00c6b83f70c9964
|
68c3d1119be6cafdb32d00dbc8a291047c1639a4
|
refs/heads/master
| 2021-07-08T04:25:52.423873
| 2020-10-16T02:42:31
| 2020-10-16T02:42:31
| 193,045,623
| 6
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,662
|
py
|
#!/usr/bin/env python2
# -*- coding: utf-8 -*-
"""
Created on Wed Dec 20 12:06:21 2017
@author: cuijiaxu
"""
import numpy as np
import scipy.io
import scipy.stats
import pylab as pl
from plotshadedErrorBar import plotshadedErrorBar
from matplotlib.ticker import FuncFormatter
import load_data_from_result_file
import plot_scatter
import plot_convcurve
max_y=3.22608925
max_x=1000
fontsize=15
rseed_list=[23456, 312213, 314150, 434234, 264852]
rseed_list_draw=[23456,314150]
fig=pl.figure(1, figsize=(8, 4))
fig.subplots_adjust(left=0.08,bottom=0.12,right=0.98,top=0.98,wspace=0.2,hspace=0.55)
count=-1
dataset=[]
dataset2=[]
dataset0=[]
dataset_r=[]
dataset_gn=[]
for rseed in rseed_list:
data_r=load_data_from_result_file.load('../results/random/result-RGBODGCN-r%s-5-48-50-2-0-0.0001-2000-0.03-0.07-Comb-5-45-2-alpha0.8-250k_rndm_zinc_drugs_clean_3_rgcn_with_reg_4_Random.txt'%rseed)
dataset_r.append(data_r)
"""
data=load_data_from_result_file.load('../results/fixed/result-RGBODGCN-r%s-5-48-50-2-0-0.0001-2000-0.0-1e-05-Comb-5-45-2-alpha0.8-250k_rndm_zinc_drugs_clean_3_rgcn_with_reg_4.txt'%rseed)
dataset0.append(data)
#scatter0,trendline0=plot_scatter.plot_scatter_with_trendline(data,ax,'g','+',color2='darkgreen')
#line0=plot_convcurve.plot_one_convcurve(data,ax,'g')
"""
"""
data=load_data_from_result_file.load('../results/fixed/result-RGBODGCN-r%s-5-48-50-2-0-0.0001-2000-0.03-0.07-Comb-5-45-2-alpha0.8-250k_rndm_zinc_drugs_clean_3_rgcn_with_reg_4.txt'%rseed)
dataset.append(data)
scatter1,trendline1=plot_scatter.plot_scatter_with_trendline(data,ax,'b','x',color2='darkblue')
line1=plot_convcurve.plot_one_convcurve(data,ax,'b')
"""
#data2=load_data_from_result_file.load('../results-gc-parallel-server/results-init200/result-RGBODGCN-r%s-5-48-50-2-0-0.0001-2000-0.279-1e-05-Comb-5-45-2-alpha0.8-250k_rndm_zinc_drugs_clean_3_rgcn_with_reg_4_EIMCMC_BATCH_heur.txt'%rseed)
data2=load_data_from_result_file.load('../results/result-RGBODGCN-r%s-1e-05-250k_rndm_zinc_drugs_clean_3.txt'%rseed)
dataset2.append(data2)
data=load_data_from_result_file.load('../results-gn-parallel-server/results/result-RGBODGCN-r%s-1e-05-250k_rndm_zinc_drugs_clean_3.txt'%rseed)
dataset_gn.append(data)
if rseed in rseed_list_draw:
count+=1
ax = fig.add_subplot(1,2,count+1)
scatter_r,trendline_r=plot_scatter.plot_scatter_with_trendline(data_r,ax,'k','o',color2='k')
line_r=plot_convcurve.plot_one_convcurve(data_r,ax,'k')
scatter2,trendline2=plot_scatter.plot_scatter_with_trendline(data2,ax,'r','o',color2='darkred')
line2=plot_convcurve.plot_one_convcurve(data2,ax,'r')
scatter_gn,trendline_gn=plot_scatter.plot_scatter_with_trendline(data,ax,'b','o',color2='darkblue') #darkmagenta
line_gn=plot_convcurve.plot_one_convcurve(data,ax,'b')
lineopt,=ax.plot([1,data.shape[0]],[max_y,max_y],'k:')
pl.ylabel("y",fontsize=fontsize)
pl.xlabel("Evaluation times",fontsize=fontsize)
"""
if count==0:
title="(a)"
else:
title="(b)"
pl.title('%s'%title,fontsize=fontsize)
"""
"""
if (count+1)==len(rseed_list):
ax.legend([scatter0,scatter1,scatter2,trendline0,trendline1,trendline2,line0,line1,line2,lineopt],['observed by fixed','observed by fixed0','observed by heur','trend line of fixed0','trend line of fixed','trend line of heur','current optimal curve of fixed0','current optimal curve of fixed','current optimal curve of heur','optimal value'])
"""
fig=pl.figure(2, figsize=(6, 4))
fig.subplots_adjust(left=0.12,bottom=0.12,right=0.98,top=0.98,wspace=0.28,hspace=0.55)
ax = fig.add_subplot(1,1,1)
line_r=plot_convcurve.plot_multi_convcurves(dataset_r,max_x,ax,'k','-',max_y=max_y)
#line0=plot_convcurve.plot_multi_convcurves(dataset0,max_x,ax,'g','-',max_y=max_y)
#line1=plot_convcurve.plot_multi_convcurves(dataset,max_x,ax,'b','-',max_y=max_y)
line2=plot_convcurve.plot_multi_convcurves(dataset2,max_x,ax,'r','-',max_y=max_y)
line_gn=plot_convcurve.plot_multi_convcurves(dataset_gn,max_x,ax,'b','-',max_y=max_y)
ax.plot([1,max_x],[max_y,max_y],'k:')
pl.ylabel("y",fontsize=fontsize)
pl.xlabel("Evaluation times",fontsize=fontsize)
pl.ylim(1.2,max_y+0.1)
pl.legend([r"Random",r"PDGBO$_{GC}$",r"PDGBO$_{GN}$"],fontsize=fontsize)
#pl.title('mean')
#ax.legend([line0,line1,line2],['fixed-0-1e-5','fixed-0.03-0.07','heur'])
#pl.savefig("scatters-fixed-0.03-0.07.pdf")
#pl.savefig("scatters-heuristic-fixed-0.03-0.07-0-1e-5.pdf")
pl.show()
|
[
"774197840@qq.com; jxcui16@mails.jlu.edu.cn"
] |
774197840@qq.com; jxcui16@mails.jlu.edu.cn
|
9f6df83ef0c85235a7b23d535ab01164ba9d8afa
|
3c437c9ad69317115d5fa9dfc72d22b2e415c0ac
|
/extra/py/setup.py
|
6accd7fbf2dd525abf3a5b7b2ade65a41b66bcb8
|
[
"MIT"
] |
permissive
|
SpartanJ/bmpanel2
|
e2bcda8eac8385f718fbaa51aa078ac816a46aee
|
9d67757d915c7b707cc48a50c8cf97aa7c7a3513
|
refs/heads/master
| 2021-07-25T19:30:36.182477
| 2021-01-12T15:02:43
| 2021-01-12T15:02:43
| 11,990,899
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 253
|
py
|
from distutils.core import setup
setup(name='bmpanel2cfg',
version='1.0',
description='Bmpanel2 Config',
author='nsf',
author_email='no.smile.face@gmail.com',
scripts=['bmpanel2cfg'],
py_modules=['bmpanel2']
)
|
[
"no.smile.face@gmail.com"
] |
no.smile.face@gmail.com
|
43d5673a4d29d135e5649d6880fe12ba92999459
|
58442c17f5b0be577258b519be7dc6cefa9b9a8a
|
/exercises/for-statement.py
|
9b68e74cce49ba7b9faed0088901cdfb23707606
|
[] |
no_license
|
LucasMonteiroi/python-course
|
ef4af5ffb53502cfdbfcb6f6e9f9e6022689823e
|
49bbea2cc41320baa1d756dd7605419249ac160b
|
refs/heads/master
| 2022-11-18T19:07:28.336186
| 2020-07-14T18:18:32
| 2020-07-14T18:18:32
| 277,085,859
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 77
|
py
|
print('My name is ')
for i in range(0, 5) :
print('Machine nº' + str(i))
|
[
"lmsupport@outlook.com"
] |
lmsupport@outlook.com
|
85cb80c6b44a3d07bee31bad87c5d4102559bde4
|
5cb98473ea9972d0a9a0278cde9b6ee8264f9bac
|
/01. Jump to python/chap05/mod3_driver.py
|
5994fc46b1cc9b334106b3ade0789c1d901e08d6
|
[] |
no_license
|
libus1204/bigdata2019
|
fd85dbcd8c89db991ab5c3efa11ff85466a823f8
|
5e9a6fa2c340c1fcd2840889ba40c7b805926558
|
refs/heads/master
| 2020-04-21T10:56:33.519490
| 2019-04-15T05:28:19
| 2019-04-15T05:28:19
| 169,503,607
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 45
|
py
|
from mod2 import mod3
print(mod3.sum2(1, 2))
|
[
"libus1204@naver.com"
] |
libus1204@naver.com
|
6248744912b7244e7fe7c4a642bb338ce2d833f1
|
35cc9b6b42ec55707a0e92cf7c0d7545b5d553a4
|
/Pancake Problem Algorithm/SearchStrategy.py
|
1694f94d63d8d37101f2eac326c895e55a7298ab
|
[
"MIT"
] |
permissive
|
MrTee99/Pancake-Problem-Algorithm
|
6d11cb1ac0238dfd94e9d6834f884638e4d794b8
|
eb2474d070aa875323b174b3dadc51c68de3af82
|
refs/heads/main
| 2023-03-02T17:01:33.819341
| 2021-02-16T19:02:48
| 2021-02-16T19:02:48
| 339,476,846
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 583
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon Oct 28 00:10:56 2019
@author: MrTee
"""
from abc import abstractmethod;
class SearchStrategy(object):
@abstractmethod
def __init__(self): pass
@abstractmethod
def IsEmpty(self): pass
# This will be used to check weather the fringe list is empty or not
@abstractmethod
def AddNode(self, node): pass
# This will be used to add a node in the fringe list
@abstractmethod
def RemoveNode(self): pass
# This will be used to remove a node in the fringe list
|
[
"noreply@github.com"
] |
noreply@github.com
|
87b430e954513c448fbbdd1e6e42af70c656a460
|
328d20201ba9b4080c107781994281a2f05e3dba
|
/AI/similarity/Similarity.py
|
3a069679b615f0aa2a8893442e830214e1eab91a
|
[] |
no_license
|
zzpp2333/BUAA-SoftwareEngineing-Feedback
|
08c4687b79089cf58103fb6124482f2fc17e787a
|
d8fdb71e86c716e455e1fd395bd7a165380577b1
|
refs/heads/master
| 2022-05-18T09:51:55.469865
| 2019-06-02T19:39:27
| 2019-06-02T19:39:27
| 189,846,423
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,204
|
py
|
from sklearn.feature_extraction.text import TfidfVectorizer
import numpy as np
from scipy.linalg import norm
from jieba import analyse
import json
tfidf = analyse.extract_tags
class Similarity_function:
def __init__(self, stopwords_file, questions):
self.stf = stopwords_file
self.ques = questions
self.__init_getstopwords__()
def __init_getstopwords__(self):
with open(self.stf, encoding='UTF-8') as f:
self.stop_words = set([l.strip() for l in f])
def getkeywords(self, str):
#tfidf=analyse.extract_tags
tfidf = analyse.textrank
keywords = tfidf(str, 3, False,allowPOS=('n', 'i', 'l', 'nr', 'ns', 'nt', 'nz',))
for keyword in keywords:
if keyword in self.stop_words:
keywords.remove(keyword)
print("keywords:")
for keyword in keywords:
print (keyword + "/")
print('\n')
def getmostsimilar(self, quesid, str):
def add_space(s):
return ' '.join(list(s))
threshold=0.5
s1=add_space(str)
lst=[]
#with open(self.quf) as f:
# self.questions = set([l.strip() for l in f])
#strJson = self.data
#print(strJson)
for q in self.ques:
if quesid == q['id']:
continue
s2 = add_space(q['title'])
cv = TfidfVectorizer(tokenizer=lambda s: s.split())
corpus = [s1, s2]
vectors = cv.fit_transform(corpus).toarray()
similarity=np.dot(vectors[0], vectors[1]) / (norm(vectors[0]) * norm(vectors[1]))
if similarity > threshold:
dic={}
dic["id"]=q['id']
dic["title"]=q['title']
lst.append(dic)
result = {"similar": []}
result["similar"]=lst
return result
#
# file = open('test.json', encoding='utf-8')
# simi_model = Similarity_function('hlp_stop_words.txt', json.load(file))
# simi_model.getkeywords("请问冒泡排序和快速排序在选用时有什么讲究?")
# print(simi_model.getmostsimilar("请问冒泡排序和快速排序在选用时有什么讲究?"))
|
[
"1132066026@qq.com"
] |
1132066026@qq.com
|
90ad81a8add0ce7a814e2dcef7127af1c6b70464
|
7b9161359e24f396481599e48e72ad3ff4ff3d6c
|
/mqttSamples/mqttShadowForProject.py
|
6fda3e299cd7e7a93b595c25ebda6fc5a35ef91a
|
[] |
no_license
|
cedriclec/postureFixer
|
9471b93583074f3648fde7fa9bad2dc3fcc4baa2
|
4e345d32a90dfe914191ecec401d63f7c9867e62
|
refs/heads/master
| 2021-08-30T01:27:57.023903
| 2017-12-15T14:05:05
| 2017-12-15T14:05:05
| 110,058,545
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,002
|
py
|
'''
/*
* Copyright 2010-2017 Amazon.com, Inc. or its affiliates. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License").
* You may not use this file except in compliance with the License.
* A copy of the License is located at
*
* http://aws.amazon.com/apache2.0
*
* or in the "license" file accompanying this file. This file is distributed
* on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either
* express or implied. See the License for the specific language governing
* permissions and limitations under the License.
*/
'''
from AWSIoTPythonSDK.MQTTLib import AWSIoTMQTTShadowClient
import logging
import time
import json
import argparse
from readSensorOnce import readSensorDistance
# Custom Shadow callback
def customShadowCallback_Update(payload, responseStatus, token):
# payload is a JSON string ready to be parsed using json.loads(...)
# in both Py2.x and Py3.x
if responseStatus == "timeout":
print("Update request " + token + " time out!")
if responseStatus == "accepted":
payloadDict = json.loads(payload)
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Update request with token: " + token + " accepted!")
#print("property: " + str(payloadDict["state"]["desired"]["property"]))
print("property: " + str(payloadDict["state"]["desired"]))
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Update request " + token + " rejected!")
def customShadowCallback_Delete(payload, responseStatus, token):
if responseStatus == "timeout":
print("Delete request " + token + " time out!")
if responseStatus == "accepted":
print("~~~~~~~~~~~~~~~~~~~~~~~")
print("Delete request with token: " + token + " accepted!")
print("~~~~~~~~~~~~~~~~~~~~~~~\n\n")
if responseStatus == "rejected":
print("Delete request " + token + " rejected!")
# Shadow JSON schema:
#
# Name: distanceDevice
# {
# "state": {
# "Top":{
# "value":<INT VALUE>
# },
# "Bottom":{
# "value":<INT VALUE>
# }
# }
# }
#JSONexqmple = '{"state":{"desired":{"property":' + str(loopCount) + '}}}'
def createJsonSensorsDistance(distanceTop, distanceBottom):
#Limite to three number after virgule
distanceTop = round(distanceTop, 3)
distanceBottom = round(distanceBottom, 3)
#jsonSensors = json.dumps({"state": { "Top" : { "distance" : distanceTop}, "Bottom" : { "distance" : distanceBottom} } })
#jsonSensors = json.dumps({"state": { "Top" : distanceTop, "Bottom" : distanceBottom} })
#jsonSensors = json.dumps({"state":{"desired":{"property": distanceTop} } } )
jsonSensors = json.dumps({"state":{"desired":{ "Top" : distanceTop, "Bottom" : distanceBottom} } } )
print jsonSensors
return jsonSensors
host = "all6qkgnylmz8.iot.us-west-2.amazonaws.com" #args.host
rootCAPath = "key/root-CA.crt" #args.rootCAPath
certificatePath = "key/509e2f9bc0-certificate.pem.crt" #args.certificatePath
privateKeyPath = "key/509e2f9bc0-private.pem.key" #args.privateKeyPath
useWebsocket = False #args.useWebsocket
thingName = "distanceDevice" #args.thingName
clientId = "postureUser" #args.clientId
# Configure logging
logger = logging.getLogger("AWSIoTPythonSDK.core")
logger.setLevel(logging.DEBUG)
streamHandler = logging.StreamHandler()
formatter = logging.Formatter('%(asctime)s - %(name)s - %(levelname)s - %(message)s')
streamHandler.setFormatter(formatter)
logger.addHandler(streamHandler)
# Init AWSIoTMQTTShadowClient
myAWSIoTMQTTShadowClient = None
if useWebsocket:
myAWSIoTMQTTShadowClient = AWSIoTMQTTShadowClient(clientId, useWebsocket=True)
myAWSIoTMQTTShadowClient.configureEndpoint(host, 443)
myAWSIoTMQTTShadowClient.configureCredentials(rootCAPath)
else:
myAWSIoTMQTTShadowClient = AWSIoTMQTTShadowClient(clientId)
myAWSIoTMQTTShadowClient.configureEndpoint(host, 8883)
myAWSIoTMQTTShadowClient.configureCredentials(rootCAPath, privateKeyPath, certificatePath)
# AWSIoTMQTTShadowClient configuration
myAWSIoTMQTTShadowClient.configureAutoReconnectBackoffTime(1, 32, 20)
myAWSIoTMQTTShadowClient.configureConnectDisconnectTimeout(10) # 10 sec
myAWSIoTMQTTShadowClient.configureMQTTOperationTimeout(5) # 5 sec
# Connect to AWS IoT
myAWSIoTMQTTShadowClient.connect()
# Create a deviceShadow with persistent subscription
deviceShadowHandler = myAWSIoTMQTTShadowClient.createShadowHandlerWithName(thingName, True)
# Delete shadow JSON doc
deviceShadowHandler.shadowDelete(customShadowCallback_Delete, 5)
# Update shadow in a loop
#while True:
i = 1
while i:
#Limite to one call to avoid having too much message sent
distanceTOP = readSensorDistance("TOP")
distanceDOWN = 0
# distanceDOWN = readSensorDistance("DOWN")
JSONPayload = createJsonSensorsDistance(distanceTOP, distanceDOWN)
print(JSONPayload)
deviceShadowHandler.shadowUpdate(JSONPayload, customShadowCallback_Update, 5)
time.sleep(5)
i = 0
|
[
"cedric.lecuyer580@gmail.com"
] |
cedric.lecuyer580@gmail.com
|
4ad61973e9a63ca5158e9c049f29b04ecff7bd8c
|
fd198117c4f7fb9f0634c6241defb4278ff1fb00
|
/ali_recommend/tianchi_liyumeng/util/3expandfeature.py
|
7957f338c0e41e33539a3f3ee336795d8f13c375
|
[] |
no_license
|
ajoeajoe/dut_tianchi_mobile_recommend_train
|
acf08d2065cc13514c86db983b49d0370a0925e4
|
ddf529bacf9530c94d1c75afc0ac326bc1f22175
|
refs/heads/master
| 2021-01-19T12:04:08.968171
| 2017-04-19T06:01:31
| 2017-04-19T06:01:31
| 88,015,889
| 0
| 0
| null | 2017-04-12T06:08:53
| 2017-04-12T06:08:53
| null |
UTF-8
|
Python
| false
| false
| 2,198
|
py
|
# coding=utf-8
"""
本脚本用于扩展出除法特征
"""
import sys
# 配置项
# 输入文件路径,需要是未进行归一化的文件
filename = ur'..\dutir_tianchi_recommend_data.csv.mysql.samp.csv'
#配置项结束
if len(sys.argv) > 1:
src = sys.argv[1]
else:
src = filename
#输出文件
dst = src.rstrip('.csv') + '.expand.csv'
#ui 在4~39,u在44~79,i在111~146,c在151~186,uc在218~253
ui_range = range(4, 40)
u_range = range(44, 80)
i_range = range(111, 147)
c_range = range(151, 187)
uc_range = range(218, 254)
def divide(a, b):
if b > 0 or b < 0:
return 1.0 * a / b
else:
return 0
dst_f = open(dst, 'w')
with open(src, 'r') as f:
header = f.readline()
tail = []
for a, b in zip(ui_range, u_range): #ui 在4~39,u在44~79,i在111~146,c在151~186,uc在218~253
tail.append("%d_%d" % (a, b))
for a, b in zip(uc_range, c_range):
tail.append("%d_%d" % (a, b))
for a, b in zip(ui_range, uc_range):
tail.append("%d_%d" % (a, b))
for a, b in zip(i_range, c_range):
tail.append("%d_%d" % (a, b))
for a, b in zip(uc_range, u_range):
tail.append("%d_%d" % (a, b))
header = header.rstrip('\n') + ','.join(tail) + '\n'
print 'headers:', header
length = len(header.rstrip('\n').rstrip(',').split(','))
print 'count:', length
dst_f.write(header)
t = 0
#ui 在4~39,u在44~79,i在111~146,c在151~186,uc在218~253
alist = ui_range + uc_range + ui_range + i_range + uc_range
blist = u_range + c_range + uc_range + c_range + u_range
ziplen = len(alist)
ziplist = zip(range(ziplen), alist, blist)
tail = [0] * ziplen
for tmp in f:
items = [item for item in tmp.rstrip('\n').rstrip(',').split(',')]
for i, a, b in ziplist:
tail[i] = divide(float(items[a]), float(items[b]))
dst_f.write(tmp.rstrip('\n').rstrip(','))
dst_f.write(',')
dst_f.write(','.join(map(str, tail)))
dst_f.write('\n')
t += 1
if t % 1000 == 0:
print t
dst_f.close()
print ur'生成除法特征完毕,文件行数:', t
print ur'输出路径:', dst
|
[
"noreply@github.com"
] |
noreply@github.com
|
2527d268820449444eac5831b8e75750f7df3a50
|
61ecf9a14ac978d4d1241a7f4f453c9274e0a057
|
/Question8.py
|
5b9ac32cf545fd7de56cb1fa000ba790c2f4c8f4
|
[] |
no_license
|
nikuthapa/Python-AssignmentII
|
578f53379ce6fcc69ce7a16c23eec62da5f0ae39
|
fd46abf7801f1b408f35fa77d5d3ac7caf76a5a7
|
refs/heads/master
| 2022-11-10T21:52:42.941359
| 2020-07-05T08:33:58
| 2020-07-05T08:33:58
| 277,256,897
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 499
|
py
|
"""
Write a function, is_prime, that takes an integer and returns True if
the number is prime and False if the number is not prime.
"""
def is_prime(num1):
if num1 == 1:
return False
elif num1 == 2:
return True
else:
if num1>1:
for x in range(2, num1):
if (num1 % x) == 0:
return False
return True
else:
return False
number = int(input("Enter a number:"))
print(is_prime(number))
|
[
"thapaniku12@gmail.com"
] |
thapaniku12@gmail.com
|
c5e6565284afe0245f00a0f1092a9ca3943fd09d
|
5ad4d4ff6060f67e262e42f0d6a24496efa87235
|
/7_10_dream_vacation.py
|
c71f940d6bea95d15bc43aca76eae0a503133f57
|
[] |
no_license
|
silasfelinus/PythonProjects
|
779bba4b508e2110510a1607e44c3edbf8a232ea
|
4474e03c9e21e35c100bfb524a86a35d1b59820d
|
refs/heads/master
| 2022-12-08T12:34:42.987932
| 2020-08-27T21:10:30
| 2020-08-27T21:10:30
| 290,848,514
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 503
|
py
|
responses = {}
polling_active = True
while polling_active:
name = input ("\nWhat is your name? ")
response = input("Where is your dream vacation? ")
responses[name] = response
repeat = input("Would you like to let another person respond? (yes/no) ")
if repeat == 'no':
polling_active = False
#Polling done, print results
print("\n--- Poll results ---")
for name, response in responses.items():
print(name + " would like to visit " + response + ".")
end = input("Press Enter to end: ")
|
[
"silasfelinus@gmail.com"
] |
silasfelinus@gmail.com
|
30ede00e9d10dd52241df086f9b16f32c6aa467a
|
6b01a1743bb58d079f957971d83209896ba45369
|
/budgets/migrations/0003_auto_20190624_2108.py
|
f4b30283890df33fbbeb0c350028a1898c53d289
|
[
"MIT"
] |
permissive
|
saraxboyd/BalancedBudget
|
a0bf0645bfebd8414f5655751c153df4177cda20
|
b5509ef1ee482c9e766dd5749bf19b39e23b6e78
|
refs/heads/master
| 2020-06-10T21:53:35.772910
| 2019-07-10T02:00:25
| 2019-07-10T02:00:25
| 193,762,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 361
|
py
|
# Generated by Django 2.1.7 on 2019-06-24 21:08
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('budgets', '0002_budget_current'),
]
operations = [
migrations.RenameField(
model_name='category',
old_name='category',
new_name='name',
),
]
|
[
"saraxboyd@gmail.com"
] |
saraxboyd@gmail.com
|
67db908fcf4ac7295182923fcb904759672d6542
|
7fc1a2dc206f32974610c316f3d35eeea08fd669
|
/AIEngine/transformation/medi_mapping.py
|
708a3bffad11071d6bc7bcb57324a84d4c0d0f0b
|
[] |
no_license
|
WendellTeam/AICoreEngine
|
8ab30e2eca9e1ca34daceb5683203bfc8efb0520
|
2a61b6a41177ca4197614cdaeda86d5decbf85dc
|
refs/heads/master
| 2023-05-10T03:27:56.678992
| 2021-05-28T13:17:03
| 2021-05-28T13:17:03
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,818
|
py
|
from transformation.medi_file_manipulation import file_manipulation,all_excel_move_to_archive,create_path,createfolder,move_file,get_all_File
from transformation.medi_medi import Medi_Mos
from transformation.medi_bdx_manipulation import bdx_automation
from transformation.medi_update_dcm import update_to_dcm
from transformation.medi_generate_dc import generate_dc
from pandas.io.json import json_normalize
from utils.Session import session
from utils.guid import get_guid
from utils.audit_trail import audit_log
from utils.logging import logging
from extraction.marc.authenticate_marc_access import get_marc_access
import pandas as pd
import traceback,os
from utils.notification import send
from datetime import datetime
import xlrd
uploadfile = r"C:\Users\CHUNKIT.LEE\Desktop\test"
disbursementMaster = r"C:\Users\CHUNKIT.LEE\Desktop\test\Disbursement Claims Running No 2020.xls"
#disbursementClaim
disbursementClaim = r"C:\Users\CHUNKIT.LEE\Desktop\test\MCLXXXXX.xls"
# Bordereaux Listing
bordereauxListing = r"C:\Users\CHUNKIT.LEE\Desktop\test\AETNA11324-2019-09 WEB.xls"
def medi_mapping(disbursementClaim,bordereauxListing):
wb = xlrd.open_workbook(disbursementClaim)
df = pd.read_excel(wb)
df.to_excel(r"C:\Users\CHUNKIT.LEE\Desktop\test\testw.xlsx")
getDC = df.iloc[28, 3]
wb = xlrd.open_workbook(disbursementMaster)
df1 = pd.read_excel(wb,skiprows=[0])
new_header = df1.iloc[0] #grab the first row for the header
df1 = df1[1:] #take the data less the header row
df1.columns = new_header #set the header row as the df header
mybordnum = 'TPAAY-0001-202001'
df1.loc[df1['Bord No'] == '%s' % str(mybordnum)]
for bordno in df1[df1['col4']]:
if df1[df1['col4']=='TPAAY-0001-202001']:
print(exist)
else:
print(not exist)
df1.iloc['Bord No']
|
[
"helo.aizek@gmail.com"
] |
helo.aizek@gmail.com
|
651cce70a1b9079a9d3c2ff36e374e7e07518a62
|
aa1b99de303ae4c092f77dab0c928e4962d14f3e
|
/project/models.py
|
7f0fefacc983dffec833e44816e98ce87fc9b00f
|
[] |
no_license
|
dionissqq/test
|
980dd260c70ea69463a31265b77f2d6661f9d05f
|
3c547a0ef6a8d96cbe6af862dfcdc7e372ef60dd
|
refs/heads/main
| 2023-03-12T21:52:51.720926
| 2021-02-11T10:36:46
| 2021-02-11T10:36:46
| 338,022,911
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 220
|
py
|
from . import db
# class User():
# email = db.StringField()
# password = db.StringField()
# name = db.StringField()
# class Token(db.Document):
# value = db.StringField()
# userID = db.StringField()
|
[
"dendenysyk@gmail.com"
] |
dendenysyk@gmail.com
|
87d413d7af90828f2782af0f4e847016caecc553
|
b403c7fe56209472855dff451f0b6283d5471008
|
/Supplemental_Material/PythonProjects/myFunctions/isItOdd.py
|
14037a63dbb500f808f9316903acca319e7bc678
|
[] |
no_license
|
Sandbox4KidsTM/Python_Basics
|
842bde52796896e913fdb5cc349034c52092555f
|
68c95547ec1567958fc8069e6a4bb119e436211a
|
refs/heads/master
| 2020-03-23T01:06:29.363196
| 2018-08-10T04:32:58
| 2018-08-10T04:32:58
| 140,901,128
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
#checks if a user-entered number if odd
a = int(input("enter a num: "))
if a % 2 == 0: #% modulus rep
print("number is EVEN")
else:
print("number is ODDDDD")
|
[
"mitchslabrenz@gmail.com"
] |
mitchslabrenz@gmail.com
|
f18208cbe2c56461d40b39d71cffbfaf1b0fee2b
|
6af6a6fb7d0759be524f2592a470d91947e0e2bc
|
/RandomForest/src/dataset/sp_010_1e2.py
|
699dc20994db4aa94c5f33202f7ef75e147f7653
|
[] |
no_license
|
wasit7/ImageSearch
|
5094e56db46af0d05cf76e5b5110c5b92d5198fd
|
3cd7ab3fa3c89873c0b49b1311ed5e7c5f4b8939
|
refs/heads/master
| 2020-05-17T01:12:24.616821
| 2015-08-10T07:26:44
| 2015-08-10T07:26:44
| 22,672,379
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,887
|
py
|
"""
Contain class that provide spiral dataset to random forest.
@author: Krerkkiat
updated by Wasit
"""
import numpy as np
class SpiralDataset:
'''
Provide Spiral Dataset to Random Forest
'''
def __init__(self, clmax, spc):
'''
Initial routine.
Parameter(s):
clmax: int - Maximum number of class.
spc: int - Size of data per class per client.
'''
self.clmax = clmax # class max of dataset
self.spc = spc # q size per class per client
self.dimension = 2 # it is axis x and y
self.I = np.zeros([self.dimension, 0], dtype=np.float) # np.ndarray row vetor, hold features
self.L = np.array([], dtype=np.int) # np.array, hold label
# create I
for x in range(self.clmax):
theta = np.linspace(0, 2*np.pi, self.spc)+np.random.randn(self.spc)*0.4*np.pi/clmax + 2*np.pi*x/clmax
r = np.linspace(0.1, 1, self.spc)
self.I = np.append(self.I, [r*np.cos(theta), r*np.sin(theta)], axis=1)
self.L = np.append(self.L, np.ones(self.spc, dtype=np.int)*x, axis=1)
def getL(self, x):
'''
Lookup database for a lebel of data at x.
Parameter(s):
x: int or numpy.array - Index or indexes of data that you need to get label.
Return(s):
label: int - Label of data at x.
'''
return self.L[x]
def getI(self, theta, x):
'''
Lookup table by theta for tau (splitting parameter or threshold) at index x.
Parameter(s):
theta: int - theta that will use for lookup.
x: int - Index of data.
Return(s):
tau: float - tau or raw data of data at index x with dimension theta.
'''
return self.I[theta, x]
def getX(self):
'''
Make a list of index that will use when initial root node at Client side
Return(s):
idx_list: list - List of index of data.
'''
return np.arange(0, self.clmax * self.spc)
def getParam(self, X):
'''
Random theta and then get tau from that randomed theta at index x.
Parameter(s):
x: list - List of index that will use to get tau.
Return(s):
theta: list - List of randomed theta.
tau: list - List of tau with lookup by theta and x.
'''
theta = np.random.randint(self.dimension, size=len(X))
tau = self.getI(theta, X)
return theta, tau
def __str__(self):
'''
Nothing spacial, use when debug.
Return:
txt: str - String that represent this class.
'''
return 'clmax: {cm}, spc: {ql}'.format(cm=self.clmax, ql=self.spc)
if __name__ == '__main__':
clmax = 10
spc = int(1e2)
dataset = SpiralDataset(clmax, spc)
|
[
"wasit7@gmail.com"
] |
wasit7@gmail.com
|
09260fd8d838c70817614bb78ecb29b4e3fa11f3
|
57fb2f548e594b1ac0fe56d04f7efdf9f48c86ff
|
/Projects/feature_selection/find_signature.py
|
2a61c7487162db1e7d93f1c5c1afb0d7d86ed733
|
[] |
no_license
|
altrome/UD120
|
63d68640ff83b180b25084b65f1078239713faa9
|
9c6696d59729804a8b3e392fff2eba9a0e9787cc
|
refs/heads/master
| 2021-01-10T14:28:25.337822
| 2016-01-21T10:21:54
| 2016-01-21T10:21:54
| 49,486,883
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,907
|
py
|
#!/usr/bin/python
import pickle
import numpy
numpy.random.seed(42)
### The words (features) and authors (labels), already largely processed.
### These files should have been created from the previous (Lesson 10)
### mini-project.
words_file = "../text_learning/your_word_data.pkl"
authors_file = "../text_learning/your_email_authors.pkl"
word_data = pickle.load( open(words_file, "r"))
authors = pickle.load( open(authors_file, "r") )
### test_size is the percentage of events assigned to the test set (the
### remainder go into training)
### feature matrices changed to dense representations for compatibility with
### classifier functions in versions 0.15.2 and earlier
from sklearn import cross_validation
features_train, features_test, labels_train, labels_test = cross_validation.train_test_split(word_data, authors, test_size=0.1, random_state=42)
from sklearn.feature_extraction.text import TfidfVectorizer
vectorizer = TfidfVectorizer(sublinear_tf=True, max_df=0.5,
stop_words='english')
features_train = vectorizer.fit_transform(features_train)
features_test = vectorizer.transform(features_test).toarray()
### a classic way to overfit is to use a small number
### of data points and a large number of features;
### train on only 150 events to put ourselves in this regime
features_train = features_train[:150].toarray()
labels_train = labels_train[:150]
### your code goes here
from sklearn import tree
clf = tree.DecisionTreeClassifier()
clf = clf.fit(features_train, labels_train)
pred = clf.predict(features_test)
from sklearn.metrics import accuracy_score
acc = accuracy_score(pred, labels_test)
def submitAccuracy():
return acc
#print len(features_test)
print(submitAccuracy())
features_imp = clf.feature_importances_
cnt = 0
for feat in features_imp:
if feat > 0.2:
print feat, vectorizer.get_feature_names()[cnt]
cnt += 1
|
[
"alextrejo@onsanity.com"
] |
alextrejo@onsanity.com
|
4ac2070081bd649a4703983d4b5dd9f1a05f510b
|
1964d5bbb0cf82e24c485ffffe1b692033e500b5
|
/vpg.py
|
27ab60814a920af526ecdf06eae267e869d399f1
|
[] |
no_license
|
rashmi-iyer/RISE
|
264dc4a4736a85d9d38f5c0a3f37b325061cd69c
|
5ffff3559fd2c9d5e5306312b441cb6c8ab9286a
|
refs/heads/master
| 2020-06-28T00:51:26.959856
| 2019-08-03T21:37:08
| 2019-08-03T21:37:08
| 200,098,583
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19,196
|
py
|
import numpy as np
import tensorflow as tf
import gym
import gym_maze
import time
import spinup.algos.vpg.core as core
from spinup.utils.logx import EpochLogger
from spinup.utils.mpi_tf import MpiAdamOptimizer, sync_all_params
from spinup.utils.mpi_tools import mpi_fork, mpi_avg, proc_id, mpi_statistics_scalar, num_procs
class VPGBuffer:
"""
A buffer for storing trajectories experienced by a VPG agent interacting
with the environment, and using Generalized Advantage Estimation (GAE-Lambda)
for calculating the advantages of state-action pairs.
"""
def __init__(self, obs_dim, act_dim, size, gamma=0.99, lam=0.95, hierstep=3):
self.obs_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.init_obs_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.goal_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.count_buf = np.zeros(core.combined_shape(size, act_dim), dtype=np.float32)
self.act_buf = np.zeros(core.combined_shape(size, act_dim), dtype=np.float32)
self.adv_buf = np.zeros(size, dtype=np.float32)
self.rew_buf = np.zeros(size, dtype=np.float32)
self.ret_buf = np.zeros(size, dtype=np.float32)
self.val_buf = np.zeros(size, dtype=np.float32)
self.logp_buf = np.zeros(size, dtype=np.float32)
self.obshi_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.acthi_buf = np.zeros(core.combined_shape(size, obs_dim), dtype=np.float32)
self.advhi_buf = np.zeros(size, dtype=np.float32)
self.rewhi_buf = np.zeros(size, dtype=np.float32)
self.rethi_buf = np.zeros(size, dtype=np.float32)
self.valhi_buf = np.zeros(size, dtype=np.float32)
self.logphi_buf = np.zeros(size, dtype=np.float32)
self.gamma, self.lam, self.hierstep = gamma, lam, hierstep
self.ptr, self.path_start_idx, self.max_size = 0, 0, size
self.ptrhi, self.path_start_idxhi = 0, 0
def store(self, obs, init_obs, goal, count, act, rew, val, logp):
"""
Append one timestep of agent-environment interaction to the buffer.
"""
assert self.ptr < self.max_size # buffer has to have room so you can store
self.obs_buf[self.ptr] = obs
self.init_obs_buf[self.ptr] = init_obs
self.goal_buf[self.ptr] = goal
self.count_buf[self.ptr] = count
self.act_buf[self.ptr] = act
self.rew_buf[self.ptr] = rew
self.val_buf[self.ptr] = val
self.logp_buf[self.ptr] = logp
self.ptr += 1
def storehi(self, obshi, acthi, rewhi, valhi, logphi):
assert self.ptrhi < self.max_size
self.obshi_buf[self.ptrhi] = obshi
self.acthi_buf[self.ptrhi] = acthi
self.rewhi_buf[self.ptrhi] = rewhi
self.valhi_buf[self.ptrhi] = valhi
self.logphi_buf[self.ptrhi] = logphi
self.ptrhi += 1
def finish_path(self, last_val=0):
path_slice = slice(self.path_start_idx, self.ptr)
rews = np.append(self.rew_buf[path_slice], last_val)
vals = np.append(self.val_buf[path_slice], last_val)
# the next two lines implement GAE-Lambda advantage calculation
deltas = rews[:-1] + self.gamma * vals[1:] - vals[:-1]
self.adv_buf[path_slice] = core.discount_cumsum(deltas, self.gamma * self.lam)
# the next line computes rewards-to-go, to be targets for the value function
self.ret_buf[path_slice] = core.discount_cumsum(rews, self.gamma)[:-1]
self.path_start_idx = self.ptr
def finish_path_hi(self, last_val=0):
path_slice = slice(self.path_start_idxhi, self.ptrhi)
rews = np.append(self.rewhi_buf[path_slice], last_val)
vals = np.append(self.valhi_buf[path_slice], last_val)
# the next two lines implement GAE-Lambda advantage calculation
deltas = rews[:-1] + self.gamma ** self.hierstep * vals[1:] - vals[:-1]
self.advhi_buf[path_slice] = core.discount_cumsum(deltas, self.gamma * self.lam)
# the next line computes rewards-to-go, to be targets for the value function
self.rethi_buf[path_slice] = core.discount_cumsum(rews, self.gamma ** self.hierstep)[:-1]
self.path_start_idxhi = self.ptrhi
def get(self):
"""
Call this at the end of an epoch to get all of the data from
the buffer, with advantages appropriately normalized (shifted to have
mean zero and std one). Also, resets some pointers in the buffer.
"""
assert self.ptr == self.max_size # buffer has to be full before you can get
self.ptr, self.path_start_idx = 0, 0
# the next two lines implement the advantage normalization trick
#adv_mean = np.mean(self.adv_buf)
#adv_std = np.std(self.adv_buf)
adv_mean, adv_std = mpi_statistics_scalar(self.adv_buf)
self.adv_buf = (self.adv_buf - adv_mean) / adv_std
return [self.obs_buf, self.init_obs_buf, self.goal_buf, np.expand_dims(self.count_buf, axis=1),
self.act_buf, self.adv_buf, self.ret_buf, self.logp_buf]
def gethi(self):
"""
Call this at the end of an epoch to get all of the data from
the buffer, with advantages appropriately normalized (shifted to have
mean zero and std one). Also, resets some pointers in the buffer.
"""
# the next two lines implement the advantage normalization trick
#adv_mean = np.mean(self.advhi_buf[:self.ptrhi])
#adv_std = np.std(self.advhi_buf[:self.ptrhi])
adv_mean, adv_std = mpi_statistics_scalar(self.advhi_buf)
int_num = int(self.ptrhi)
self.ptrhi, self.path_start_idxhi = 0, 0
return [self.obshi_buf[:int_num], self.acthi_buf[:int_num],
(self.advhi_buf[:int_num] - adv_mean) / adv_std, self.rethi_buf[:int_num], self.logphi_buf[:int_num]]
"""
Vanilla Policy Gradient
(with GAE-Lambda for advantage estimation)
"""
def vpg(env_fn, actor_critic=core.mlp_actor_critic, ac_kwargs=dict(), seed=0,
steps_per_epoch=4000, epochs=50, gamma=0.99, pi_lr=3e-4,
vf_lr=1e-3, train_v_iters=80, lam=0.97, max_ep_len=1000,
logger_kwargs=dict(), save_freq=10, c=3):
"""
Args:
env_fn : A function which creates a copy of the environment.
The environment must satisfy the OpenAI Gym API.
actor_critic: A function which takes in placeholder symbols
for state, ``x_ph``, and action, ``a_ph``, and returns the main
outputs from the agent's Tensorflow computation graph:
=========== ================ ======================================
Symbol Shape Description
=========== ================ ======================================
``pi`` (batch, act_dim) | Samples actions from policy given
| states.
``logp`` (batch,) | Gives log probability, according to
| the policy, of taking actions ``a_ph``
| in states ``x_ph``.
``logp_pi`` (batch,) | Gives log probability, according to
| the policy, of the action sampled by
| ``pi``.
``v`` (batch,) | Gives the value estimate for states
| in ``x_ph``. (Critical: make sure
| to flatten this!)
=========== ================ ======================================
ac_kwargs (dict): Any kwargs appropriate for the actor_critic
function you provided to VPG.
seed (int): Seed for random number generators.
steps_per_epoch (int): Number of steps of interaction (state-action pairs)
for the agent and the environment in each epoch.
epochs (int): Number of epochs of interaction (equivalent to
number of policy updates) to perform.
gamma (float): Discount factor. (Always between 0 and 1.)
pi_lr (float): Learning rate for policy optimizer.
vf_lr (float): Learning rate for value function optimizer.
train_v_iters (int): Number of gradient descent steps to take on
value function per epoch.
lam (float): Lambda for GAE-Lambda. (Always between 0 and 1,
close to 1.)
max_ep_len (int): Maximum length of trajectory / episode / rollout.
c: hierearchical step length
"""
logger = EpochLogger(**logger_kwargs)
logger.save_config(locals())
seed += 10000 * proc_id()
print("HELLLLLOOOOOOOOOOOOOOOOO")
print(seed)
tf.set_random_seed(seed)
np.random.seed(seed)
env = env_fn()
obs_dim = env.observation_space.shape
act_dim = env.action_space.shape
# Share information about action space with policy architecture
#ac_kwargs['action_space'] = env.action_space
# Inputs to computation graph
x_ph, x_initial_ph, g_ph, a_ph = core.placeholders_from_spaces(env.observation_space,
env.observation_space,
env.observation_space,
env.action_space)
count_ph = core.placeholder(1)
adv_ph, ret_ph, logp_old_ph = core.placeholders(None, None, None)
xhi_ph, ahi_ph = core.placeholders_from_spaces(env.observation_space, env.observation_space)
advhi_ph, rethi_ph, logphi_old_ph = core.placeholders(None, None, None)
# Main outputs from computation graph
x_concat = tf.concat([x_ph, x_initial_ph, g_ph, count_ph], 1)
pi, logp, logp_pi, v = actor_critic(x_concat, a_ph, action_space=env.action_space)
pihi, logphi, logphi_pi, vhi = actor_critic(xhi_ph, ahi_ph, action_space=env.observation_space)
# Need all placeholders in *this* order later (to zip with data from buffer)
all_phs = [x_ph, x_initial_ph, g_ph, count_ph, a_ph, adv_ph, ret_ph, logp_old_ph]
allhi_phs = [xhi_ph, ahi_ph, advhi_ph, rethi_ph, logphi_old_ph]
# Every step, get: action, value, and logprob
get_action_ops = [pi, v, logp_pi]
gethi_action_ops = [pihi, vhi, logphi_pi]
# Experience buffer
local_steps_per_epoch = int(steps_per_epoch / num_procs())
buf = VPGBuffer(obs_dim, act_dim, local_steps_per_epoch, gamma, lam)
# Count variables
var_counts = tuple(core.count_vars(scope) for scope in ['pi', 'v'])
logger.log('\nNumber of parameters: \t pi: %d, \t v: %d\n'%var_counts)
# VPG objectives
pi_loss = -tf.reduce_mean(logp * adv_ph)
v_loss = tf.reduce_mean((ret_ph - v) ** 2)
pihi_loss = -tf.reduce_mean(logphi * advhi_ph)
vhi_loss = tf.reduce_mean((rethi_ph - vhi) ** 2)
# Info (useful to watch during learning)
approx_kl = tf.reduce_mean(logp_old_ph - logp) # a sample estimate for KL-divergence, easy to compute
approx_ent = tf.reduce_mean(-logp) # a sample estimate for entropy, also easy to compute
approx_klhi = tf.reduce_mean(logphi_old_ph - logphi) # a sample estimate for KL-divergence, easy to compute
approx_enthi = tf.reduce_mean(-logphi) # a sample estimate for entropy, also easy to compute
# Optimizers
train_pi = tf.train.AdamOptimizer(learning_rate=pi_lr).minimize(pi_loss)
train_v = tf.train.AdamOptimizer(learning_rate=vf_lr).minimize(v_loss)
trainhi_pi = tf.train.AdamOptimizer(learning_rate=pi_lr).minimize(pihi_loss)
trainhi_v = tf.train.AdamOptimizer(learning_rate=vf_lr).minimize(vhi_loss)
sess = tf.Session()
sess.run(tf.global_variables_initializer())
# Sync params across processes
sess.run(sync_all_params())
# Setup model saving
logger.setup_tf_saver(sess, inputs={'x': x_ph}, outputs={'pi': pi, 'v': v})
def update():
inputs = {k: valbuf for k, valbuf in zip(all_phs, buf.get())}
inputshi = {k: valbuf for k, valbuf in zip(allhi_phs, buf.gethi())}
pi_l_old, v_l_old, ent = sess.run([pi_loss, v_loss, approx_ent], feed_dict=inputs)
pihi_l_old, vhi_l_old, enthi = sess.run([pihi_loss, vhi_loss, approx_enthi], feed_dict=inputshi)
# Policy gradient step
sess.run(train_pi, feed_dict=inputs)
sess.run(trainhi_pi, feed_dict=inputshi)
# Value function learning
for _ in range(train_v_iters):
sess.run(train_v, feed_dict=inputs)
sess.run(trainhi_v, feed_dict=inputshi)
# Log changes from update
pi_l_new, v_l_new, kl = sess.run([pi_loss, v_loss, approx_kl], feed_dict=inputs)
pihi_l_new, vhi_l_new, klhi = sess.run([pihi_loss, vhi_loss, approx_klhi], feed_dict=inputshi)
logger.store(LossPi=pi_l_old, LossV=v_l_old,
KL=kl, Entropy=ent,
DeltaLossPi=(pi_l_new - pi_l_old),
DeltaLossV=(v_l_new - v_l_old),
LossPiHi=pihi_l_old, LossVHi=vhi_l_old,
KLHi=klhi, EntropyHi=enthi,
DeltaLossPiHi=(pihi_l_new - pihi_l_old),
DeltaLossVHi=(vhi_l_new - vhi_l_old))
start_time = time.time()
reset = env.reset()
o, x_init, count, r, rhi, r_intr, d, ep_ret, ep_len = reset, reset, 0, 0, 0, 0, False, 0, 0
g, vhi_t, logphi_t = sess.run(gethi_action_ops, feed_dict={xhi_ph: x_init.reshape(1, -1)})
buf.storehi(x_init, np.squeeze(g, axis=0), rhi, vhi_t, logphi_t)
# Main loop: collect experience in env and update/log each epoch
for epoch in range(epochs):
for t in range(steps_per_epoch):
a, v_t, logp_t = sess.run(get_action_ops,
feed_dict={x_ph: o.reshape(1, -1), x_initial_ph: x_init.reshape(1, -1),
g_ph: g.reshape(1, -1), count_ph: np.expand_dims([count%c], axis=1)})
buf.store(o, x_init, g, count%c, a, r_intr, v_t, logp_t)
logger.store(VVals=v_t)
o, r, d, _ = env.step(a[0])
ep_ret += r
ep_len += 1
r_intr = -np.linalg.norm(o - g, ord=2) #low level reward calculation via simple euclidian distance
rhi += r
count += 1
if count % c == 0 and buf.ptrhi < buf.max_size:
buf.finish_path(r_intr)
x_init = o
g, vhi_t, logpihi_t = sess.run(gethi_action_ops, feed_dict={xhi_ph: x_init.reshape(1, -1)})
buf.storehi(x_init, np.squeeze(g, axis=0), rhi, vhi_t, logpihi_t)
logger.store(VValsHi=vhi_t)
rhi = 0
terminal = d or (count == max_ep_len)
if terminal or (t == local_steps_per_epoch - 1):
if not (terminal):
print('Warning: trajectory cut off by epoch at %d steps.' %count)
# if trajectory didn't reach terminal state, bootstrap value target
last_val = r_intr if d else sess.run(v,
feed_dict={x_ph: o.reshape(1, -1), x_initial_ph: x_init.reshape(1, -1),
g_ph: g.reshape(1, -1),
count_ph: np.expand_dims([count%c], axis=1)})
if count%c != 0:
buf.finish_path(last_val)
if terminal:
# only save EpRet / EpLen if trajectory finished
logger.store(EpRet=ep_ret, EpLen=ep_len)
lasthi_val = rhi if d else sess.run(vhi, feed_dict={xhi_ph: o.reshape(1, -1)})
buf.finish_path_hi(lasthi_val)
reset = env.reset()
o, x_init, count, r, rhi, r_intr, d, ep_ret, ep_len = reset, reset, 0, 0, 0, 0, False, 0, 0
g, vhi_t, logpihi_t = sess.run(gethi_action_ops, feed_dict={xhi_ph: x_init.reshape(1, -1)})
buf.storehi(x_init, np.squeeze(g, axis=0), rhi, vhi_t, logpihi_t)
logger.store(VValsHi=vhi_t)
# Save model
if (epoch % save_freq == 0) or (epoch == epochs-1):
logger.save_state({'env': env}, None)
# Perform VPG update!
update()
# Log info about epoch
logger.log_tabular('Epoch', epoch)
logger.log_tabular('EpRet', with_min_and_max=True)
logger.log_tabular('EpLen', average_only=True)
logger.log_tabular('VVals', with_min_and_max=True)
logger.log_tabular('VValsHi', with_min_and_max=True)
logger.log_tabular('TotalEnvInteracts', (epoch+1)*steps_per_epoch)
logger.log_tabular('LossPi', average_only=True)
logger.log_tabular('LossV', average_only=True)
logger.log_tabular('DeltaLossPi', average_only=True)
logger.log_tabular('DeltaLossV', average_only=True)
logger.log_tabular('Entropy', average_only=True)
logger.log_tabular('KL', average_only=True)
logger.log_tabular('LossPiHi', average_only=True)
logger.log_tabular('LossVHi', average_only=True)
logger.log_tabular('DeltaLossPiHi', average_only=True)
logger.log_tabular('DeltaLossVHi', average_only=True)
logger.log_tabular('EntropyHi', average_only=True)
logger.log_tabular('KLHi', average_only=True)
logger.log_tabular('Time', time.time()-start_time)
logger.dump_tabular()
if __name__ == '__main__':
import argparse
parser = argparse.ArgumentParser()
parser.add_argument('--env', type=str, default='MountainCar-v0')
parser.add_argument('--hid', type=int, default=64)
parser.add_argument('--l', type=int, default=2)
parser.add_argument('--gamma', type=float, default=0.99)
parser.add_argument('--seed', '-s', type=int, default=0)
parser.add_argument('--cpu', type=int, default=4)
parser.add_argument('--steps', type=int, default=4000)
parser.add_argument('--epochs', type=int, default=50)
parser.add_argument('--exp_name', type=str, default='vpg')
parser.add_argument('--c', type=int, default='3')
args = parser.parse_args()
mpi_fork(args.cpu) # run parallel code with mpi
from spinup.utils.run_utils import setup_logger_kwargs
logger_kwargs = setup_logger_kwargs(args.exp_name, args.seed)
vpg(lambda: gym.make(args.env), actor_critic=core.mlp_actor_critic,
ac_kwargs=dict(hidden_sizes=[args.hid] * args.l), gamma=args.gamma,
seed=args.seed, steps_per_epoch=args.steps, epochs=args.epochs, logger_kwargs=logger_kwargs,
c=args.c)
|
[
"noreply@github.com"
] |
noreply@github.com
|
c7ffad90b31577fe2d51742bfeff3c95081eaeb5
|
b875a17848ab0a604c54556e2afe18a83650c4b0
|
/api/views.py
|
13faed2eff31b5d8363ac0dae1e51ad9015a1df0
|
[] |
no_license
|
isaacampah222/alma_server
|
2cd62e32fd2700cc7d5af64f1abfd3960f947234
|
3b7acb2b48291a9528e62bbf1204e14b718891be
|
refs/heads/master
| 2022-11-24T09:45:52.767773
| 2020-07-30T18:53:12
| 2020-07-30T18:53:12
| 283,631,620
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,823
|
py
|
from rest_framework.generics import GenericAPIView
from rest_framework import mixins, permissions
from rest_framework.response import Response
from knox.models import AuthToken
from django.contrib.auth import login
from rest_framework.authtoken.serializers import AuthTokenSerializer
from .models import SingleOrder
from knox.views import LoginView as KnoxLoginView
from .serializers import SingleOrderSerializer,UserSerializer,RegisterSerializer
class SingleOrderView(GenericAPIView, mixins.CreateModelMixin,mixins.ListModelMixin,
mixins.RetrieveModelMixin, mixins.DestroyModelMixin):
queryset = SingleOrder.objects.all()
serializer_class = SingleOrderSerializer
lookup_field = 'id'
def get(self, request, id= None):
if id:
return self.retrieve(request, id)
else:
return self.list(request)
def post(self, request):
return self.create(request)
def delete(self, request, id):
return self.destroy(request, id)
class RegisterAPI(GenericAPIView):
serializer_class = RegisterSerializer
def post(self, request, *args, **kwargs):
serializer = self.get_serializer(data = request.data)
serializer.is_valid(raise_exception=True)
user = serializer.save()
return Response({
"user": UserSerializer(user, context = self.get_serializer_context()).data,
"token": AuthToken.objects.create(user)[1]
})
class LoginAPI(KnoxLoginView):
permission_classes = (permissions.AllowAny, )
def post(self, request, format= None):
serializer =AuthTokenSerializer(data=request.data)
serializer.is_valid(raise_exception=True)
user = serializer.validated_data['user']
login(request, user)
return super(LoginAPI, self).post(request, format=None)
|
[
"ijampah@localhost.localdomain"
] |
ijampah@localhost.localdomain
|
96568d5b7e0d8b7c45731111f658cc000e273b1a
|
8885b07ee5fd98bbd9d0d9232f6539816879faca
|
/reinforcement/widgets/tests/agents/random/test_random_agent.py
|
def506e5da3ff27fc4055ff3772f4ba98783e3bd
|
[] |
no_license
|
gbaptista/orange3-reinforcement
|
d66db1eefd9914fb33b54f0f014a7d359c501be5
|
e08910899d110b75afa99f25e01ced87ff94a30b
|
refs/heads/master
| 2020-03-28T21:11:55.181459
| 2018-10-05T23:11:35
| 2018-10-05T23:11:35
| 149,137,134
| 5
| 0
| null | 2018-10-06T00:37:26
| 2018-09-17T14:21:52
|
Python
|
UTF-8
|
Python
| false
| false
| 414
|
py
|
from ....agents.random.random_agent import RandomAgent
def test_train_episode():
environment_id = 'FrozenLake-v0'
random_agent = RandomAgent(environment_id)
assert random_agent.name == 'Random Agent'
result_keys = list(random_agent.train_episode().keys())
assert result_keys == ['steps_to_finish',
'total_reward',
'last_action_info']
|
[
"guilhermebaptistasilva@gmail.com"
] |
guilhermebaptistasilva@gmail.com
|
e8187f4393ff43fc5d05a2a836249ecab831a4e3
|
2ad1b46515e07561fc45a724e0c890e073bd4553
|
/my_max.py
|
995b5a761346509e17bb47f38be67cc35510e452
|
[] |
no_license
|
MariaKrepko/my_python
|
4b96910de31b43ab3f0d55de69735cbab5b5ff9d
|
36663a6608deaa57dd513d1da34c4dad1d109198
|
refs/heads/master
| 2020-04-23T22:47:57.191051
| 2019-05-28T22:22:34
| 2019-05-28T22:22:34
| 171,514,159
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 86
|
py
|
num1=input()
num2=input()
if num1 > num2:
print(num1)
else:
print(num2)
|
[
"noreply@github.com"
] |
noreply@github.com
|
8a163c061b4c1c372383efd313451dd7bc59d983
|
0f0440c398ce75044c0e54b12d6c0bc5d1e7a167
|
/sitepr/votacao/migrations/0001_initial.py
|
f6ce911ba968c07fa36a303a8f23ddb43814cd6b
|
[] |
no_license
|
ElSulphur/DIAM
|
8511b15681861c5198479bfdf18455656a5b60ba
|
726f4df785ee5b7b6c58d961b4bb6621de55052f
|
refs/heads/master
| 2023-03-31T20:23:38.345012
| 2021-04-10T22:57:46
| 2021-04-10T22:57:46
| 356,336,623
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,077
|
py
|
# Generated by Django 3.1.7 on 2021-03-18 10:26
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Questao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('questao_texto', models.CharField(max_length=200)),
('pub_data', models.DateTimeField(verbose_name='data de publicacao')),
],
),
migrations.CreateModel(
name='Opcao',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opcao_texto', models.CharField(max_length=200)),
('votos', models.IntegerField(default=0)),
('questao', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='votacao.questao')),
],
),
]
|
[
"nascimento1188@gmail.com"
] |
nascimento1188@gmail.com
|
c0dd503b1a9ab64668c1bd73cb9fac6abcc20aaf
|
9c20f53c155a487b2af0110a7388f7b1ae8d6ac0
|
/JQKA/JQKA/spiders/myselector.py
|
13a243c755006de1367c0d53a52fc93853ace3af
|
[] |
no_license
|
xfzhu2003/github
|
b9f2f2c37b571b7019a2faf02deb5f8d1d5fafc9
|
2f135849023a89d1514dec236d086e4783aad3df
|
refs/heads/master
| 2020-08-08T14:59:03.860049
| 2018-03-20T01:52:37
| 2018-03-20T01:52:37
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 10,810
|
py
|
#-*- coding:utf-8 -*-
import re
import urllib.parse
#from itertools import chain
import datetime
#import random
from user_agent import generate_user_agent
from pdfminer.pdfparser import PDFParser,PDFDocument
from pdfminer.pdfinterp import PDFResourceManager, PDFPageInterpreter
from pdfminer.converter import PDFPageAggregator
from pdfminer.layout import LTTextBoxHorizontal,LAParams
from pdfminer.pdfinterp import PDFTextExtractionNotAllowed
import requests
import os
from io import BytesIO
from win32com import client as wc
#from imp import reload
s = requests.Session()
class Selector(object):
def __init__(self):
pass
@staticmethod
def pdfparse(url=None):
try:
if url:
res = s.get(url,headers = {"user-agent":generate_user_agent()})
res.encoding = 'utf-8'
f = BytesIO()
f.write(res.content)
f.seek(0)
# path2 = os.getcwd()+"\\%s.txt"%name.split(".")[0]
# print(path1)
praser = PDFParser(f)
doc = PDFDocument()
praser.set_document(doc)
doc.set_parser(praser)
doc.initialize()
if not doc.is_extractable:
raise PDFTextExtractionNotAllowed
else:
# 创建PDf 资源管理器 来管理共享资源
# print("a")
rsrcmgr = PDFResourceManager()
# 创建一个PDF设备对象
laparams = LAParams()
device = PDFPageAggregator(rsrcmgr, laparams=laparams)
# 创建一个PDF解释器对象
interpreter = PDFPageInterpreter(rsrcmgr, device)
text = ''
# 循环遍历列表,每次处理一个page的内容
for page in doc.get_pages(): # doc.get_pages() 获取page列表
interpreter.process_page(page)
# 接受该页面的LTPage对象
layout = device.get_result()
#text = "".join(map(lambda x:x.get_text().strip(" ") if x.get_text() else "",layout))
#print(text)
# 这里layout是一个LTPage对象 里面存放着 这个page解析出的各种对象 一般包括LTTextBox, LTFigure, LTImage, LTTextBoxHorizontal 等等 想要获取文本就获得对象的text属性,
for x in layout:
results = x.get_text()
if results:
text = text+results.strip('\n')
f.close()
return text
except Exception as e:
print(e)
@staticmethod
def docparse(url):
name = url.split("/")[-1]
try:
path1 = os.getcwd()+"\\%s.doc"%name.split(".")[0]
path2 = os.getcwd()+"\\%s.txt"%name.split(".")[0]
# print(path1,path2)
doc = s.get(url,headers = {"user-agent":generate_user_agent()})
word = wc.Dispatch('Word.Application')
with open(path1,"wb") as f:
f.write(doc.content)
docment = word.Documents.Open(path1)
docment.SaveAs(path2, 4)
docment.Close()
try:
with open(path2) as f:
workdoc = f.read()
except:
workdoc = ""
os.remove(path1)
os.remove(path2)
return workdoc
except Exception as e:
print(e)
@classmethod
def replace_all(self,content):
content = self.replace_html_tag(content)
content = self.replace_invalid_html_char(content)
content = self.replace_invalid_char(content)
return content
@staticmethod
def changdt(content,dt):
if dt == "int":
v = int(content) if hasattr(content,'replace') and content.isdigit() else content if isinstance(content,int) else None
return v
elif dt == "float":
try:
v = round(float(content),4)
return v
except:
return None
if dt == 'str':
try:
if content:
return str(content)
except:
return None
if dt == "date":
if content:
if re.match("\d{4}-\d+-\d+",content):
result = content.split("-")
return "{0:0>4}-{1:0>2}-{2:0>2}".format(result[0],result[1],result[2])
else:
return content
else:
return content
@staticmethod
def select_content(content,config,response=None):
selector_type = config['t']
tag = config['v']
try:
if hasattr(content,'text'):
body = content.text
else:
body = content
except Exception as e:
print(e)
try:
if selector_type == 'meta':
return response.meta[tag]
elif selector_type == "json":
for i in tag.split("/"):
if isinstance(content,dict):
pass
else:
raise TypeError("typeError")
content = content[i] if i in content else ''
v = content
return v
elif selector_type == "xpath":
return content.xpath(tag)
elif selector_type == 'xpathList':
return content.xpath(tag).extract()
elif selector_type == 'xpath_split':
v = content.xpath(tag).extract()
if v:
return ",".join(v)
elif selector_type == "xpath_first":
v = content.xpath(tag).extract_first()
return v
elif selector_type == "xpath_join":
v = content.xpath(tag).extract()
if v:
v = "".join(v)
else:
v = None
return v
elif selector_type == 'xpathSet':
v = content.xpath(tag).extract()
v = set(v)
return v
elif selector_type == "css":
v = content.css[tag]
if v:
return v
elif selector_type == "re_first":
v = re.search(tag,body)
if hasattr(v,"group"):
v = v.group(0)
else:
return ''
elif selector_type == "re_findall":
v = re.findall(tag,body)
return v
elif 'splitwith' in selector_type:
if hasattr(selector_type,'replace'):
b = selector_type.replace('splitwith','')
else:
raise AttributeError('%s has not attribute replace'%selector_type)
if hasattr(content,'split'):
try:
return content.split(b)[tag]
except IndexError as e:
print(e)
else:
raise AttributeError('%s has not attribute split'%content)
elif selector_type == "url":
if hasattr(response,"url"):
return response.url
else:
raise AttributeError("url is Not Method")
elif selector_type =="date":
#set tag = "%Y-%m-%d %H:%M:%S"
return datetime.datetime.now().strftime(tag)
elif selector_type =='abs':
return tag
elif selector_type == 'url_re':
v = re.search(tag,response.url)
if v:
return v.group(1)
elif selector_type == 'url_split':
if hasattr(response,"url"):
return response.url.split('/')[tag]
else:
raise AttributeError("url is Not Method")
elif selector_type == 'static':
return content
except Exception as e:
print(e)
@staticmethod
def replace_html_tag(content):
if hasattr(content, 'replace'):
return re.subn('<[\s\S]*?>','',content)[0]
return content
@staticmethod
def replace_invalid_char(content):
if hasattr(content, 'replace'):
invalid_chars = {'\t','\r','\n','[',']',' ','--','\u3000','\xa0',"'"}
for char in invalid_chars:
content = content.replace(char,'')
return content
@staticmethod
def replace_invalid_html_char(content):
try:
if hasattr(content, 'replace'):
chars = {'nbsp': ' ','160': ' ',
'lt': '<', '60':'<',
'gt': '>', '62': '>',
'amp': '&', '38': '&',
'quot': '"', '34': '"',
}
re_char_entity = re.compile(r'&#?(?P<name>\w+);')
sz = re_char_entity.search(content)
while sz:
key = sz.group('name')
try:
content = re_char_entity.sub(chars[key], content, 1)
sz = re_char_entity.search(content)
except KeyError:
content = re_char_entity.sub('', content, 1)
sz = re_char_entity.search(content)
except Exception as e:
print(e)
return e
return content
@staticmethod
def urljoin(path, url):
urlp = urllib.parse.urlparse(url)
return urlp.scheme+'://'+urlp.netloc+'/'+path
@staticmethod
def urljoin2(path, url):
urlp = urllib.parse.urlparse(url)
return urlp.scheme+'://'+urlp.netloc+path
@classmethod
def headers(self):
return {'User-Agent':generate_user_agent()}
if __name__ == "__main__":
pass
a = Selector.pdfparse("http://www.szse.cn/UpFiles/cfwj/2017-09-20_002638676.pdf")
print(a)
# a = Selector()
# a = a.headers()
# print(a)
# print(type(a))
# print(a)
# a = Selector.replace_all('''<td style="text-align:center">男</td>
# <td style="text-align:center">南山区
#
# </td>
# <td style="text-align:center">
#
# <a href="/lawfirm/12e61b22fa6045deb55ca13d8ac5777c" target="_blank">广东君言律师事务所</a>
#''')
# print(a)
|
[
"luopx@hffss.com"
] |
luopx@hffss.com
|
9109cf92a3874f5187986e1ec9f78049378ebeb5
|
273ab7fc98ef8a299daf56d9ce7a0d917453be6a
|
/BlueStakesETL_Old.py
|
819054d2145cf6c06d81f022744008513a3d89eb
|
[] |
no_license
|
Zee-Bee/tester
|
c5e6aa385ae62c80b2b1f00d051ac99346cc22ba
|
5793f97e2c0f108ea61842117a966bb1de81f185
|
refs/heads/master
| 2020-04-07T00:18:44.949542
| 2018-11-16T17:57:20
| 2018-11-16T17:57:20
| 157,898,351
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 58,163
|
py
|
import arcpy, datetime, time
from arcpy import env
from arcpy import da
#sgid10 = r'C:\ZBECK\BlueStakes\stagingBS.gdb\SGID10_GEOGRAPHIC'
#sgid10 = r'Database Connections\dc_agrc@SGID10@gdb10.agrc.utah.gov.sde'
sgid10 = r'C:\ZBECK\BlueStakes\testDB.gdb'
sgid10_GEO = r'C:\ZBECK\BlueStakes\stagingBS.gdb\SGID10_GEOGRAPHIC'
stageDB = r'C:\ZBECK\BlueStakes\stagingBS.gdb'
schemaDB = r'C:\ZBECK\BlueStakes\schemaBS.gdb'
outLoc = r'C:\ZBECK\BlueStakes\outBlueStakes'
env.workspace = sgid10
arcpy.env.overwriteOutput = True
clpCnty = sgid10_GEO + '\\Counties'
#arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Counties', clpCnty)
fipsNum = ['49001', '49003', '49005', '49007', '49009', '49011', '49013', '49015', '49017', '49019', '49021', \
'49023', '49025', '49027', '49029', '49031', '49033', '49035', '49037', '49039', '49041', '49043', '49045', \
'49047', '49049', '49051', '49053', '49055', '49057']
fipsDict = {'Beaver': '49001', 'BoxElder': '49003', 'Cache': '49005', 'Carbon': '49007', 'Daggett': '49009', \
'Davis': '49011', 'Duchesne': '49013', 'Emery': '49015', 'Garfield': '49017', 'Grand': '49019', \
'Iron': '49021', 'Juab': '49023', 'Kane': '49025', 'Millard': '49027', 'Morgan': '49029', \
'Piute': '49031', 'Rich': '49033', 'SaltLake': '49035', 'SanJuan': '49037', 'Sanpete': '49039', \
'Sevier': '49041', 'Summit': '49043', 'Tooele': '49045', 'Uintah': '49047', 'Utah': '49049', \
'Wasatch': '49051', 'Washington': '49053', 'Wayne': '49055', 'Weber': '49057'}
typeList = ('ALY', 'AVE', 'BLVD', 'CIR', 'CT', 'CV', 'DR', 'EST', 'ESTS', 'EXPY', 'FWY', 'HWY', 'HOLW', \
'JCT', 'LN', 'LOOP', 'PKWY', 'PL', 'PLZ', 'PT', 'RAMP', 'RNCH', 'RD', 'RTE', 'RUN', 'RW', 'SQ', \
'ST', 'TER', 'TRL', 'WAY', 'HTS', 'COR')
typeList2 = ('ALLEY', 'AVENUE', 'BOULEVARD', 'CIRCLE', 'COURT', 'COVE', 'DRIVE', 'ESTATE', 'ESTATES', 'EXPRESSWAY', \
'FREEWAY', 'HEIGHTS', 'HIGHWAY', 'HOLLOW', 'JUNCTION', 'LANE', 'LOOP', 'PARKWAY', 'PLACE', 'PLAZA', \
'POINT', 'RAMP', 'RANCH', 'ROAD', 'ROUTE', 'RUN', 'ROW', 'SQUARE', 'STREET', 'TERRACE', 'TRAIL', 'WAY', 'CORNER')
typeDict = {'ALLEY': 'ALY', 'AVENUE': 'AVE', 'BOULEVARD': 'BLVD', 'CIRCLE': 'CIR', 'COURT': 'CT', 'COVE': 'CV', \
'DRIVE': 'DR', 'ESTATE': 'EST', 'ESTATES': 'ESTS', 'EXPRESSWAY': 'EXPY', 'FREEWAY': 'FWY', 'HIGHWAY': 'HWY', \
'HOLLOW': 'HOLW', 'JUNCTION': 'JCT', 'LANE': 'LN', 'LOOP': 'LOOP', 'PARKWAY': 'PKWY', 'PLACE': 'PL', \
'PLAZA': 'PLZ', 'POINT': 'PT', 'RAMP': 'RAMP', 'RANCH': 'RNCH', 'ROAD': 'RD', 'ROUTE': 'RTE', 'RUN': 'RUN', \
'ROW': 'RW', 'SQUARE': 'SQ', 'STREET': 'ST', 'TERRACE': 'TER', 'TRAIL': 'TRL', 'WAY': 'WAY', 'HEIGHTS': 'HTS', \
'CORNER': 'COR'}
dirList = ('N', 'S', 'E', 'W')
dirList2 = ('NORTH', 'SOUTH', 'EAST', 'WEST')
dirDict = {'NORTH': 'N', 'SOUTH': 'S', 'EAST': 'E', 'WEST': 'W'}
if not arcpy.Exists(stageDB):
arcpy.CreateFileGDB_management('C:\ZBECK\BlueStakes', 'stagingBS.gdb')
#-------------------------------------------------------------------------------------------------------------------------------------------
def parcels():
print 'Starting Parcels ' + str(datetime.datetime.now())
#-Check for parcels in staging DB, add it if missing, delete features if they exist
## for fips in fipsNum:
## parcelFC = stageDB + '\\' + 'par' + fips
##
## if not arcpy.Exists(parcelFC):
## arcpy.CopyFeatures_management(schemaDB + '\\parSSCCC_schema', stageDB + '\\par' + fips)
## print 'Copied par' + fips + ' to staging GDB'
## else:
## arcpy.DeleteFeatures_management(parcelFC)
## print 'Deleted existing features in ' + parcelFC
fc = r'C:\ZBECK\BlueStakes\testDB.gdb\Parcels_Garfield'
fcBS = r'C:\ZBECK\BlueStakes\stagingBS.gdb\par49001'
srcFlds = ['PARCEL_ID', 'PARCEL_ADD', 'SHAPE@']
tarFlds = ['ADDR_NUMB', 'ADDR_FULL', 'FEDIRP', 'FENAME', 'FETYPE', 'FEDIRS', 'OWNER', 'SHAPE@']
## for fc in arcpy.ListFeatureClasses():
## #if fc[:23][-7:] == 'Parcels': #get SDE parcels
## if fc.split('_')[0] == 'Parcels':
##
## cnty = fc.split('_')[1]
## tarRows = arcpy.da.InsertCursor(stageDB + '\\par' + fipsDict[cnty], tarFlds)
tarRows = arcpy.da.InsertCursor(fcBS, tarFlds)
srcRows = arcpy.da.SearchCursor(fc, srcFlds)
for srcRow in srcRows:
srcPar_ParID = srcRow[0]
addFull = srcRow[1]
shp = srcRow[2]
if addFull != None and addFull.strip() != '':
#--Address Number-----------------------------------------
if addFull.split(' ')[0].isdigit():
addNum = addFull.split(' ')[0]
else:
addNum = ''
#--Prefix Direction---------------------------------------
preDirs = addFull.split(' ')[1]
if preDirs.upper() in dirList:
preDir = preDirs.upper()
if preDirs.upper() in dirList2:
preDir = dirDict[preDirs.upper()]
else:
preDirs = ''
#--Sufix Direction----------------------------------------
sufDirs = addFull.split(' ')[-1]
if sufDirs.upper() in dirList:
sufDir = sufDirs.upper()
if sufDirs.upper() in dirList2:
sufDir = dirDict[sufDirs.upper()]
else:
sufDirs = ''
#--Street Type--------------------------------------------
sTypes = addFull.split(' ')[-1]
if sTypes.upper() in typeList:
sType = sTypes.upper()
if sTypes.upper() in typeList2:
sType = typeDict[sTypes.upper()]
else:
sType = ''
#---Street Name-------------------------------------------
houseNumber = 'houseNumber'
preDirection = 'preDirection'
sName = 'sName'
streetNameBegun = 'streetNameBegun'
streetNameEnded = 'streetNameEnded'
streetName = ''
if streetName != None:
def checkWord(word, state):
global streetName
if state == houseNumber:
return preDirection
elif state == preDirection:
if word in dirList or word in dirList2:
return sName
else:
streetName = streetName + word
return streetNameBegun
elif state == sName:
streetName = word
return streetNameBegun
elif state == streetNameBegun:
if word in typeList or word in dirList or word in typeList2 or word in dirList2:
return streetNameEnded
else:
streetName = streetName + ' ' + word
return streetNameBegun
elif state == streetNameEnded:
return streetNameEnded
def findStName(addFull):
global streetName
streetName = ''
state = houseNumber
for word in addFull.strip().split(' '):
state = checkWord(word, state)
return streetName
## for add in addList:
## print findStName(add)
else:
addNum = ''
preDir = ''
sType = ''
sufDir = ''
srcPar_ParID = ''
tarRows.insertRow((addNum, addFull, preDir, findStName(srcRow[1]), sType, 'S', srcPar_ParID, shp))
del tarRows
del srcRows
# print cnty + ' par' + fipsDict[cnty] + ' Done'
print 'Done Parcels ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def addressPoints():
print 'Starting Address Points ' + str(datetime.datetime.now())
addPts = sgid10_GEO + '\\AddressPoints'
addPtsBS = stageDB + '\\adr_StWide'
clpCnty = 'SGID10.BOUNDARIES.Counties'
#---Check for Address Points in SGID10_GEOGRAPHIC staging area
if arcpy.Exists(addPts):
arcpy.Delete_management(addPts)
arcpy.CopyFeatures_management(r'Database Connections\DC_Location@SGID10@gdb10.agrc.utah.gov.sde\SGID10.LOCATION.AddressPoints', addPts)
else:
arcpy.CopyFeatures_management(r'Database Connections\DC_Location@SGID10@gdb10.agrc.utah.gov.sde\SGID10.LOCATION.AddressPoints', addPts)
#---Check for statewide Address Points in BlueStakes schema
if not arcpy.Exists(addPtsBS):
arcpy.CopyFeatures_management(schemaDB + '\\adrSSCCC_schema', addPtsBS)
else:
arcpy.DeleteFeatures_management(addPtsBS)
srcFlds = ['ADDLABEL', 'ADDNBR', 'PRE_DIR', 'STREETNAME', 'STREETTYPE', 'SUF_DIR', 'SHAPE@']
tarFlds = ['ADDR_NUMB', 'ADDR_FULL', 'FEDIRP', 'FENAME', 'FETYPE', 'FEDIRS', 'OWNER', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(addPts, srcFlds)
tarRows = arcpy.da.InsertCursor(addPtsBS, tarFlds)
for srcRow in srcRows:
if srcRow[1] != None:
ADDR_NUMB = srcRow[1]
else:
ADDR_NUMB = ''
if srcRow[0] != None:
ADDR_FULL = srcRow[0]
else:
ADDR_FULL = ''
if srcRow[2] != None:
FEDIRP = srcRow[2]
else:
FEDIRP = ''
if srcRow[3] != None:
FENAME = srcRow[3]
else:
FENAME = ''
if srcRow[4] != None:
FETYPE = srcRow[4]
else:
FETYPE = ''
if srcRow[5] != None:
FEDIRS = srcRow[5]
else:
FEDIRS = ''
OWNER = ''
shp = srcRow[6]
tarRows.insertRow((ADDR_NUMB, ADDR_FULL, FEDIRP, FENAME, FETYPE, FEDIRS, OWNER, shp))
del tarRows
#---Copy State wide address points to Bluestakes root---------------
arcpy.CopyFeatures_management(addPtsBS, outLoc + '\\adr_StWide.shp')
#---Clip by county-------------------------------------------
clpFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
clpRows = arcpy.da.SearchCursor(clpCnty, clpFlds)
for row in clpRows:
clpFeat = row[2]
#----Delete shapefiles with no features----
clp = arcpy.Clip_analysis(addPtsBS, clpFeat, outLoc + '\\TGR' + row[1] + '\\adr' + row[1] + '.shp')
clpCount = int(arcpy.GetCount_management(clp).getOutput(0))
if clpCount < 1:
arcpy.Delete_management(clp)
print 'Done Translating Address Points ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def roads():
print 'Starting Roads ' + str(datetime.datetime.now())
## for fips in fipsNum:
## streetFC = stageDB + '\\TGR' + fips + 'lkA'
##
## if not arcpy.Exists(streetFC):
## arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCLKA_schema', stageDB + '\\TGR' + fips + 'lkA')
## print 'Copied TGR' + fips + 'lkA to staging GDB'
## else:
## arcpy.DeleteFeatures_management(streetFC)
## print 'Deleted existing features in ' + streetFC
srcFlds = ['CARTOCODE', 'PREDIR', 'FULLNAME', 'STREETTYPE', 'SUFDIR', 'L_F_ADD', 'L_T_ADD', 'R_F_ADD', 'R_T_ADD', 'ALIAS1', \
'ALIAS2', 'ACSALIAS', 'ACSNAME', 'COFIPS', 'HWYNAME', 'MODIFYDATE', 'ADDR_SYS', 'STREETNAME', 'SHAPE@']
tarFlds = ['FEDIRP', 'FENAME', 'FETYPE', 'FEDIRS', 'CFCC', 'FRADDL', 'TOADDL', 'FRADDR', 'TOADDR', 'CFCC1', 'CFCC2', 'FULLNAME', \
'HASALT', 'ISALT', 'S_FIPS', 'AGRC_MDATE', 'ADDRESS_SY', 'SHAPE@']
rdFC = 'RoadsALL'
#rdFC = 'SLCRoads'
#tarRds = stageDB + '\\TGR_StWide_lkA'
tarRds = stageDB + '\\TGR49035lkA'
#----Remove empty spaces from roads----------------------------------------------------------------------------
with arcpy.da.UpdateCursor(rdFC, srcFlds) as rows:
for row in rows:
for fld in srcFlds:
fldX = srcFlds.index(fld)
if row[fldX] == ' ':
row[fldX] = None
rows.updateRow(row)
del rows
srcRows = arcpy.da.SearchCursor(rdFC, srcFlds)
tarRows = arcpy.da.InsertCursor(tarRds, tarFlds)
for srcRow in srcRows:
#----Prefix Direction----
if srcRow[1] == None:
FEDIRP = None
else:
FEDIRP = srcRow[1]
#----Root and Full Street Name----
if srcRow[2] != None:
if FEDIRP != None:
FULLNAME = (FEDIRP + ' ' + srcRow[2]).title()
else:
FULLNAME = srcRow[2].title()
else:
FULLNAME = ''
if srcRow[17] != None:
FENAME = srcRow[17].replace('HIGHWAY', 'HWY').title() + ' All'
else:
FENAME = ''
#----Street Type----
if srcRow[3] != None:
FETYPE = srcRow[3].title()
else:
FETYPE = ''
#----Sufix Direction----
if srcRow[4] != None:
FEDIRS = srcRow[4]
else:
FEDIRS = ''
#----CFCC----
if srcRow[0] != None:
if srcRow[0] == '1':
CFCC = 'A15'
if srcRow[0] == '2':
CFCC = 'A25'
if srcRow[0] == '3':
CFCC = 'A21'
if srcRow[0] == '4':
CFCC = 'A35'
if srcRow[0] == '5':
CFCC = 'A31'
if srcRow[0] == '6':
CFCC = 'A41'
if srcRow[0] == '7':
CFCC = 'A20'
if srcRow[0] == '8':
CFCC = 'A31'
if srcRow[0] == '9':
CFCC = 'A41'
if srcRow[0] == '10':
CFCC = 'A41'
if srcRow[0] == '11':
CFCC = 'A41'
if srcRow[0] == '12':
CFCC = 'A41'
CFCC1 = 'A'
CFCC2 = CFCC[:2]
#----From Address Left----
if srcRow[5] != None:
FRADDL = str(srcRow[5]).split('.')[0]
else:
FRADDL = 0
#----To Address Left----
if srcRow[6] != None:
TOADDL = str(srcRow[6]).split('.')[0]
else:
TOADDL = 0
#----From Address Right----
if srcRow[7] != None:
FRADDR = str(srcRow[7]).split('.')[0]
else:
FRADDR = 0
#----To Address Right----
if srcRow[8] != None:
TOADDR = str(srcRow[8]).split('.')[0]
else:
TOADDR = 0
#----FIPS----
if srcRow[13] != None:
S_FIPS = srcRow[13]
else:
S_FIPS = ''
#----AGRC M Date----
if srcRow[15] != None:
AGRC_MDATE = srcRow[15]
else:
AGRC_MDATE = '1/1/1000'
#----Address System----
if srcRow[16] != None:
ADDRESS_SY = srcRow[16]
else:
ADDRESS_SY = ''
shp = srcRow[18]
#----Has Alt Name----
if srcRow[9] != None:
HASALT = 1
ISALT = 0
elif srcRow[12] != None:
HASALT = 1
ISALT = 0
else:
HASALT = 0
ISALT = 0
tarRows.insertRow((FEDIRP, FENAME, FETYPE, FEDIRS, CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, HASALT, \
ISALT, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
#----Add Duplicate Interstates----
if srcRow[0] == '1':
usFENAME = srcRow[14]
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + usFENAME
else:
FULLNAME = usFENAME
tarRows.insertRow((FEDIRP, usFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
#----Add Duplicate US Highways----
if srcRow[0] == '2' or srcRow[0] == '3':
usFENAME = srcRow[14] + ' US Z'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + usFENAME
else:
FULLNAME = usFENAME
tarRows.insertRow((FEDIRP, usFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
if srcRow[14].split()[0] == 'US':
hwyFENAME = 'Hwy ' + srcRow[14].split()[1] + ' US Z2'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + hwyFENAME
else:
FULLNAME = hwyFENAME
tarRows.insertRow((FEDIRP, hwyFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
#----Add Duplicate State Highways----
if srcRow[0] == '4' or srcRow[0] == '5' or srcRow[0] == '6':
if srcRow[14] != None:
srFENAME = srcRow[14] + ' SR Z'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + srFENAME
else:
FULLNAME = srFENAME
tarRows.insertRow((FEDIRP, srFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
if srcRow[14].split()[0] == 'SR':
if srcRow[14].split()[1] != '201':
hwyFENAME = 'Hwy ' + srcRow[14].split()[1] + ' SR Z2'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + hwyFENAME
else:
FULLNAME = hwyFENAME
tarRows.insertRow((FEDIRP, hwyFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
#----Add Duplicate Alias and ACSAlias----
if srcRow[9] != None:
if srcRow[9] != '':
if srcRow[9][:7] != 'HIGHWAY':
alsFENAME = srcRow[9] + ' ALS Z'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + alsFENAME + ' ' + FEDIRS
else:
alsFENAME + ' ' + FEDIRS
tarRows.insertRow((FEDIRP, alsFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, \
FULLNAME, 0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
if srcRow[12] != None:
acsFENAME = srcRow[12] + ' ACS Z'
if FEDIRP != None:
FULLNAME = FEDIRP + ' ' + acsFENAME + ' ' + FEDIRS
else:
FULLNAME = acsFENAME + ' ' + FEDIRS
tarRows.insertRow((FEDIRP, acsFENAME, '', '', CFCC, FRADDL, TOADDL, FRADDR, TOADDR, CFCC1, CFCC2, FULLNAME, \
0, 1, S_FIPS, AGRC_MDATE, ADDRESS_SY, shp))
del tarRows
del srcRows
#---Copy Roads to Blues Stakes root level-----------------
# arcpy.CopyFeatures_management(tarRds, outLoc + '\\TGR_StWide_lka.shp')
#---Clip Blue Stakes Roads-----------------------------------------------------------
# clip(tarRds, 'lkA.shp');
print 'Done Translating Roads ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def municipalities():
print 'Starting Municipalities ' + str(datetime.datetime.now())
muni = sgid10_GEO + '\\Municipalities'
muniBS = stageDB + '\\TGR_StWide_plc00'
clpCnty = 'SGID10.BOUNDARIES.Counties'
#---Check for Municipalites in SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Municipalities', muni)
#---Check for statewide municipalities BlueStakes schema
if not arcpy.Exists(muniBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCplc00_schema', muniBS)
else:
arcpy.DeleteFeatures_management(muniBS)
srcFlds = ['NAME', 'SHAPE@']
tarFlds = ['NAME', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(muni, srcFlds)
tarRows = arcpy.da.InsertCursor(muniBS, tarFlds)
for srcRow in srcRows:
NAME = srcRow[0]
shp = srcRow[1]
tarRows.insertRow((NAME, shp))
del tarRows
#---Copy Municipalities to Blues Stakes root level
arcpy.CopyFeatures_management(muniBS, outLoc + '\\TGR_StWide_plc00.shp')
#---Clip Blue Stakes Municipalities-----------------------------------------------------------
clip(muniBS, 'plc00.shp');
print 'Done Translating Municipalities ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def mileposts():
print 'Starting Mileposts ' + str(datetime.datetime.now())
arcpy.env.overwriteOutput = True
milePosts = sgid10_GEO + '\\UDOTMilePosts'
exits = sgid10_GEO + '\\Roads_FreewayExits'
milePostsBS = stageDB + '\\Hwy_MPM'
#---Copy new Exits and Mileposts to Staging DB
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.Roads_FreewayExits', exits)
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.UDOTMileposts', milePosts)
print 'Copied SGID10.TRANSPORTATION.Roads_FreewayExits to staging DB'
print 'Copied SGID10.TRANSPORTATION.UDOTMileposts to staging DB'
#---Check for Mileposts BlueStakes schema
if not arcpy.Exists(milePostsBS):
arcpy.CopyFeatures_management(schemaDB + '\\Hwy_MPM', milePostsBS)
else:
arcpy.DeleteFeatures_management(milePostsBS)
srcMP_Flds = ['RT_NAME', 'MILEPOST', 'CARTO', 'SHAPE@']
srcEX_Flds = ['EXITNAME', 'SHAPE@']
tarFlds = ['Type', 'Label_Name', 'SHAPE@']
srcMP_Rows = arcpy.da.SearchCursor(milePosts, srcMP_Flds)
srcEX_Rows = arcpy.da.SearchCursor(exits, srcEX_Flds)
tarRows = arcpy.da.InsertCursor(milePostsBS, tarFlds)
#----Add Milepost Records--------------------------------------------------------
for srcMP_Row in srcMP_Rows:
Type = 'mpm'
hwyDig1 = srcMP_Row[0][3:4]
hwyDig2 = srcMP_Row[0][2:4]
hwyDig3 = srcMP_Row[0][1:4]
if srcMP_Row[2] == '1':
Label_Name = 'I-{0} milepost {1}'.format(hwyDig2, srcMP_Row[1])
else:
if srcMP_Row[0][2] == '0':
Label_Name = 'Hwy {0} milepost {1}'.format(hwyDig1, srcMP_Row[1])
elif srcMP_Row[0][1] == '0':
Label_Name = 'Hwy {0} milepost {1}'.format(hwyDig2, srcMP_Row[1])
else:
Label_Name = 'Hwy {0} milepost {1}'.format(hwyDig3, srcMP_Row[1])
shp = srcMP_Row[3]
tarRows.insertRow((Type, Label_Name, shp))
#----Add Exit Records-------------------------------------------------------------
for srcEX_Row in srcEX_Rows:
Type = 'epm'
if srcEX_Row[0].split()[0] == 'SR':
Label_Name = 'Hwy ' + ' '.join(srcEX_Row[0].split()[1:])
elif srcEX_Row[0].split()[0] == 'US':
Label_Name = 'Hwy ' + ' '.join(srcEX_Row[0].split()[1:])
else:
Label_Name = srcEX_Row[0]
shp = srcEX_Row[1]
tarRows.insertRow((Type, Label_Name, shp))
del tarRows
#----Copy Mileposts to shapefile--------------------------------------------------
if arcpy.Exists(outLoc + '\\Hwy_MPM.shp'):
arcpy.Delete_management(outLoc + '\\Hwy_MPM.shp')
arcpy.FeatureClassToShapefile_conversion(milePostsBS, outLoc)
else:
arcpy.FeatureClassToShapefile_conversion(milePostsBS, outLoc)
print 'Done Translating Mileposts ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------
def landownershipLarge():
print 'Starting Large Landownership ' + str(datetime.datetime.now())
landown = sgid10_GEO + '\\LandOwnership'
parks = sgid10_GEO + '\\Parks'
cemeteries = sgid10_GEO + '\\Cemeteries'
golf = sgid10_GEO + '\\GolfCourses'
landownBS = stageDB + '\\TGR_StWide_lpy'
clpCnty = 'SGID10.BOUNDARIES.Counties'
#---Add new Landownership, Parks, and Cemeteries to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.CADASTRE.LandOwnership', landown)
arcpy.CopyFeatures_management('SGID10.RECREATION.ParksLocal', parks)
arcpy.CopyFeatures_management('SGID10.SOCIETY.Cemeteries_Poly', cemeteries)
arcpy.CopyFeatures_management('SGID10.RECREATION.GolfCourses', golf)
#---Check for statewide Large Landownership BlueStakes schema
if not arcpy.Exists(landownBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCClpy_schema', landownBS)
else:
arcpy.DeleteFeatures_management(landownBS)
srcLnd_Flds = ['OWNER', 'DESIG', 'LABEL_STATE', 'LABEL_FEDERAL', 'STATE_LGD', 'SHAPE@']
srcPrk_Flds = ['NAME', 'SHAPE@']
srcCem_Flds = ['Name', 'SHAPE@']
srcGlf_Flds = ['NAME', 'SHAPE@']
tarFlds = ['CFCC', 'LANDNAME', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcLnd_Rows = arcpy.da.SearchCursor(landown, srcLnd_Flds)
srcPrk_Rows = arcpy.da.SearchCursor(parks, srcPrk_Flds)
srcCem_Rows = arcpy.da.SearchCursor(cemeteries, srcCem_Flds)
srcGlf_Rows = arcpy.da.SearchCursor(golf, srcGlf_Flds)
tarRows = arcpy.da.InsertCursor(landownBS, tarFlds)
#----Add LandOwn features-------------------------------------
for srcLnd_Row in srcLnd_Rows:
if srcLnd_Row[0] == 'Tribal':
CFCC = 'D40'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
if srcLnd_Row[1] == 'Military':
CFCC = 'D10'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
if srcLnd_Row[1] == 'National Historic Site' or srcLnd_Row[1] == 'National Monument' \
or srcLnd_Row[1] == 'National Park' or srcLnd_Row == 'National Recreation Area':
CFCC = 'D83'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
if srcLnd_Row[1] == 'National Forest':
CFCC = 'D84'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
if srcLnd_Row[1] == 'Primitive Area' or srcLnd_Row[1] == 'Wilderness' or srcLnd_Row[1] == 'Wildlife Reserve/Management Area' \
or srcLnd_Row[1] == 'National Wildlife Refuge':
CFCC = 'D89'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
if srcLnd_Row[1] == 'Parks and Recreation':
CFCC = 'D85'
if srcLnd_Row[3] != None:
LANDNAME = srcLnd_Row[3]
else:
LANDNAME = srcLnd_Row[4]
shp = srcLnd_Row[5]
tarRows.insertRow((CFCC, LANDNAME, shp))
#----Add Parks--------------------------------------------
for srcPrk_Row in srcPrk_Rows:
CFCC = 'D85'
if srcPrk_Row[0] != None:
LANDNAME = srcPrk_Row[0]
else:
LANDNAME = ''
shp = srcPrk_Row[1]
tarRows.insertRow((CFCC, LANDNAME, shp))
#----Add Cemeteries--------------------------------------------
for srcCem_Row in srcCem_Rows:
CFCC = 'D82'
if srcCem_Row[0] != None:
LANDNAME = srcCem_Row[0]
else:
LANDNAME = ''
shp = srcCem_Row[1]
tarRows.insertRow((CFCC, LANDNAME, shp))
del tarRows
arcpy.CopyFeatures_management(landownBS, outLoc + '\\TGR_StWide_lpy.shp')
#---Clip Blue Stakes Misc Transportation-----------------------------------------------------------
clip(landownBS, 'lpy.shp');
print 'Done Translating Large Landownership ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------
def waterPoly():
print 'Starting Lakes ' + str(datetime.datetime.now())
lakes = sgid10_GEO + '\\LakesNHDHighRes'
lakesBS = stageDB + '\\TGR_StWide_wat'
#---Copy lakesNHD to SGID10_GEOGRAPHIC staging area
# arcpy.CopyFeatures_management('SGID10.WATER.LakesNHDHighRes', lakes)
#---Check for statewide lakes BlueStakes schema
if not arcpy.Exists(lakesBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCWAT_schema', lakesBS)
else:
arcpy.DeleteFeatures_management(lakesBS)
srcFlds = ['FCode', 'GNIS_Name', 'InUtah', 'SHAPE@']
tarFlds = ['CFCC', 'LANDNAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(lakes, srcFlds)
tarRows = arcpy.da.InsertCursor(lakesBS, tarFlds)
for srcRow in srcRows:
if srcRow[2] == 1:
if srcRow[0] == 36100:
CFCC = 'H32'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Playa'
if srcRow[0] == 39001:
CFCC = 'H32'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Intermittent Salt Lake/Pond'
if srcRow[0] == 39004 or srcRow[0] == 39005 or srcRow[0] == 39006 or srcRow[0] == 39009 or srcRow[0] == 39010:
CFCC = 'H30'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Lake/Pond'
if srcRow[0] == 39012 or srcRow[0] == 43600 or srcRow[0] == 43601 or srcRow[0] == 43607:
CFCC = 'H40'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Reservoir'
if srcRow[0] == 43612:
CFCC = 'H40'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Sewage Treatment Pond'
if srcRow[0] == 43613:
CFCC = 'H40'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Covered Reservoir'
if srcRow[0] == 43616 or srcRow[0] == 43619 or srcRow[0] == 43623 or srcRow[0] == 43624 or srcRow[0] == 43625:
CFCC = 'H40'
if srcRow[1] != None:
LANDNAME = srcRow[1]
else:
LANDNAME = 'Reservoir'
shp = srcRow[3]
tarRows.insertRow((CFCC, LANDNAME, shp))
#---Copy Lakes to Blue Stakes root level---------------
arcpy.CopyFeatures_management(lakesBS, outLoc + '\\TGR_StWide_WAT.shp')
#---Clip Blue Stakes Misc Transportation-----------------------------------------------------------
clip(lakesBS, 'WAT.shp');
print 'Done Translating Lakes ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def waterLines():
print 'Starting Rivers ' + str(datetime.datetime.now())
rivers = sgid10_GEO + '\\StreamsNHD'
riversBS = stageDB + '\\TGR_StWide_lkH'
arcpy.CopyFeatures_management('SGID10.WATER.StreamsNHDHighRes', rivers)
#---Check for Rivers BlueStakes schema
if not arcpy.Exists(riversBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCClkH_schema', riversBS)
else:
arcpy.DeleteFeatures_management(riversBS)
srcFlds = ['GNIS_Name', 'FCode', 'InUtah', 'SHAPE@']
tarFlds = ['FENAME', 'CFCC2', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(rivers, srcFlds)
tarRows = arcpy.da.InsertCursor(riversBS, tarFlds)
for srcRow in srcRows:
if srcRow[2] == 1:
if srcRow[1] == 46003:
CFCC2 = 'H2'
if srcRow[0] != None:
FENAME = srcRow[0]
else:
FENAME = 'unknown'
if srcRow[1] != 46003:
CFCC2 = 'H1'
if srcRow[0] != None:
FENAME = srcRow[0]
else:
FENAME = 'unknown'
shp = srcRow[3]
tarRows.insertRow((FENAME, CFCC2, shp))
del tarRows
#---Copy Rivers to Blue Stakes root level---------------
arcpy.CopyFeatures_management(riversBS, outLoc + '\\TGR_StWide_lkH.shp')
#---Clip Blue Stakes Misc Transportation-----------------------------------------------------------
clip(riversBS, 'lkH.shp');
print 'Done Translating Rivers ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------
def rail():
print 'Starting Railroads ' + str(datetime.datetime.now())
rail = sgid10_GEO + '\\Railroads'
railLt = sgid10_GEO + '\\LightRail_UTA'
railLt_new = sgid10_GEO + '\\LightRailNewRoutes_UTA'
railCommut = sgid10_GEO + '\\CommuterRailRoute_UTA'
railCommut_new = sgid10_GEO + '\\CommuterRailNewRoutes_UTA'
railBS = stageDB + '\\TGR_StWide_lkB'
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.Railroads', rail)
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.LightRail_UTA', railLt)
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.LightRailNewRoutes_UTA', railLt_new)
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.CommuterRailRoute_UTA', railCommut)
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.CommuterRailNewRoutes_UTA', railCommut_new)
#---Check for statewide railroad BlueStakes schema
if not arcpy.Exists(railBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCClkB_schema', railBS)
else:
arcpy.DeleteFeatures_management(railBS)
srcRail_Flds = ['RAILROAD', 'SHAPE@']
srcRailLt_Flds = ['SHAPE@']
srcRailLtNew_Flds = ['SHAPE@']
srcRailCommut_Flds = ['SHAPE@']
srcRailCommutNew_Flds = ['SHAPE@']
tarFlds = ['FENAME', 'CFCC2', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcRail_Rows = arcpy.da.SearchCursor(rail, srcRail_Flds)
srcRailLt_Rows = arcpy.da.SearchCursor(railLt, srcRailLt_Flds)
srcRailLtNew_Rows = arcpy.da.SearchCursor(railLt_new, srcRailLtNew_Flds)
srcRailCommut_Rows = arcpy.da.SearchCursor(railCommut, srcRailCommut_Flds)
srcRailCommutNew_Rows = arcpy.da.SearchCursor(railCommut_new, srcRailCommutNew_Flds)
tarRows = arcpy.da.InsertCursor(railBS, tarFlds)
#---Add Railroads---------------------------------
for srcRail_Row in srcRail_Rows:
if srcRail_Row[0] != 'UTA' and srcRail_Row[0] != 'UT Transit Auth':
FENAME = srcRail_Row[0]
CFCC2 = 'B1'
shp = srcRail_Row[1]
tarRows.insertRow((FENAME, CFCC2, shp))
#----Add Light Rail------------------------------------
for srcRailLt_Row in srcRailLt_Rows:
FENAME = 'UTA Trax light rail'
CFCC2 = 'B1'
shp = srcRailLt_Row[0]
tarRows.insertRow((FENAME, CFCC2, shp))
#----Add Light New Rail------------------------------------
for srcRailLtNew_Row in srcRailLtNew_Rows:
FENAME = 'UTA Trax light rail'
CFCC2 = 'B1'
shp = srcRailLtNew_Row[0]
tarRows.insertRow((FENAME, CFCC2, shp))
#----Add Commuter Rail------------------------------------
for srcRailCommut_Row in srcRailCommut_Rows:
FENAME = 'UTA Frontrunner railroad'
CFCC2 = 'B1'
shp = srcRailCommut_Row[0]
tarRows.insertRow((FENAME, CFCC2, shp))
#----Add Commuter New Rail------------------------------------
for srcRailCommutNew_Row in srcRailCommutNew_Rows:
FENAME = 'UTA Frontrunner railroad'
CFCC2 = 'B1'
shp = srcRailCommutNew_Row[0]
tarRows.insertRow((FENAME, CFCC2, shp))
del tarRows
#---Copy Railroads to Blue Stakes root level----------------------
arcpy.CopyFeatures_management(railBS, outLoc + '\\TGR_StWide_lkB.shp')
#---Clip Blue Stakes Airstrips-----------------------------------------------------------
clip(railBS, 'lkB.shp');
print 'Done Translating Railroads ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def airstrips():
print 'Starting Airstrips ' + str(datetime.datetime.now())
airstrips = sgid10_GEO + '\\Airports'
airstripsBS = stageDB + '\\TGR_StWide_lkD'
clpCnty = 'SGID10.BOUNDARIES.Counties'
arcpy.CopyFeatures_management('SGID10.TRANSPORTATION.Airports', airstrips)
#---Check for statewide airports BlueStakes schema
if not arcpy.Exists(airstripsBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCClkD_schema', airstripsBS)
else:
arcpy.DeleteFeatures_management(airstripsBS)
srcFlds = ['FAC_TYPE', 'FULLNAME', 'SHAPE@']
tarFlds = ['FENAME', 'CFCC2', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(airstrips, srcFlds)
tarRows = arcpy.da.InsertCursor(airstripsBS, tarFlds)
for srcRow in srcRows:
if srcRow[0] == 'AIRPORT':
if srcRow[0].find('AIRFIELD') != -1:
FENAME = srcRow[1].replace('MUNI', 'MUNICIPAL')
elif srcRow[0].find('BASE') != -1:
FENAME = srcRow[1]
else:
FENAME = srcRow[1].replace('MUNI', 'MUNICIPAL') + ' ' + srcRow[0]
else:
FENAME = srcRow[1]
CFCC2 = 'D5'
shp = srcRow[2]
tarRows.insertRow((FENAME, CFCC2, shp))
del tarRows
#---Copy Airstrips to Blue Stakes root level-------------------------
arcpy.CopyFeatures_management(airstripsBS, outLoc + '\\TRG_StWide_lkD.shp')
#---Clip Blue Stakes Airstrips-----------------------------------------------------------
clip(airstripsBS, 'lkD.shp');
print 'Done Translating Airstrips ' + str(datetime.datetime.now())
#-------------------------------------------------------------------------------------------------------------------------------------------
def miscTransportation():
print 'Starting Misc Transportation ' + str(datetime.datetime.now())
miscTrans = sgid10_GEO + '\\SkiLifts'
miscTransBS = stageDB + '\\TGR_StWide_lkC'
clpCnty = 'SGID10.BOUNDARIES.Counties'
arcpy.CopyFeatures_management('SGID10.RECREATION.SkiLifts', miscTrans)
#---Check for statewide municipalities BlueStakes schema
if not arcpy.Exists(miscTransBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCClkC_schema', miscTransBS)
else:
arcpy.DeleteFeatures_management(miscTransBS)
srcFlds = ['LIFT_NAME', 'SHAPE@']
tarFlds = ['FENAME', 'CFCC2', 'SHAPE@']
cntyFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(miscTrans, srcFlds)
tarRows = arcpy.da.InsertCursor(miscTransBS, tarFlds)
for srcRow in srcRows:
FENAME = srcRow[0] + ' Ski Lift'
CFCC2 = 'C3'
shp = srcRow[1]
tarRows.insertRow((FENAME, CFCC2, shp))
del tarRows
#---Copy Misc Trans to Blue Stakes root level---------------
arcpy.CopyFeatures_management(miscTransBS, outLoc + '\\TGR_StWide_lkC.shp')
#---Clip Blue Stakes Misc Transportation-----------------------------------------------------------
clip(miscTransBS, 'lkC.shp');
print 'Done Translating Misc Transportation ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------------
def townships():
print 'Starting Townships ' + str(datetime.datetime.now())
twnShips = sgid10_GEO + '\\PLSSTownships'
twnShipsBS = stageDB + '\\UT_TR'
#---Move Townships in SGID10_GEOGRAPHIC staging area
## if arcpy.Exists(muni):
## arcpy.Delete_management(muni)
arcpy.CopyFeatures_management('SGID10.CADASTRE.PLSSTownships_GCDB', twnShips)
## else:
## arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Municipalities', muni)
#---Check for statewide township BlueStakes schema
if not arcpy.Exists(twnShipsBS):
arcpy.CopyFeatures_management(schemaDB + '\\UT_TR_schema', twnShipsBS)
else:
arcpy.DeleteFeatures_management(twnShipsBS)
srcFlds = ['BASEMERIDIAN', 'TWNSHPLAB', 'SHAPE@']
tarFlds = ['NAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(twnShips, srcFlds)
tarRows = arcpy.da.InsertCursor(twnShipsBS, tarFlds)
for srcRow in srcRows:
NAME = ("SL" if srcRow[0] == "26" else "UI") + " " + srcRow[1]
shp = srcRow[2]
tarRows.insertRow((NAME, shp))
del tarRows
#---Export to shapefile-------------------------------------------
outTwnshps = outLoc + '\\UT_TR.shp'
arcpy.CopyFeatures_management(twnShipsBS, outTwnshps)
flds = arcpy.ListFields(outTwnshps)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(outTwnshps, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(outTwnshps, 'Shape_Leng')
print 'Done Translating Townships ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------------
def sections():
print 'Starting Sections ' + str(datetime.datetime.now())
sections = sgid10_GEO + '\\PLSSSections'
sectionsBS = stageDB + '\\UT_TRS'
#---Move Sections to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.CADASTRE.PLSSSections_GCDB', sections)
#---Check for statewide BlueStakes sections
if not arcpy.Exists(sectionsBS):
arcpy.CopyFeatures_management(schemaDB + '\\UT_TRS_schema', sectionsBS)
else:
arcpy.DeleteFeatures_management(sectionsBS)
srcFlds = ['SNUM', 'SHAPE@']
tarFlds = ['NAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(sections, srcFlds)
tarRows = arcpy.da.InsertCursor(sectionsBS, tarFlds)
for srcRow in srcRows:
NAME = srcRow[0]
shp = srcRow[1]
tarRows.insertRow((NAME, shp))
del tarRows
#---Export to shapefile-------------------------------------------
outSections = outLoc + '\\UT_TRS.shp'
arcpy.CopyFeatures_management(sectionsBS, outSections)
flds = arcpy.ListFields(outSections)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(outSections, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(outSections, 'Shape_Leng')
print 'Done Translating Sections ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------------
def deciPoints():
print 'Starting Deci Points (GNIS) ' + str(datetime.datetime.now())
deciPts = sgid10_GEO + '\\GNIS2010'
deciPtsBS = stageDB + '\\TGR_StWide_deci'
#---Move GNIS to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.LOCATION.PlaceNamesGNIS2010', deciPts)
#---Check for statewide Deci Points BlueStakes schema
if not arcpy.Exists(deciPtsBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCdeci_schema', deciPtsBS)
else:
arcpy.DeleteFeatures_management(deciPtsBS)
srcFlds = ['NAME', 'SHAPE@']
tarFlds = ['NAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(deciPts, srcFlds)
tarRows = arcpy.da.InsertCursor(deciPtsBS, tarFlds)
for srcRow in srcRows:
if srcRow[0] != None:
NAME = srcRow[0]
else:
NAME = ''
shp = srcRow[1]
tarRows.insertRow((NAME, shp))
del tarRows
#---Clip Blue Stakes Deci Points-----------------------------------------------------------
clip(deciPtsBS, 'deci.shp');
print 'Done Translating Deci Points (GNIS) ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------------
def addedPoints():
print 'Starting Added Points ' + str(datetime.datetime.now())
correctionsPts = sgid10_GEO + '\\CorrectionalFacilities'
fireStnPts = sgid10_GEO + '\\FireStations'
libraryPts = sgid10_GEO + '\\Libraries'
liquorPts = sgid10_GEO + '\\LiquorStores'
churchPts = sgid10_GEO + '\\PlacesOfWorship'
policePts = sgid10_GEO + '\\PoliceStations'
postOfficePts = sgid10_GEO + '\\PostOffices'
schoolPts = sgid10_GEO + '\\Schools'
mallPts = sgid10_GEO + '\\ShoppingMalls'
healthCarePts = sgid10_GEO + '\\HealthCareFacilities'
addedPtsBS = stageDB + '\\TGR_StWide_added'
#---Move Points to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.SOCIETY.CorrectionalFacilities', correctionsPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.FireStations', fireStnPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.Libraries', libraryPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.LiquorStores', liquorPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.PlacesOfWorship', churchPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.PoliceStations', policePts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.PostOffices', postOfficePts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.Schools', schoolPts)
arcpy.CopyFeatures_management('SGID10.SOCIETY.ShoppingMalls', mallPts)
arcpy.CopyFeatures_management('SGID10.HEALTH.HealthCareFacilities', healthCarePts)
print 'Done copying features from SGID10 to staging area'
#---Check for statewide Deci Points BlueStakes schema
if not arcpy.Exists(addedPtsBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCdeci_schema', addedPtsBS)
else:
arcpy.DeleteFeatures_management(addedPtsBS)
tarFlds = ['NAME', 'SHAPE@']
tarRows = arcpy.da.InsertCursor(addedPtsBS, tarFlds)
pointFC_List = [correctionsPts, fireStnPts, libraryPts, churchPts, mallPts, healthCarePts]
#---Loop through feature classes that have common fields-------
for pointFC in pointFC_List:
srcFlds = ['NAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(pointFC, srcFlds)
for srcRow in srcRows:
if srcRow[0] != None:
if len(srcRow[0]) > 79:
NAME = ' '.join(srcRow[0].split()[:-1]).title()
else:
NAME = srcRow[0].title()
else:
NAME = ''
shp = srcRow[1]
tarRows.insertRow((NAME, shp))
print 'Added ' + pointFC
liquorFlds = ['TYPE', 'SHAPE@']
policeFlds = ['NAME', 'SHAPE@']
postOfficeFlds = ['TOWN', 'STREET', 'SHAPE@']
schoolFlds = ['INSTITUTION_NAME', 'SHAPE@']
liquorRows = arcpy.da.SearchCursor(liquorPts, liquorFlds)
policeRows = arcpy.da.SearchCursor(policePts, policeFlds)
postOfficeRows = arcpy.da.SearchCursor(postOfficePts, postOfficeFlds)
schoolRows = arcpy.da.SearchCursor(schoolPts, schoolFlds)
for liquorRow in liquorRows:
if liquorRow[0] != None:
NAME = 'Liquor ' + liquorRow[0]
else:
NAME = 'Liquor Store'
shp = liquorRow[1]
tarRows.insertRow((NAME, shp))
print 'Added ' + liquorPts
for policeRow in policeRows:
if policeRow[0] != None:
if policeRow[0] == 'UNITED STATES FISH AND WILDLIFE SERVICE - OFFICE OF LAW ENFORCEMENT - BEAR RIVER MIGRATORY BIRD REFUGE':
NAME = 'U.S. Fish And Wildlife Service - Law Enforcement - Bear River Bird Refuge'
else:
NAME = (policeRow[0].title().replace('United States', 'U.S.'))
else:
NAME = ''
shp = policeRow[1]
tarRows.insertRow((NAME, shp))
print 'Added ' + policePts
for postOfficeRow in postOfficeRows:
if postOfficeRow[0] != None:
NAME = postOfficeRow[0] + ' Post Office'
else:
NAME = 'Post Office'
shp = policeRow[1]
tarRows.insertRow((NAME, shp))
print 'Added ' + postOfficePts
for schoolRow in schoolRows:
if schoolRow[0] != None:
NAME = schoolRow[0].title()
else:
NAME = ''
shp = schoolRow[1]
tarRows.insertRow((NAME, shp))
print 'Added ' + schoolPts
del tarRows
#---Clip Blue Stakes Deci Points-----------------------------------------------------------
clip(deciPtsBS, 'deci.shp');
print 'Done Translating Added Points ' + str(datetime.datetime.now())
#----------------------------------------------------------------------------------------------------------------------------------------------
def counties():
print 'Starting Counties ' + str(datetime.datetime.now())
cnty = sgid10_GEO + '\\Counties'
utah = sgid10_GEO + '\\Utah'
cntyBS = stageDB + '\\TGRSSCCCcty00'
cntyBS_All = stageDB + '\\CO49_D90'
stateBS = stageDB + '\\ST49_D00'
#---Move Counties to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Counties', cnty)
arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Utah', utah)
#---Check for County BlueStakes schema
if not arcpy.Exists(cntyBS):
arcpy.CopyFeatures_management(schemaDB + '\\TGRSSCCCcty00_schema', cntyBS)
if not arcpy.Exists(cntyBS_All):
arcpy.CopyFeatures_management(schemaDB + '\\CO49_D90_schema', cntyBS_All)
if not arcpy.Exists(stateBS):
arcpy.CopyFeatures_management(schemaDB + '\\ST49_D00_schema', stateBS)
else:
arcpy.DeleteFeatures_management(cntyBS)
arcpy.DeleteFeatures_management(cntyBS_All)
arcpy.DeleteFeatures_management(stateBS)
srcFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
srcFldsUT = ['STATE', 'SHAPE@']
cntyFlds = ['COUNTY', 'SHAPE@']
cntyAllFlds = ['NAME', 'ST', 'CO', 'SHAPE@']
stFlds = ['NAME', 'STATE', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(cnty, srcFlds)
srcRowsUT = arcpy.da.SearchCursor(utah, srcFldsUT)
cntyRows = arcpy.da.InsertCursor(cntyBS, cntyFlds)
cntyAllRows = arcpy.da.InsertCursor(cntyBS_All, cntyAllFlds)
stRows = arcpy.da.InsertCursor(stateBS, stFlds)
#---Create individual county shapefiles--------------------------------------------------------
for srcRow in srcRows:
if srcRow[0] != None:
COUNTY = srcRow[0]
else:
COUNTY = ''
shp = srcRow[2]
cntyRows.insertRow((COUNTY, shp))
del cntyRows
arcpy.CopyFeatures_management(cntyBS, outLoc + outCntyShp)
#---Copy each county to Bluestakes folder----------
cntyBSRows = arcpy.da.SearchCursor(cntyBS, cntyFlds)
for cntyBSRow in cntyBSRows:
cntyName = ''.join(cntyBSRow[0].title().split())
fldrPrefix = '\\TGR'
outFldr = outLoc + '\\TGR' + fipsDict[cntyName]
outCntyShp = fldrPrefix + fipsDict[cntyName] + 'cty00.shp'
cntyFL = arcpy.MakeFeatureLayer_management(cntyBS, cntyName + '_FL', " \"COUNTY\" = '{0}' ".format(cntyBSRow[0]))
arcpy.CopyFeatures_management(cntyFL, outFldr + outCntyShp)
flds = arcpy.ListFields(outFldr + outCntyShp)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(outFldr + outCntyShp, 'Shape_Area')
print 'should have deleted area fld'
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(outFldr + outCntyShp, 'Shape_Leng')
print 'should have deleted leng fld'
#---Create Statewide County Shapefile----------------------------------------------------------
for srcRow in srcRows:
NAME = srcRow[0]
ST = '49'
CO = srcRow[1][-3:]
shp = srcRow[2]
cntyAllRows.insertRow((NAME, ST, CO, shp))
del cntyAllRows
cntyBS_All_shp = outLoc + '\\CO49_D90.shp'
arcpy.CopyFeatures_management(cntyBS_All, cntyBS_All_shp)
flds = arcpy.ListFields(cntyBS_All_shp)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(cntyBS_All_shp, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(cntyBS_All_shp, 'Shape_Leng')
#---Create State shapfile--------------------------------------------------------------------
for srcRowUT in srcRowsUT:
if srcRowUT[0] == 'Utah':
NAME = 'Utah'
STATE = '49'
shp = srcRowUT[1]
stRows.insertRow((NAME, STATE, shp))
del stRows
stateBS_shp = outLoc + '\\ST49_D00.shp'
arcpy.CopyFeatures_management(stateBS, stateBS_shp)
flds = arcpy.ListFields(stateBS_shp)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(stateBS_shp, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(stateBS_shp, 'Shape_Leng')
print 'Done Translating Counties ' + str(datetime.datetime.now())
#---------------------------------------------------------------------------------------------------------------------
def addressZones():
print 'Starting Address Zones ' + str(datetime.datetime.now())
addZones = sgid10_GEO + '\\AddressSystemQuadrants'
addZonesBS = stageDB + '\\addrsys'
#---Add Address Zones to SGID10_GEOGRAPHIC staging area
arcpy.CopyFeatures_management('SGID10.LOCATION.AddressSystemQuadrants', addZones)
#---Check for Address Zones BlueStakes schema
if not arcpy.Exists(addZonesBS):
arcpy.CopyFeatures_management(schemaDB + '\\addrsys_schema', addZonesBS)
else:
arcpy.DeleteFeatures_management(addZonesBS)
srcFlds = ['GRID_NAME', 'QUADRANT', 'SHAPE@']
tarFlds = ['NAME', 'SHAPE@']
srcRows = arcpy.da.SearchCursor(addZones, srcFlds)
tarRows = arcpy.da.InsertCursor(addZonesBS, tarFlds)
for srcRow in srcRows:
if srcRow[0] != None:
if srcRow[1] != None:
NAME = srcRow[0] + ' ' + srcRow[1]
else:
NAME = srcRow[0]
shp = srcRow[2]
tarRows.insertRow((NAME, shp))
del tarRows
#---Copy Address Zones to Blues Stakes root level
arcpy.CopyFeatures_management(addZonesBS, outLoc + '\\addrsys_StWide.shp')
#---Clip by county-------------------------------------------
clpFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
clpRows = arcpy.da.SearchCursor(clpCnty, clpFlds)
for row in clpRows:
clpFeat = row[2]
#----Delete shapefiles with no features----
clp = arcpy.Clip_analysis(addZonesBS, clpFeat, outLoc + '\\TGR' + row[1] + '\\addrsys' + row[1][-2:] + '.shp')
clpCount = int(arcpy.GetCount_management(clp).getOutput(0))
if clpCount < 1:
arcpy.Delete_management(clp)
flds = arcpy.ListFields(clp)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(clp, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(clp, 'Shape_Leng')
print 'Done Translating Municipalities ' + str(datetime.datetime.now())
#---Clip Blue Stakes output, delete empty shapefiles, delete Shape_Leng field-----------------------------------------
def clip(clipMe, outNameSuffix):
clpCnty = sgid10_GEO + '\\Counties'
arcpy.CopyFeatures_management('SGID10.BOUNDARIES.Counties', clpCnty)
clpFlds = ['NAME', 'FIPS_STR', 'SHAPE@']
clpRows = arcpy.da.SearchCursor(clpCnty, clpFlds)
fldrPrefix = '\\TGR'
for row in clpRows:
clpFeat = row[2]
#----Delete shapefiles with no features----
clp = arcpy.Clip_analysis(clipMe, clpFeat, outLoc + fldrPrefix + row[1] + fldrPrefix + row[1] + outNameSuffix)
clpCount = int(arcpy.GetCount_management(clp).getOutput(0))
if clpCount < 1:
arcpy.Delete_management(clp)
flds = arcpy.ListFields(clp)
for fld in flds:
if fld.name == 'Shape_Area':
arcpy.DeleteField_management(clp, 'Shape_Area')
if fld.name == 'Shape_Leng':
arcpy.DeleteField_management(clp, 'Shape_Leng')
#parcels();
roads();
#municipalities();
#mileposts();
#landownershipLarge();
#waterPoly();
#waterLines();
#rail();
#airstrips();
#miscTransportation();
#addressPoints();
#townships();
#sections();
#deciPoints();
#addedPoints();
#counties();
#addressZones();
|
[
"zachybeck"
] |
zachybeck
|
5595eba708e5354b7677d7ac6638394192648dee
|
6322fcae7302c4fe60b14c36a2dfbc5ba7432d8b
|
/profiles_project/profiles_api/migrations/0002_profilefeeditem.py
|
8e2bf057cda238c513914d1d003d70ca9b7b03c2
|
[
"MIT"
] |
permissive
|
Ali1995Askar/profiles-rest-api
|
abb2b580c6ab451159f3164acc3377a62597da3a
|
3e1cd11dfc2cd1fc879b3f1adbe720c0ad38999c
|
refs/heads/master
| 2023-06-12T13:14:32.028640
| 2021-07-08T15:22:06
| 2021-07-08T15:22:06
| 320,111,987
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 789
|
py
|
# Generated by Django 3.2.4 on 2021-07-08 14:57
from django.conf import settings
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('profiles_api', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='ProfileFeedItem',
fields=[
('id', models.BigAutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('status_text', models.CharField(max_length=255)),
('created_on', models.DateTimeField(auto_now=True)),
('user_profile', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to=settings.AUTH_USER_MODEL)),
],
),
]
|
[
"ali1995askar@gmail.com"
] |
ali1995askar@gmail.com
|
6e4be48ebd16b05af14ae97508e703148073511e
|
24b43b336689530ccb9e3b07ff74d21f124c9ebe
|
/Testing/plot_bound.py
|
a0a8178372a74a9f40b5ade861aaf06307a0ef5c
|
[] |
no_license
|
kemalmahmuljin/Pear_Project
|
3f04a429fd14dfd479a3351d139914dbfbed5cd9
|
97b34d2f85b47046028a405cb7cc047d12745fca
|
refs/heads/master
| 2022-04-19T18:18:13.809935
| 2020-04-22T08:23:04
| 2020-04-22T08:23:04
| 240,002,420
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,021
|
py
|
import matplotlib.pyplot as plt
import numpy as np
from io import StringIO
coords_file = open("coords", 'r')
coords_str = coords_file.read()
coords_str = coords_str[coords_str.find('\n') + 1:]
coords_stream = StringIO(coords_str)
coords = np.loadtxt(coords_stream)
boundaries_file = open("boundaries", 'r')
bound_str = boundaries_file.read()
bound_stream = StringIO(bound_str)
boundaries = np.loadtxt(bound_stream)
ax = plt.axes()
for elem in boundaries:
x = [coords[int(elem[0])][0], coords[int(elem[1])][0]]
y = [coords[int(elem[0])][1], coords[int(elem[1])][1]]
if elem[2] == 0:
color = 'r'
else:
color = 'b'
ax.arrow(coords[int(elem[0])][0],coords[int(elem[0])][1],
coords[int(elem[1])][0]-coords[int(elem[0])][0],
coords[int(elem[1])][1]-coords[int(elem[0])][1],
head_width=0.002, length_includes_head=True, lw=0.00001,
color=color)
plt.xlim(0,0.050)
plt.ylim(0,0.120)
plt.title("Working boundaries 05/03/20")
plt.show()
|
[
"gcapittini@localhost.localdomain"
] |
gcapittini@localhost.localdomain
|
7ed4ad3d5e2ca344f1887ae726701a3e87e4c6ae
|
41615e2198cd99fdf0c8e473918138cedd05dde4
|
/flamingoTest/settings.py
|
d60f4c3f4df0dc035ea6ba3cce3b299b814274ba
|
[] |
no_license
|
mansi02/flamingoTestBackend
|
3b08c7d2e0de40a21e1f584663b73dc91d89ccbc
|
0ffeb7624d174789e2f3975d1bde89a21386b466
|
refs/heads/main
| 2023-06-18T20:58:54.357352
| 2021-06-29T09:57:01
| 2021-06-29T09:57:01
| 381,314,255
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,565
|
py
|
"""
Django settings for flamingoTest project.
Generated by 'django-admin startproject' using Django 3.2.4.
For more information on this file, see
https://docs.djangoproject.com/en/3.2/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/3.2/ref/settings/
"""
import os.path
from pathlib import Path
# Build paths inside the project like this: BASE_DIR / 'subdir'.
BASE_DIR = Path(__file__).resolve().parent.parent
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/3.2/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'django-insecure-m4reov^nx_q7%=xmvytj3ulnp6m!7tanc9#b93v)9781(h5)44'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['localhost:4200', 'http://localhost;4200', '*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'corsheaders',
'rest_framework',
'rest_framework.authtoken',
'custom_user'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'flamingoTest.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
CORS_ORIGIN_ALLOW_ALL = True
WSGI_APPLICATION = 'flamingoTest.wsgi.application'
# Database
# https://docs.djangoproject.com/en/3.2/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': BASE_DIR / 'db.sqlite3',
}
}
AUTH_USER_MODEL = 'custom_user.User'
# Password validation
# https://docs.djangoproject.com/en/3.2/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/3.2/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/3.2/howto/static-files/
STATIC_URL = '/static/'
MEDIA_URL = os.path.join(BASE_DIR, 'static/images/')
# Default primary key field type
# https://docs.djangoproject.com/en/3.2/ref/settings/#default-auto-field
DEFAULT_AUTO_FIELD = 'django.db.models.BigAutoField'
|
[
"mansi0201@gmail.com"
] |
mansi0201@gmail.com
|
9f2b66930fa1680db4ac6e886f4ab820898c1c59
|
5e97883ffb0baf1aa837050245afb253d4ad97b1
|
/backend/blog/models.py
|
da9f90ea7b43ccf7eec727478bd0ea01c3a41ea6
|
[
"ISC"
] |
permissive
|
DamQuangKhoa/interview-home-work
|
1d421710da3bf29c3da3cc3ca54babca8222b792
|
39bdfa20aba77af2946cc3e796b92fb343584c66
|
refs/heads/master
| 2020-12-05T14:32:42.258884
| 2020-01-08T15:40:40
| 2020-01-09T19:52:22
| 232,140,207
| 0
| 0
| null | 2020-01-06T16:26:55
| 2020-01-06T16:26:54
| null |
UTF-8
|
Python
| false
| false
| 3,592
|
py
|
from django.db import models
from django.utils import timezone
from django.contrib.auth.models import User
from django.urls import reverse
from taggit.managers import TaggableManager
from django.contrib.postgres.fields import ArrayField
class PublishedManager(models.Manager):
def get_queryset(self):
return super(PublishedManager,
self).get_queryset()\
.filter(status='published')
class PostBlog(models.Model):
title = models.CharField(max_length=250)
slug = models.SlugField(max_length=250
)
author = models.ForeignKey("UserBlog",
related_name='posts',
on_delete=models.CASCADE)
content = models.TextField()
created_at = models.CharField(max_length=250)
tags = ArrayField(models.CharField(max_length=80, blank=True), size=5)
def __str__(self):
return self.title
class UserBlog(models.Model):
username = models.CharField(unique=True, max_length=50, blank=True, null=True)
password = models.CharField(max_length=50, blank=True, null=True)
name = models.CharField(max_length=250)
created_at = models.CharField(max_length=250)
dob = models.CharField(max_length=250)
# list_display = [field.name for field in this._meta.get_fields()]
class CommentBlog(models.Model):
owner = models.ForeignKey(UserBlog,
related_name='comments',
on_delete=models.CASCADE)
post = models.ForeignKey(PostBlog,
related_name='posts',
on_delete=models.CASCADE)
content = models.CharField(max_length=250)
created_at = models.CharField(max_length=250)
class Post(models.Model):
# Our custom manager.
tags = TaggableManager()
STATUS_CHOICES = (
('draft', 'Draft'),
('published', 'Published')
)
objects = models.Manager() # The default manager.
published = PublishedManager()
title = models.CharField(max_length=250)
slug = models.SlugField(max_length=250,
unique_for_date='publish')
author = models.ForeignKey(User,
related_name='blog_posts',
on_delete=models.CASCADE)
body = models.TextField()
publish = models.DateTimeField(default=timezone.now)
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
status = models.CharField(max_length=10,
choices=STATUS_CHOICES,
default='draft')
def get_absolute_url(self):
return reverse('blog:post_detail',
args=[self.publish.year,
self.publish.strftime('%m'),
self.publish.strftime('%d'),
self.slug])
class Meta:
ordering = ('-publish',)
def __str__(self):
return self.title
class Comment(models.Model):
post = models.ForeignKey(PostBlog, related_name='comments',on_delete=models.CASCADE)
name = models.CharField(max_length=80)
email = models.EmailField()
body = models.TextField()
created = models.DateTimeField(auto_now_add=True)
updated = models.DateTimeField(auto_now=True)
active = models.BooleanField(default=True)
class Meta:
ordering = ('created',)
def __str__(self):
return 'Comment by {} on {}'.format(self.name, self.post)
|
[
"damquangkhoa02@gmail.com"
] |
damquangkhoa02@gmail.com
|
067fa51eb4fa0e09a6621a97ebe17ee42860681d
|
6b9851216acc29a63fba94a633c9370ed48006ad
|
/product/migrations/0009_auto_20210501_2216.py
|
a47acdf6beda36c035bf5093d83079ee96e302ed
|
[] |
no_license
|
mustafabayarr/Django-E-Commerce
|
dc07ecc7b417decf6de0cd92b915be9c2e7a9b7a
|
de2e46f0793f445ca546ed7b2a484092b4a8e024
|
refs/heads/main
| 2023-05-30T20:01:44.250290
| 2021-06-30T19:05:15
| 2021-06-30T19:05:15
| 347,505,444
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 373
|
py
|
# Generated by Django 3.1.7 on 2021-05-01 19:16
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('product', '0008_comment'),
]
operations = [
migrations.AlterField(
model_name='category',
name='slug',
field=models.SlugField(unique=True),
),
]
|
[
"bayarmustafa151@gmail.com"
] |
bayarmustafa151@gmail.com
|
55312306f0b0073a60e7739ef1e8dcfcfae8c28c
|
b7fdc9df049029ab043073b06648375da0693737
|
/accounts/api/urls.py
|
10dd92abd9b34d381cc753cb4f45cadc0d924160
|
[] |
no_license
|
karn21/task-manager
|
c640a6209e20b429ffa4f2307273509184f8396f
|
77cacd1e6f83de5eba83ec6b350ba71160fc161c
|
refs/heads/master
| 2023-08-01T13:42:41.747043
| 2020-06-06T09:58:17
| 2020-06-06T09:58:17
| 263,887,690
| 0
| 0
| null | 2021-09-22T19:01:03
| 2020-05-14T10:45:29
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 381
|
py
|
from django.urls import path, include
from knox.views import LogoutView
from .views import UserAPIView, RegisterAPIView,LoginAPIView
urlpatterns = [
path('', include('knox.urls')),
path('user', UserAPIView.as_view()),
path('register', RegisterAPIView.as_view()),
path('login', LoginAPIView.as_view()),
path('logout', LogoutView.as_view(), name='knox_logout')
]
|
[
"karn212000@gmail.com"
] |
karn212000@gmail.com
|
89e353022fef9fffa9f5835f74ae7501b8c1d990
|
3960fa9721ff97c8da99d010e27118ab0bc1201d
|
/tests/storage/fake_storage.py
|
c1437e781c494d82c715effbb93b4b9fafedaf40
|
[
"Apache-2.0"
] |
permissive
|
iamjoshbinder/plaso
|
d3ebbc216b4d89c8f8f6ab50f059b6db7bcca599
|
762aa1d1eb17760ef5e2708a48dff2acad7001ea
|
refs/heads/master
| 2021-08-08T13:23:10.146862
| 2017-11-09T10:44:09
| 2017-11-09T10:44:09
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,362
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
"""Tests for the fake storage."""
import unittest
from plaso.containers import errors
from plaso.containers import event_sources
from plaso.containers import reports
from plaso.containers import sessions
from plaso.containers import tasks
from plaso.lib import definitions
from plaso.storage import fake_storage
from tests.storage import test_lib
class FakeStorageWriterTest(test_lib.StorageTestCase):
"""Tests for the fake storage writer object."""
def testAddAnalysisReport(self):
"""Tests the AddAnalysisReport function."""
session = sessions.Session()
analysis_report = reports.AnalysisReport(
plugin_name=u'test', text=u'test report')
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
storage_writer.AddAnalysisReport(analysis_report)
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.AddAnalysisReport(analysis_report)
def testAddError(self):
"""Tests the AddError function."""
session = sessions.Session()
extraction_error = errors.ExtractionError(
message=u'Test extraction error')
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
storage_writer.AddError(extraction_error)
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.AddError(extraction_error)
def testAddEvent(self):
"""Tests the AddEvent function."""
session = sessions.Session()
test_events = self._CreateTestEvents()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
event = None
for event in test_events:
storage_writer.AddEvent(event)
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.AddEvent(event)
def testAddEventSource(self):
"""Tests the AddEventSource function."""
session = sessions.Session()
event_source = event_sources.EventSource()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
storage_writer.AddEventSource(event_source)
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.AddEventSource(event_source)
def testAddEventTag(self):
"""Tests the AddEventTag function."""
session = sessions.Session()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
test_events = self._CreateTestEvents()
for event in test_events:
storage_writer.AddEvent(event)
event_tag = None
test_event_tags = self._CreateTestEventTags(test_events)
for event_tag in test_event_tags:
storage_writer.AddEventTag(event_tag)
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.AddEventTag(event_tag)
def testOpenClose(self):
"""Tests the Open and Close functions."""
session = sessions.Session()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
storage_writer.Close()
storage_writer.Open()
storage_writer.Close()
storage_writer = fake_storage.FakeStorageWriter(
session, storage_type=definitions.STORAGE_TYPE_TASK)
storage_writer.Open()
storage_writer.Close()
storage_writer.Open()
with self.assertRaises(IOError):
storage_writer.Open()
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.Close()
def testGetEvents(self):
"""Tests the GetEvents function."""
session = sessions.Session()
test_events = self._CreateTestEvents()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
event = None
for event in test_events:
storage_writer.AddEvent(event)
events = list(storage_writer.GetEvents())
self.assertEqual(len(events), len(test_events))
storage_writer.Close()
# TODO: add tests for GetEventSources.
# TODO: add tests for GetEventTags.
# TODO: add tests for GetFirstWrittenEventSource and
# GetNextWrittenEventSource.
def testGetSortedEvents(self):
"""Tests the GetSortedEvents function."""
session = sessions.Session()
test_events = self._CreateTestEvents()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
event = None
for event in test_events:
storage_writer.AddEvent(event)
events = list(storage_writer.GetSortedEvents())
self.assertEqual(len(events), len(test_events))
storage_writer.Close()
# TODO: add test with time range.
def testWriteSessionStartAndCompletion(self):
"""Tests the WriteSessionStart and WriteSessionCompletion functions."""
session = sessions.Session()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
storage_writer.WriteSessionStart()
storage_writer.WriteSessionCompletion()
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.WriteSessionStart()
with self.assertRaises(IOError):
storage_writer.WriteSessionCompletion()
storage_writer = fake_storage.FakeStorageWriter(
session, storage_type=definitions.STORAGE_TYPE_TASK)
storage_writer.Open()
with self.assertRaises(IOError):
storage_writer.WriteSessionStart()
with self.assertRaises(IOError):
storage_writer.WriteSessionCompletion()
storage_writer.Close()
def testWriteTaskStartAndCompletion(self):
"""Tests the WriteTaskStart and WriteTaskCompletion functions."""
session = sessions.Session()
task = tasks.Task(session_identifier=session.identifier)
storage_writer = fake_storage.FakeStorageWriter(
session, storage_type=definitions.STORAGE_TYPE_TASK, task=task)
storage_writer.Open()
storage_writer.WriteTaskStart()
storage_writer.WriteTaskCompletion()
storage_writer.Close()
with self.assertRaises(IOError):
storage_writer.WriteTaskStart()
with self.assertRaises(IOError):
storage_writer.WriteTaskCompletion()
storage_writer = fake_storage.FakeStorageWriter(session)
storage_writer.Open()
with self.assertRaises(IOError):
storage_writer.WriteTaskStart()
with self.assertRaises(IOError):
storage_writer.WriteTaskCompletion()
storage_writer.Close()
if __name__ == '__main__':
unittest.main()
|
[
"joachim.metz@gmail.com"
] |
joachim.metz@gmail.com
|
e365f14b049d2372740d1366505b2ea8cab392c1
|
aa520d389a91e68fa61170caeb4ec8f76fad40d3
|
/testCases/conftest.py
|
05895d257f4acd64a7f20400fa6650496457a1c2
|
[] |
no_license
|
arumugam85/Selenium_Python_Allure
|
5703046b98fcd350b90a50f7ad4a5e5c1b513026
|
9ec65e79c910daec75d51fb7c96fc52a1c309cef
|
refs/heads/master
| 2023-09-04T02:05:00.446419
| 2021-11-01T02:41:34
| 2021-11-01T02:41:34
| 423,153,205
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,370
|
py
|
import datetime
import pytest
from selenium import webdriver
from webdriver_manager.chrome import ChromeDriverManager
class BaseClass:
def __init__(self, driver):
self.driver = driver
@pytest.fixture()
def test_setup(self):
global driver
self.driver.implicitly_wait(10)
self.driver.maximize_window()
yield
self.driver.quit()
# if browser == 'chrome':
# # driver = webdriver.Chrome()
# driver = webdriver.Chrome(ChromeDriverManager().install())
# driver.maximize_window()
#
# elif browser == 'firefox':
# driver = webdriver.Firefox()
# driver.maximize_window()
#
# return driver
def teardown(self):
if driver is None:
self.logger.info("***********Test is Destroyed*************")
self.logger.info("Test destroyed at " + str(datetime.datetime.now()))
# driver.close()
yield
self.driver.quit()
def pytest_addoption(parser):
parser.addoption("--browser")
@pytest.fixture()
def browser(request):
return request.config.getoption("--browser")
def pytest_configure(config):
config._metadata['Project Name'] = 'ECommerce Application'
config._metadata['Framework'] = 'PyTest Framework'
config._metadata['Language'] = 'Python'
config._metadata['Author'] = 'Aru'
|
[
"rarumugambe@gmail.com"
] |
rarumugambe@gmail.com
|
4fd9bed4328f8591ad62960574eed263df888ec7
|
f618cb7a1b1f49c02396a2bb969cc7518fd163ab
|
/doc/_gallery/1_3_1_noisy_chirp_wv.py
|
ba10a534a72d30bbb4a32f5780d048b7422177fb
|
[] |
no_license
|
kingjr/pytftb
|
b968b8e2fc294a19cec8bf63e7d289f368ddf194
|
0bcacf5eef46bd173d90a23c00a7f4b8ee284b22
|
refs/heads/master
| 2021-01-16T22:27:05.587174
| 2015-06-25T05:16:02
| 2015-06-25T05:16:02
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 572
|
py
|
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# vim:fenc=utf-8
#
# Copyright © 2015 jaidev <jaidev@newton>
#
# Distributed under terms of the MIT license.
"""
"""
from tftb.generators import fmlin, sigmerge, noisecg
from tftb.processing.cohen import WignerVilleDistribution
# Generate a chirp signal
n_points = 128
fmin, fmax = 0.0, 0.5
signal, _ = fmlin(n_points, fmin, fmax)
# Noisy chirp
noisy_signal = sigmerge(signal, noisecg(128), 0)
# Wigner-Ville spectrum of noisy chirp.
wvd = WignerVilleDistribution(noisy_signal)
wvd.run()
wvd.plot(kind='contour')
|
[
"deshpande.jaidev@gmail.com"
] |
deshpande.jaidev@gmail.com
|
872d338f6ea05a63f4fdf3aa24e2179ae2df3125
|
0e039e8aacd1e89b122ae0d5aa0b58da04ae13af
|
/Python2/matrix_addition1.py
|
a268fa45c0560cdcda10921358e81e3358326fac
|
[] |
no_license
|
jaredStevens/python-exercises
|
0e4013667dd4409c734219558e444bfbf1eb3f06
|
f5aba9c9db2219a258218457960f841ba58a4f6b
|
refs/heads/master
| 2021-09-10T08:29:40.268170
| 2018-03-22T23:20:44
| 2018-03-22T23:20:44
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 216
|
py
|
arr1 = [1, 3, 2, 4]
arr2 = [5, 2, 1, 0]
def addVectors(arr1, arr2):
addList = []
for i in range(0, len(arr1)):
addList.append(arr1[i] + arr2[i])
return addList
print(addVectors(arr1, arr2))
|
[
"jared.b.stevens@gmail.com"
] |
jared.b.stevens@gmail.com
|
f23a0c46bc7f06f519c46dda30322d02e934272d
|
9ec431f50fd0e8f3949ea792aa14b11ad1712144
|
/Modulo 01/exercicos/d008.py
|
5694b1497cd605daec992565afbbfb0a9f916636
|
[
"MIT"
] |
permissive
|
euyag/python-cursoemvideo
|
c3d42ae332e81a9fc9dfc66743df4acee439bff3
|
d2f684854d926e38ea193816a6c7d2c48d25aa3d
|
refs/heads/main
| 2023-08-02T14:32:02.266366
| 2021-10-01T03:47:40
| 2021-10-01T03:47:40
| 379,093,140
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 200
|
py
|
print('===== DESAFIO 08 =====')
m = int(input('digite um valor em metros: '))
cm = m * 100
mi = m * 1000
print(f'{m}m em centimetros equivale a {cm}cm')
print(f'{m}m em milimetros equivale a {mi}mm')
|
[
"clancysla@gmail.com"
] |
clancysla@gmail.com
|
2655406fbba0bc1f5d4ec29aaa0d0d04a1e907b4
|
bd0d510a9117c65552c9e813fdfcc115dc36d752
|
/app/__init__.py
|
3e56b439bba39e686c211fdcd571392e348a4fdd
|
[] |
no_license
|
grigorevma/test
|
81a42b1e9f84d0860b915402ef264efef20cc2a8
|
d1342d909b5359f865df2390611ccc95eacf5143
|
refs/heads/master
| 2022-12-21T12:28:16.709713
| 2019-07-18T16:09:45
| 2019-07-18T16:09:45
| 197,398,989
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 476
|
py
|
from flask import Flask
from flask_bootstrap import Bootstrap
from flask_sqlalchemy import SQLAlchemy
from app.config import config
bootstrap = Bootstrap()
db = SQLAlchemy()
def create_app(config_name):
from app.main import main as main_blueprint
app = Flask(__name__)
app.config.from_object(config[config_name])
config[config_name].init_app(app)
bootstrap.init_app(app)
db.init_app(app)
app.register_blueprint(main_blueprint)
return app
|
[
"grigorevma@bk.ru"
] |
grigorevma@bk.ru
|
685bb881ba505735cab4af01cfd4fd30bc4c0bbd
|
3592225aaddee6265cb7170e0bea9dc606ee6a7e
|
/Documents/UCSB_classes/loadtest/bin/rstpep2html.py
|
6a2a4a3aec25464fb92d7fe7b599b8cc417b3c52
|
[] |
no_license
|
larssbr/AI_ovinger
|
d9c059762b1ffa2c7ebe174ebc0298a677507cec
|
3d2e16c0f291ac0ec2df23cffeb6ef57dd042f56
|
refs/heads/master
| 2021-01-20T05:31:45.768631
| 2015-10-11T16:47:18
| 2015-10-11T16:47:18
| 13,295,085
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 717
|
py
|
#!/Users/larsbrusletto/Documents/UCSB_classes/loadtest/bin/python
# $Id: rstpep2html.py 4564 2006-05-21 20:44:42Z wiemann $
# Author: David Goodger <goodger@python.org>
# Copyright: This module has been placed in the public domain.
"""
A minimal front end to the Docutils Publisher, producing HTML from PEP
(Python Enhancement Proposal) documents.
"""
try:
import locale
locale.setlocale(locale.LC_ALL, '')
except:
pass
from docutils.core import publish_cmdline, default_description
description = ('Generates (X)HTML from reStructuredText-format PEP files. '
+ default_description)
publish_cmdline(reader_name='pep', writer_name='pep_html',
description=description)
|
[
"larsbrusletto@gmail.com"
] |
larsbrusletto@gmail.com
|
a23e2e8bc24dcfdc8da9c27890d774c829e57699
|
4494d8b45191f4e323d35ea5e1a65533db41a3b8
|
/booking/management/commands/startbot.py
|
658f66b586f6394f49c117e5ff6dbedfe6c7c91a
|
[] |
no_license
|
AVKashtanov/chatbot
|
18dd104cd3a477a91befc1306adab6dc6a5a1875
|
e111e54e6c4129ab08558b410b11e1c3cd74edd2
|
refs/heads/master
| 2023-08-16T20:31:34.713813
| 2021-08-23T01:14:15
| 2021-08-23T01:14:15
| 397,475,252
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,304
|
py
|
from django.core.management.base import BaseCommand
import telebot
from telebot import types
from chatbot.settings import TELEGRAM_TOKEN
from booking.enums import BookingStages, SocialNetworks
from booking.bot import AbstractBot
class TelegramBot(AbstractBot):
def do_bot(self, **kwargs):
bot = telebot.TeleBot(kwargs['token'])
print(bot.get_me())
@bot.message_handler(func=lambda message: self.get_current_stage(
message.chat.id) == BookingStages.START.value)
def text_start(message):
bot.send_message(
message.chat.id, "Для бронирования введите /start")
self.set_stage(message.chat.id, BookingStages.COUNT.value)
@bot.message_handler(commands=["start"])
def cmd_start(message):
self.save_profile(message.chat.id)
state = self.get_current_stage(message.chat.id)
if state == BookingStages.COUNT.value:
bot.send_message(message.chat.id, "Введите количество гостей.")
elif state == BookingStages.TIME.value:
bot.send_message(
message.chat.id, "Введите время в формате 00:00.")
else:
bot.send_message(
message.chat.id,
"Привет, чтобы сделать заказ введите количество гостей!")
self.set_stage(message.chat.id, BookingStages.COUNT.value)
@bot.message_handler(commands=["reset"])
def cmd_reset(message):
bot.send_message(
message.chat.id, "Начнем по новой, введите количество гостей.")
self.set_stage(message.chat.id, BookingStages.COUNT.value)
@bot.message_handler(func=lambda message: self.get_current_stage(
message.chat.id) == BookingStages.COUNT.value)
def get_count(message):
if self.save_count(message.text):
bot.send_message(
message.chat.id, "Введите время в формате 00:00.")
self.set_stage(message.chat.id, BookingStages.TIME.value)
else:
bot.send_message(
message.chat.id, 'Цифрами, пожалуйста.')
@bot.message_handler(func=lambda message: self.get_current_stage(
message.chat.id) == BookingStages.TIME.value)
def get_time(message):
if self.save_time(message.text):
keyboard = types.InlineKeyboardMarkup()
key_yes = types.InlineKeyboardButton(text='Да',
callback_data='yes')
keyboard.add(key_yes)
key_no = types.InlineKeyboardButton(text='Нет',
callback_data='no')
keyboard.add(key_no)
question = 'Подтвердить бронирование?'
bot.send_message(
message.chat.id, text=question, reply_markup=keyboard)
self.set_stage(message.chat.id, BookingStages.START.value)
else:
bot.send_message(
message.chat.id,
'Некорректная дата, пожалуйста, введите в формате 00:00.')
@bot.callback_query_handler(func=lambda call: True)
def callback_worker(call):
if call.data == "yes":
self.save_reservation()
bot.send_message(
call.message.chat.id,
'Место успешно забронировано!')
elif call.data == "no":
bot.send_message(
call.message.chat.id, 'Будем ждать Вас в другой раз!')
bot.delete_message(call.message.chat.id, call.message.message_id)
bot.polling(none_stop=True, interval=0)
class Command(BaseCommand):
help = 'Чат-бот'
def handle(self, *args, **kwargs):
TelegramBot(
token=TELEGRAM_TOKEN,
social_network=SocialNetworks.TELEGRAM.value
)
|
[
"tema.nkkav@mail.ru"
] |
tema.nkkav@mail.ru
|
e8cff7eef8976183de24a8c81236fe19a69fbaa1
|
56b7bef4ff86fb30289807d6ac80feb7580fd972
|
/replay_buffer.py
|
f351533f56527a36624df1994b976c2d521f065a
|
[] |
no_license
|
Akshai/rl_collaboration_and_competition
|
9f80cb51e5eab816e89b585efdc59d6a34a06485
|
38e818d9a6401ca2e3fdb3d5b451efdd9129d2fb
|
refs/heads/main
| 2023-01-13T14:07:53.733535
| 2020-11-14T03:59:40
| 2020-11-14T03:59:40
| 310,808,829
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,028
|
py
|
from collections import namedtuple, deque
import numpy as np
import torch
import random
device = torch.device("cuda:0" if torch.cuda.is_available() else "cpu")
class ReplayBuffer():
"""Fixed-size buffer to store experience tuples."""
def __init__(self, action_size, buffer_size, batch_size, seed):
"""Initialize a ReplayBuffer object.
Params
======
action_size (int): dimension of each action
buffer_size (int): maximum size of buffer
batch_size (int): size of each training batch
seed (int): Random seed
"""
random.seed(seed)
np.random.seed(seed)
self.action_size = action_size
self.memory = deque(maxlen=buffer_size)
self.batch_size = batch_size
self.experience = namedtuple("Experience", field_names=["state", "action", "reward", "next_state", "done"])
def add(self, state, action, reward, next_state, done):
"""Add a new experience to memory."""
e = self.experience(state, action, reward, next_state, done)
self.memory.append(e)
def sample(self):
"""Randomly sample a batch of experiences from memory."""
experiences = random.sample(self.memory, k=self.batch_size)
states = torch.from_numpy(np.vstack([e.state for e in experiences if e is not None])).float().to(device)
actions = torch.from_numpy(np.vstack([e.action for e in experiences if e is not None])).float().to(device)
rewards = torch.from_numpy(np.vstack([e.reward for e in experiences if e is not None])).float().to(device)
next_states = torch.from_numpy(np.vstack([e.next_state for e in experiences if e is not None])).float().to(device)
dones = torch.from_numpy(np.vstack([e.done for e in experiences if e is not None]).astype(np.uint8)).float().to(device)
return (states, actions, rewards, next_states, dones)
def __len__(self):
"""Return the current size of internal memory."""
return len(self.memory)
|
[
"akshai@AKSHAIs-MacBook-Pro.local"
] |
akshai@AKSHAIs-MacBook-Pro.local
|
678ecff1e87f853037d4016cce7439c1431d7128
|
b0e31216fcf986eaaa51adb973404c766e8042db
|
/build/search_and_rescue_sim/catkin_generated/pkg.develspace.context.pc.py
|
916300b574a077774f2edf0e962f289fd546643f
|
[] |
no_license
|
mateoguaman/RAPDR
|
8a3f56cc68de17b44e292262e18b3bf204d3dd97
|
431e2d59b062c900163f55e0cf68d55f927feebf
|
refs/heads/master
| 2020-03-28T19:51:30.447935
| 2018-09-16T14:36:08
| 2018-09-16T14:36:08
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 379
|
py
|
# generated from catkin/cmake/template/pkg.context.pc.in
CATKIN_PACKAGE_PREFIX = ""
PROJECT_PKG_CONFIG_INCLUDE_DIRS = "".split(';') if "" != "" else []
PROJECT_CATKIN_DEPENDS = "".replace(';', ' ')
PKG_CONFIG_LIBRARIES_WITH_PREFIX = "".split(';') if "" != "" else []
PROJECT_NAME = "search_and_rescue_sim"
PROJECT_SPACE_DIR = "/home/Mateo/ros_ws/devel"
PROJECT_VERSION = "0.0.0"
|
[
"Evana13G@gmail.com"
] |
Evana13G@gmail.com
|
8b26447125e32014c72172e771be247c148428e0
|
4e5ddba389409b4b62444a4eac9903635b57e230
|
/rastervision/backend/fastai_utils.py
|
1df6d269c2d6389ed6483fbf20a6dceac6cbdc25
|
[
"Apache-2.0",
"LicenseRef-scancode-generic-cla"
] |
permissive
|
blessings-h/raster-vision
|
cc4804e09f8396f861e1fff8bff3e0a4c4f3d048
|
65647c710e668ba59951081faa5f379397185d67
|
refs/heads/master
| 2021-06-29T18:59:23.823567
| 2021-01-27T02:05:01
| 2021-01-27T02:05:01
| 210,014,893
| 0
| 0
| null | 2019-09-21T16:09:08
| 2019-09-21T16:09:07
| null |
UTF-8
|
Python
| false
| false
| 10,285
|
py
|
import os
from os.path import join
import zipfile
from typing import Any
import warnings
from fastai.callbacks import CSVLogger, Callback, SaveModelCallback, TrackerCallback
from fastai.metrics import add_metrics
from fastai.torch_core import dataclass, torch, Tensor, Optional, warn
from fastai.basic_train import Learner
from torch.utils.tensorboard import SummaryWriter
from rastervision.utils.files import (sync_to_dir)
class SyncCallback(Callback):
"""A callback to sync from_dir to to_uri at the end of epochs."""
def __init__(self, from_dir, to_uri, sync_interval=1):
self.from_dir = from_dir
self.to_uri = to_uri
self.sync_interval = sync_interval
def on_epoch_end(self, **kwargs):
if (kwargs['epoch'] + 1) % self.sync_interval == 0:
sync_to_dir(self.from_dir, self.to_uri, delete=True)
class ExportCallback(TrackerCallback):
""""Exports the model when monitored quantity is best.
The exported model is the one used for inference.
"""
def __init__(self, learn:Learner, model_path:str, monitor:str='valid_loss', mode:str='auto'):
self.model_path = model_path
super().__init__(learn, monitor=monitor, mode=mode)
def on_epoch_end(self, epoch:int, **kwargs:Any)->None:
current = self.get_monitor_value()
if (epoch == 0 or
(current is not None and self.operator(current, self.best))):
print(f'Better model found at epoch {epoch} with {self.monitor} value: {current}.')
self.best = current
print(f'Exporting to {self.model_path}')
self.learn.export(self.model_path)
class MySaveModelCallback(SaveModelCallback):
"""Saves the model after each epoch to potentially resume training.
Modified from fastai version to delete the previous model that was saved
to avoid wasting disk space.
"""
def on_epoch_end(self, epoch:int, **kwargs:Any)->None:
"Compare the value monitored to its best score and maybe save the model."
if self.every=="epoch":
self.learn.save(f'{self.name}_{epoch}')
prev_model_path = self.learn.path/self.learn.model_dir/f'{self.name}_{epoch-1}.pth'
if os.path.isfile(prev_model_path):
os.remove(prev_model_path)
else: #every="improvement"
current = self.get_monitor_value()
if current is not None and self.operator(current, self.best):
print(f'Better model found at epoch {epoch} with {self.monitor} value: {current}.')
self.best = current
self.learn.save(f'{self.name}')
class MyCSVLogger(CSVLogger):
"""Logs metrics to a CSV file after each epoch.
Modified from fastai version to:
- flush after each epoch
- append to log if already exists
"""
def __init__(self, learn, filename='history'):
super().__init__(learn, filename)
def on_train_begin(self, **kwargs):
if self.path.exists():
self.file = self.path.open('a')
else:
super().on_train_begin(**kwargs)
def on_epoch_end(self, epoch, smooth_loss, last_metrics, **kwargs):
out = super().on_epoch_end(
epoch, smooth_loss, last_metrics, **kwargs)
self.file.flush()
return out
# The following are a set of metric callbacks that have been modified from the
# original version in fastai to support semantic segmentation, which doesn't
# have the class dimension in position -1. It also adds an ignore_idx
# which is used to ignore pixels with class equal to ignore_idx. These
# would be good to contribute back upstream to fastai -- however we should
# wait for their upcoming refactor of the callback architecture.
@dataclass
class ConfusionMatrix(Callback):
"Computes the confusion matrix."
# The index of the dimension in the output and target arrays which ranges
# over the different classes. This is -1 (the last index) for
# classification, but is 1 for semantic segmentation.
clas_idx:int=-1
def on_train_begin(self, **kwargs):
self.n_classes = 0
def on_epoch_begin(self, **kwargs):
self.cm = None
def on_batch_end(self, last_output:Tensor, last_target:Tensor, **kwargs):
preds = last_output.argmax(self.clas_idx).view(-1).cpu()
targs = last_target.view(-1).cpu()
if self.n_classes == 0:
self.n_classes = last_output.shape[self.clas_idx]
self.x = torch.arange(0, self.n_classes)
cm = ((preds==self.x[:, None]) & (targs==self.x[:, None, None])).sum(dim=2, dtype=torch.float32)
if self.cm is None: self.cm = cm
else: self.cm += cm
def on_epoch_end(self, **kwargs):
self.metric = self.cm
@dataclass
class CMScores(ConfusionMatrix):
"Base class for metrics which rely on the calculation of the precision and/or recall score."
average:Optional[str]="binary" # `binary`, `micro`, `macro`, `weighted` or None
pos_label:int=1 # 0 or 1
eps:float=1e-9
# If ground truth label is equal to the ignore_idx, it should be ignored
# for the sake of evaluation.
ignore_idx:int=None
def _recall(self):
rec = torch.diag(self.cm) / self.cm.sum(dim=1)
rec[rec != rec] = 0 # removing potential "nan"s
if self.average is None: return rec
else:
if self.average == "micro": weights = self._weights(avg="weighted")
else: weights = self._weights(avg=self.average)
return (rec * weights).sum()
def _precision(self):
prec = torch.diag(self.cm) / self.cm.sum(dim=0)
prec[prec != prec] = 0 # removing potential "nan"s
if self.average is None: return prec
else:
weights = self._weights(avg=self.average)
return (prec * weights).sum()
def _weights(self, avg:str):
if self.n_classes != 2 and avg == "binary":
avg = self.average = "macro"
warn("average=`binary` was selected for a non binary case. Value for average has now been set to `macro` instead.")
if avg == "binary":
if self.pos_label not in (0, 1):
self.pos_label = 1
warn("Invalid value for pos_label. It has now been set to 1.")
if self.pos_label == 1: return Tensor([0,1])
else: return Tensor([1,0])
else:
if avg == "micro": weights = self.cm.sum(dim=0) / self.cm.sum()
if avg == "macro": weights = torch.ones((self.n_classes,)) / self.n_classes
if avg == "weighted": weights = self.cm.sum(dim=1) / self.cm.sum()
if self.ignore_idx is not None and avg in ["macro", "weighted"]:
weights[self.ignore_idx] = 0
weights /= weights.sum()
return weights
class Recall(CMScores):
"Compute the Recall."
def on_epoch_end(self, last_metrics, **kwargs):
return add_metrics(last_metrics, self._recall())
class Precision(CMScores):
"Compute the Precision."
def on_epoch_end(self, last_metrics, **kwargs):
return add_metrics(last_metrics, self._precision())
@dataclass
class FBeta(CMScores):
"Compute the F`beta` score."
beta:float=2
def on_train_begin(self, **kwargs):
self.n_classes = 0
self.beta2 = self.beta ** 2
self.avg = self.average
if self.average != "micro": self.average = None
def on_epoch_end(self, last_metrics, **kwargs):
prec = self._precision()
rec = self._recall()
metric = (1 + self.beta2) * prec * rec / (prec * self.beta2 + rec + self.eps)
metric[metric != metric] = 0 # removing potential "nan"s
if self.avg: metric = (self._weights(avg=self.avg) * metric).sum()
return add_metrics(last_metrics, metric)
def on_train_end(self, **kwargs): self.average = self.avg
def zipdir(dir, zip_path):
"""Create a zip file from a directory.
The zip file contains the contents of dir, but not dir itself.
Args:
dir: (str) the directory with the content to place in zip file
zip_path: (str) path to the zip file
"""
with zipfile.ZipFile(zip_path, 'w', zipfile.ZIP_DEFLATED) as ziph:
for root, dirs, files in os.walk(dir):
for file in files:
ziph.write(join(root, file),
join('/'.join(dirs),
os.path.basename(file)))
# This code was adapted from
# https://github.com/Pendar2/fastai-tensorboard-callback/blob/master/fastai_tensorboard_callback/tensorboard_cb.py
@dataclass
class TensorboardLogger(Callback):
learn:Learner
run_name:str
histogram_freq:int=100
path:str=None
def __post_init__(self):
self.path = self.path or os.path.join(self.learn.path, "logs")
self.log_dir = os.path.join(self.path, self.run_name)
def on_train_begin(self, **kwargs):
self.writer = SummaryWriter(log_dir=self.log_dir)
def on_epoch_end(self, **kwargs):
iteration = kwargs["iteration"]
metrics = kwargs["last_metrics"]
metrics_names = ["valid_loss"] + [o.__class__.__name__ for o in self.learn.metrics]
for val, name in zip(metrics, metrics_names):
self.writer.add_scalar(name, val, iteration)
def on_batch_end(self, **kwargs):
iteration = kwargs["iteration"]
loss = kwargs["last_loss"]
self.writer.add_scalar("learning_rate", self.learn.opt.lr, iteration)
self.writer.add_scalar("momentum", self.learn.opt.mom, iteration)
self.writer.add_scalar("loss", loss, iteration)
if iteration%self.histogram_freq == 0:
for name, param in self.learn.model.named_parameters():
self.writer.add_histogram(name, param, iteration)
def on_train_end(self, **kwargs):
try:
with warnings.catch_warnings():
warnings.simplefilter("ignore")
dummy_input = next(iter(self.learn.data.train_dl))[0]
self.writer.add_graph(self.learn.model, tuple(dummy_input))
except Exception as e:
print("Unable to create graph.")
print(e)
self.writer.close()
|
[
"lewfish@gmail.com"
] |
lewfish@gmail.com
|
2f60ba606f3f3ff16f6ce61b7441c7944a9a3939
|
15f365dc711f2230073391687642498305286321
|
/Figure plotting/FIG_3.9c)_maximal allowable radial offset.py
|
d3495aa3b0b50f74b8be071296dbfa7a96ad2f13
|
[] |
no_license
|
Isabelliuqin/Optical_Levitation_Master_project_final
|
16d177ee0852361745286d4a5af8eea84aad5845
|
0ebe133a08a84e3c8521b06c6e9eec2584e0b3cc
|
refs/heads/master
| 2023-01-03T13:35:05.753240
| 2020-11-01T10:13:59
| 2020-11-01T10:13:59
| 309,067,970
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,693
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Sun Nov 1 16:31:57 2020
@author: liuqi
"""
import scipy as sp
import numpy as np
from mpl_toolkits.mplot3d import axes3d
import matplotlib.pylab as plt
import scipy.integrate as spi
from scipy.integrate import quad
import seaborn
from scipy.integrate import odeint
from scipy.integrate import dblquad
import Will_Module_addwdep as TQ
import Module_table_parameter as MTP
import time
integration_method = 'manual' # 'manual' or 'integrated'
grid_size = 100
plt.close('all')
###########################
#Our sphere
g = 9.8
c = 3 * 10**8
w_0 = 0.85 * 10 ** (-6)
Lambda = 1.064 * 10**(-6)
z_R = np.pi* w_0 ** 2 / Lambda
rho = 30 * 10 ** (-6)
n_0 = 1
n_s_n = 0.04
k = 7.6097
n_s = n_s_n - k*1j
sig_s = 10.49 * 10 ** 3 * (( 3 ** 3 - 2.25 ** 3) / 3 ** 3 ) #density of sphere in kg/m^3
sig_0 = 0 #density of medium in kg/m^3
m = 4/3 * np.pi * rho ** 3 * ( sig_s - sig_0 )
Permittivity = 8.85 * 10**(-12)
#P = 0.5 * c * n_0 * Permittivity #total power of the LG01 beam
P = 12 #optimal power required to levitate at w0 = 0.85um
############################################
#FIG 3.9c) maximal allowable radial offset
############################################
#x-axis: x-axis radial offset
#y-axis: Qx trapping efficiency
#key function: TQ.F_total_manual_integration
rho_0x = np.linspace(0,2*rho,100)
rho_0 = [0,0]
w = np.sqrt(2) * rho #optimal beam radius
Qoplist = []
for rho_0xe in rho_0x:
F_op = TQ.F_total_manual_integration(rho_0xe,rho_0[1], rho, n_0, n_s, w_0, w, z_R, P , target = "reflective", coordinate = 'x', grid_size = grid_size)['force_total'] #compute Qx at optimal beam radius wop, various radial offsets
Q_op = F_op * c / ( n_0 * P )
Qoplist.append(Q_op)
plt.plot(rho_0x/rho, np.array(Qoplist), lw=2, c="c", label="w/(sqrt(2)rho) = 1")
print ((rho_0x/rho)[np.argmin(abs(np.array(Qoplist)))]) #print the inflection point
new_ticks1 = np.linspace(0, 2 , 5) # plot axis
print(new_ticks1)
plt.xticks(new_ticks1,fontsize=20)
plt.yticks(np.linspace(-0.1, 0.05, 4),fontsize=20)
plt.rc('xtick',labelsize=20)
plt.rc('ytick',labelsize=20)
ax = plt.gca()
ax.spines['top'].set_color('none')
ax.spines['right'].set_color('none')
ax.xaxis.set_ticks_position('bottom')
ax.yaxis.set_ticks_position('left')
ax.spines['left'].set_position(('data', 0))
ax.spines['bottom'].set_position(('data',0))
plt.legend(loc=1,fontsize=16)
plt.xlabel('rho_0x/rho',fontsize=20)
plt.ylabel('Qx',fontsize=20)
plt.title('rho = 30um, w0 = 0.85um',fontsize=20)
plt.grid()
plt.show()
|
[
"qin.liu16@imperial.ac.uk"
] |
qin.liu16@imperial.ac.uk
|
ddc582e23efc50fe80bbdbcbe3f5ee3bd6e77b02
|
e9a7b9815e217759e2ff23a0abd8384d571f322f
|
/pyenlone/tasks/__init__.py
|
e4c6462a242397d4bcdbe96743f7af9996a840ed
|
[
"MIT"
] |
permissive
|
QPotato/pyenlone
|
7a1ca0150d474acf092666ac1f29df51289e7512
|
b3bb9cc708e1ff247243e9ae48fb62ee284e4b94
|
refs/heads/master
| 2020-04-28T01:41:39.820679
| 2019-03-10T18:59:39
| 2019-03-10T18:59:39
| 174,867,027
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,473
|
py
|
"""
Implements Tasks API methods.
More info on: https://wiki.enl.one/doku.php?id=t_basic_documentation
"""
from typing import List, Optional
from .operation import Operation, OpID, OpType, _fix_op_params
from .task import Task, TaskID, TaskType, TaskStatus, PortalID, \
_fix_task_params
from .._proxy import TokenProxy, KeyProxy
from ..enloneexception import NotImplementedByBackendException
__all__ = ["Operation", "OpID", "OpType",
"Task", "TaskID", "TaskType", "TaskStatus", "PortalID",
"Tasks"]
class Tasks:
"""
The main Tasks object.
You should create and get operation using it's methods.
"""
def __init__(self,
apikey: Optional[str] = None,
voauth: Optional[str] = None,
rocks: Optional[str] = None,
enlio: Optional[str] = None,
google: Optional[str] = None,
firebase: Optional[str] = None,
cache: int = 0):
"""
Create the Tasks instance with only one auth method token.
"""
url = "https://tasks.enl.one"
if apikey:
self._proxy = KeyProxy(url + "/api", apikey, cache=cache)
elif voauth:
self._proxy = TokenProxy(url + "/oauth",
"VOAuth " + voauth, cache=cache)
elif rocks:
self._proxy = TokenProxy(url + "/rocks",
"Rocks " + rocks, cache=cache)
elif enlio:
self._proxy = TokenProxy(url + "/enlio",
"EnlIO " + enlio, cache=cache)
elif google:
self._proxy = TokenProxy(url + "/gapi",
"Google " + google, cache=cache)
elif firebase:
self._proxy = TokenProxy(url + "/firebase",
"FirebaseJWT " + firebase, cache=cache)
def get_operation(self, id: OpID):
"""
Retrive Operation.
"""
return Operation(self._proxy, self._proxy.get("/op/" + str(id)))
def get_operations(self, **filters) -> List[Operation]:
"""
Get all operations user is owner, operator or can see.
"""
_fix_op_params(filters)
return [Operation(self._proxy, api_res) for api_res
in self._proxy.get("/ops", filters)]
def new_operation(self, name: str, op_type: OpType, **params) -> Operation:
"""
Add new operation.
Required parameters are name and type.
Aditional initializing arguments can be passes in keyword arguments.
"""
params["name"] = name
params["type"] = op_type
_fix_op_params(params)
return Operation(self._proxy, self._proxy.post("/op", params))
def search_operations(self, lat: float, lon: float, km: int,
**filters) -> List[Operation]:
"""
Find all operations with tasks in a radius of km from lat/lon visible
to the user.
Aditional search filters can be passed in keyword arguments.
"""
_fix_op_params(filters)
return [Operation(self._proxy, api_res) for api_res
in self._proxy.get("/ops/search"
+ "/" + str(lat)
+ "/" + str(lon)
+ "/" + str(km),
filters)]
def get_tasks(self, **filters) -> List[Task]:
"""
Retrieve all tasks visible to the user,
from all operations.
"""
_fix_task_params(filters)
return [Task(self._proxy, api_res) for api_res
in self._proxy.get("/tasks", filters)]
def search_tasks(self, lat: float, lon: float, km: float,
**filters) -> List[Task]:
"""
Find all tasks in a radius of km from lat/lon visible to the user,
from all operations.
Aditional search filters can be passed in keyword arguments.
"""
_fix_task_params(filters)
raise NotImplementedByBackendException
return [Task(self._proxy, api_res) for api_res
in self._proxy.get("/tasks/search"
+ "/" + str(lat)
+ "/" + str(lon)
+ "/" + str(km),
filters)]
|
[
"federicobadaloni@hotmail.com"
] |
federicobadaloni@hotmail.com
|
531ed097cb48be9a70ee62b910a57f827acfa168
|
5209b4ad1f64b41886ab2f53595185eea1fe3dab
|
/laba 6/13_2.py
|
24692784933843814f255c0fea9801b841282376
|
[] |
no_license
|
elochka99/LAbs
|
74b8b7a68bcdbc8ad7a2de3d9e9eab848564410e
|
6d7745a298f0dee055d95839b1f979fa14ca219f
|
refs/heads/master
| 2021-01-09T06:27:50.622635
| 2018-04-03T09:32:22
| 2018-04-03T09:32:22
| 80,990,384
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,523
|
py
|
# Горохова Елена КНИТ 16-А
# Дан текстовый файл f. Записать в файл g компоненты файла f
# в обратном порядке, без повторяющихся компонентов.
import random
while True:
try:
b = int(input("Введите количество элементов в файле: "))
file_f = open('f.txt', 'w')
h = list()
for i in range(1, b+1):
k = str(random.randint(0, 100))
file_f.write(k +",")
h.append(k)
file_f.close()
file_f = open('f.txt', 'r')
file_g = open('g.txt', 'w')
str_file = file_f.readlines()
h2 = []
for i in h:
if i not in h2:
h2.append(i)
print(h)
print(h2)
h2.reverse()
file_g.write(str(h2) +",")
file_g.flush()
file_g.close()
file_f.flush()
file_f.close()
file_f = open('f.txt', 'r')
file_g = open('g.txt', 'r')
print(file_g.read())
file_g.flush()
file_f.flush()
file_f.close()
file_g.close()
w = input('\nХотите начать работу с программой заново? [1 - да]: ')
if w == '1':
print()
continue
else:
print("пока!")
break
except (ValueError, IndexError, MemoryError):
print("введите корректные данные! ")
|
[
"elochka150899@gmail.com"
] |
elochka150899@gmail.com
|
3f5c239ed4dab8d99cfb4a67e61997dd3bdeab99
|
1020e655f91b5f3ae5056f3a3006858ddaa92d3a
|
/modelo2/Script_LoadFiles.py
|
8c58f59627bb177295813873948282f0b48f04b6
|
[] |
no_license
|
ivomota/Olho-Passarinho
|
578200e3c12a4c94c399b0455ef33ed61ca7619c
|
5b531aebac3c4914f90586f4f163b42773a5b31d
|
refs/heads/master
| 2021-01-18T11:25:50.284361
| 2014-06-28T13:24:19
| 2014-06-28T13:24:19
| 15,495,323
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,376
|
py
|
import pickle
import numpy as np
from bson import json_util
import json
import itertools
def load(filename, verbose=False):
# Open file
if verbose : print("Loading %s" % filename)
pkl_file = open(filename, 'rb')
# Load from Pickle file.
data = pickle.load(pkl_file)
pkl_file.close()
return data
# file1 = load('data0.pkl')
# file2 = load('data1.pkl')
# print len(file1)
# print len(file2)
# file = file1 + file2
# file = load('data_teste.pkl')
file = load('tweets_instagram.pkl')
print len(file)
# Remove duplicates without itertools
seen_values = set()
new_file = []
other = []
for d in file:
value = d["id_str"]
if value not in seen_values:
new_file.append(d)
seen_values.add(value)
else:
other.append(value) #para debug
# print other
# print len(other)
# f = open('debug.txt', 'w')
# f.write(str(other))
# f.close()
# Remove duplicates with itertools (nao apaga 2 duplicados)
# new_file = [g.next() for k,g in itertools.groupby(file, lambda x: x['id_str'])]
#debug
# seen_values = set()
# other = []
# for d in new_file:
# value = d["id_str"]
# if value not in seen_values:
# seen_values.add(value)
# else:
# other.append(value)
# print other
print len(new_file)
f = open('tweets_instagram.json', 'w')
f_data = json.dumps(new_file, default=json_util.default)
f.write(f_data)
f.close()
|
[
"ifvmota@gmail.com"
] |
ifvmota@gmail.com
|
9ec1e3c261193a232d7e2a9c5348f74283e9ee2d
|
679d4a8966fb516f09b4fd53ffb481159836178a
|
/Linux/python/bases/BlockList/malwareblacklist/malwareblacklist.py
|
40b7caf5974f8997f8aba2fc01ad20cc2e9f0d35
|
[] |
no_license
|
dmitry-demin/docus
|
b72f58becbca0f8e1fb0fd35328f7176dd87705e
|
4e64f54ec5b61511cf3130550ca5769b1ab14825
|
refs/heads/master
| 2021-09-15T02:20:20.505399
| 2018-05-24T09:59:09
| 2018-05-24T09:59:09
| 104,602,839
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,880
|
py
|
import os
import requests
from bs4 import BeautifulSoup
from datetime import datetime
RESULT = []
def geturltext(url):
try:
r = requests.get(url)
except:
return ''
else:
return r.text
def process(text):
soup = BeautifulSoup(text, 'lxml')
table = soup.find("table", {"class" : "Table"})
mlwr = table.find_all('tr')[1:]
for ml in mlwr:
rows = ml.find_all('td')
if(len(rows) > 8):
date = rows[0].text
url = rows[1].text
registrar = rows[2].text
ip = rows[3].text
asn = rows[4].text
hosting = rows[5].text
try:
cn = rows[6].find('img').get('title')
except:
cn = '-'
dl = rows[7].text
submitted = rows[8].text
RESULT.append([date, url, registrar, ip, asn, hosting, cn, dl, submitted])
def savetofile():
name = datetime.now().strftime("%Y-%m-%d_%H_%M") + '.csv'
f = open(name, 'w')
for line in RESULT:
f.write((line[0] +', '+ line[1] +', '+ line[2] + ', '+
line[3] +', '+ line[4] +', '+ line[5] + ', '+
line[6] +', '+ line[7] +', '+ line[8]).encode('utf-8').strip() + '\n')
f.close()
def getTotalPage(text):
try:
soup = BeautifulSoup(text, 'html.parser')
table = soup.find('table')
td = table.find_all('a')
return int(td[1].text)
except:
return 0
def main():
url = 'http://www.malwareblacklist.com/showAllMalwareURL.php?pageNo='
firstPageText = geturltext(url+'1')
totalPage = getTotalPage(firstPageText)
for x in range(totalPage):
pageText = geturltext(url + str(x+1))
process(pageText)
savetofile()
if(__name__ == '__main__'):
main()
|
[
"d.demin@avsw.ru"
] |
d.demin@avsw.ru
|
7213a6bdcd1534875dc8bcf1f6be7e772b3ff2a7
|
11c331eca50308526ab603bc8ae7ec92f79f2527
|
/05/script.py
|
b611ff79b1b15a6b08e10538c56f39de21f82cb1
|
[] |
no_license
|
ascheets/barba12stepModule
|
929e5dbe023c9c118fffbe9310f3473583004e1e
|
f145b6a423ed0ceb0f218622112ea6714cd96583
|
refs/heads/master
| 2021-01-10T13:58:19.501494
| 2016-03-08T06:36:19
| 2016-03-08T06:36:19
| 52,310,441
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,358
|
py
|
import numpy as np
import sympy
#from sympy import init_printing #include these when working with a notebook
#init_print(use_latex=True)
from sympy.utilities.lambdify import lambdify
import matplotlib.pyplot as plt
#setting up intial conditions, little more complicated this time...
#setting up symbolic variables
x, nu, t = sympy.symbols('x nu t')
phi = sympy.exp(-(x-4*t)**2/(4*nu*(t+1))) + sympy.exp(-(x-4*t-2*np.pi)**2/(4*nu*(t+1)))
phiprime = phi.diff(x)
u = -2*nu*(phiprime/phi)+4
ufunc = lambdify((t,x,nu),u)
#domain declarations
nx = 101
nt = 100
dx = 2*np.pi/(nx-1)
nu = 0.07
dt = dx*nu
x = np.linspace(0, 2*np.pi, nx)
#u = np.empty(nx)
un = np.empty(nx)
t = 0
u = np.asarray([ufunc(t,x0,nu) for x0 in x])
plt.figure(figsize=(11,7), dpi=100)
plt.plot(x,u, marker='o', lw=2)
plt.xlim([0,2*np.pi])
plt.ylim([0,10])
plt.show()
for n in range(nt):
un = u.copy()
for i in range(nx-1):
u[i] = un[i] -un[i] * dt/dx * (un[i] - un[i-1]) + nu*dt/dx**2*(un[i+1]-2*un[i]+un[i-1])
u[-1] = un[-1] - un[-1] * dt/dx * (un[-1] - un[-2]) + nu*dt/dx**2*(un[0]-2*un[-1]+un[-2])
u_analytical = np.asarray([ufunc(nt*dt, xi, nu) for xi in x])
plt.figure(figsize=(11,7), dpi=100)
plt.plot(x,u,marker='o',lw=2,label='Computational')
plt.plot(x,u_analytical,label='Analytical')
plt.xlim([0,2*np.pi])
plt.ylim([0,10])
plt.legend()
plt.show()
|
[
"ascheets@carthage.edu"
] |
ascheets@carthage.edu
|
f7ab33d7427455b944e63de21bfa2223d505df3c
|
edde86a5daa8c91d33e0c79854b476213eb7d2ab
|
/NeuralNetwork/NN.py
|
e9545983759855283bb3402c5d14f4e49f3fcad6
|
[] |
no_license
|
tsaxena/DataMining
|
98d732ba5f8307248af355caed096454ed533b6c
|
062653419edb9f6e6741ab856943b04f67736ad2
|
refs/heads/master
| 2021-01-22T09:32:24.329194
| 2014-03-24T10:31:17
| 2014-03-24T10:31:17
| 17,099,349
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,787
|
py
|
# Back-Propagation Neural Networks
#
# Written in Python. See http://www.python.org/
import math
import random
import string
random.seed(0)
# calculate a random number where: a <= rand < b
def rand(a, b):
return (b-a)*random.random() + a
# Make a matrix (we could use NumPy to speed this up)
def makeMatrix(I, J, fill=0.0):
m = []
for i in range(I):
m.append([fill]*J)
return m
# our sigmoid function, tanh is a little nicer than the standard 1/(1+e^-x)
def sigmoid(x):
return math.tanh(x)
# derivative of our sigmoid function, in terms of the output (i.e. y)
def dsigmoid(y):
return 1.0 - y**2
class NN:
def __init__(self, ni, nh, no):
# number of input, hidden, and output nodes
self.ni = ni + 1 # +1 for bias node
self.nh = nh
self.no = no
# activations for nodes
self.ai = [1.0]*self.ni
self.ah = [1.0]*self.nh
self.ao = [1.0]*self.no
# create weights
self.wi = makeMatrix(self.ni, self.nh)
self.wo = makeMatrix(self.nh, self.no)
# set them to random vaules
for i in range(self.ni):
for j in range(self.nh):
self.wi[i][j] = rand(-0.2, 0.2)
for j in range(self.nh):
for k in range(self.no):
self.wo[j][k] = rand(-2.0, 2.0)
# last change in weights for momentum
self.ci = makeMatrix(self.ni, self.nh)
self.co = makeMatrix(self.nh, self.no)
def update(self, inputs):
if len(inputs) != self.ni-1:
raise ValueError, 'wrong number of inputs'
# input activations
for i in range(self.ni-1):
#self.ai[i] = sigmoid(inputs[i])
self.ai[i] = inputs[i]
# hidden activations
for j in range(self.nh):
summ = 0.0
for i in range(self.ni):
summ = summ + self.ai[i] * self.wi[i][j]
self.ah[j] = sigmoid(summ)
# output activations
for k in range(self.no):
summ = 0.0
for j in range(self.nh):
summ = summ + self.ah[j] * self.wo[j][k]
self.ao[k] = sigmoid(summ)
return self.ao[:]
def backPropagate(self, targets, N, M):
if len(targets) != self.no:
raise ValueError, 'wrong number of target values'
# calculate error terms for output
output_deltas = [0.0] * self.no
for k in range(self.no):
error = targets[k]-self.ao[k]
output_deltas[k] = dsigmoid(self.ao[k]) * error
# calculate error terms for hidden
hidden_deltas = [0.0] * self.nh
for j in range(self.nh):
error = 0.0
for k in range(self.no):
error = error + output_deltas[k]*self.wo[j][k]
hidden_deltas[j] = dsigmoid(self.ah[j]) * error
# update output weights
for j in range(self.nh):
for k in range(self.no):
change = output_deltas[k]*self.ah[j]
self.wo[j][k] = self.wo[j][k] + N*change + M*self.co[j][k]
self.co[j][k] = change
#print N*change, M*self.co[j][k]
# update input weights
for i in range(self.ni):
for j in range(self.nh):
change = hidden_deltas[j]*self.ai[i]
self.wi[i][j] = self.wi[i][j] + N*change + M*self.ci[i][j]
self.ci[i][j] = change
# calculate error
error = 0.0
for k in range(len(targets)):
error = error + 0.5*(targets[k]-self.ao[k])**2
return error
def test(self, patterns):
for p in patterns:
print p[0], '->', self.update(p[0])
def weights(self):
print 'Input weights:'
for i in range(self.ni):
print self.wi[i]
print
print 'Output weights:'
for j in range(self.nh):
print self.wo[j]
def train(self, patterns, iterations=1000, N=0.5, M=0.1):
# N: learning rate
# M: momentum factor
for i in xrange(iterations):
error = 0.0
for p in patterns:
inputs = p[0]
targets = p[1]
self.update(inputs)
error = error + self.backPropagate(targets, N, M)
if i % 100 == 0:
pass #print 'error %-14f' % error
def demo():
# Teach network XOR function
pat = [
[[0,0], [0]],
[[0,1], [1]],
[[1,0], [1]],
[[1,1], [0]]
]
# create a network with two input, two hidden, and one output nodes
n = NN(2, 2, 1)
# train it with some patterns
n.train(pat)
# test it
n.test(pat)
if __name__ == '__main__':
demo()
|
[
"tripti.saxena@gmail.com"
] |
tripti.saxena@gmail.com
|
6e55fa66ccf8d5e6285a3c09ed9d349ae202f5b5
|
e8e72708f435459d60fe38bdaf528b8c8be1c10e
|
/ACDNE_codes/ACDNE_test_citation.py
|
9d2e4441ac77e8ca3c342772226dcd12cdf5fca7
|
[] |
no_license
|
shenxiaocam/ACDNE
|
2842710360aef172505a7d9dc1cfe152c87f9729
|
b3cafed35a75c5c087d5b1ad5667a6bfaee19992
|
refs/heads/master
| 2023-08-25T04:37:39.084778
| 2021-10-31T12:57:57
| 2021-10-31T12:57:57
| 278,086,067
| 16
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,974
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Mon May 27 18:16:14 2019
@author: SHEN xiao
Please cite our paper as:
"Xiao Shen, Quanyu Dai, Fu-lai Chung, Wei Lu, and Kup-Sze Choi. Adversarial Deep Network Embedding for Cross-Network Node Classification. In Proceedings of AAAI Conference on Artificial Intelligence (AAAI), pages 2991-2999, 2020."
"""
import numpy as np
import tensorflow as tf
import utils
from evalModel import train_and_evaluate
import scipy.io
from scipy.sparse import lil_matrix
tf.set_random_seed(0)
np.random.seed(0)
source= 'citationv1'
target = 'dblpv7'
emb_filename=str(source)+'_'+str(target)
Kstep=3
####################
# Load source data
####################
A_s, X_s, Y_s= utils.load_network('./input/'+str(source)+'.mat')
'''compute PPMI'''
A_k_s=utils.AggTranProbMat(A_s, Kstep)
PPMI_s=utils.ComputePPMI(A_k_s)
n_PPMI_s=utils.MyScaleSimMat(PPMI_s) # row normalized PPMI
X_n_s=np.matmul(n_PPMI_s,lil_matrix.toarray(X_s)) #neibors' attribute matrix
####################
# Load target data
####################
A_t, X_t, Y_t = utils.load_network('./input/'+str(target)+'.mat')
'''compute PPMI'''
A_k_t=utils.AggTranProbMat(A_t, Kstep)
PPMI_t=utils.ComputePPMI(A_k_t)
n_PPMI_t=utils.MyScaleSimMat(PPMI_t) # row normalized PPMI
X_n_t=np.matmul(n_PPMI_t,lil_matrix.toarray(X_t)) #neibors' attribute matrix
##input data
input_data=dict()
input_data['PPMI_S']=PPMI_s
input_data['PPMI_T']=PPMI_t
input_data['attrb_S']=X_s
input_data['attrb_T']=X_t
input_data['attrb_nei_S']=X_n_s
input_data['attrb_nei_T']=X_n_t
input_data['label_S']=Y_s
input_data['label_T']=Y_t
###model config
config=dict()
config['clf_type'] = 'multi-label'
config['dropout'] = 0.5
config['num_epoch'] = 30 #maximum training iteration
config['batch_size'] = 100
config['n_hidden'] = [512,128] #dimensionality for each k-th hidden layer of FE1 and FE2
config['n_emb'] = 128 #embedding dimension d
config['l2_w'] = 1e-3 #weight of L2-norm regularization
config['net_pro_w'] = 0.1 #weight of pairwise constraint
config['emb_filename'] =emb_filename #output file name to save node representations
config['lr_ini'] = 0.02 #initial learning rate
numRandom=5
microAllRandom=[]
macroAllRandom=[]
print ('source and target networks:',str(source),str(target))
for random_state in range(numRandom):
print("%d-th random initialization " %(random_state+1))
micro_t,macro_t=train_and_evaluate(input_data, config, random_state)
microAllRandom.append(micro_t)
macroAllRandom.append(macro_t)
'''avg F1 scores over 5 random splits'''
micro=np.mean(microAllRandom)
macro=np.mean(macroAllRandom)
micro_sd=np.std(microAllRandom)
macro_sd=np.std(macroAllRandom)
print("The avergae micro and macro F1 scores over %d random initializations are: %f +/- %f and %f +/- %f: " %(numRandom, micro, micro_sd, macro, macro_sd))
|
[
"noreply@github.com"
] |
noreply@github.com
|
68669534022a93c0233112c770390a258c0a4e33
|
7802a0f4753c0e40075613ba51da680a61bcaf57
|
/first_half/4main.py
|
605940334833a2134f07726555f0e82e30bf54ba
|
[] |
no_license
|
SHOGO0727/NTTcom_MobProgramming_1215
|
cd65052b762aeba76c7aa96197cbb4eac160e096
|
6862ceb6e4cd616b181ffbddf2758914988ae25e
|
refs/heads/master
| 2020-11-24T14:06:17.875744
| 2019-12-15T13:01:59
| 2019-12-15T13:01:59
| 228,184,336
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 858
|
py
|
import csv
#16,16
#32,128
CPU_host = []
Mem_host = []
with open("./hv_list.csv") as f:
reader = csv.reader(f)
for r in reader:
CPU_host.append(int(r[0]))
Mem_host.append(int(r[1]))
print("CPU_host", CPU_host)
print("Mem_host", Mem_host)
while True:
CPU_vm,Mem_vm = map(int,input().split(" "))
print("CPU_vm", CPU_vm)
print("Mem_vm", Mem_vm)
#違う
CPU_max = max(CPU_host)
print("CPU_max", CPU_max)
#
CPU_max_index = CPU_host.index(CPU_max);
print("CPU_max_index", CPU_max_index)
if CPU_vm <= CPU_host[CPU_max_index] and Mem_vm <= Mem_host[CPU_max_index]:
print("OK")
print("allocate to HV"+str(CPU_max_index))
CPU_host[CPU_max_index] -= CPU_vm
Mem_host[CPU_max_index] -= Mem_vm
print("CPU_host", CPU_host)
print("Mem_host", Mem_host)
else:
print("NG")
|
[
"noreply@github.com"
] |
noreply@github.com
|
3c78c0879f7c9609622997bcf931015ec8e28650
|
245bb90e89d479b27927a06fb3d0fbae29ce6d08
|
/JLS731/JLS731/LoadRawData.py
|
2e047eae6c3241c61e360743a2ddd2c45eb62013
|
[] |
no_license
|
jls731/final_project
|
5745b79648f4ac922a702c8797d4f34d48fcb58f
|
4e895bd0d15bfdbad58ed39e8c972d544e7b8164
|
refs/heads/master
| 2020-12-03T02:13:47.576953
| 2015-05-12T01:49:44
| 2015-05-12T01:49:44
| 35,075,441
| 0
| 0
| null | 2015-05-05T03:23:17
| 2015-05-05T03:23:17
| null |
UTF-8
|
Python
| false
| false
| 1,008
|
py
|
'''
Created by: Joseph Song
Last version on: 5/11/2015
Description: Has the functions needed to load csv, xls, and FRED data.
'''
import pandas.io.data as web
import pandas as pd
import datetime
def loadfreddata(ticker):
'''Loads the FRED Data from the St. Louis Fed Website as a dataframe'''
start = datetime.datetime(1998,12,1)
end = datetime.datetime(2015,3,1)
data = web.DataReader(ticker, "fred", start, end)
return data
def loadcsvdata(filename, datecol):
'''Loads the CSV Data as a dataframe'''
parsedate = lambda x: pd.to_datetime(x)
dataset = pd.read_csv(filename, parse_dates = datecol, date_parser = parsedate)
return dataset
def loadxlsdata(filename):
'''Loads the excel files as a dataframe'''
data = pd.read_excel(filename, sheetname = 0)
return data
'''
import urllib2
import numpy as np
x = loadcsvdata('NFP.csv', ['Date', 'NFP_Release'])
print x
x = loadcsvdata('NF.csv', ['Date', 'NFP_Release'])
print x.info()
print x
'''
|
[
"jls731@nyu.edu"
] |
jls731@nyu.edu
|
d5a12a01f55e04a1cee8ab7374e30fd2d4ec25df
|
7f9961edb9113250a7890c6a174fca39d6eeafa6
|
/read_xls.py
|
f5ca1becbc37773ee6396a230395f176e5694348
|
[] |
no_license
|
memgoon/shelf_map_test
|
523179ff2c806884857bdb41bb47757e95061b11
|
51a9b44cf9d84288b75fa73f71ec7c76a165a15c
|
refs/heads/master
| 2021-01-23T20:38:46.906833
| 2015-11-21T07:40:01
| 2015-11-21T07:40:01
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 752
|
py
|
import xlrd
import os
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'shelf_map.settings')
import django
django.setup()
import shelf_map.models
from django.core.exceptions import ValidationError
def main():
workbook = xlrd.open_workbook('static/1자료실.xlsx')
workbook_index = workbook.sheet_by_index(0)
for i in range(0, int(workbook_index.nrows/2)):
major_id_list = workbook_index.row_values(2*i)
minor_id_list = workbook_index.row_values(2*i+1)
for j in range(0, len(major_id_list)):
try:
shelf_map.models.create_shelf(chr(j+65), str(i/2+1), major_id_list[j], minor_id_list[j])
except ValidationError:
continue
if __name__ == '__main__':
main()
|
[
"darkdakku@gmail.com"
] |
darkdakku@gmail.com
|
88c3528812a6cced3d9be4f57629574a5bd8e20a
|
e501c170759e929742eb366a65b1bc5a1d3cb915
|
/automan/api/projects/storages/models.py
|
bda66593e4bfe533f40fe03ba4706d9e7745cbac
|
[] |
no_license
|
tier4/AutomanTools
|
75e8062d6ac2cd7c6cfc530ad7d079a9d65f9ca1
|
8053e9a2c2dffc1f444088a3ff7c156f12fe8152
|
refs/heads/master
| 2023-01-24T12:52:35.286041
| 2021-01-05T08:07:25
| 2021-01-05T08:07:25
| 192,694,079
| 65
| 31
| null | 2023-01-16T17:01:17
| 2019-06-19T08:45:18
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 449
|
py
|
# -*- coding: utf-8 -*-
from django.db import models
from django.utils import timezone
from projects.models import Projects
class Storage(models.Model):
storage_type = models.CharField(max_length=45)
storage_config = models.CharField(max_length=511)
created_at = models.DateTimeField(default=timezone.now)
updated_at = models.DateTimeField(default=timezone.now)
project = models.ForeignKey(Projects, on_delete=models.CASCADE)
|
[
"eiji.sekiya.0326@gmail.com"
] |
eiji.sekiya.0326@gmail.com
|
7c9dbe37ca506ea5cece40cc3421204f71a0c55f
|
a9c5d348a96fa960ce17f7c7e8b61062ff1e5a85
|
/Day5/Day5Activities.py
|
41570900630c99eb627c24027162983f375201cb
|
[
"BSD-3-Clause"
] |
permissive
|
cdcox/PythonClassScripts
|
f094155bb25f7bd065b01002203989eafb58db36
|
77c1c22d68f342a0bb380fbda3abb7a903b2e1f7
|
refs/heads/master
| 2020-05-17T14:38:35.430980
| 2013-07-24T19:58:59
| 2013-07-24T19:58:59
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,414
|
py
|
# -*- coding: utf-8 -*-
"""
Making pretty python:
0. To get use to the def function return thing,
write a program that asks a user for a number.
Then make function called add_fifty
have it take the number as an input and return
the number as an output.
print the number.
remember to have a if __name__=='__main__':
at the bottom.
Bonus: Try running the function by calling it externally (go to
the command line and type python (your files path and name))
Windows user can set a program to run with python as the default program
try that out!
1. Make a program that finds the 1001 prime number!
to do this we need the following elements:
I've written out the code to do it below, break out the chunck of code
from the for loop to the append line into a function,
remember you have to give it previouslist and your nubmer
and return prevoius list and the number
set the rest of the code under an __init__=='__main___'
(remember only one of these per program!)
i=4
previous_list=[2,3]
while len(previous_list)<1001:
i=i+1
checking_number=0
for items in previous_list:
if i%items==0:
checking_number=1
if checking_number==0:
previous_list.append(i)
2. Below this code you will find two lists of numbers
Control and experimental
make a function strip_low_values that goes
through both lists and turns numbers below zero into 0
(use an indexing for loop for this)
after this make a
if __init__=='__main__' that runs through this which each list
and returns the new list. then perform a ttest
under scipy.stats.ttest_ind on the two output lists.
3. Thanks for sticking out the intro weeks! go to your console
and type in.
import this
be sure to tell me what you'd like to see:
Things I came up with:
cell counting
text document parsing (for numbers)
neural network stuff
character recognition for pictures
graphing
image processing
We have many weeks ahead (after this quarter so all ideas will
be taught, but I'll teach the ones with the most interest first
so if any of the above sound good tell me!)
"""
"""
control=[ 0.29892391, 0.29390532, 0.44596463, 0.4905357 , -0.49486145,
-0.29503334, -0.03309252, 0.43755501, 0.13342393, -0.27069969]
experimental= [ 1.37913615, 0.23513822, -0.0032897 , 1.35233926, 0.85305913,
1.30169848, 0.29811183, -0.21212195, -0.09939539, 1.01793759]
"""
|
[
"cdcox1@gmail.com"
] |
cdcox1@gmail.com
|
d3dfcbccc7a497b25c9e010e916922ef1d20cdbe
|
2aecf1aa50078047b27589f56fbbff42b483b05b
|
/magico/juego.py
|
ee8ddd5cc2220ee0ca6b9df389fd10518d278fe1
|
[] |
no_license
|
tabris2015/intro_python_blumbit
|
e49f2ce760feaa8c2cbf170b78f91807acdb972b
|
33def8ddc9b308fa5461af801dd0aef74d8b233a
|
refs/heads/master
| 2022-10-28T00:36:00.706326
| 2020-06-13T22:30:17
| 2020-06-13T22:30:17
| 269,220,872
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,662
|
py
|
from dado import Dado
# juego de dados tiene las siguientes <<reglas>>
# 2-4 jugadores
# en cada turno:
# el jugador lanza los dados
# el juego, termina cuando un jugador llega a 100 puntos
# funcion para verificar el fin del juego
def hay_ganador(dic_jugadores):
for jugador, puntos in dic_jugadores.items():
if puntos >= 100:
return True
return False
# preguntar el numero de jugadores
n_jugadores = int(input('numero de jugadores (2 a 4)'))
if n_jugadores < 2 or n_jugadores > 4:
print('numero invalido de jugadores')
exit()
dic_jugadores = {}
for i in range(n_jugadores):
dic_jugadores[f'Jugador{i+1}'] = 0
# si los dados son iguales, se suma el numero obtenido al puntaje
# si un dado es 1 se resta el otro dado al puntaje
# si obtienes 3 en los dos dados, lanza de nuevo (sumo el puntaje)
# si obtienes 5 en los dos dados, no se suma el puntaje y pasa al siguiente
dado1 = Dado()
dado2 = Dado()
while not hay_ganador(dic_jugadores):
# lanza cada jugador y se acumulan los puntos
for jugador in dic_jugadores:
input(f'lanza el {jugador}: ') # pausa para el lanzamiento
dado1.lanzar()
dado2.lanzar()
print(f'resultado: {dado1} , {dado2}')
# logica del juego
if dado1 == dado2:
if dado1.valor == 5:
# no se suma y pasa al siguiente
print('pasa al siguiente jugador')
continue
if dado1.valor == 3:
# sumo el puntaje
print('suma al puntaje')
dic_jugadores[jugador] += dado1.valor*4
# lanzar de nuevo
input('lanzar de nuevo')
dado1.lanzar()
dado2.lanzar()
print(f'resultado: {dado1} , {dado2}')
if dado1 == dado2:
print('suma al puntaje')
dic_jugadores[jugador] += dado1.valor*4
else:
# sumo el puntaje
print('suma al puntaje')
dic_jugadores[jugador] += dado1.valor*4
elif dado1.valor == 1:
# resto el puntaje
print('resta al puntaje')
dic_jugadores[jugador] -= dado2.valor
elif dado2.valor == 1:
print('resta al puntaje')
dic_jugadores[jugador] -= dado1.valor
print(f'puntajes: {dic_jugadores}')
max_puntos = 0
for jugador, puntos in dic_jugadores.items():
if puntos > max_puntos:
max_puntos = puntos
for jugador, puntos in dic_jugadores.items():
if puntos == max_puntos:
print(f"GANO {jugador}")
break
|
[
"eduardo.laruta@gmail.com"
] |
eduardo.laruta@gmail.com
|
f84fe04b1883d8c93b41284be4d6b8b5d5034602
|
ca739add7863f923013e7b6e1cf7b8bfbc8d8c0c
|
/source/complementos.py
|
043adc447b192e8c79bf9ad7c1e45a1d12489a56
|
[] |
no_license
|
PibeDx/plants-vs-zombies-clon
|
51992edd6239ecf305528ee64368818b07828097
|
7118a34f286573c0224ed2f15cdcb13bf27493f7
|
refs/heads/master
| 2021-01-18T08:52:19.661411
| 2012-07-05T06:58:17
| 2012-07-05T06:58:17
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 41,846
|
py
|
from pygame import locals
from engine import pytweener
import sqlite3
from datetime import datetime
from personal import Usuario
from unidades.defensores import *
class MenuPausa(object):
def __init__(self):
self.imagen = engine.cargar_imagen("pausa.png", True)
self.boton_sal = Boton(260, 586, engine.cargar_imagen("boton3.png", True), comando = self.salir)
self.tiempo_pausa = engine.pygame.time.get_ticks()
def dibujar(self, superficie):
superficie.blit(self.imagen, (0, 0))
self.boton_sal.dibujar(superficie)
def actualizar(self, tiempo):
self.boton_sal.actualizar(tiempo)
def verificar_eventos(self, evento):
self.boton_sal.verificar_eventos(evento)
def salir(self):
t = engine.pygame.time.get_ticks() - self.tiempo_pausa
engine.obtener_director().escena_actual.menu_pausa = None
for fila in engine.obtener_director().escena_actual.tablero:
for p in fila:
if p: p.sincronizar_cronos_pausa(t)
for s in engine.obtener_director().escena_actual.solsitos:
s.sincronizar_cronos_pausa(t)
class Confirmar(object):
def __init__(self, nombre):
self.imagen = engine.cargar_imagen("areyousure.png", True)
self.rect = engine.pygame.Rect(242, 183, 510, 380)
self.boton_yes = Boton(self.rect.x + 37, self.rect.y + 284, engine.cargar_imagen("boton10.png", True), comando = self.yes, nombre = "YES")
self.boton_no = Boton(self.rect.x + 258, self.rect.y + 284, engine.cargar_imagen("boton10.png", True), comando = self.no, nombre = "NO")
self.nombre = self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 34), "'" + nombre + "'", 1, (224, 187, 98))
if self.nombre.get_rect().width + 339 > 416:
self.msj_1 = self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 34), "This will permanently remove ", 1, (224, 187, 98))
self.msj_2 = self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 34), "'" + nombre + "' from the player roster!", 1, (224, 187, 98))
else:
self.msj_1 = self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 34), "This will permanently remove '" + nombre + "'", 1, (224, 187, 98))
self.msj_2 = self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 34), "from the player roster!", 1, (224, 187, 98))
self.rect_1 = self.msj_1.get_rect()
self.rect_2 = self.msj_2.get_rect()
self.rect_1.center = 494, 365
self.rect_2.center = 494, 394
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
self.boton_yes.dibujar(superficie)
self.boton_no.dibujar(superficie)
superficie.blit(self.msj_1, self.rect_1)
superficie.blit(self.msj_2, self.rect_2)
def verificar_eventos(self, evento):
self.boton_yes.verificar_eventos(evento)
self.boton_no.verificar_eventos(evento)
def actualizar(self, tiempo):
self.boton_yes.actualizar(tiempo)
self.boton_no.actualizar(tiempo)
self.boton_yes.rect.x = self.rect.x + 37
self.boton_yes.rect.y = self.rect.y + 284
self.boton_no.rect.x = self.rect.x + 258
self.boton_no.rect.y = self.rect.y + 284
def yes(self):
engine.obtener_director().escena_actual.cuadro_quien.delete()
engine.obtener_director().escena_actual.cuadro_quien.confirmar = None
def no(self):
engine.obtener_director().escena_actual.cuadro_quien.confirmar = None
class QuienEres(object):
def __init__(self):
self.imagen = engine.cargar_imagen("quieneres.png", True)
self.rect = engine.pygame.Rect(126, 58, 745, 628)
self.boton_rename = Boton(self.rect.x + 40, self.rect.y + 481, engine.cargar_imagen("boton9.png", True), comando = self.rename, nombre = "rename")
self.boton_delete = Boton(self.rect.x + 378, self.rect.y + 481, engine.cargar_imagen("boton9.png", True), comando = self.areyousure, nombre = "delete")
self.boton_ok = Boton(self.rect.x + 40, self.rect.y + 538, engine.cargar_imagen("boton9.png", True), comando = self.ok, nombre = "ok")
self.boton_cancel = Boton(self.rect.x + 378, self.rect.y + 538, engine.cargar_imagen("boton9.png", True), comando = self.cancel, nombre = "cancel")
self.imagen_seleccion = engine.pygame.Surface((568, 31))
self.imagen_seleccion.fill((0, 174, 0))
self.elementos = [Elemento(engine.pygame.Rect(self.rect.x + 84, self.rect.y + 175 + a, 568, 31), self.imagen_seleccion) for a in range(0, 248 , 31)]
self.rect_clic = None
self.imagen_crear = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 40), "(Create a New User)", 1, (246, 244, 177))
self.rect_crear = self.imagen_crear.get_rect()
self.confirmar = None
self.renombrar = None
self.cargar_usuarios()
if self.elementos[0].usuario: self.elementos[0].seleccionado = True
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
self.boton_rename.dibujar(superficie)
self.boton_delete.dibujar(superficie)
self.boton_ok.dibujar(superficie)
self.boton_cancel.dibujar(superficie)
for e in self.elementos:
e.dibujar(superficie)
if self.rect_clic: superficie.blit(self.imagen_crear, self.rect_crear)
if self.confirmar: self.confirmar.dibujar(superficie)
if self.renombrar: self.renombrar.dibujar(superficie)
def actualizar(self, tiempo):
if self.confirmar: self.confirmar.actualizar(tiempo)
elif self.renombrar: self.renombrar.actualizar(tiempo)
else:
for e in self.elementos:
e.actualizar(tiempo)
self.boton_ok.actualizar(tiempo)
self.boton_cancel.actualizar(tiempo)
self.boton_delete.actualizar(tiempo)
self.boton_rename.actualizar(tiempo)
def verificar_eventos(self, evento):
if not self.confirmar and not self.renombrar:
self.boton_rename.verificar_eventos(evento)
self.boton_delete.verificar_eventos(evento)
self.boton_ok.verificar_eventos(evento)
self.boton_cancel.verificar_eventos(evento)
for e in self.elementos:
e.verificar_eventos(evento)
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if self.rect_clic and self.rect_clic.collidepoint(evento.pos[0], evento.pos[1]):
engine.obtener_director().escena_actual.cuadro_creacion = CuadroCreacion()
elif self.confirmar: self.confirmar.verificar_eventos(evento)
elif self.renombrar: self.renombrar.verificar_eventos(evento)
def rename(self):
s = None
for e in self.elementos:
if e.seleccionado == True: s = e.usuario
self.renombrar = Renombre(s)
def ok(self):
s = None
for e in self.elementos:
if e.seleccionado == True: s = e.usuario
if s != engine.obtener_usuario().nombre:
conn = sqlite3.connect("recursos/data.db")
cursor = conn.cursor()
cursor.execute("select id from usuario where nombre = ?", (s,))
id_user = cursor.fetchone()[0]
cursor.execute("insert into sesion values (null,?,?)", (id_user, str(datetime.now())))
conn.commit()
plantas = []
objetos = []
cursor.execute("select * from carta where id in (select id_carta from usuario_carta where id_usuario = ?);", (id_user,))
for row in cursor:
clase = None
clasebase = None
exec("clase = {}".format(row[3]))
exec("clasebase = {}".format(row[10]))
plantas.append(Carta(engine.pygame.Rect(row[1], row[2], 62, 87), clase, row[4], row[5], row[6], row[7], row[8], row[9], clasebase))
cursor.execute("select * from objeto where id in (select id_objeto from usuario_objeto where id_usuario = ?);", (id_user,))
for row in cursor:
objetos.append(row[1])
engine.definir_usuario(Usuario(id_user, s, plantas, objetos))
conn.close()
engine.obtener_director().escena_actual.cuadro_quien = None
def areyousure(self):
s = None
for e in self.elementos:
if e.seleccionado == True: s = e.usuario
self.confirmar = Confirmar(s)
def delete(self):
s = None
i = 0
for e in self.elementos:
if e.seleccionado == True:
s = e.usuario
e.seleccionado = False
break
i += 1
if i == 0:
if self.elementos[1].usuario == None:
engine.definir_usuario(None)
engine.obtener_director().escena_actual.cuadro_creacion = CuadroCreacion()
else:
self.elementos[0].seleccionado = True
conn = sqlite3.connect("recursos/data.db")
cursor = conn.cursor()
cursor.execute("select id from usuario where nombre = ?;", (self.elementos[1].usuario,))
id_user = cursor.fetchone()[0]
plantas = []
objetos = []
cursor.execute("select * from carta where id in (select id_carta from usuario_carta where id_usuario = ?);", (id_user,))
for row in cursor:
clase = None
clasebase = None
exec("clase = {}".format(row[3]))
exec("clasebase = {}".format(row[10]))
plantas.append(Carta(engine.pygame.Rect(row[1], row[2], 62, 87), clase, row[4], row[5], row[6], row[7], row[8], row[9], clasebase))
cursor.execute("select * from objeto where id in (select id_objeto from usuario_objeto where id_usuario = ?);", (id_user,))
for row in cursor:
objetos.append(row[1])
engine.definir_usuario(Usuario(id_user, self.elementos[1].usuario, plantas, objetos))
conn.close()
else:
self.elementos[i - 1].seleccionado = True
conn = sqlite3.connect("recursos/data.db")
cursor = conn.cursor()
cursor.execute("select id from usuario where nombre = ?", (s,))
id_user = cursor.fetchone()[0]
cursor.execute("delete from usuario where id = ?", (id_user,))
cursor.execute("delete from sesion where id_usuario = ?", (id_user,))
cursor.execute("delete from usuario_carta where id_usuario = ?", (id_user,))
conn.commit()
for e in self.elementos:
e.usuario = None
self.cargar_usuarios()
def cancel(self):
engine.obtener_director().escena_actual.cuadro_quien = None
def cargar_usuarios(self):
if engine.obtener_usuario():
conn = sqlite3.connect("recursos/data.db")
cursor = conn.cursor()
cursor.execute("select nombre from usuario where nombre != ? order by nombre;", (engine.obtener_usuario().nombre,))
self.elementos[0].usuario = engine.obtener_usuario().nombre
i = 1
for row in cursor:
self.elementos[i].usuario = row[0]
i += 1
if i < 8:
self.rect_clic = self.elementos[i].rect
self.rect_crear.center = self.rect_clic.center
else:
self.rect_clic = None
else:
self.rect_clic = self.elementos[0].rect
self.rect_crear.center = self.rect_clic.center
self.elementos[0].seleccionado = True
class Elemento(object):
def __init__(self, rect, seleccion = None):
self.rect = rect
self.seleccion = seleccion
self.seleccionado = False
self.imagen_nombre = None
self.rect_nombre = None
self.__nombre = None
def dibujar(self, superficie):
if self.seleccionado: superficie.blit(self.seleccion, self.rect)
if self.imagen_nombre: superficie.blit(self.imagen_nombre, self.rect_nombre)
def actualizar(self, tiempo):
if self.imagen_nombre:
self.rect_nombre.center = self.rect.center
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if self.rect.collidepoint(evento.pos[0], evento.pos[1]):
for e in engine.obtener_director().escena_actual.cuadro_quien.elementos:
if e is not self and e.seleccionado:
if self.usuario: e.seleccionado = False
if self.usuario:
self.seleccionado = True
@property
def usuario(self):
return self.__nombre
@usuario.setter
def usuario(self, nombre):
if nombre:
self.__nombre = nombre
self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 40), self.__nombre, 1, (246, 244, 177))
self.rect_nombre = self.imagen_nombre.get_rect()
self.rect_nombre.center = self.rect.center
else:
self.__nombre = None
self.imagen_nombre = None
self.rect_nombre = None
class Bienvenida(object):
def __init__(self):
self.imagen = engine.cargar_imagen("bienvenida.png", True)
self.x = 30
self.y = -243
self.rect_clic = engine.pygame.Rect(58, self.y + 186, 334, 38)
self.__nombre = engine.obtener_usuario().nombre
self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 40), self.nombre, 1, (0, 176, 0))
self.rect_nombre = self.imagen_nombre.get_rect()
self.rect_nombre.center = 215, self.y + 120
def dibujar(self, superficie):
superficie.blit(self.imagen, (self.x, self.y))
superficie.blit(self.imagen_nombre, self.rect_nombre)
def actualizar(self, tiempo):
self.rect_nombre.centery = self.y + 120
self.rect_clic.centery = self.y + 186
self.nombre = engine.obtener_usuario().nombre if engine.usuario else ""
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if self.rect_clic.collidepoint(evento.pos[0], evento.pos[1]):
engine.obtener_director().escena_actual.cuadro_quien = QuienEres()
@property
def nombre(self):
return self.__nombre
@nombre.setter
def nombre(self, nombre):
if nombre != self.__nombre:
self.__nombre = nombre
self.imagen_nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.nombre, 1, (0, 176, 0))
self.rect_nombre = self.imagen_nombre.get_rect()
self.rect_nombre.center = 215, self.y + 120
class CuadroObligar(object):
def __init__(self):
self.imagen = engine.cargar_imagen("obligacion.png", True)
self.rect = engine.pygame.Rect(184, 182, 636, 382)
self.boton_ok = Boton(self.rect.x + 63, self.rect.y + 283, engine.cargar_imagen("boton11.png", True), comando = self.ok, nombre = "ok")
def ok(self):
engine.obtener_director().escena_actual.cuadro_creacion.cuadro_obligacion = None
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
self.boton_ok.dibujar(superficie)
def verificar_eventos(self, evento):
self.boton_ok.verificar_eventos(evento)
def actualizar(self, tiempo):
self.boton_ok.actualizar(tiempo)
class Renombre(object):
def __init__(self, old):
self.imagen = engine.cargar_imagen("rename.png", True)
self.rect = engine.pygame.Rect(184, 184, 624, 380)
self.boton_ok = Boton(223, 467, engine.cargar_imagen("boton8.png", True), comando = self.cambiar_usuario, nombre = "ok")
self.boton_cancel = Boton(500, 467, engine.cargar_imagen("boton8.png", True), comando = self.cancelar, nombre = "cancel")
self.entrada = Input(248, 376)
self.old = old
self.entrada.indice = len(self.old)
self.entrada.text = self.old
self.entrada.seleccionar()
def cambiar_usuario(self):
i = 0
while i < len(self.entrada.text):
if self.entrada.text[i] != " ":
break
i += 1
self.entrada.text = self.entrada.text[i:len(self.entrada.text)]
i = len(self.entrada.text) - 1
while i > 0:
if self.entrada.text[i] != " ":
break
i -= 1
self.entrada.text = self.entrada.text[0:i + 1]
if len(self.entrada.text.replace(" ", "")) != 0:
conn = sqlite3.connect('recursos/data.db')
cursor = conn.cursor()
cursor.execute("update usuario set nombre = ? where nombre = ?", (self.entrada.text, self.old))
conn.commit()
conn.close()
if self.old == engine.obtener_usuario().nombre: engine.obtener_usuario().nombre = self.entrada.text
engine.obtener_director().escena_actual.cuadro_quien.cargar_usuarios()
engine.obtener_director().escena_actual.cuadro_quien.renombrar = None
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
self.boton_ok.dibujar(superficie)
self.boton_cancel.dibujar(superficie)
self.entrada.dibujar(superficie)
def actualizar(self, tiempo):
self.entrada.actualizar(tiempo)
self.boton_ok.actualizar(tiempo)
self.boton_cancel.actualizar(tiempo)
def verificar_eventos(self, evento):
if evento.type == locals.KEYDOWN:
self.seleccion = None
if evento.key == 13:
self.cambiar_usuario()
elif evento.key == 275:
self.entrada.text = self.old
self.boton_ok.verificar_eventos(evento)
self.boton_cancel.verificar_eventos(evento)
self.entrada.verificar_eventos(evento)
def cancelar(self):
engine.obtener_director().escena_actual.cuadro_quien.renombrar = None
class CuadroCreacion(object):
def __init__(self):
self.imagen = engine.cargar_imagen("crearuser2.png", True)
self.rect = engine.pygame.Rect(184, 184, 624, 380)
self.boton_ok = Boton(223, 467, engine.cargar_imagen("boton8.png", True), comando = self.agregar_usuario, nombre = "ok")
self.boton_cancel = Boton(500, 467, engine.cargar_imagen("boton8.png", True), comando = self.cancelar, nombre = "cancel")
self.entrada = Input(248, 376)
self.cuadro_obligacion = None
def agregar_usuario(self):
i = 0
while i < len(self.entrada.text):
if self.entrada.text[i] != " ":
break
i += 1
self.entrada.text = self.entrada.text[i:len(self.entrada.text)]
i = len(self.entrada.text) - 1
while i > 0:
if self.entrada.text[i] != " ":
break
i -= 1
self.entrada.text = self.entrada.text[0:i + 1]
if len(self.entrada.text.replace(" ", "")) != 0:
conn = sqlite3.connect('recursos/data.db')
cursor = conn.cursor()
n = (self.entrada.text,)
cursor.execute("select * from usuario where nombre = ?", (self.entrada.text,))
if cursor.fetchall() != []:
print "Ya existe un usuario con ese nombre,elija otro"
else:
cursor.execute("insert into usuario values (null,?,0)", n)
cursor.execute("insert into sesion values (null,last_insert_rowid(),?)", (str(datetime.now()),))
conn.commit()
cursor.execute("select id from usuario where nombre = ?", (self.entrada.text,))
engine.definir_usuario(Usuario(cursor.fetchone()[0], self.entrada.text, [], []))
conn.close()
engine.obtener_director().escena_actual.cuadro_creacion = None
engine.obtener_director().escena_actual.cuadro_quien = None
engine.obtener_director().escena_actual.saludar()
else: self.cuadro_obligacion = CuadroObligar()
def cancelar(self):
if engine.usuario:
engine.obtener_director().escena_actual.cuadro_creacion = None
else:
self.cuadro_obligacion = CuadroObligar()
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
self.boton_ok.dibujar(superficie)
self.boton_cancel.dibujar(superficie)
self.entrada.dibujar(superficie)
if self.cuadro_obligacion: self.cuadro_obligacion.dibujar(superficie)
def actualizar(self, tiempo):
self.entrada.actualizar(tiempo)
self.boton_ok.actualizar(tiempo)
self.boton_cancel.actualizar(tiempo)
if self.cuadro_obligacion: self.cuadro_obligacion.actualizar(tiempo)
def verificar_eventos(self, evento):
if self.cuadro_obligacion: self.cuadro_obligacion.verificar_eventos(evento)
else:
if evento.type == locals.KEYDOWN and evento.key == 13:
self.agregar_usuario()
self.boton_ok.verificar_eventos(evento)
self.boton_cancel.verificar_eventos(evento)
self.entrada.verificar_eventos(evento)
class SlotCuadro(object):
def __init__(self, rect):
self.carta = None
self.rect = rect
self.usado = False
def dibujar(self, superficie):
if self.carta:
superficie.blit(engine.obtener_director().escena_actual.imagen_cartas, self.rect, self.carta.rect_origen_carta)
if self.usado:
superficie.blit(engine.obtener_director().escena_actual.imagen_charge, self.rect)
class CuadroELeccion(object):
def __init__(self):
self.imagen = engine.cargar_imagen("choose.png", True)
self.imagen_copia = engine.cargar_imagen("choose.png", True)
f1 = [SlotCuadro(engine.pygame.Rect(a + 28, 51, 62, 87)) for a in range(0, 528, 66)]
f2 = [SlotCuadro(engine.pygame.Rect(a + 28, 142, 62, 87)) for a in range(0, 528, 66)]
f3 = [SlotCuadro(engine.pygame.Rect(a + 28, 233, 62, 87)) for a in range(0, 528, 66)]
f4 = [SlotCuadro(engine.pygame.Rect(a + 28, 324, 62, 87)) for a in range(0, 528, 66)]
f5 = [SlotCuadro(engine.pygame.Rect(a + 28, 415, 62, 87)) for a in range(0, 528, 66)]
self.slots = f1 + f2 + f3 + f4 + f5
self.rect = engine.pygame.Rect(0, 109, 581, 662)
self.tweener = engine.pytweener.Tweener()
self.altura = 750
self.listo = Boton(194, 1325, engine.cargar_imagen("boton0.png", True), comando = self.bajar, nombre = "let's rock")
def aparecer(self):
self.tweener.addTween(self, altura = 109, tweenTime = 0.5, tweenType = engine.pytweener.Easing.Linear.easeIn)
def agregar_carta(self, carta):
for slot in self.slots:
if slot.carta == None:
slot.carta = carta
slot.carta.x, slot.carta.y = slot.rect.x, slot.rect.y + 109
slot.carta.slot_inicial = slot
break
def dibujar(self, superficie):
self.imagen.blit(self.imagen_copia, (0, 0))
for slot in self.slots:
slot.dibujar(self.imagen)
superficie.blit(self.imagen, (0, self.altura))
self.listo.dibujar(superficie)
def actualizar(self, tiempo):
if not self.listo.presionado: self.listo.rect.y = self.altura + 575
self.listo.actualizar(tiempo)
if self.tweener.hasTweens():
self.tweener.update(tiempo / 1000.0)
def verificar_eventos(self, evento):
self.listo.verificar_eventos(evento)
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
for slot in self.slots:
if slot.rect.collidepoint(evento.pos[0], evento.pos[1] - 109) and not slot.usado:
if slot.carta:
for s in engine.obtener_director().escena_actual.barra_control.slots:
if s.carta == None:
slot.usado = True
engine.obtener_director().escena_actual.cartas.append(slot.carta)
self.tweener.addTween(slot.carta, x = s.rect.x, y = s.rect.y, tweenTime = 0.2, tweenType = pytweener.Easing.Linear.easeIn, onCompleteFunction = slot.carta.empotrar)
break
def bajar(self):
engine.obtener_director().escena_actual.barra_control.eligiendo = 1
self.tweener.addTween(self, altura = 750, tweenTime = 0.3, tweenType = engine.pytweener.Easing.Linear.easeIn, onCompleteFunction = self.acabar)
def acabar(self):
engine.obtener_director().escena_actual.cuadro_eleccion = None
engine.obtener_director().escena_actual.tweener.addTween(engine.obtener_director().escena_actual, izquierda = 265, tweenTime = 1.5, tweenType = engine.pytweener.Easing.Linear.easeIn, onCompleteFunction = engine.obtener_director().escena_actual.modo_juego)
class CuadroLampa(object):
def __init__(self):
self.imagen = engine.cargar_imagen("slot_lampa.png", True)
self.imagen_lampa = engine.cargar_imagen("lampa.png", True)
self.rect_lampa = self.imagen_lampa.get_rect()
self.rect = self.imagen.get_rect()
self.rect.left = engine.obtener_director().escena_actual.barra_control.imagen.get_rect().width
self.rect_lampa.center = self.rect.center
self.usando = False
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
if not self.usando:
superficie.blit(self.imagen_lampa, self.rect_lampa)
def dibujar_lampa(self, superficie):
if self.usando:
superficie.blit(self.imagen_lampa, self.rect_lampa)
def actualizar(self, tiempo):
if self.usando:
self.rect_lampa.bottomleft = engine.pygame.mouse.get_pos()
else:
self.rect_lampa.center = self.rect.center
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if engine.obtener_director().escena_actual.barra_control.eligiendo == 2:
if self.rect.collidepoint(evento.pos[0], evento.pos[1]) and not self.usando:
self.usando = True
elif self.usando:
i = (evento.pos[1] - 120) / 104
j = (evento.pos[0] - 50) / 100
if 0 <= i <= 5 and 0 <= j <= 8 and engine.obtener_director().escena_actual.tablero[i][j] != None:
if engine.obtener_director().escena_actual.tablero[i][j].__class__ == Nenufar and engine.obtener_director().escena_actual.tablero[i][j].contenido:
engine.obtener_director().escena_actual.tablero[i][j].contenido = None
else:
engine.obtener_director().escena_actual.tablero[i][j] = None
self.usando = False
elif evento.type == locals.MOUSEBUTTONDOWN and evento.button == 3:
if self.usando: self.usando = False
class BarraControl(object):
def __init__(self):
self.imagen = engine.cargar_imagen("barra.png", True)
self.slots = [SlotBarra(engine.pygame.Rect(107 + a, 11, 62, 88)) for a in range(0, 438, 73)]
self.soles = 50
self.nro_soles = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 30), str(self.soles), 1, (0, 0, 0))
self.rect_soles = self.nro_soles.get_rect()
self.rect_soles.center = 49, 91
self.eligiendo = 0
self.tweener = engine.pytweener.Tweener()
def dibujar(self, superficie):
superficie.blit(self.imagen, (0, 0))
for slot in self.slots:
slot.dibujar(superficie)
superficie.blit(self.nro_soles, self.rect_soles)
def actualizar(self, tiempo):
for slot in self.slots:
slot.actualizar(tiempo)
self.nro_soles = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 30), str(self.soles), 1, (0, 0, 0))
self.rect_soles = self.nro_soles.get_rect()
self.rect_soles.center = 49, 91
if self.tweener.hasTweens():
self.tweener.update(tiempo / 1000.0)
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if self.eligiendo == 2:
hubo_clic = False
for slot in self.slots:
if slot.rect.collidepoint(evento.pos[0], evento.pos[1]):
hubo_clic = True
if slot.carta and engine.obtener_director().escena_actual.seleccion == None and slot.cargando == False:
if engine.obtener_director().escena_actual.barra_control.soles >= slot.carta.info['precio']:
engine.obtener_director().escena_actual.seleccion = PlantaSeleccionada(slot)
slot.oscurecer()
else:
if engine.obtener_director().escena_actual.seleccion: engine.obtener_director().escena_actual.seleccion.slot.aclarar()
engine.obtener_director().escena_actual.seleccion = None
break
if not hubo_clic and self.imagen.get_rect().collidepoint(evento.pos[0], evento.pos[1]) and engine.obtener_director().escena_actual.seleccion:
engine.obtener_director().escena_actual.seleccion.slot.aclarar()
engine.obtener_director().escena_actual.seleccion = None
elif self.eligiendo == 0:
for slot in self.slots:
if slot.rect.collidepoint(evento.pos[0], evento.pos[1]):
if slot.carta:
engine.obtener_director().escena_actual.cartas.append(slot.carta)
self.tweener.addTween(slot.carta, x = slot.carta.slot_inicial.rect.x, y = slot.carta.slot_inicial.rect.y + 109, tweenTime = 0.2, tweenType = pytweener.Easing.Linear.easeIn, onCompleteFunction = slot.carta.reempotrar)
slot.carta = None
self.reacomodar(slot)
break
def agregar_carta(self, carta):
for slot in self.slots:
if slot.carta == None:
slot.carta = carta
break
def reacomodar(self, slot_movido):
slot_ant = slot_movido
for slot in self.slots:
if slot.rect.centerx > slot_movido.rect.centerx and slot.carta:
engine.obtener_director().escena_actual.cartas.append(slot.carta)
self.tweener.addTween(slot.carta, x = slot_ant.rect.x, y = slot_ant.rect.y, tweenTime = 0.2, tweenType = pytweener.Easing.Linear.easeIn, onCompleteFunction = slot.carta.empotrar)
slot.carta = None
slot_ant = slot
class PlantaSeleccionada(object):
def __init__(self, slot):
self.slot = slot
self.carta = slot.carta
self.imagen = engine.cargar_imagen(self.carta.clase.url_imagen, True)
self.grilla = Grilla(self.carta.clase.url_imagen, self.carta.clase.cantidad[0], self.carta.clase.cantidad[1])
self.rect = engine.pygame.Rect(0, 0, self.grilla.ancho, self.grilla.alto)
self.rect_origen = self.grilla.obtener_cuadro(self.carta.clase.cuadro_alpha)
self.rect_fondo = engine.pygame.Rect(0, 0, self.grilla.ancho, self.grilla.alto)
self.plantable = 2
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect, self.rect_origen)
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEMOTION:
x, y = evento.pos
i = (y - 120) / 104
j = (x - 50) / 100
if i in range(0, 6) and j in range(0, 9):
self.rect_fondo.right = 50 + engine.obtener_director().escena_actual.ancho_cuadro * (j + 1)
self.rect_fondo.bottom = 120 + engine.obtener_director().escena_actual.alto_cuadro * (i + 1)
if engine.obtener_director().escena_actual.lineas[i] == self.carta.info['campo']:
if ((engine.obtener_director().escena_actual.tablero[i][j] and engine.obtener_director().escena_actual.tablero[i][j].__class__ == self.carta.base) or (not engine.obtener_director().escena_actual.tablero[i][j] and self.carta.base == None)):
self.plantable = 0
if engine.obtener_director().escena_actual.tablero[i][j].__class__ == Nenufar and engine.obtener_director().escena_actual.tablero[i][j].contenido != None:
self.plantable = 2
else: self.plantable = 2
else:
if engine.obtener_director().escena_actual.tablero[i][j] and engine.obtener_director().escena_actual.tablero[i][j].__class__ == Nenufar and engine.obtener_director().escena_actual.tablero[i][j].contenido == None:
self.plantable = 1
else:
self.plantable = 2
else:
self.plantable = 2
def dibujar_posible(self, superficie):
if self.plantable == 0 or self.plantable == 1:
self.dibujar_alpha(superficie, self.imagen, self.rect_fondo, 130, self.rect_origen)
def dibujar_alpha(self, fondo, imagen, rect_fondo, opacidad, rect_origen):
temp = engine.pygame.Surface((rect_fondo.width, rect_fondo.height)).convert()
temp.blit(fondo, (0, 0), rect_fondo)
temp.blit(imagen, (0, 0), rect_origen)
temp.set_alpha(opacidad)
fondo.blit(temp, rect_fondo)
def actualizar(self, tiempo):
self.rect.centerx, self.rect.centery = engine.pygame.mouse.get_pos()
def verificar_bases(self):
for fila in engine.obtener_director().escena_actual.tablero:
for p in fila:
if p and p.__class__ == self.carta.clase.base:
print "encontre una base"
class Carta(object):
def __init__(self, rect_origen, cls, nombre, descripcion, precio, campo, tipo, tiempo_charge = 3, base = None):
self.rect_origen_carta = rect_origen
self.rect = engine.pygame.Rect(0, 0, 85, 121)
self.clase = cls
self.x = 0
self.y = 0
self.slot_inicial = None
self.info = {
'nombre':nombre,
'descripcion:':descripcion,
'precio':precio,
'campo':campo,
'tipo':tipo,
}
self.tiempo_charge = tiempo_charge
self.base = base
def dibujar(self, superficie):
superficie.blit(engine.obtener_director().escena_actual.imagen_cartas, self.rect, self.rect_origen_carta)
def actualizar(self, superficie):
self.rect.x, self.rect.y = self.x, self.y
def empotrar(self):
engine.obtener_director().escena_actual.barra_control.agregar_carta(self)
engine.obtener_director().escena_actual.cartas.pop(0)
def reempotrar(self):
self.slot_inicial.usado = False
engine.obtener_director().escena_actual.cartas.pop(0)
class SlotBarra(object):
def __init__(self, rect):
self.carta = None
self.rect = rect
self.cargando = False
self.rect_charge = engine.pygame.Rect(0, 0, self.rect.width, 0)
self.tweener = pytweener.Tweener()
self.falta_cargar = 0
def dibujar(self, superficie):
if self.carta:
superficie.blit(engine.obtener_director().escena_actual.imagen_cartas, self.rect, self.carta.rect_origen_carta)
if engine.obtener_director().escena_actual.barra_control.soles < self.carta.info['precio'] and engine.obtener_director().escena_actual.barra_control.eligiendo == 2: superficie.blit(engine.obtener_director().escena_actual.imagen_nosoles, self.rect)
superficie.blit(engine.obtener_director().escena_actual.imagen_charge, self.rect, self.rect_charge)
def actualizar(self, tiempo):
self.rect_charge.height = self.falta_cargar
if self.tweener.hasTweens():
self.tweener.update(tiempo / 1000.0)
def oscurecer(self):
self.falta_cargar = self.rect.height
def aclarar(self):
self.falta_cargar = 0
def cargar(self):
if self.carta:
self.tweener.addTween(self, falta_cargar = 0, tweenTime = self.carta.tiempo_charge, tweenType = pytweener.Easing.Linear.easeIn, onCompleteFunction = self.terminar_cargado)
self.cargando = True
def terminar_cargado(self):
self.cargando = False
self.falta_cargar = 0
class Input(object):
def __init__(self, x, y):
self.text = ""
self.palabra = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.text, 1, (255, 255, 255))
self.parpadeante = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), "|", 1, (255, 255, 255))
self.x, self.y = x, y
self.xp = self.x - 3
self.visible = True
self.indice = len(self.text)
self.seleccionado = False
def dibujar(self, superficie):
if self.seleccionado: superficie.blit(self.seleccion, (self.x, self.y))
if self.visible: superficie.blit(self.parpadeante, (self.xp, self.y))
superficie.blit(self.palabra, (self.x, self.y))
def actualizar(self, tiempo):
if not self.seleccionado: self.palabra = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.text, 1, (255, 255, 255))
else: self.palabra = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.text, 1, (0, 0, 0))
if 0 <= engine.pygame.time.get_ticks() % 1000 <= 500:
self.visible = False
else: self.visible = True
self.xp = self.x + engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.text[0:self.indice], 1, (255, 255, 255)).get_width() - 3
def verificar_eventos(self, evento):
if evento.type == locals.KEYDOWN:
if evento.key == 275 and self.indice < len(self.text):
self.indice += 1
elif evento.key == 275 and self.indice == len(self.text):
self.deseleccionar()
elif evento.key == 276 and self.indice > 0:
if not self.seleccionado: self.indice -= 1
else:
self.deseleccionar()
self.indice = 0
elif evento.key != 8 and evento.key != 13 and evento.key != 275 and evento.key != 276 and evento.key != 304 and len(self.text) < 15:
if self.seleccionado:
self.deseleccionar()
self.text = ""
self.indice = 0
self.text = self.text[0:self.indice] + evento.unicode + self.text[self.indice:len(self.text)]
self.indice += 1
elif evento.key == 8 and self.indice > 0:
if self.seleccionado:
self.deseleccionar()
self.text = ""
self.indice = 0
else:
self.text = self.text[0:self.indice - 1] + self.text[self.indice:len(self.text)]
self.indice -= 1
def seleccionar(self):
if len(self.text) > 0:
self.seleccionado = True
self.palabra = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 50), self.text, 1, (0, 0, 0))
self.seleccion = engine.pygame.Surface((self.palabra.get_width(), self.palabra.get_height()))
self.seleccion.fill((255, 255, 255))
self.indice = len(self.text)
def deseleccionar(self):
self.seleccionado = False
self.seleccion = None
class Boton(object):
def __init__(self, x, y, imagen, comando = None, nombre = None):
self.imagen = imagen
self.rect = self.imagen.get_rect()
self.mask = engine.pygame.mask.from_surface(self.imagen, 127)
self.rect.x = x
self.rect.y = y
self.comando = comando
self.presionado = False
self.nombre = engine.pygame.font.Font.render(engine.pygame.font.Font(None, 32), nombre.upper(), 1, (0, 174, 0)) if nombre else None
self.rect_nombre = self.nombre.get_rect() if self.nombre else None
if self.rect_nombre: self.rect_nombre.center = self.rect.center
def dibujar(self, superficie):
superficie.blit(self.imagen, self.rect)
if self.nombre: superficie.blit(self.nombre, self.rect_nombre)
def verificar_eventos(self, evento):
if evento.type == locals.MOUSEBUTTONDOWN and evento.button == 1:
if engine.pygame.sprite.spritecollideany(self, [engine.cursor], engine.pygame.sprite.collide_mask):
if not self.presionado:
self.presionado = True
self.rect.x = self.rect.x + 3
self.rect.y = self.rect.y + 3
elif evento.type == locals.MOUSEBUTTONUP and evento.button == 1 and self.presionado:
self.presionado = False
self.rect.x = self.rect.x - 3
self.rect.y = self.rect.y - 3
if self.rect_nombre: self.rect_nombre.center = self.rect.center
if engine.pygame.sprite.spritecollideany(self, [engine.cursor], engine.pygame.sprite.collide_mask):
if self.comando: self.comando()
def actualizar(self, tiempo):
if self.rect_nombre: self.rect_nombre.center = self.rect.center
|
[
"eyscode@gmail.com"
] |
eyscode@gmail.com
|
44f66ca709805c87ece0f5ff08c1d8643e76dc1f
|
87736a69e11ce7e5750d91e9891169090180ba18
|
/virtual/bin/epylint
|
e3fa4286e1d555ec29e1a75c7f2664337196e555
|
[] |
no_license
|
MutumaMutuma/soccer-scout
|
16e8cf98f435868b2cb28838fcd7376caa91cc57
|
299b4cb9b8473b94562b5b287e8dbadf5c035034
|
refs/heads/master
| 2020-03-28T22:36:53.552396
| 2018-09-20T12:48:45
| 2018-09-20T12:48:45
| 149,244,840
| 0
| 0
| null | 2018-09-18T07:15:12
| 2018-09-18T07:15:12
| null |
UTF-8
|
Python
| false
| false
| 261
|
#!/home/zamzam/Documents/soccer-scout/virtual/bin/python
# -*- coding: utf-8 -*-
import re
import sys
from pylint import run_epylint
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(run_epylint())
|
[
"lewismutuma1000@gmail.com"
] |
lewismutuma1000@gmail.com
|
|
a0558eff96171575b90ef92a7b59d2a7abd7f87f
|
f07a42f652f46106dee4749277d41c302e2b7406
|
/Data Set/bug-fixing-5/a8ab1a0b200881f52f564d28db90f10730c1f0b5-<latest>-fix.py
|
34987d8922650e14b77fd72b4e1557dd3181ede0
|
[] |
no_license
|
wsgan001/PyFPattern
|
e0fe06341cc5d51b3ad0fe29b84098d140ed54d1
|
cc347e32745f99c0cd95e79a18ddacc4574d7faa
|
refs/heads/main
| 2023-08-25T23:48:26.112133
| 2021-10-23T14:11:22
| 2021-10-23T14:11:22
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,811
|
py
|
def latest(module, items, repoq, yum_basecmd, conf_file, en_repos, dis_repos, update_only, installroot='/'):
res = {
}
res['results'] = []
res['msg'] = ''
res['changed'] = False
res['rc'] = 0
pkgs = {
}
pkgs['update'] = []
pkgs['install'] = []
updates = {
}
update_all = False
cmd = None
if ('*' in items):
update_all = True
(rc, out, err) = run_check_update(module, yum_basecmd)
if ((rc == 0) and update_all):
res['results'].append('Nothing to do here, all packages are up to date')
return res
elif (rc == 100):
updates = parse_check_update(out)
elif (rc == 1):
res['msg'] = err
res['rc'] = rc
module.fail_json(**res)
if update_all:
cmd = (yum_basecmd + ['update'])
will_update = set(updates.keys())
will_update_from_other_package = dict()
else:
will_update = set()
will_update_from_other_package = dict()
for spec in items:
if spec.startswith('@'):
pkgs['update'].append(spec)
will_update.add(spec)
continue
elif (spec.endswith('.rpm') and ('://' not in spec)):
if (not os.path.exists(spec)):
res['msg'] += ("No RPM file matching '%s' found on system" % spec)
res['results'].append(("No RPM file matching '%s' found on system" % spec))
res['rc'] = 127
module.fail_json(**res)
envra = local_envra(spec)
if (not is_installed(module, repoq, envra, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot)):
pkgs['install'].append(spec)
continue
elif ('://' in spec):
package = fetch_rpm_from_url(spec, module=module)
envra = local_envra(package)
if (not is_installed(module, repoq, envra, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot)):
pkgs['install'].append(package)
continue
elif (is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot) or update_only):
pkgs['update'].append(spec)
else:
pkgs['install'].append(spec)
pkglist = what_provides(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot)
if (not pkglist):
res['msg'] += ("No package matching '%s' found available, installed or updated" % spec)
res['results'].append(("No package matching '%s' found available, installed or updated" % spec))
res['rc'] = 126
module.fail_json(**res)
nothing_to_do = True
for pkg in pkglist:
if ((spec in pkgs['install']) and is_available(module, repoq, pkg, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot)):
nothing_to_do = False
break
(pkgname, _, _, _, _) = splitFilename(pkg)
if ((spec in pkgs['update']) and (pkgname in updates)):
nothing_to_do = False
will_update.add(spec)
if (spec != pkgname):
will_update_from_other_package[spec] = pkgname
break
if ((not is_installed(module, repoq, spec, conf_file, en_repos=en_repos, dis_repos=dis_repos, installroot=installroot)) and update_only):
res['results'].append(('Packages providing %s not installed due to update_only specified' % spec))
continue
if nothing_to_do:
res['results'].append(('All packages providing %s are up to date' % spec))
continue
conflicts = transaction_exists(pkglist)
if conflicts:
res['msg'] += ('The following packages have pending transactions: %s' % ', '.join(conflicts))
res['results'].append(('The following packages have pending transactions: %s' % ', '.join(conflicts)))
res['rc'] = 128
module.fail_json(**res)
if module.check_mode:
to_update = []
for w in will_update:
if w.startswith('@'):
to_update.append((w, None))
elif (w not in updates):
other_pkg = will_update_from_other_package[w]
to_update.append((w, ('because of (at least) %s-%s.%s from %s' % (other_pkg, updates[other_pkg]['version'], updates[other_pkg]['dist'], updates[other_pkg]['repo']))))
else:
to_update.append((w, ('%s.%s from %s' % (updates[w]['version'], updates[w]['dist'], updates[w]['repo']))))
res['changes'] = dict(installed=pkgs['install'], updated=to_update)
if (will_update or pkgs['install']):
res['changed'] = True
return res
if cmd:
(rc, out, err) = module.run_command(cmd)
res['changed'] = True
elif (pkgs['install'] or will_update):
cmd = (((yum_basecmd + ['install']) + pkgs['install']) + pkgs['update'])
lang_env = dict(LANG='C', LC_ALL='C', LC_MESSAGES='C')
(rc, out, err) = module.run_command(cmd, environ_update=lang_env)
out_lower = out.strip().lower()
if ((not out_lower.endswith('no packages marked for update')) and (not out_lower.endswith('nothing to do'))):
res['changed'] = True
else:
(rc, out, err) = [0, '', '']
res['rc'] = rc
res['msg'] += err
res['results'].append(out)
if rc:
res['failed'] = True
return res
|
[
"dg1732004@smail.nju.edu.cn"
] |
dg1732004@smail.nju.edu.cn
|
a8cf597841bdc78c1f56b1e0b73d9efdcca7b554
|
c55bca491632ef98dfd0e39e9e197f86d4ce94f0
|
/wcoa/migrations/0019_auto_20200922_1837.py
|
6a1b7fb208ec5b9d7b5906ffb04ffb52f40aa3af
|
[
"MIT"
] |
permissive
|
Ecotrust/wcoa
|
420b2e9f03219a72f79e435c1001b87a76233a8b
|
f6ad1e42fa93560d57043ebeb8464a320befef14
|
refs/heads/main
| 2023-08-03T21:02:01.013970
| 2023-07-28T22:56:03
| 2023-07-28T22:56:03
| 196,878,615
| 1
| 1
|
MIT
| 2021-12-09T19:29:37
| 2019-07-14T20:07:39
|
Python
|
UTF-8
|
Python
| false
| false
| 395
|
py
|
# Generated by Django 2.2.9 on 2020-09-22 18:37
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('wcoa', '0018_delete_masonrypage'),
]
operations = [
migrations.AlterField(
model_name='catalogiframepage',
name='source',
field=models.URLField(max_length=1999),
),
]
|
[
"ryan.d.hodges@gmail.com"
] |
ryan.d.hodges@gmail.com
|
07f603f42438f4bf7d83e60cfb810b8fe1dcd1f5
|
d3442664e61329eb7f78ec93c3c2f77344e48941
|
/assignment_3/q1/q1.py
|
63c8d4dc4d69aa4c176a41c75a63bc33135e6c44
|
[] |
no_license
|
pranav-sankhe/EE679-Speech-Processing
|
6b748a283eb5f634941741e4068ecaf572812b7a
|
1d9434006551c024f6846db42243c4a161b8672b
|
refs/heads/master
| 2022-01-25T03:58:33.081064
| 2019-08-26T15:03:27
| 2019-08-26T15:03:27
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,182
|
py
|
from scipy import signal
import numpy as np
import matplotlib.pyplot as plt
from scipy.io import wavfile
import params
F0 = params.F0
sounds = params.sounds
a_den = params.a_den
n_den = params.n_den
i_den = params.i_den
s_den = params.s_den
den = [a_den, n_den, i_den, s_den]
a_num = params.a_num
n_num = params.n_num
i_num = params.i_num
s_num = params.s_num
num = [a_num, n_num, i_num, s_num]
for i in range(len(sounds)):
duration = params.duration
samp_freq = params.samp_freq
t = np.linspace(0, duration, duration*samp_freq, endpoint=False)
sig = (1 + signal.square(2 * np.pi * F0 * t, duty=0.01))/2
if i == 3:
samp_freq = samp_freq*2
sig = np.random.normal(0, 1, int(duration*samp_freq))
result = signal.lfilter(num[i], den[i], sig)
result = signal.lfilter(np.asarray([1.0, 0.0]), np.asarray([1, -15.0/16.0]), result)
fig = plt.figure()
plt.plot(result[0:1000])
plt.title("Sound: " + sounds[i])
plt.xlabel('time')
plt.ylabel('signal')
fig.savefig("Sound: " + sounds[i] + ".pdf", bbox_inches='tight')
wavfile.write(sounds[i] + '.wav', samp_freq, np.int16(result/np.max(result)*32767))
|
[
"pranavsankhe40@gmail.com"
] |
pranavsankhe40@gmail.com
|
cf242458a24aa0e1d728ab2e04b7dd8fd4423492
|
29c823ca7aad7122a3ee4dd04360b1118928e34d
|
/raspberry/samples/feature_extration.py
|
ebe37ad883590365fe690bb04d7a6a9ce03efc5d
|
[] |
no_license
|
sdk115/CarAccidentDetect
|
9f405761e06cda25ab115b12fed17cdedb2937f0
|
bf0bc7794b584bc5333b230958b818e309ddd911
|
refs/heads/master
| 2021-08-26T08:39:16.800398
| 2017-11-22T17:26:14
| 2017-11-22T17:26:14
| 111,210,917
| 1
| 1
| null | 2017-11-18T16:21:50
| 2017-11-18T14:12:13
|
Arduino
|
UTF-8
|
Python
| false
| false
| 386
|
py
|
def average_pooling(sensor_list, window_size, stride):
ret = []
for i in range(len(sensor_list)-window_size+1):
col_len = len(sensor_list[i])
temp = [0] * col_len
for j in range(col_len):
for k in range(window_size):
temp[j]+=sensor_list[i+k][j]
temp = [i/3.0 for i in temp]
ret.append(temp)
return ret
|
[
"sdk11594@gmail.com"
] |
sdk11594@gmail.com
|
cf7c57e1e50ca9d690c84f42ae3f0258e355854a
|
3e2593161915670c5586bd600e7e076bbe1a0758
|
/dalaoyuanma.py
|
3b5d58af9bfc1da703aa11f8a91675643ef93cb4
|
[] |
no_license
|
qzylalala/WebSpider
|
19f0691b3b05b8650f2f152d36eaaa17e08a0712
|
b2d7f143dba6e54322f401251633488b9406fde4
|
refs/heads/master
| 2020-04-11T01:22:27.457676
| 2019-01-14T12:07:07
| 2019-01-14T12:07:07
| 161,413,126
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,496
|
py
|
import json
import requests
from requests.exceptions import RequestException
import re
import time
def get_one_page(url):
try:
headers = {'Users-Agent': 'Mozilla/5.0(Macintosh;Intel Mac OS X\
10_13_3) AppleWebKit/537.36 (KHTML,like Gecko)\
Chrome/65.0.3325.162 safari/537.36'}
response = requests.get(url, headers=headers)
if response.status_code == 200:
return response.text
return None
except RequestException:
return None
def parse_one_page(html):
pattern = re.compile('<dd>.*?board-index.*?>(\d+)</i>.*?data-src="(.*?)".*?name"><a''.*?>(.*?)</a>.*?star">(.*?)</p>.*?releasetime">(.*?)</p>' r'.*?integer">(.*?)</i>.*?fraction">(.*?)</i>.*?</dd>', re.S)
items = re.findall(pattern, str(html))
for item in items:
yield {
'index': item[0],
'image': item[1],
'title': item[2],
'actor': item[3].strip()[3:],
'time': item[4].strip()[5:],
'score': item[5] + item[6]
}
def write_to_file(content):
with open('result1.txt', 'a', encoding='utf-8') as f:
f.write(json.dumps(content, ensure_ascii=False) + '\n')
def main(offset):
url = 'http://maoyan.com/board/4?offset=' + str(offset)
html = get_one_page(url)
for item in parse_one_page(html):
print(item)
write_to_file(item)
if __name__ == '__main__':
for i in range(10):
main(offset=i * 10)
time.sleep(1)
|
[
"304228244@qq.com"
] |
304228244@qq.com
|
a7e0820a5a2af18ff40dff28e75c2e8cf876f08e
|
94fe6bd54ba15f44569641acf5f3655d8eda1783
|
/src/run_regressor.py
|
91b5d444406a4867d9736f7930cc5091a76c28eb
|
[
"MIT"
] |
permissive
|
grey-eye/gpt-2
|
e94915bff8badf9e188fd67745c3b5ab4bdbce8d
|
40030059768719eca0c700ab521adf70b9d0656c
|
refs/heads/master
| 2022-01-18T00:55:27.344689
| 2019-07-18T00:03:24
| 2019-07-18T00:03:24
| 192,074,933
| 0
| 0
| null | 2019-06-15T12:15:06
| 2019-06-15T12:15:06
| null |
UTF-8
|
Python
| false
| false
| 5,164
|
py
|
import functools
import argparse
import json
import tensorflow as tf
import os
import model
import encoder
import pandas as pd
import numpy as np
def generate_data(mode):
def pad(encoding):
padlen = args.len_seq-len(encoding)
encoding.extend([220]*padlen)
return encoding
enc = encoder.get_encoder(args.base_model_name, args.base_model_dir)
if mode == 'train':
path = os.path.join(args.data_dir, 'train.tsv')
elif mode == 'eval':
path = os.path.join(args.data_dir, 'dev.tsv')
else:
path = os.path.join(args.data_dir, 'test.tsv')
df = pd.read_csv(path, sep='\t', skiprows=1)
for idx, row in df.iterrows():
label = np.float32(row[0])
features = np.array(pad(enc.encode(row[1])[:args.len_seq]),
dtype=np.int32)
yield features, label
def input_fn(params, mode):
generator_fn = params['generator_fn']
ds = tf.data.Dataset.from_generator(generator_fn, (tf.int32, tf.float32),
None, (mode,))
ds = ds.shuffle(1000).repeat().batch(args.batch_size)
return ds
def my_model_fn(features, labels, params, mode):
hparams = model.default_hparams()
with open(os.path.join(args.base_model_dir, args.base_model_name, 'hparams.json')) as f:
hparams.override_from_dict(json.load(f))
features.set_shape([args.batch_size, args.len_seq])
net = model.model(hparams, features)
logits = net['logits']
dropout = tf.nn.dropout(logits, keep_prob=0.9)
avg_logits = tf.math.reduce_mean(dropout, axis=1)
predictions = tf.layers.dense(avg_logits, 1)
loss = tf.losses.mean_squared_error(labels=labels,
predictions=predictions)
metrics = {'mse': loss}
tf.summary.scalar('mse', loss)
optimizer = params['optimizer']
train_op = optimizer.minimize(loss, global_step=tf.train.get_global_step())
if mode == tf.estimator.ModeKeys.PREDICT:
predictions = {'prediction': predictions}
return tf.estimator.EstimatorSpec(mode, predictions=predictions)
if mode == tf.estimator.ModeKeys.EVAL:
return tf.estimator.EstimatorSpec(mode, loss=loss)
if mode == tf.estimator.ModeKeys.TRAIN:
return tf.estimator.EstimatorSpec(mode, loss=loss, train_op=train_op)
def build_tpu_config():
if args.use_tpu:
my_project_name = subprocess.check_output([
'gcloud','config','get-value','project'])
my_zone = subprocess.check_output([
'gcloud','config','get-value','compute/zone'])
tpu_cluster_resolver = tf.contrib.cluster_resolver.TPUClusterResolver(
tpu=[tpu_name],
zone=my_zone,
project=my_project_name)
master = tpu_cluster_resolver.get_master()
else:
master = ''
tpu_config = tf.estimator.tpu.RunConfig(master=master)
return tpu_config
def train_regressor():
tpu_config = build_tpu_config()
optimizer = tf.train.AdamOptimizer()
if args.use_tpu:
optimizer = tf.contrib.tpu.CrossShardOptimizer(optimizer)
regressor = tf.estimator.tpu.TPUEstimator(
model_fn = my_model_fn,
model_dir = args.model_dir,
params={
'optimizer': optimizer,
'generator_fn': generate_data,
},
use_tpu=args.use_tpu,
config=tpu_config
)
regressor.train(functools.partial(input_fn, mode='train'), steps=args.num_steps)
regressor.evaluate(functools.partial(input_fn, mode='eval'), steps=args.num_steps)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument('--data_dir', required=True, help='Path to data ' + \
'directory containing train, dev and test files')
parser.add_argument('--len_seq', required=False, type=int, default=40,
help='Input sequence length')
parser.add_argument('--base_model_dir', required=True, help='Path to ' + \
'directory containing model')
parser.add_argument('--base_model_name', required=False, default='117M',
help='Name of model')
parser.add_argument('--batch_size', required=False, default=8,
help='Sets the batch size', type=int)
parser.add_argument('--train', action='store_true',
required=False, help='Run training')
parser.add_argument('--test', action='store_true', required=False)
parser.add_argument('--num_steps', type=int, required=False, default=1000,
help='Number of train batches to run')
parser.add_argument('--model_dir', type=str, required=False,
default='modeldir',
help='Output directory for checkpoints')
parser.add_argument('--use_tpu', action='store_true',
required=False, default=False, help='Use TPU')
parser.add_argument('--tpu_name', required=False, type=str, default=None,
help='Name of TPU')
args = parser.parse_args()
if args.train:
train_regressor()
|
[
"niels@grey-eye.net"
] |
niels@grey-eye.net
|
4d60520fd2eac336a9b98ec30e875ee9fa9e75f8
|
74e75430e4ca2bf422017c7035580ae973c2c42e
|
/test/functional/p2p_time_offset.py
|
00912448b5da04489c15731b40645ded3020428e
|
[
"MIT"
] |
permissive
|
j00v/Lightcoin
|
9e55bad2d3e38f4c3781f62f915828cde0e51bc9
|
a8555320bebbf95545bc8c2841f1fadc38f5bd53
|
refs/heads/main
| 2023-05-09T07:13:28.031313
| 2021-06-08T22:11:45
| 2021-06-08T22:11:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,966
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2019-2020 The Lightcoin developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
import time
from test_framework.test_framework import LightcoinTestFramework
from test_framework.util import (
assert_equal,
connect_nodes,
set_node_times,
)
def connect_nodes_bi(nodes, a, b):
connect_nodes(nodes[a], b)
connect_nodes(nodes[b], a)
class TimeOffsetTest(LightcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 8
self.enable_mocktime()
def setup_network(self):
# don't connect nodes yet
self.setup_nodes()
def check_connected_nodes(self):
ni = [node.getnetworkinfo() for node in self.connected_nodes]
assert_equal([x['connections'] for x in ni], [2] * len(ni))
assert_equal([x['timeoffset'] for x in ni], [0] * len(ni))
def run_test(self):
# Nodes synced but not connected
self.mocktime = int(time.time())
set_node_times(self.nodes, self.mocktime)
ni = [node.getnetworkinfo() for node in self.nodes]
assert_equal([x['connections'] for x in ni], [0] * self.num_nodes)
self.log.info("Nodes disconnected from each other. Time: %d" % self.mocktime)
assert_equal([x['timeoffset'] for x in ni], [0] * self.num_nodes)
self.log.info("Nodes have nTimeOffset 0")
# Set node times.
# nodes [1, 5]: set times to +10, +15, ..., +30 secs
for i in range(1, 6):
self.nodes[i].setmocktime(self.mocktime + 5 * (i + 1))
# nodes [6, 7]: set time to -5, -10 secs
for i in range(6, 8):
self.nodes[i].setmocktime(self.mocktime - 5 * (i - 5))
# connect nodes 1 and 2
self.log.info("Connecting with node-1 (+10 s) and node-2 (+15 s)...")
connect_nodes_bi(self.nodes, 0, 1)
connect_nodes_bi(self.nodes, 0, 2)
self.log.info("--> samples = [+0, +10, (+10), +15, +15]")
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 4)
assert_equal(ni['timeoffset'], 10)
self.connected_nodes = [self.nodes[1], self.nodes[2]]
self.check_connected_nodes()
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
# connect node 3
self.log.info("Connecting with node-3 (+20 s). This will print the warning...")
connect_nodes_bi(self.nodes, 0, 3)
self.log.info("--> samples = [+0, +10, +10, (+15), +15, +20, +20]")
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 6)
assert_equal(ni['timeoffset'], 15)
self.connected_nodes.append(self.nodes[3])
self.check_connected_nodes()
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
# connect node 6
self.log.info("Connecting with node-6 (-5 s)...")
connect_nodes_bi(self.nodes, 0, 6)
self.log.info("--> samples = [-5, -5, +0, +10, (+10), +15, +15, +20, +20]")
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 8)
assert_equal(ni['timeoffset'], 10)
self.connected_nodes.append(self.nodes[6])
self.check_connected_nodes()
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
# connect node 4
self.log.info("Connecting with node-4 (+25 s). This will print the warning...")
connect_nodes_bi(self.nodes, 0, 4)
self.log.info("--> samples = [-5, -5, +0, +10, +10, (+15), +15, +20, +20, +25, +25]")
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 10)
assert_equal(ni['timeoffset'], 15)
self.connected_nodes.append(self.nodes[4])
self.check_connected_nodes()
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
# try to connect node 5 and check that it can't
self.log.info("Trying to connect with node-5 (+30 s)...")
connect_nodes_bi(self.nodes, 0, 5)
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 10)
assert_equal(ni['timeoffset'], 15)
self.log.info("Not connected.")
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
# connect node 7
self.log.info("Connecting with node-7 (-10 s)...")
connect_nodes_bi(self.nodes, 0, 7)
self.log.info("--> samples = [-10, -10, -5, -5, +0, +10, (+10), +15, +15, +20, +20, +25, +25]")
ni = self.nodes[0].getnetworkinfo()
assert_equal(ni['connections'], 12)
assert_equal(ni['timeoffset'], 10)
self.connected_nodes.append(self.nodes[6])
self.check_connected_nodes()
self.log.info("Node-0 nTimeOffset: +%d seconds" % ni['timeoffset'])
if __name__ == '__main__':
TimeOffsetTest().main()
|
[
"Lightcoindev@gmail.com"
] |
Lightcoindev@gmail.com
|
f140ce1797948b2c124c84af0dac0f6967d36be2
|
8d5523b62fe7459afec54c56614d1baf7815fff7
|
/main.py
|
a515bb4b99a5e88c241e3c62321ef01a4b642748
|
[] |
no_license
|
heavyii/python-httpserver
|
520c062d98223fdfa0d1aa820e60b5c250d78e51
|
3b0c05d55462188d7ca4389210e28247657ce1b6
|
refs/heads/main
| 2023-04-13T03:54:30.858675
| 2021-04-07T08:09:26
| 2021-04-07T08:09:26
| 355,463,617
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 892
|
py
|
#!/usr/bin/env python
from bottle import get, post, request, run, static_file, redirect
import json
"""
@apiDefine jsonRequest
@apiHeader {String} Content-Type=application/json
"""
## 首页
@get('/')
def index():
redirect('/static/index.html')
## 静态文件
@get('/static/<filepath:path>')
def server_static(filepath):
return static_file(filepath, root='./static_file')
"""
@apiDescription 这是测试例子的描述
@api {post} /hello/:name 测试例子
@apiName name
@apiGroup 测试
@apiUse jsonRequest
@apiParam {String} [lastName] your lastName
@apiParam {String} [middleName] your middleName
"""
@post('/hello')
@post('/hello/<name>')
def hello(name = "tom"):
# 路由参数name(可选), post提交的参数在request.json
# 返回用json.dumps
return json.dumps({ 'name': name, 'request-Parameters': request.json })
run(host='localhost', port=8080)
|
[
"linruisheng@seeklane.com"
] |
linruisheng@seeklane.com
|
ad5d88daef36ed25257d10be732b9c0e3104e120
|
f46231564d2c4cdb44774935316a204f617731e4
|
/Week3/day1/single_riffle_shuffle.py
|
53e81704ae22e9fe83edc24d2dff2ea1df5c7449
|
[] |
no_license
|
shravan090/competitiveprograming
|
a99983e4351f19afbcfcec2d4f2dc5ae5138fdee
|
21cc064ba44b73cef7838f901bc47db743470f66
|
refs/heads/master
| 2020-03-21T15:49:26.402912
| 2018-07-21T09:32:31
| 2018-07-21T09:32:31
| 138,735,069
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,256
|
py
|
import unittest
def is_single_riffle(half1, half2, shuffled_deck):
# Check if the shuffled deck is a single riffle of the halves
h1 = 0
h2 = 0
for card in shuffled_deck:
if h1 < len(half1) and card == half1[h1]:
h1 += 1
elif h2 < len(half2) and card == half2[h2]:
h2 += 1
else:
return False
return True
# Tests
class Test(unittest.TestCase):
def test_both_halves_are_the_same_length(self):
result = is_single_riffle([1, 4, 5], [2, 3, 6], [1, 2, 3, 4, 5, 6])
self.assertTrue(result)
def test_halves_are_different_lengths(self):
result = is_single_riffle([1, 5], [2, 3, 6], [1, 2, 6, 3, 5])
self.assertFalse(result)
def test_one_half_is_empty(self):
result = is_single_riffle([], [2, 3, 6], [2, 3, 6])
self.assertTrue(result)
def test_shuffled_deck_is_missing_cards(self):
result = is_single_riffle([1, 5], [2, 3, 6], [1, 6, 3, 5])
self.assertFalse(result)
def test_shuffled_deck_has_extra_cards(self):
result = is_single_riffle([1, 5], [2, 3, 6], [1, 2, 3, 5, 6, 8])
self.assertFalse(result)
unittest.main(verbosity=2)
|
[
"noreply@github.com"
] |
noreply@github.com
|
7ee2ac071830c965bed30bdaaa123fe1578c40b9
|
d69bbe98028d75aa14892d1f74c7bb7fa6ed572a
|
/D3SManager.py
|
8bb9c06de5abc3319130f5ec87c5d233d5e2a75e
|
[] |
no_license
|
dvi31/PyDxS
|
0df055094b9c4f58f94ed9595b5b03241c265b5a
|
2cd4fb13d10dbc65c40c8ed8270dc1de71ff909b
|
refs/heads/master
| 2020-07-06T01:43:54.788171
| 2015-08-21T09:32:07
| 2015-08-21T09:32:07
| 40,010,322
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,147
|
py
|
#!/usr/bin/python2.7
import base64, uuid, hashlib, time, os
import logging
import D3SCaller
from suds.client import Client
from suds.sax.attribute import Attribute
from httpslib import HTTPSClientCertTransport
import urlparse, urllib
from suds.plugin import MessagePlugin
class D3SSecretManager():
def __init__(self, authCaller, stoCaller):
self.authCaller = authCaller
self.stoCaller = stoCaller
def write(self, applicantPath, motivation, boxPath, metadatas, data):
global result, response
time2 = 0
result, time1, response = self.authCaller.grantWrite(applicantPath, motivation, boxPath)
if result:
result, time2, response = self.stoCaller.write(applicantPath, motivation, boxPath, response.securityToken,
response.certificates, metadatas, base64.encodestring(data))
wstime = time1 + time2
return result, wstime, response
def read(self, applicantPath, motivation, depositPath):
time2 = 0
result, time1, response = self.authCaller.grantRead(applicantPath, motivation, depositPath)
if result:
result, time2, response = self.stoCaller.read(response.securityToken, response.depositProof)
wstime = time1 + time2
return result, wstime, response
def delete(self, applicantPath, motivation, depositPath):
time2 = 0
result, time1, response = self.authCaller.grantDelete(applicantPath, motivation, depositPath)
if result:
result, time2, response = self.stoCaller.delete(applicantPath, motivation, depositPath,
response.securityToken)
wstime = time1 + time2
return result, wstime, response
def buildPasswordMetadatas(self, login, domain):
metadatas = self.stoCaller.buildMetadatasParameter()
metadata = [self.stoCaller.buildStrMetadataParameter("appLogin", login),
self.stoCaller.buildStrMetadataParameter("appDomainName", domain)]
metadatas.metadata = metadata
return metadatas
|
[
"i2165aq@FP216530.intra.bdf.local"
] |
i2165aq@FP216530.intra.bdf.local
|
3436db72180129f031c11512a0200f9269428f46
|
eaafd553be05babfb061737d0216fac86cfe6d12
|
/triangePascal.py
|
1b1af83096f09db5733cff66772c1f25ade1410e
|
[] |
no_license
|
margauxln/kata
|
ee2a9311a937a9626a559462fa5d8f08381c3fb0
|
a8a47dbb4d89d854f20790715ab680a0fdaaf189
|
refs/heads/master
| 2023-02-07T17:09:19.167942
| 2021-01-04T14:05:56
| 2021-01-04T14:05:56
| 326,702,964
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 507
|
py
|
lineNumber=10
triangle = []
for j in range(lineNumber):
newline=[]
for i in range(len(triangle)+1) :
if i == 0 or i == len(triangle):
newline.append(1)
else:
newline.append(triangle[len(triangle)-1][i-1]+triangle[len(triangle)-1][i])
triangle.append(newline)
for line in triangle:
spaceNumber = lineNumber - len(line)
for i in range(spaceNumber):
print(" ",end="")
for element in line:
print(element, end =' ')
print()
|
[
"margaux.lennebonety@gmail.com"
] |
margaux.lennebonety@gmail.com
|
b709bf38b7de50baf8f706feed37e75970d6808f
|
e090c02718e8df08579dd97929dc23f54d9009c3
|
/sample6.py
|
86148ae5781bdb308a9bfc85095aaac254f7b15b
|
[] |
no_license
|
devapandu/Python
|
1c23d84c72eacefc98f77daa46db7586286df706
|
6f753d6cdff5021943a99dd55ae4b8297111d104
|
refs/heads/master
| 2020-06-13T09:37:20.815726
| 2019-07-04T05:24:41
| 2019-07-04T05:24:41
| 194,615,780
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 87
|
py
|
year =int(input())
if year%4 ==0 and year%100!=0:
print("yes")
else:
print("no")
|
[
"noreply@github.com"
] |
noreply@github.com
|
f3eb20c56fcb02809c305f06878aaed2f5362d30
|
bb81c12c2d391a8d18073d8fef055a9893655e60
|
/funcif/exercise3.24.py
|
239c40b7f8a7d21937116e142cb7b0e77a932671
|
[] |
no_license
|
georgkuenze/ScientificComputingPython
|
77dc06a55e2daecb64c6d6a37d1235661993bbed
|
89b475578753660d48d868e37fa063076a176c1d
|
refs/heads/master
| 2021-01-10T11:19:55.508164
| 2016-02-03T23:48:12
| 2016-02-03T23:48:12
| 51,037,498
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 855
|
py
|
# -*- coding: utf-8 -*-
"""
Created on Wed Jul 08 09:18:01 2015
@author: Georg
"""
import numpy as np
import matplotlib.pyplot as plt
# Define smoothed heaviside function
def H_eps(x, eps):
if x < -eps:
return 0
elif -eps <= x <= eps:
return 0.5 + x/(2.*eps) + 1/(2*np.pi)*np.sin((np.pi*x)/eps)
else:
return 1
# Define smoothed heaviside test function
def test_H_eps():
eps = 0.01
a = -2.*eps
b = 2.*eps
n = 100
y = []
x = [((b-a)/float(n))*i + a for i in range(0, n+1)]
for i in range(len(x)):
y.append(H_eps(x[i], eps))
return x, y
# Run test
test_result_x, test_result_y = test_H_eps()
# Make plot of smoothed heaviside function
fig = plt.figure()
ax = fig.add_subplot(111)
ax.plot(test_result_x, test_result_y, 'r-')
ax.set_xlim(-0.02, 0.02)
ax.set_ylim(-0.2, 1.2)
|
[
"georg.kuenze@vanderbilt.edu"
] |
georg.kuenze@vanderbilt.edu
|
870e8b785f33924cc5c5a2cc48d349e22e6060a0
|
f88e65488ecd08ea0da5372dc49efe6c5439a13c
|
/raspberry/GXclassify/ApiGateway-python-sdk-2.0.4/backend_signature.py
|
e5f06250490d24489159bef45ac31419f72367eb
|
[] |
no_license
|
Emoic/Garbage-classification
|
2eb5f9755ec505d3d9bf8d8536d5e4f62b504f76
|
9de75fd2dd36f917a6e822f7d5f8af8684a84b37
|
refs/heads/main
| 2023-03-26T12:18:45.541396
| 2021-03-24T07:58:56
| 2021-03-24T07:58:56
| 350,977,722
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,974
|
py
|
from flask import Flask
from flask import request
from functools import wraps
import re
from datetime import datetime
from datetime import timedelta
from apig_sdk import signer
app = Flask(__name__)
def requires_apigateway_signature():
def wrapper(f):
secrets = {
"signature_key1": "signature_secret1",
"signature_key2": "signature_secret2",
}
authorizationPattern = re.compile(
r'SDK-HMAC-SHA256\s+Access=([^,]+),\s?SignedHeaders=([^,]+),\s?Signature=(\w+)')
BasicDateFormat = "%Y%m%dT%H%M%SZ"
@wraps(f)
def wrapped(*args, **kwargs):
if "authorization" not in request.headers:
return 'Authorization not found.', 401
authorization = request.headers['authorization']
m = authorizationPattern.match(authorization)
if m is None:
return 'Authorization format incorrect.', 401
signingKey = m.group(1)
if signingKey not in secrets:
return 'Signing key not found.', 401
signingSecret = secrets[signingKey]
signedHeaders = m.group(2).split(";")
r = signer.HttpRequest()
r.method = request.method
r.uri = request.path
r.query = {}
for k in request.query_string.decode('utf-8').split('&'):
spl = k.split("=", 1)
if spl[0] != "":
if len(spl) < 2:
r.query[spl[0]] = ""
else:
r.query[spl[0]] = spl[1]
r.headers = {}
needbody = True
dateHeader = None
for k in signedHeaders:
if k not in request.headers:
return 'Signed header ' + k + ' not found', 401
v = request.headers[k]
if k.lower() == 'x-sdk-content-sha256' and v == 'UNSIGNED-PAYLOAD':
needbody = False
if k.lower() == 'x-sdk-date':
dateHeader = v
r.headers[k] = v
if needbody:
r.body = request.get_data()
if dateHeader is None:
return 'Header x-sdk-date not found.', 401
t = datetime.strptime(dateHeader, BasicDateFormat)
if abs(t - datetime.utcnow()) > timedelta(minutes=15):
return 'Signature expired.', 401
sig = signer.Signer()
sig.Key = signingKey
sig.Secret = signingSecret
if not sig.Verify(r, m.group(3)):
return 'Verify authroization failed.', 401
return f(*args, **kwargs)
return wrapped
return wrapper
@app.route("/<id>", methods=['GET', 'POST', 'PUT', 'DELETE'])
@requires_apigateway_signature()
def hello(id):
return "Hello World!"
if __name__ == '__main__':
app.run(
host='0.0.0.0',
port=8080)
|
[
"noreply@github.com"
] |
noreply@github.com
|
796fde6069383ef7c30b2222a559bbe8bc28b294
|
ab09c04f237cb83d581f7b50b6ff2ab1688497c9
|
/myROI_tracker.py
|
133cec9dea02daea9126f4792a46c79ddd40eb1e
|
[] |
no_license
|
minaf/HW1-assignment4
|
db283f32b63f40f374e5f8410c4b9edffc70409b
|
30405ab081a2699ac4150a0bc99d3e501343f7dc
|
refs/heads/master
| 2020-03-06T20:20:52.513557
| 2018-03-28T06:25:50
| 2018-03-28T06:25:50
| 127,050,901
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,456
|
py
|
import numpy as np
import cv2
from calculateKLT import calculateKLT
#cap = cv2.VideoCapture(0) #for tracking object from webcam
cap = cv2.VideoCapture('sailing_boat.mp4')
# Create some random colors
color = np.random.randint(0,255,(100,3))
# Take first frame and find corners in it
ret, old_frame = cap.read()
old_gray = cv2.cvtColor(old_frame, cv2.COLOR_BGR2GRAY)
#select ROI manually
r = cv2.selectROI(old_gray)
#select window size
window_size = np.array([21, 21])
# points of interest
p0 = np.empty([4, 1, 2], dtype=np.float32)
p0[0][0] = [r[0],r[1]]
p0[1][0] = [r[0]+r[2],r[1]]
p0[2][0] = [r[0],r[1]+r[3]]
p0[3][0] = [r[0]+r[2],r[1]+r[3]]
# Create a mask image for drawing purposes
mask = np.zeros_like(old_frame)
while(1):
#new frame
ret,frame = cap.read()
frame_gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
# calculate image alignment
p1 = calculateKLT(old_gray, frame_gray, p0, window_size, 30, 0.01)
#get new points to track
good_new = p1[0:4]
good_old = p0[0:4]
#drawing rectangle of tracked object
cv2.rectangle(frame,(p1[0][0, 0], p1[0][0, 1]),(p1[3][0, 0], p1[3][0, 1]),(0,255,0),3)
img = cv2.add(frame,mask)
cv2.imshow('frame',img)
#if pressed ESC break
k = cv2.waitKey(30) & 0xff
if k == 27:
break
# Now update the previous frame and previous points
old_gray = frame_gray.copy()
p0 = good_new.reshape(-1,1,2)
cv2.destroyAllWindows()
cap.release()
|
[
"minaferizbegovic@gmail.com"
] |
minaferizbegovic@gmail.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.