blob_id
stringlengths 40
40
| directory_id
stringlengths 40
40
| path
stringlengths 2
616
| content_id
stringlengths 40
40
| detected_licenses
listlengths 0
69
| license_type
stringclasses 2
values | repo_name
stringlengths 5
118
| snapshot_id
stringlengths 40
40
| revision_id
stringlengths 40
40
| branch_name
stringlengths 4
63
| visit_date
timestamp[us] | revision_date
timestamp[us] | committer_date
timestamp[us] | github_id
int64 2.91k
686M
โ | star_events_count
int64 0
209k
| fork_events_count
int64 0
110k
| gha_license_id
stringclasses 23
values | gha_event_created_at
timestamp[us] | gha_created_at
timestamp[us] | gha_language
stringclasses 213
values | src_encoding
stringclasses 30
values | language
stringclasses 1
value | is_vendor
bool 2
classes | is_generated
bool 2
classes | length_bytes
int64 2
10.3M
| extension
stringclasses 246
values | content
stringlengths 2
10.3M
| authors
listlengths 1
1
| author_id
stringlengths 0
212
|
|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
c7d86bf851fda7dde5e28313f496c7c66a86cd6f
|
0c98435987df858aa25df6db6f6110076e46a800
|
/randomPassword.py
|
6f21f6b7db39fbe3cada73eaace9952108cf8933
|
[
"MIT"
] |
permissive
|
mk-knight23/hello-Mk
|
218df4ef0382353d0837d4943d6eafde1ede320e
|
51ed1af9ded630328aa2535c048f06a85dc9029d
|
refs/heads/main
| 2023-08-29T18:12:33.768112
| 2021-11-06T15:24:58
| 2021-11-06T15:24:58
| 335,338,799
| 1
| 2
|
MIT
| 2021-10-13T05:34:49
| 2021-02-02T15:42:20
|
HTML
|
UTF-8
|
Python
| false
| false
| 142
|
py
|
import random
k="abcdefghijklmnopqrstuvwxyz123456789ABCDEFGHIJKLMNOPQRSTUVWXYZ#@!][}{?ยฟ%&"
len = 12
p="".join(random.sample(k,len))
print(p)
|
[
"noreply@github.com"
] |
mk-knight23.noreply@github.com
|
b3dbee07bc3f3d9638fb8808a02fe6c8648b9872
|
8aca460c3b6e25826a1d385d6b23f837dedfce7a
|
/dihash/hash_impl.py
|
819d2a2513d7545a8c6ae4bc590e91dbbdb9abd2
|
[
"MIT"
] |
permissive
|
calebh/dihash
|
24d2255de20cabd9c574843157bef0605121c088
|
5b5c13b295915fb582b334a0ca82246bdf75a04e
|
refs/heads/master
| 2023-06-09T14:56:30.112485
| 2023-05-24T23:15:35
| 2023-05-24T23:15:35
| 240,933,328
| 13
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,251
|
py
|
import hashlib
import networkx as nx
import pynauty
import math
# Convert a NetworkX graph to a nauty graph
# Input should be a NetworkX digraph with node labels represented as strings, stored in the 'label'
# field of the NetworkX node attribute dictionary
def nauty_graph(g):
# Map each node to a natural number 0,...,n-1 in an arbitrary order. The number of a node is a node index
node_to_idx = {n: i for (i, n) in enumerate(g.nodes)}
# Convert the NetworkX adjacency information to use the node indices
adj_dict = {node_to_idx[s]: [node_to_idx[t] for t in g.successors(s)] for s in g.nodes}
# Dictionary mapping node labels to a set of node indices
colorings_lookup = {}
for n in g.nodes:
label = g.nodes[n]['label']
if label not in colorings_lookup:
colorings_lookup[label] = set()
colorings_lookup[label].add(node_to_idx[n])
# It turns out that the order of the vertex_coloring passed to nauty is important
ordered_labels = sorted(colorings_lookup.keys())
# Convert the dictionary into a list of sets. Each set contains node indices with identical labels
colorings = [colorings_lookup[label] for label in ordered_labels]
# Construct the pynauty graph
nauty_g = pynauty.Graph(g.order(), directed=True, adjacency_dict=adj_dict, vertex_coloring=colorings)
# Return the node to index conversion function and the nauty graph
return (node_to_idx, nauty_g)
# Returns a list of nodes, ordered in the canonical order
def canonize(idx_to_node, nauty_g):
canon = pynauty.canon_label(nauty_g)
return [idx_to_node[i] for i in canon]
def escape(s):
# Replace backslashes with double backslash and quotes with escaped quotes
return s.replace('\\', '\\\\').replace('"', '\\"')
def to_str(data):
if isinstance(data, list):
return "[{}]".format(",".join([to_str(elem) for elem in data]))
elif isinstance(data, str):
return '"{}"'.format(escape(data))
elif isinstance(data, tuple):
return "({})".format(",".join([to_str(elem) for elem in data]))
elif isinstance(data, int):
return str(data)
else:
raise TypeError("Unable to call to_str on " + str(data))
# Returns a list of lists of nodes, each list is an orbit
def orbits(idx_to_node, nauty_g):
# orbs gives the orbits of the graph. Two nodes i,j are in the same orbit if and only if orbs[i] == orbs[j]
(_, _, _, orbs, num_orbits) = pynauty.autgrp(nauty_g)
# orbits_lookup maps an orbit identifier to a list of nodes in that orbit
orbits_lookup = {}
for i in range(len(orbs)):
orb_label = orbs[i]
if orb_label not in orbits_lookup:
orbits_lookup[orb_label] = []
orbits_lookup[orb_label].append(idx_to_node[i])
# Now dispose of the orbit identifier, we are only interested in the orbit groupings
return list(orbits_lookup.values())
# Analyze a NetworkX graph, returning a list of nodes in canonical order and a list of orbits
def analyze_graph(g, compute_orbits):
(node_to_idx, nauty_g) = nauty_graph(g)
idx_to_node = invert_dict(node_to_idx)
if compute_orbits:
orbs = orbits(idx_to_node, nauty_g)
else:
orbs = None
return (canonize(idx_to_node, nauty_g), orbs)
def compose_dicts(d2, d1):
return {k: d2.get(v) for (k, v) in d1.items()}
# Returns the number of bits needed to represent an integer input x
def num_to_bit_counts(x):
return math.ceil(math.log2(x + 1))
# Encodes a node-edge labeled digraph as a node labeled digraph via the conversion
# outlined in the nauty manual, section 14
def edge_labeled_digraph_to_digraph(g):
edge_labels = set()
for (s, t) in g.edges():
edge_labels.add(g.edges[(s, t)]['label'])
# The manual doesn't say that edges need to be sorted, but we've
# experimentally verified that changing the edge label order changes
# g_out due to propogated changes to the created layers. Fortunately
# we can order our labels.
edge_layers = sorted(list(edge_labels))
num_layers = num_to_bit_counts(len(edge_layers))
format_str = '{0:0' + str(num_layers) + 'b}'
edge_layer_to_bits = {label: format_str.format(i + 1) for (i, label) in enumerate(edge_layers)}
g_out = nx.DiGraph()
# Add the nodes for each layer
for layer_i in range(num_layers):
for n in g.nodes():
g_out.add_node((layer_i, n))
g_out.nodes[(layer_i, n)]['label'] = g.nodes[n]['label']
# Create the edges in each layer
for layer_i in range(num_layers):
for (s, t) in g.edges():
edge_label = g.edges[(s, t)]['label']
from_end_i = -(layer_i + 1)
# Only add an edge if the bit corresponding to this layer, and the edge label
# is set to 1
if edge_layer_to_bits[edge_label][from_end_i] == '1':
g_out.add_edge((layer_i, s), (layer_i, t))
# Create the vertical threads for each node
# Each node in the layer is connected in one direction to the node above it
for layer_i in range(num_layers - 1):
for n in g.nodes():
g_out.add_edge((layer_i, n), (layer_i + 1, n))
return (g_out, edge_layers)
# g is a MultiDiGraph. This function returns the maximum number of parallel edges in the MultiDiGraph
# If g has no edges, 1 is returned
def max_num_multiedges(g):
if len(g.edges()) > 0:
return max(g.number_of_edges(s,t) for (s, t) in g.edges())
else:
return 1
# Converts a multigraph to an edge labeled digraph. The edges are labeled with
# the number of edges between two nodes
def multigraph_to_edge_labeled_digraph(g):
output = nx.DiGraph()
for n in g.nodes:
output.add_node(n)
output.nodes[n]['label'] = g.nodes[n]['label']
for (s, t) in g.edges():
output.add_edge(s, t)
for (s, t) in g.edges():
output.edges[(s, t)]['label'] = str(g.number_of_edges(s, t))
return output
# Computes the quotient graph G/Orb
# Input can be either a MultiDiGraph or a DiGraph
# Return result is a MultiDiGraph
def quotient_graph(g):
if isinstance(g, nx.DiGraph):
g = nx.MultiDiGraph(g)
(g_digraph, _) = edge_labeled_digraph_to_digraph(multigraph_to_edge_labeled_digraph(g))
(node_to_idx, nauty_g) = nauty_graph(g_digraph)
idx_to_node = invert_dict(node_to_idx)
# orbs is a list of orbits. Each orbit contains a list of nodes,
# each node is encoded as (layer_i, node) where layer_i is the layer
# of the edge encoding and node is a reference to a node in the original graph
orbs = orbits(idx_to_node, nauty_g)
output = nx.MultiDiGraph()
node_to_quotient_idx = {}
# Filter out the orbits of everything except the first layer
orbs = [o for o in orbs if o[0][0] == 0]
for (i, o) in enumerate(orbs):
representative_idx = i
output.add_node(representative_idx)
output.nodes[representative_idx]['label'] = g.nodes[o[0][1]]['label']
for (_, node) in o:
node_to_quotient_idx[node] = representative_idx
for o in orbs:
# Arbitrarily pick the first node in the orbit
(_, representative) = o[0]
quotient_representative_idx = node_to_quotient_idx[representative]
for target in g.successors(representative):
quotient_target_idx = node_to_quotient_idx[target]
for i in range(g.number_of_edges(representative, target)):
output.add_edge(quotient_representative_idx, quotient_target_idx)
return (node_to_quotient_idx, output)
# Computes the quotient graph (G/Orb)/Orb... until a fixpoint is reached
# Input can be either a MultiDiGraph or a DiGraph
# Return result is a MultiDiGraph
def quotient_fixpoint(g):
prev = g
(sigma, output) = quotient_graph(g)
while output.number_of_nodes() < prev.number_of_nodes():
prev = output
(sigma_prime, output) = quotient_graph(output)
sigma = compose_dicts(sigma_prime, sigma)
return (sigma, output)
def invert_dict(d):
return {v: k for (k, v) in d.items()}
def invert_list(lst):
ret = {}
for (i, elem) in enumerate(lst):
ret[elem] = i
return ret
# Sort a set of orbits by the minimum canonical index
def sort_orbits(canonization_mapping, orbits):
def min_canon_node(nodes):
return min([canonization_mapping[n] for n in nodes])
return sorted(orbits, key=min_canon_node)
# Map nodes to the index of their orbit
def canonical_orbits_mapping(sorted_orbits):
ret = {}
for (i, orb) in enumerate(sorted_orbits):
for n in orb:
ret[n] = i
return ret
def hash_sha256(s):
return hashlib.sha256(s.encode('utf-8')).hexdigest()
# (g_hash, node_hashes) = dihash.hash_graph(g, hash_nodes=True, apply_quotient=False, string_hash_fun=hash_sha256)
#
# hash_graph has the following inputs:
# - g: A NetworkX digraph. Each node should have a 'label' entry in its node attribute dictionary. The value of this entry should be a string which determines the label of that node. g may optionally have a graph attribute named 'label', which is a label for the entire graph
# - hash_nodes: A boolean value. If true, hash_graph also returns a dictionary giving the hashes of all nodes in the graph
# - apply_quotient: A boolean value. If true, the input graph g is run through the quotient_fixpoint function, which computes (G/Orb)/Orb... prior to hashing the graph.
# - string_hash_fun: A function which maps strings to a string. The default value, hash_sha256 hashes by using hashlib.sha256 and converting to the result to a hex digest.
#
# hash_graph has the following outputs:
# - g_hash: A hex digest of the hash of the entire graph
# - node_hashes: If hash_nodes is False, this value is None. If hash_nodes is True, this value is a dictionary mapping nodes to their hash hex digests.
def hash_graph(g, hash_nodes=True, apply_quotient=False, string_hash_fun=hash_sha256):
original_graph = g
original_nodes = frozenset(original_graph.nodes())
if apply_quotient:
(sigma, quotient_multigraph) = quotient_fixpoint(original_graph)
if max_num_multiedges(quotient_multigraph) == 1:
# If there are no multiedges, simply convert back to an ordinary DiGraph
g = nx.DiGraph(quotient_multigraph)
else:
# Otherwise we have to encode the multigraph into a digraph
(quotient_digraph, edge_labels) = edge_labeled_digraph_to_digraph(multigraph_to_edge_labeled_digraph(quotient_multigraph))
# Map n to nodes on the first layer, then compose with the mapping from (0,n) to the node in the quotient graph
sigma = compose_dicts({n: (0, n) for n in original_nodes}, sigma)
# The parallel edges in the multigraph were converted to edge labels, and then implicitly
# encoded in the structure of quotient_digraph. We lost the value of the labels, so we
# need to take that into account. Here we save the edge labels as a global
# property of the graph
if 'label' in original_graph.graph:
quotient_digraph.graph['label'] = to_str((quotient_digraph.graph['label'], edge_labels))
else:
quotient_digraph.graph['label'] = to_str(edge_labels)
g = quotient_digraph
else:
# sigma is the identity mapping
sigma = {n: n for n in g.nodes()}
(canonization, orbs) = analyze_graph(g, hash_nodes)
canon_mapping = invert_list(canonization)
canon_adj_list = sorted([(canon_mapping[s], canon_mapping[t]) for (s, t) in g.edges])
canon_labels = [g.nodes[n]['label'] for n in canonization]
if 'label' in g.graph:
g_summary = (g.graph['label'], canon_labels, canon_adj_list)
else:
g_summary = (canon_labels, canon_adj_list)
g_hash = string_hash_fun(to_str(g_summary))
node_hashes = None
if hash_nodes:
ordered_orbits = sort_orbits(canon_mapping, orbs)
# Note that this indexing scheme departs slightly from the paper. Instead of mapping from nodes to the minimum
# node index in the same orbit, we map from nodes to the index of the orbit, where the index of the orbit
# is computed based on its order of appearance in ordered_orbits.
canon_orbits_mapping = canonical_orbits_mapping(ordered_orbits)
node_hashes = {}
for n in original_nodes:
quotient_node = sigma[n]
node_hashes[n] = string_hash_fun(to_str((canon_orbits_mapping[quotient_node], g_hash)))
return (g_hash, node_hashes)
# Compute the hashes of nodes in a graph where we have pointers to all the nodes in the node_set
# This is in contrast to the node_hashes in the hash_graph function, where we are assuming
# that we only want the hashes of one pointer into the graph
def hash_graph_node_set(g, node_set, apply_quotient=False, string_hash_fun=hash_sha256):
# Copy the graph because we're going to need to mutate it
g = g.copy()
if len(node_set) >= 2:
for n in g.nodes():
if n in node_set:
# Add a pointer label for each node in the node_set
g.nodes[n]['label'] = to_str(('ptr', g.nodes[n]['label']))
else:
g.nodes[n]['label'] = to_str(('nonptr', g.nodes[n]['label']))
(g_hash, node_hashes) = hash_graph(g, hash_nodes=True, apply_quotient=apply_quotient, string_hash_fun=string_hash_fun)
node_hashes = {n : node_hashes[n] for n in node_set}
return (g_hash, node_hashes)
def hash_scc(g, cond, scc, scc_hashes, node_hashes, apply_quotient, string_hash_fun):
if scc in scc_hashes:
return
scc_members = frozenset(cond.nodes[scc]['members'])
scc_graph = g.subgraph(scc_members).copy()
for s in scc_members:
non_scc_succs = frozenset(g.successors(s)) - scc_members
# Recursively hash all nodes that are the target of an edge from within the scc to outside the scc
for t in non_scc_succs:
if t not in node_hashes:
t_scc = cond.graph['mapping'][t]
hash_scc(g, cond, t_scc, scc_hashes, node_hashes, apply_quotient, string_hash_fun)
non_scc_succs_hashes = sorted([node_hashes[t] for t in non_scc_succs])
scc_graph.nodes[s]['label'] = string_hash_fun(to_str((g.nodes[s]['label'], non_scc_succs_hashes)))
(scc_hash, scc_node_hashes) = hash_graph(scc_graph, hash_nodes=True, apply_quotient=apply_quotient, string_hash_fun=string_hash_fun)
scc_hashes[scc] = scc_hash
node_hashes.update(scc_node_hashes)
def merkle_hash_graph(g, nodes_to_hash=None, apply_quotient=False, precomputed_hashes=None, string_hash_fun=hash_sha256):
if precomputed_hashes is None:
node_hashes = {}
else:
node_hashes = precomputed_hashes.copy()
scc_hashes = {}
cond = nx.algorithms.components.condensation(g)
if nodes_to_hash is None:
roots = {n for (n, d) in cond.in_degree() if d == 0}
else:
roots = {cond.graph['mapping'][n] for n in nodes_to_hash}
for r in roots:
hash_scc(g, cond, r, scc_hashes, node_hashes, apply_quotient, string_hash_fun)
return (scc_hashes, cond, node_hashes)
|
[
"caleb.helbling@yahoo.com"
] |
caleb.helbling@yahoo.com
|
f59f062c20bb92420bb4ec172e9e3f763356ef80
|
a140fe192fd643ce556fa34bf2f84ddbdb97f091
|
/.history/quiz04_20200628163202.py
|
43cf11c468754194ccbd6ea39f998db2cd2226d8
|
[] |
no_license
|
sangha0719/py-practice
|
826f13cb422ef43992a69f822b9f04c2cb6d4815
|
6d71ce64bf91cc3bccee81378577d84ba9d9c121
|
refs/heads/master
| 2023-03-13T04:40:55.883279
| 2021-02-25T12:02:04
| 2021-02-25T12:02:04
| 342,230,484
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 826
|
py
|
# ๋น์ ์ ํ๊ต์์๋ ํ์ด์ฌ ์ฝ๋ฉ ๋ํ๋ฅผ ์ฃผ์ตํฉ๋๋ค.
# ์ฐธ์๋ฅ ์ ๋์ด๊ธฐ ์ํด ๋๊ธ ์ด๋ฒคํธ๋ฅผ ์งํํ๊ธฐ๋ก ํ์์ต๋๋ค.
# ๋๊ธ ์์ฑ์๋ค ์ค์ ์ถ์ฒจ์ ํตํด 1๋ช
์ ์นํจ, 3๋ช
์ ์ปคํผ ์ฟ ํฐ์ ๋ฐ๊ฒ ๋ฉ๋๋ค.
# ์ถ์ฒจ ํ๋ก๊ทธ๋จ์ ์์ฑํ์์ค.
# ์กฐ๊ฑด 1: ํธ์์ ๋๊ธ์ 20๋ช
์ด ์์ฑํ์๊ณ ์์ด๋๋ 1~20 ์ด๋ผ๊ณ ๊ฐ์
# ์กฐ๊ฑด 2: ๋๊ธ ๋ด์ฉ๊ณผ ์๊ด ์์ด ๋ฌด์์๋ก ์ถ์ฒจํ๋ ์ค๋ณต ๋ถ๊ฐ
# ์กฐ๊ฑด 3: random ๋ชจ๋์ shuffle๊ณผ sample์ ํ์ฉ
# (์ถ๋ ฅ ์์ )
# -- ๋น์ฒจ์ ๋ฐํ - -
# ์นํจ ๋น์ฒจ์: 1
# ์ปคํผ ๋น์ฒจ์: [2, 3, 4]
# -- ์ถํํฉ๋๋ค. --
# (ํ์ฉ ์์ )
from random import *
# lst = [1, 2, 3, 4, 5]
# print(lst)
# shuffle(lst)
# print(lst)
# print(sample(lst, 1))
winner = random(20) + 1
print(winner)
|
[
"sangha0719@gmail.com"
] |
sangha0719@gmail.com
|
3c3353fd4690ad220cbbd644ee0bf66566894884
|
dbd65739cd4303679c4d81726982ba7b557c812c
|
/backend/objs/ReminderEntry.py
|
298ca6241a0a27a9ad5ebb744cf24719118b2728
|
[] |
no_license
|
hnjitlh/SITE_2
|
680f49c9288aff3e52485b6f7a9487370e82ba8e
|
02a654813bd118d78df29b2b78e37fe72c145772
|
refs/heads/master
| 2022-03-19T12:26:17.669016
| 2019-11-10T03:35:36
| 2019-11-10T03:35:36
| 220,728,996
| 0
| 0
| null | 2022-02-13T11:50:35
| 2019-11-10T02:02:54
|
HTML
|
UTF-8
|
Python
| false
| false
| 542
|
py
|
from app import db
from datetime import datetime
from .Messages import Message
class ReminderEntry(db.Model):
entry_id = db.column(db.Integer, primary_key=True)
user_id = db.column(db.Integer, db.ForeignKey('user.id'), nullable=False)
message = db.column(db.Integer, db.ForeignKey('message.msg_id'), nullable=True)
time = db.column(db.DateTime)
def get_message(self):
return Message.query.filter_by(msg_id=self.message).first()
def find_date_diff(self):
return (datetime.utcnow() - self.time).days
|
[
"berry64@outlook.com"
] |
berry64@outlook.com
|
c82fba8ab01d1d077471b17f9aead11553d75109
|
f24edb38dd4f7de8a7683afbbc9ab2a4237a361e
|
/venv/lib/python3.6/site-packages/pip/_internal/network/auth.py
|
64c6fd42fd13ad6212d709ee2ae711d0ca0f6507
|
[] |
no_license
|
ngecu/automate_django_data_filling
|
882220f84a6b4af5484d4b136c740a803ccccfd2
|
d6b7095904878f06e4aae6beb2156113a6145c21
|
refs/heads/main
| 2023-02-26T02:36:26.582387
| 2021-01-31T15:50:22
| 2021-01-31T15:50:22
| 317,846,258
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,537
|
py
|
"""Network Authentication Helpers
Contains interface (MultiDomainBasicAuth) and associated glue code for
providing credentials in the context of network requests.
"""
import logging
from pip._vendor.requests.auth import AuthBase, HTTPBasicAuth
from pip._vendor.requests.utils import get_netrc_auth
from pip._vendor.six.moves.urllib import parse as urllib_parse
from pip._internal.utils.misc import (
ask,
ask_input,
ask_password,
remove_auth_from_url,
split_auth_netloc_from_url,
)
from pip._internal.utils.typing import MYPY_CHECK_RUNNING
if MYPY_CHECK_RUNNING:
from typing import Any, Dict, List, Optional, Tuple
from pip._vendor.requests.models import Request, Response
from pip._internal.vcs.versioncontrol import AuthInfo
Credentials = Tuple[str, str, str]
logger = logging.getLogger(__name__)
try:
import keyring # noqa
except ImportError:
keyring = None
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
def get_keyring_auth(url, username):
# type: (str, str) -> Optional[AuthInfo]
"""Return the tuple auth for a given url from keyring."""
global keyring
if not url or not keyring:
return None
try:
try:
get_credential = keyring.get_credential
except AttributeError:
pass
else:
logger.debug("Getting credentials from keyring for %s", url)
cred = get_credential(url, username)
if cred is not None:
return cred.username, cred.password
return None
if username:
logger.debug("Getting password from keyring for %s", url)
password = keyring.get_password(url, username)
if password:
return username, password
except Exception as exc:
logger.warning(
"Keyring is skipped due to an exception: %s", str(exc),
)
keyring = None
return None
class MultiDomainBasicAuth(AuthBase):
def __init__(self, prompting=True, index_urls=None):
# type: (bool, Optional[List[str]]) -> None
prompting = prompting
index_urls = index_urls
passwords = {} # type: Dict[str, AuthInfo]
# When the user is prompted to enter credentials and keyring is
# available, we will offer to save them. If the user accepts,
# this value is set to the credentials they entered. After the
# request authenticates, the caller should call
# ``save_credentials`` to save these.
_credentials_to_save = None # type: Optional[Credentials]
def _get_index_url(self, url):
# type: (str) -> Optional[str]
"""Return the original index URL matching the requested URL.
Cached or dynamically generated credentials may work against
the original index URL rather than just the netloc.
The provided url should have had its username and password
removed already. If the original index url had credentials then
they will be included in the return value.
Returns None if no matching index was found, or if --no-index
was specified by the user.
"""
if not url or not index_urls:
return None
for u in index_urls:
prefix = remove_auth_from_url(u).rstrip("/") + "/"
if url.startswith(prefix):
return u
return None
def _get_new_credentials(self, original_url, allow_netrc=True,
allow_keyring=True):
# type: (str, bool, bool) -> AuthInfo
"""Find and return credentials for the specified URL."""
# Split the credentials and netloc from the url.
url, netloc, url_user_password = split_auth_netloc_from_url(
original_url,
)
# Start with the credentials embedded in the url
username, password = url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in url for %s", netloc)
return url_user_password
# Find a matching index url for this request
index_url = _get_index_url(url)
if index_url:
# Split the credentials from the url.
index_info = split_auth_netloc_from_url(index_url)
if index_info:
index_url, _, index_url_user_password = index_info
logger.debug("Found index url %s", index_url)
# If an index URL was found, try its embedded credentials
if index_url and index_url_user_password[0] is not None:
username, password = index_url_user_password
if username is not None and password is not None:
logger.debug("Found credentials in index url for %s", netloc)
return index_url_user_password
# Get creds from netrc if we still don't have them
if allow_netrc:
netrc_auth = get_netrc_auth(original_url)
if netrc_auth:
logger.debug("Found credentials in netrc for %s", netloc)
return netrc_auth
# If we don't have a password and keyring is available, use it.
if allow_keyring:
# The index url is more specific than the netloc, so try it first
kr_auth = (
get_keyring_auth(index_url, username) or
get_keyring_auth(netloc, username)
)
if kr_auth:
logger.debug("Found credentials in keyring for %s", netloc)
return kr_auth
return username, password
def _get_url_and_credentials(self, original_url):
# type: (str) -> Tuple[str, Optional[str], Optional[str]]
"""Return the credentials to use for the provided URL.
If allowed, netrc and keyring may be used to obtain the
correct credentials.
Returns (url_without_credentials, username, password). Note
that even if the original URL contains credentials, this
function may return a different username and password.
"""
url, netloc, _ = split_auth_netloc_from_url(original_url)
# Use any stored credentials that we have for this netloc
username, password = passwords.get(netloc, (None, None))
if username is None and password is None:
# No stored credentials. Acquire new credentials without prompting
# the user. (e.g. from netrc, keyring, or the URL itself)
username, password = _get_new_credentials(original_url)
if username is not None or password is not None:
# Convert the username and password if they're None, so that
# this netloc will show up as "cached" in the conditional above.
# Further, HTTPBasicAuth doesn't accept None, so it makes sense to
# cache the value that is going to be used.
username = username or ""
password = password or ""
# Store any acquired credentials.
passwords[netloc] = (username, password)
assert (
# Credentials were found
(username is not None and password is not None) or
# Credentials were not found
(username is None and password is None)
), "Could not load credentials from url: {}".format(original_url)
return url, username, password
def __call__(self, req):
# type: (Request) -> Request
# Get credentials for this request
url, username, password = _get_url_and_credentials(req.url)
# Set the url of the request to the url without any credentials
req.url = url
if username is not None and password is not None:
# Send the basic auth with this request
req = HTTPBasicAuth(username, password)(req)
# Attach a hook to handle 401 responses
req.register_hook("response", handle_401)
return req
# Factored out to allow for easy patching in tests
def _prompt_for_password(self, netloc):
# type: (str) -> Tuple[Optional[str], Optional[str], bool]
username = ask_input("User for {}: ".format(netloc))
if not username:
return None, None, False
auth = get_keyring_auth(netloc, username)
if auth and auth[0] is not None and auth[1] is not None:
return auth[0], auth[1], False
password = ask_password("Password: ")
return username, password, True
# Factored out to allow for easy patching in tests
def _should_save_password_to_keyring(self):
# type: () -> bool
if not keyring:
return False
return ask("Save credentials to keyring [y/N]: ", ["y", "n"]) == "y"
def handle_401(self, resp, **kwargs):
# type: (Response, **Any) -> Response
# We only care about 401 responses, anything else we want to just
# pass through the actual response
if resp.status_code != 401:
return resp
# We are not able to prompt the user so simply return the response
if not prompting:
return resp
parsed = urllib_parse.urlparse(resp.url)
# Prompt the user for a new username and password
username, password, save = _prompt_for_password(parsed.netloc)
# Store the new username and password to use for future requests
_credentials_to_save = None
if username is not None and password is not None:
passwords[parsed.netloc] = (username, password)
# Prompt to save the password to keyring
if save and _should_save_password_to_keyring():
_credentials_to_save = (parsed.netloc, username, password)
# Consume content and release the original connection to allow our new
# request to reuse the same one.
resp.content
resp.raw.release_conn()
# Add our new username and password to the request
req = HTTPBasicAuth(username or "", password or "")(resp.request)
req.register_hook("response", warn_on_401)
# On successful request, save the credentials that were used to
# keyring. (Note that if the user responded "no" above, this member
# is not set and nothing will be saved.)
if _credentials_to_save:
req.register_hook("response", save_credentials)
# Send our new request
new_resp = resp.connection.send(req, **kwargs)
new_resp.history.append(resp)
return new_resp
def warn_on_401(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to warn about incorrect credentials."""
if resp.status_code == 401:
logger.warning(
'401 Error, Credentials not correct for %s', resp.request.url,
)
def save_credentials(self, resp, **kwargs):
# type: (Response, **Any) -> None
"""Response callback to save credentials on success."""
assert keyring is not None, "should never reach here without keyring"
if not keyring:
return
creds = _credentials_to_save
_credentials_to_save = None
if creds and resp.status_code < 400:
try:
logger.info('Saving credentials to keyring')
keyring.set_password(*creds)
except Exception:
logger.exception('Failed to save credentials')
|
[
"devngecu@gmail.com"
] |
devngecu@gmail.com
|
62fa5b544c8111890d1b4fd3779bb0e5afef0918
|
5e83d62064ea4fd954820960306fb06cc8f0f391
|
/ecommerce2/settings/__init__.py
|
53cfe2179c4f250cc57fedca627640353c668e53
|
[] |
no_license
|
bharatkumarrathod/cfe_ecommerce2_RESTapi
|
eff2fad0cbff7cb3def2c13de282b085aba7291d
|
a081cdbf10c1fbde58e128b9c9b287443c726071
|
refs/heads/master
| 2020-12-25T21:43:44.166109
| 2015-10-27T21:04:19
| 2015-10-27T21:04:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 123
|
py
|
from .base import *
try:
from .local import *
except:
pass
try:
from .production import *
except:
pass
|
[
"carlofusiello@gmail.com"
] |
carlofusiello@gmail.com
|
371ee8cb4b4f7e37636a6fbfe01b1f1ba8180744
|
f8b5aafac15f408a48fabf853a918015c927e6fe
|
/bk_tomo/venv/venv27/bin/openstack
|
ef4239b2369d1cd6ac9e4daa1bf696a84ace7ec5
|
[] |
no_license
|
to30/tmp
|
bda1ac0ca3fc61e96c2a1c491367b698d7e97937
|
ec809683970af6787728c2c41f161f416155982a
|
refs/heads/master
| 2021-01-01T04:25:52.040770
| 2016-05-13T16:34:59
| 2016-05-13T16:34:59
| 58,756,087
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 243
|
#!/home/tomo/venv/venv27/bin/python2.7
# -*- coding: utf-8 -*-
import re
import sys
from openstackclient.shell import main
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw|\.exe)?$', '', sys.argv[0])
sys.exit(main())
|
[
"tomonaga@mx2.mesh.ne.jp"
] |
tomonaga@mx2.mesh.ne.jp
|
|
99bb440e3d91a657af83b6b5699a5675b2c46f7c
|
ca7aa979e7059467e158830b76673f5b77a0f5a3
|
/Python_codes/p03136/s297842517.py
|
a2a6230496234027046d6691748a5f445af9dd64
|
[] |
no_license
|
Aasthaengg/IBMdataset
|
7abb6cbcc4fb03ef5ca68ac64ba460c4a64f8901
|
f33f1c5c3b16d0ea8d1f5a7d479ad288bb3f48d8
|
refs/heads/main
| 2023-04-22T10:22:44.763102
| 2021-05-13T17:27:22
| 2021-05-13T17:27:22
| 367,112,348
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 163
|
py
|
n = int(input())
a = list(map(int, input().split()))
b = [0]*n
b = sorted(a)
c = 0
for i in range(n-1):
c += b[i]
if c>b[n-1]:
print("Yes")
else:
print("No")
|
[
"66529651+Aastha2104@users.noreply.github.com"
] |
66529651+Aastha2104@users.noreply.github.com
|
140cc07b959bb90f2ddd870de64a3881a6e885bc
|
24cd8ef47e6afd2883836ee69f2de95da97ccebd
|
/system.py
|
33f89b8abbd47a3aaffd745808ced2321692974d
|
[] |
no_license
|
joaowinderfeldbussolotto/TrabalhoFinalAlgoritmos
|
8ea35e9cde02624e25bf75536d508de7827ca122
|
753d384cb57eb4e2ef10690cd5a0e5281b0c7529
|
refs/heads/main
| 2023-08-13T22:35:55.788565
| 2021-09-24T19:36:51
| 2021-09-24T19:36:51
| 409,734,695
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,315
|
py
|
from models.user import *
from utils.persistence import *
from utils.helper import *
from models.cartItem import *
from time import sleep
#create_table_products()
#create_table_users()
def main() -> None:
loggedUser = None
menu()
def menu(loggedUser = None) -> None:
sleep(1)
print('===================================')
print('=========== Sistema Amazon ==========')
print('===========Bem-vindo(a) ==========')
print('===================================')
print('Selecione uma opรงรฃo abaixo: ')
print('1 - Cadastrar Usuรกrio')
print('2 - Listar usuรกrios')
print('3 - Realizar login de cliente')
print('4 - Adicionar produto')
print('5 - Editar produtos')
print('6 - Deletar produtos')
print('7 - Listar produtos')
print('8 - Comprar produto')
print('9 - Visualizar carrinho')
print('10 - Remover item do carrinho')
print('11 - Fechar pedido')
print('12 - Adicionar saldo')
print('13 - Logout')
print('14 - Sair do sistema')
op: int = int(input())
if op == 1:
register_user(loggedUser)
menu(loggedUser)
if op == 2:
show_users(loggedUser)
menu(loggedUser)
elif op == 3:
loggedUser = login(loggedUser)
menu(loggedUser)
elif op == 4:
add_product(loggedUser)
menu(loggedUser)
elif op == 5:
edit_products(loggedUser)
menu(loggedUser)
elif op == 6:
remove_products(loggedUser)
menu(loggedUser)
elif op == 7:
list_products()
menu(loggedUser)
elif op == 8:
buy_products(loggedUser)
elif op == 9:
show_cart(loggedUser)
menu(loggedUser)
elif op == 10:
remove_from_cart(loggedUser)
menu(loggedUser)
elif op == 11:
close_order(loggedUser)
menu(loggedUser)
elif op == 12:
payment(loggedUser)
menu(loggedUser)
elif op == 13:
logout()
menu()
elif op == 14:
logout()
print('Volte sempre')
exit(2)
else:
print('Opรงรฃo nรฃo vรกlida')
menu(loggedUser)
def register_user(loggedUser):
print("----Dados do cliente----")
name = input("Informe nome do cliente: ")
CPF = input("Informe CPF do cliente: ")
email = input("Informe email do cliente: ")
pwd = input("Informe a senha do cliente: ")
if isUserLogged(loggedUser) and loggedUser.permission == 1:
permission = input('Tipo de usuรกrio: 1 - Admin 2-Cliente :')
user = User(name, email, CPF, pwd, None, [], permission)
user = User(name, email, CPF, pwd)
add_user_db(user)
def show_users(loggedUser):
if isUserLogged(loggedUser):
if userHasPermission(loggedUser):
list_users()
def login(loggedUser = None):
if isUserLogged(loggedUser):
print('Usuรกrio jรก estรก logado')
return loggedUser
email = input("Digite o email: ")
if '@' in email:
pwd = input("Digite a senha: ")
loggedUser = dao_login(email, pwd)
if loggedUser == None:
op1 = input("Deseja tentar novamente? Digite 1 para sim e qualquer tecla para nรฃo")
if op1 == '1':
login()
else:
print('Email invalido')
login()
return loggedUser
def add_product(loggedUser:User):
if isUserLogged(loggedUser):
if userHasPermission(loggedUser):
productName = input('Informe o nome do produto: ')
productPrice = float(input('Informe o preรงo: '))
p = Product(productName, productPrice, get_number_of_products())
add_product_db(p)
def edit_products(loggedUser):
list_products()
if isUserLogged(loggedUser):
if userHasPermission(loggedUser):
productCode = int(input('Informe o codigo do produto que deseja editar: '))
if productCode in get_list_of_codes():
productName= input('Informe o nome do produto: ')
productPrice = float(input('Informe o preรงo: '))
p = Product(productName, productPrice, productCode)
update_delete_product_in_db(p, False)
else:
print('Produto nรฃo encontrado')
def remove_products(loggedUser):
if isUserLogged(loggedUser):
if userHasPermission(loggedUser):
list_products()
productCode = input('Informe o codigo do produto que deseja apagar: ')
if int(productCode) in get_list_of_codes():
p = get_product_by_code(productCode)
print(p.name,' Removido ')
update_delete_product_in_db(p, True)
else:
print('Produto nรฃo encontrado')
def buy_products(loggedUser):
list_products()
if isUserLogged(loggedUser):
productCode = input('Informe o cรณdigo dos produtos que deseja comprar: ')
p = get_product_by_code(productCode)
if p == None:
print('Produto nรฃo encontrado. Deseja tentar novamente? Digite 1 para sim')
op = input()
if op == '1':
buy_products(loggedUser)
else:
print(p)
quantity = int(input('Informe quantas unidades deseja adcionar ao carrinho: '))
totalPrice = (p.price*quantity)
if loggedUser.hasLimit(totalPrice):
print('Compra aprovada. O usuario possui limite')
cartItem = CartItem(p, quantity)
loggedUser.add_to_cart(cartItem)
add_cart_item_db(loggedUser, cartItem)
else:
print('O usuario '+loggedUser.name+' nรฃo possui limite para tal compra')
menu(loggedUser)
def show_cart(loggedUser):
if isUserLogged(loggedUser):
loggedUser.show_user_cart()
def remove_from_cart(loggedUser:User):
if isUserLogged(loggedUser):
show_cart(loggedUser)
itemCode = int(input('Digite o cรณdigo do item que deseja remover:'))
itemQuantity = input('Informe quantas unidades deseja remover:')
#cartItem = CartItem(get_product_by_code(itemCode), )
loggedUser.remove_item_from_cart(itemCode, itemQuantity)
update_cart_item_in_db(loggedUser, itemCode)
def close_order(loggedUser: User):
if isUserLogged(loggedUser):
if len(loggedUser.cart) > 0:
show_cart(loggedUser)
op = input('Deseja fechar pedido? 1 para sim: ')
if op == '1':
loggedUser.purchase(loggedUser.cart_total())
update_user_in_file(loggedUser)
loggedUser.cart.clear()
delete_cart_from_db(loggedUser)
print('Compra realizada com sucesso!')
else:
return
else:
print('Carrinho estรก vazio')
else:
return
def payment(loggedUser):
if isUserLogged(loggedUser):
print('Saldo atual: ', formata_float_str_moeda(loggedUser.limit))
payment = float(input("Informe quanto de saldo irรก adicionar via boleto:"))
loggedUser.limit += payment
update_user_in_file(loggedUser)
print('Saldo atual: ', formata_float_str_moeda(loggedUser.limit))
def logout():
loggedUser = None
if __name__ == '__main__':
main()
|
[
"joaowinderfeldbussolotto@gmail.com"
] |
joaowinderfeldbussolotto@gmail.com
|
1c74451d6dd86c81bcc9238ce0d1267915d0facd
|
2b80bd308a6f768e499927761d64928b8cd9da15
|
/job/migrations/0009_auto_20200722_1609.py
|
d58a3cca04db92c33ef4b9afaa578eb45eacee8e
|
[] |
no_license
|
karimmerabtane/django_job_board
|
a0015cfffc4e8b4d68a8db8c9cc2ac1e0c8400f3
|
21238a12c1a6873cf0fb278038bac9b37d51fdea
|
refs/heads/master
| 2023-08-03T20:29:11.957695
| 2020-07-26T23:52:44
| 2020-07-26T23:52:44
| 279,559,386
| 0
| 0
| null | 2021-09-22T19:28:39
| 2020-07-14T10:53:41
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 355
|
py
|
# Generated by Django 3.0.8 on 2020-07-22 16:09
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('job', '0008_jobapply_job'),
]
operations = [
migrations.RenameField(
model_name='jobapply',
old_name='job',
new_name='job_apply',
),
]
|
[
"karim.sniper88@gmail.com"
] |
karim.sniper88@gmail.com
|
4453fb58e33a80b6a1510a8e4e5c633e06b4cdc2
|
e36985669a2b068dfb3e43b7f5870dc114bb158b
|
/python_code/dataExtraction.py
|
7722d25b7d06ff6e71446c9ef08cf4b970e527d8
|
[] |
no_license
|
assassint2017/Data-extraction-UI
|
b3f0f43dc48e12c0da158bdb4a7c2c9dd5d92ab5
|
d7e1b97100ad97b334f03b0fbf09c2a506339b1c
|
refs/heads/master
| 2020-04-11T06:18:50.417214
| 2018-12-21T12:38:47
| 2018-12-21T12:38:47
| 161,577,841
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,732
|
py
|
"""
ๆฐๆฎๆๅไปฃ็
"""
import datetime
import pandas as pd
from numpy import nan
#-------------------------้่ฆ่ฎพ็ฝฎ็้จๅ-------------------------------
# ่ฎพๅฎcsvๆไปถ่ทฏๅพ ่ทฏๅพไธญไธ่ฆๅบ็ฐไธญๆ
# csvDir = 'C:\\Users\\14595\\Desktop\\2018HB example .csv.gz'
# ่ฎพๅฎๆๅcsvๆไปถ่ทฏๅพ ่ทฏๅพไธญไธ่ฆๅบ็ฐไธญๆ
# extDir = 'C:\\Users\\14595\\Desktop\\ext.csv'
# ๅๅจ็นๆฐ้ๆฑๆปๆไปถ่ทฏๅพ ่ทฏๅพไธญไธ่ฆๅบ็ฐไธญๆ
# summaryDir = 'C:\\Users\\14595\\Desktop\\summary.csv'
# ่ฎพๅฎๆถ้ดๅบ้ด
# start = pd.Timestamp(datetime.date(year=2018, month=1, day=1))
# end = pd.Timestamp(datetime.date(year=2018, month=5, day=30))
# ่ฎพๅฎ้ๅฎ็ๅฐๅบ
# locs = [42010200, 42050300, 42050600]
#---------------------------------------------------------------------
def dataExtraction(csvDir, extDir, summaryDir, start, end, locs):
# ่ฏปๅcsvๆไปถ
csv = pd.read_csv(csvDir, compression='gzip', encoding='gbk')
# ๆถ้ดๆฅๆๆ ผๅผๅๅค็
csv['่ฏๆญๆถ้ด'] = pd.to_datetime(csv['่ฏๆญๆถ้ด'], format='%Y/%m/%d')
# ๆ นๆฎๆกไปถ่ฟ่ก็ญ้
if start is None and end is None: # ๅฆๆๅช้ๆฉไบๅฐๅบ็ผ็
csv = csv[csv['ๆฅๅๅไฝๅฐๅบ็ผ็ '].isin(locs)]
elif locs is None: # ๅฆๆๅช้ๆฉไบ่ฏๆญๆถ้ด
csv = csv[(csv['่ฏๆญๆถ้ด'] >= start) & (csv['่ฏๆญๆถ้ด'] <= end)]
else: # ๅฆๆไธค็งๆกไปถ้ฝ้ๆฉไบ
csv = csv[(csv['่ฏๆญๆถ้ด'] >= start) & (csv['่ฏๆญๆถ้ด'] <= end) & (csv['ๆฅๅๅไฝๅฐๅบ็ผ็ '].isin(locs))]
# ไฟๅญๆๅๆฐๆฎๅฐcsvๆไปถ
csv.to_csv(extDir, index=0, encoding='gbk')
def removeSpace(item):
"""
ๅป้คๅจ่พๅ
ฅ่ฟ็จไธญ่ฏฏ้ฎๅ
ฅ็็ฉบๆ ผ
"""
return item.strip()
csv['ๅฝๅก็จๆทๆๅฑๅไฝ'].apply(removeSpace)
temp = pd.value_counts(csv['ๅฝๅก็จๆทๆๅฑๅไฝ'])
codes = []
for hospital in list(temp.index):
index = csv[csv['ๅฝๅก็จๆทๆๅฑๅไฝ'] == hospital].index.tolist()[0]
codes.append(csv['ๆฅๅๅไฝๅฐๅบ็ผ็ '][index])
summary = pd.DataFrame()
summary['ๆฅๅๅไฝๅฐๅบ็ผ็ '] = codes
summary['ๆฅๅๅไฝ'] = list(temp.index)
summary['็
ไพๆฐ'] = temp.values
summary.sort_values(by=['ๆฅๅๅไฝๅฐๅบ็ผ็ '], inplace=True)
summary.reset_index(drop=True, inplace=True)
nanlist = []
for i in range(1, len(summary['ๆฅๅๅไฝๅฐๅบ็ผ็ '])):
if summary.loc[i, 'ๆฅๅๅไฝๅฐๅบ็ผ็ '] == summary.loc[i - 1, 'ๆฅๅๅไฝๅฐๅบ็ผ็ ']:
nanlist.append(i)
for i in nanlist:
summary.loc[i, 'ๆฅๅๅไฝๅฐๅบ็ผ็ '] = nan
summary.to_csv(summaryDir, index=False, encoding='gbk')
|
[
"noreply@github.com"
] |
assassint2017.noreply@github.com
|
d10e49aa112d5e0cc239ab1fe98b7aa0083f8b76
|
40d5799cc031d234a1b35b7f57cd9784d01ab090
|
/manage.py
|
f5f8b98b330815659267578a77ecc90c6e757a1d
|
[] |
no_license
|
ramsaicharan/movielisting
|
bc7641c3b3df4a79cefa7647f798466ce0a302e1
|
76c84acb1f84a4c0e09db74a2643ea31104c4cfb
|
refs/heads/master
| 2020-06-25T13:49:06.216843
| 2020-06-03T06:31:43
| 2020-06-03T06:31:43
| 199,326,884
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 551
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == '__main__':
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'imdb.settings')
try:
from django.core.management import execute_from_command_line
except ImportError as exc:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
) from exc
execute_from_command_line(sys.argv)
|
[
"noreply@github.com"
] |
ramsaicharan.noreply@github.com
|
c630daa51e38e135e196be1383a6a6557486aa06
|
a68159363fb88fcd105f3a305d1ee5df445a832c
|
/letters_1000_numbers_problem-17.py
|
3304126eed5e9f6fab90ce65c2e180a52f130c1f
|
[] |
no_license
|
lokendra7512/Project-Euler-Solutions
|
8a6c8647cfdbfd71adddee9b86ee127fe1e69709
|
23a467af7807e154c9985bfebbe08f60b9418c4d
|
refs/heads/master
| 2022-12-24T07:18:18.025811
| 2020-09-30T19:05:11
| 2020-09-30T19:05:11
| 300,028,993
| 0
| 0
| null | 2020-09-30T19:03:47
| 2020-09-30T19:03:47
| null |
UTF-8
|
Python
| false
| false
| 815
|
py
|
'''
The way the program works
'''
letters ={1: 3, 2: 3, 3: 5, 4: 4, 5: 4, 6: 3, 7: 5, 8: 5, 9: 4, 10: 3, 11: 6, 12: 6, 13:8, 14: 8, 15: 7, 16: 7, 17: 9, 18: 8, 19: 8, 20: 6, 30: 6, 40: 5, 50: 5, 60: 5, 70: 7, 80: 6, 90: 6, 100: 7, 1000: 8}
sums = 0
for i in xrange(1,1001):
if i == 1000:
sums = sums + 11 #length of chars in one thousand
hundreds_value = i/100 #append the value for hundreds
rem = i%100 #get the remainder after removing hundred
tens_value = rem/10
unit_value = rem%10
if(hundreds_value > 0):
sums = sums + letters.get(hundreds_value)+ letters.get(100) + 3 #The three is the length of and
if(tens_value > 0):
sums = sums + letters.get((tens_value*10))
if(unit_value > 0):
sums = sums + letters.get(unit_value)
print sums
|
[
"abdulapopoola@gmail.com"
] |
abdulapopoola@gmail.com
|
3d0de491cc84f31ed3d96fa5ed6bc7a73bae9020
|
a5c14053b5a763dd9bbe28a0a9c34a88bc280584
|
/app/admin/views.py
|
1b1505c53465756ff2c71231a41296616627a067
|
[] |
no_license
|
smile0304/flask_movie_project
|
dbcbda194438d324fa89e682f3f8c5e1abf0ae53
|
c228821bf90468af8d20f9632ad4e2735c0c7239
|
refs/heads/master
| 2021-06-25T09:05:34.090540
| 2017-09-11T13:02:06
| 2017-09-11T13:02:06
| 103,136,236
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,397
|
py
|
#coding:utf8
from . import admin
from flask import render_template,redirect,url_for,flash,session,request,abort
from app.admin.forms import LoginForm,TagForm,MovieForm,PrivateForm,PwdForm,AuthForm,RoleForm,AdminForm
from app.models import Admin,Tag,Movie,Preview,User,Comment,Movicecol,Oplog,Adminlog,Userlog,Auth,Role
from functools import wraps
from app import db,app
from werkzeug.utils import secure_filename
import os
import uuid
import datetime
#ไธไธๆๅค็ๅจ
@admin.context_processor
def tpl_extra():
data = dict(
online_time = datetime.datetime.now().strftime("%Y-%m-%d %H:%M:%S")
)
return data
#็ปๅฝ่ฃ
้ฅฐๅจ
def admin_login_req(f):
@wraps(f)
def decorated_function(*args,**kwargs):
if "admin" not in session:
#next่ฎฐๅฝ็ๆฏไธๆฌก็นๅป็้พๆฅ๏ผ็ปๅฝๆๅๅ๏ผๅ็ดๆฅ่ทณ่ฝฌๅฐไธๆฌกๆณ่ฆ่ฎฟ้ฎ็้พๆฅ
return redirect(url_for("admin.login",next=request.url))
return f(*args,**kwargs)
return decorated_function
#ๆ้ๆงๅถ่ฃ
้ฅฐๅจ
def admin_auth(f):
@wraps(f)
def decorated_function(*args, **kwargs):
admin = Admin.query.join(
Role
).filter(
Role.id == Admin.role_id,
Admin.id == session["admin_id"]
).first()
if admin.is_super != 0:
auths = admin.role.auths
auths = list(map(lambda v: int(v), auths.split(",")))
auth_list = Auth.query.all()
urls = [v.url for v in auth_list for val in auths if val == v.id]
rule = request.url_rule
if str(rule) not in urls:
abort(404)
return f(*args, **kwargs)
return decorated_function
#ไฟฎๆนๆไปถๅ็งฐ
def change_filename(filename):
fileinfo = os.path.splitext(filename)
filename = datetime.datetime.now().strftime("%Y%m%d%H%M%S")+str(uuid.uuid4().hex)+fileinfo[1]
return filename
@admin.route("/")
@admin_login_req
def index():
return render_template("admin/index.html")
#็ปๅฝ
@admin.route("/login/",methods=["GET","POST"])
def login():
form = LoginForm()
if form.validate_on_submit():
data = form.data
admin = Admin.query.filter_by(name=data["account"]).first()
if not admin.check_pwd(data["pwd"]):
flash("ๅฏ็ ้่ฏฏ!","err")
return redirect(url_for("admin.login"))
session["admin"] = data["account"]
session["admin_id"] = admin.id
adminlogin = Adminlog(
admin_id = admin.id,
IP = request.remote_addr
)
db.session.add(adminlogin)
db.session.commit()
# next่ฎฐๅฝ็ๆฏไธๆฌก็นๅป็้พๆฅ๏ผ็ปๅฝๆๅๅ๏ผๅ็ดๆฅ่ทณ่ฝฌๅฐไธๆฌกๆณ่ฆ่ฎฟ้ฎ็้พๆฅ
return redirect(request.args.get("next") or url_for("admin.index"))
return render_template("admin/login.html",form=form)
@admin.route("/logout/")
@admin_login_req
def logout():
session.pop("admin",None)
session.pop("admin_id",None)
return redirect(url_for('admin.login'))
#ไฟฎๆนๅฏ็
@admin.route("/pwd/",methods=["GET","POST"])
@admin_login_req
def pwd():
form = PwdForm()
if form.validate_on_submit():
data = form.data
admin = Admin.query.filter_by(name=session["admin"]).first()
from werkzeug.security import generate_password_hash
admin.pwd = generate_password_hash(data["new_pwd"])
db.session.add(admin)
db.session.commit()
flash("ไฟฎๆนๅฏ็ ๆๅ,่ฏท้ๆฐ็ปๅฝ!", "ok")
return redirect(url_for('admin.logout'))
return render_template("admin/pwd.html",form=form)
#ๆทปๅ ๆ ็ญพ
@admin.route("/tag/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def tag_add():
form = TagForm()
if form.validate_on_submit():
data = form.data
tag = Tag.query.filter_by(name=data["name"]).count()
if tag == 1:
flash("่ฏฅๆ ็ญพๅๅทฒๅญๅจ","err")
return redirect(url_for('admin.tag_add'))
tag = Tag(
name=data["name"]
)
db.session.add(tag)
db.session.commit()
flash("ๆทปๅ ๆ ็ญพๆๅ!","ok")
oplog = Oplog(
admin_id = session["admin_id"],
IP = request.remote_addr,
reason = "ๆทปๅ ๆ ็ญพ%s" % data["name"]
)
db.session.add(oplog)
db.session.commit()
return redirect(url_for('admin.tag_add'))
return render_template("admin/tag_add.html",form=form)
#ๆ ็ญพๅ่กจ
@admin.route("/tag/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def tag_list(page=None):
if page is None:
page = 1
page_data = Tag.query.order_by(
Tag.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/tag_list.html",page_data=page_data)
#็ผ่พๆ ็ญพ
@admin.route("/tag/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def tag_edit(id=None):
form = TagForm()
tag = Tag.query.get_or_404(id)
if form.validate_on_submit():
data = form.data
tag_count = Tag.query.filter_by(name=data["name"]).count()
if tag.name !=data["name"] and tag_count == 1:
flash("่ฏฅๆ ็ญพๅๅทฒๅญๅจ","err")
return redirect(url_for('admin.tag_edit',id=id))
tag.name = data["name"]
db.session.add(tag)
db.session.commit()
flash("ไฟฎๆนๆ ็ญพๆๅ!","ok")
return redirect(url_for('admin.tag_edit',id=id))
return render_template("admin/tag_edit.html",form=form,tag=tag)
#ๆ ็ญพๅ ้ค
@admin.route("/tag/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def tag_del(id=None):
tag = Tag.query.filter_by(id=id).first_or_404()
db.session.delete(tag)
db.session.commit()
flash("ๅ ้คๆ ็ญพๆๅ!", "ok")
return redirect(url_for('admin.tag_list',page=1))
#ๆทปๅ ็ตๅฝฑ
@admin.route("/movie/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def movie_add():
form = MovieForm()
if form.validate_on_submit():
data = form.data
file_url = secure_filename(form.url.data.filename)
file_logo = secure_filename(form.logo.data.filename)
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
url = change_filename(file_url)
logo = change_filename(file_logo)
form.url.data.save(app.config["UP_DIR"] + url)
form.logo.data.save(app.config["UP_DIR"] + logo)
movie = Movie(
title=data["title"],
url=url,
info=data["info"],
logo=logo,
star=int(data["star"]),
playnum=0,
commentnum=0,
tag_id=int(data["tag_id"]),
area=data["area"],
release_time=data["release_time"],
length=data["length"]
)
db.session.add(movie)
db.session.commit()
flash("ๆทปๅ ็ตๅฝฑๆๅ๏ผ", "ok")
return redirect(url_for('admin.movie_add'))
return render_template("admin/movie_add.html", form=form)
#็ตๅฝฑๅ่กจ
@admin.route("/movie/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def movie_list(page=None):
if page is None:
page = 1
page_data = Movie.query.join(Tag).filter(
Tag.id == Movie.tag_id
).order_by(
Movie.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/movie_list.html",page_data=page_data)
#ๅ ้ค็ตๅฝฑ
@admin.route("/movie/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def movie_del(id=None):
movie = Movie.query.get_or_404(int(id))
db.session.delete(movie)
db.session.commit()
flash("ๅ ้ค็ตๅฝฑๆๅ!", "ok")
return redirect(url_for('admin.movie_list',page=1))
#็ผ่พ็ตๅฝฑ
@admin.route("/movie/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def movie_edit(id=None):
form = MovieForm()
form.url.validators = []
form.logo.validators = []
movie = Movie.query.get_or_404(int(id))
if request.method == 'GET':
form.info.data = movie.info
form.tag_id.data = movie.tag_id
form.star.data = movie.star
if form.validate_on_submit():
data = form.data
movie_conut = Movie.query.filter_by(title=data["title"]).count()
if movie_conut == 1 and movie.title != data["title"]:
flash("็ๅๅทฒ็ปๅญๅจ๏ผ", "err")
return redirect(url_for('admin.movie_edit', id=movie.id))
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
if form.url.data.filename != "":
file_url = secure_filename(form.url.data.filename)
movie.url = change_filename(file_url)
form.url.data.save(app.config["UP_DIR"] + movie.url)
if form.logo.data.filename != "":
file_logo = secure_filename(form.logo.data.filename)
movie.logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + movie.logo)
movie.star = data["star"]
movie.tag_id = data["tag_id"]
movie.info = data["info"]
movie.title = data["title"]
movie.area = data["area"]
movie.length = data["length"]
movie.release_time = data["release_time"]
db.session.add(movie)
db.session.commit()
flash("ไฟฎๆน็ตๅฝฑๆๅ๏ผ", "ok")
return redirect(url_for('admin.movie_edit',id=movie.id))
return render_template("admin/movie_edit.html", form=form,movie=movie)
#ๆทปๅ ้ขๅ
@admin.route("/preview/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def preview_add():
form = PrivateForm()
if form.validate_on_submit():
data = form.data
file_logo = secure_filename(form.logo.data.filename)
if not os.path.exists(app.config["UP_DIR"]):
os.makedirs(app.config["UP_DIR"])
os.chmod(app.config["UP_DIR"], "rw")
logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + logo)
preview = Preview(
title = data["title"],
logo= logo
)
db.session.add(preview)
db.session.commit()
flash("ไฟฎๆน้ขๅๆๅ๏ผ", "ok")
return redirect(url_for('admin.preview_add'))
return render_template("admin/preview_add.html",form=form)
#้ขๅๅ่กจ
@admin.route("/preview/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def preview_list(page=None):
if page is None:
page = 1
page_data = Preview.query.order_by(
Preview.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/preview_list.html",page_data=page_data)
#้ขๅๅ ้ค
@admin.route("/preview/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def preview_del(id=None):
preview = Preview.query.get_or_404(int(id))
db.session.delete(preview)
db.session.commit()
flash("ๅ ้ค้ขๅๆๅ!", "ok")
return redirect(url_for('admin.preview_list', page=1))
#ไฟฎๆน้ขๅ
@admin.route("/preview/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def preview_edit(id=None):
form = PrivateForm()
form.logo.validators= []
preview = Preview.query.get_or_404(int(id))
if request.method == "GET":
form.title.data = preview.title
if form.validate_on_submit():
data = form.data
if form.logo.data.filename != "":
file_logo = secure_filename(form.logo.data.filename)
preview.logo = change_filename(file_logo)
form.logo.data.save(app.config["UP_DIR"] + preview.logo)
preview.title = data["title"]
db.session.add(preview)
db.session.commit()
flash("ไฟฎๆน้ขๅๆๅ๏ผ", "ok")
return redirect(url_for('admin.preview_add',id=id))
return render_template("admin/preview_edit.html",form=form,preview=preview)
#ไผๅๅ่กจ
@admin.route("/user/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_list(page=None):
if page is None:
page = 1
page_data = User.query.order_by(
User.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/user_list.html",page_data=page_data)
#ไผๅ่ฏฆๆ
้กต้ข
@admin.route("/user/view/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_view(id=None):
user = User.query.get_or_404(int(id))
return render_template("admin/user_view.html",user=user)
#ไผๅๅ ้ค
@admin.route("/user/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def user_del(id=None):
user = User.query.get_or_404(int(id))
db.session.delete(user)
db.session.commit()
flash("ๅ ้คไผๅๆๅ!", "ok")
return redirect(url_for('admin.user_list', page=1))
#่ฏ่ฎบๅ่กจ
@admin.route("/comment/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def comment_list(page=None):
if page is None:
page = 1
page_data = Comment.query.join(
Movie
).join(
User
).filter(
Movie.id == Comment.movie_id,
User.id == Comment.user_id
).order_by(
Comment.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/comment_list.html",page_data=page_data)
#ๅ ้ค่ฏ่ฎบ
@admin.route("/comment/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def comment_del(id=None):
comment = Comment.query.get_or_404(int(id))
db.session.delete(comment)
db.session.commit()
flash("ๅ ้ค่ฏ่ฎบๆๅ!", "ok")
return redirect(url_for('admin.comment_list', page=1))
#ๆถ่ๅ่กจ
@admin.route("/moviecol/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def moviecol_list(page=None):
if page is None:
page = 1
page_data = Movicecol.query.join(
Movie
).join(
User
).filter(
Movie.id == Movicecol.movie_id,
User.id == Movicecol.user_id
).order_by(
Movicecol.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/moviecol_list.html",page_data=page_data)
#ๅ ้คๆถ่
@admin.route("/moviecol/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def moviecol_del(id=None):
moviecol = Movicecol.query.get_or_404(int(id))
db.session.delete(moviecol)
db.session.commit()
flash("ๅ ้ค่ฏ่ฎบๆๅ!", "ok")
return redirect(url_for('admin.moviecol_list', page=1))
#ๆไฝๆฅๅฟ
@admin.route("/oplog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def oplog_list(page=None):
if page is None:
page = 1
page_data = Oplog.query.join(
Admin
).filter(
Admin.id == Oplog.admin_id
).order_by(
Oplog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/oplog_list.html",page_data=page_data)
#็ฎก็ๅ็ปๅฝๆฅๅฟ
@admin.route("/adminloginlog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def adminloginlog_list(page=None):
if page is None:
page = 1
page_data = Adminlog.query.join(
Admin
).filter(
Admin.id == Adminlog.admin_id
).order_by(
Adminlog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/adminloginlog_list.html",page_data=page_data)
#็จๆท็ปๅฝๆฅๅฟ
@admin.route("/userloginlog/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def userloginlog_list(page=None):
if page is None:
page = 1
page_data = Userlog.query.join(
User
).filter(
User.id == Userlog.user_id
).order_by(
Userlog.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/userloginlog_list.html",page_data=page_data)
#ๆทปๅ ่ง่ฒ
@admin.route("/role/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def role_add():
form = RoleForm()
if form.validate_on_submit():
data = form.data
role = Role(
name = data["name"],
auths =",".join(map(lambda v:str(v),data["auths"]))
)
db.session.add(role)
db.session.commit()
flash("ๆทปๅ ่ง่ฒๆๅ", "ok")
return render_template("admin/role_add.html",form=form)
#่ง่ฒๅ่กจ
@admin.route("/role/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def role_list(page=None):
if page is None:
page = 1
page_data = Role.query.order_by(
Role.addtime.desc()
).paginate(page=page, per_page=10)
return render_template("admin/role_list.html",page_data=page_data)
#่ง่ฒๅ ้ค
@admin.route("/role/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def role_del(id=None):
role = Role.query.filter_by(id=id).first_or_404()
db.session.delete(role)
db.session.commit()
flash("ๅ ้ค่ง่ฒๆๅ!", "ok")
return redirect(url_for('admin.role_list',page=1))
#็ผ่พ่ง่ฒ
@admin.route("/role/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def role_edit(id=None):
form = RoleForm()
role = Role.query.get_or_404(id)
if request.method == "GET":
auths = role.auths
form.auths.data = list(map(lambda v:int(v), auths.split(',')))
if form.validate_on_submit():
data = form.data
role.name = data["name"]
auths = ",".join(map(lambda v: str(v), data["auths"]))
db.session.add(role)
db.session.commit()
flash("็ผ่พ่ง่ฒๆๅ!", "ok")
return render_template("admin/role_edit.html",form=form,role=role)
#ๆ้ๆทปๅ
@admin.route("/auth/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def auth_add():
form = AuthForm()
if form.validate_on_submit():
data = form.data
auth = Auth(
name=data["name"],
url=data["url"]
)
db.session.add(auth)
db.session.commit()
flash("ๆทปๅ ๆ้ๆๅ", "ok")
return render_template("admin/auth_add.html", form=form)
#ๆ้ๅ่กจ
@admin_auth
@admin.route("/auth/list/<int:page>/",methods=["GET"])
@admin_login_req
def auth_list(page=None):
if page is None:
page = 1
page_data = Auth.query.order_by(
Auth.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/auth_list.html",page_data=page_data)
#ๆ้ๅ ้ค
@admin.route("/auth/del/<int:id>/",methods=["GET"])
@admin_login_req
@admin_auth
def auth_del(id=None):
auth = Auth.query.filter_by(id=id).first_or_404()
db.session.delete(auth)
db.session.commit()
flash("ๅ ้คๆ้ๆๅ!", "ok")
return redirect(url_for('admin.auth_list',page=1))
#็ผ่พๆ้@admin_auth
@admin.route("/auth/edit/<int:id>/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def auth_edit(id=None):
form = AuthForm()
auth = Auth.query.get_or_404(id)
if form.validate_on_submit():
data = form.data
tag_count = Tag.query.filter_by(name=data["name"]).count()
auth.url = data["url"]
auth.name = data["name"]
db.session.add(auth)
db.session.commit()
flash("ไฟฎๆนๆ้ๆๅ!","ok")
return redirect(url_for('admin.auth_edit',id=id))
return render_template("admin/auth_edit.html",form=form,auth=auth)
#ๆทปๅ ็ฎก็ๅ
@admin.route("/admin/add/",methods=["GET","POST"])
@admin_login_req
@admin_auth
def admin_add():
form = AdminForm()
from werkzeug.security import generate_password_hash
if form.validate_on_submit():
data = form.data
admin = Admin(
name=data["name"],
pwd=generate_password_hash(data["pwd"]),
role_id = data['role_id'],
is_super=1,
)
db.session.add(admin)
db.session.commit()
flash("ๆทปๅ ็ฎก็ๅๆๅ", "ok")
return render_template("admin/admin_add.html",form=form)
#็ฎก็ๅๅ่กจ
@admin.route("/admin/list/<int:page>/",methods=["GET"])
@admin_login_req
@admin_auth
def admin_list(page=None):
if page is None:
page = 1
page_data = Admin.query.join(
Role
).filter(
Role.id == Admin.role_id
).order_by(
Admin.addtime.desc()
).paginate(page=page,per_page=10)
return render_template("admin/admin_list.html",page_data=page_data)
|
[
"smile@smilehacker.net"
] |
smile@smilehacker.net
|
dd4e74a77ce8690082c0f3839317d11c0ef60a3b
|
b73071a23f22e025f2c723654160e1cc5722cb4b
|
/cuadruplos.py
|
d68c9f384365369ae4c68b53e8ee8b094fdb4a41
|
[] |
no_license
|
omarcarreon/drawmycode
|
d6c381910906381d5a56209b23b4a22965fd6945
|
aa62bf0f3caec411c876ff4a041afd328d025f2b
|
refs/heads/master
| 2021-05-03T06:01:05.084952
| 2016-05-01T23:38:39
| 2016-05-01T23:38:39
| 53,284,924
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 20,538
|
py
|
from structs import *
from cubosemantico import *
from tablas import *
from dmcparser import *
import sys
pilaO = Stack()
pOper = Stack()
pTipos = Stack()
pSaltos = Stack()
pEjecucion = Stack()
pDimensionada = Stack()
# Inicia con el index 0
cuadruplos = []
# Inicia con el index 0
contSaltos = 0
actualAccessDIM = 1
actualAccessMatrix = {}
actualIDDim = None
actualDirBaseMatrix = None
class Cuadruplo:
def __init__(self, operador, operandoIzq, operandoDer, temp):
self.op = operador
self.opdoIzq = operandoIzq
self.opdoDer = operandoDer
self.res = temp
'''
===================================================
Inserta estructura Cuadruplo en lista de cuadruplos
===================================================
'''
def push_cuadruplo(cuadruplo):
global cuadruplos
global contSaltos
cuadruplos.append(cuadruplo)
contSaltos+=1
def goto_main_quad():
global pSaltos
genera_cuadruplo = Cuadruplo("GOTO","","","")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(0);
def iniciaMain():
global pSaltos
inicioMain = pSaltos.pop()
cuadruplos[inicioMain].res = contSaltos
'''
============================================
1. Meter direccion y tipo del ID en pilaO
============================================
'''
def exp_1(dirvar,tipo):
global pilaO
global pTipos
pilaO.push(dirvar)
pTipos.push(tipo)
'''
============================================
2. Meter * o / en pOper
============================================
'''
def exp_2(product_division):
global pOper
pOper.push(product_division)
'''
============================================
3. Meter + o - en pOper
============================================
'''
def exp_3(plus_minus):
global pOper
pOper.push(plus_minus)
'''
============================================
4. Si top(pOper) == '*' o '/'
============================================
'''
def exp_4():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '*' or pOper.peek() == '/':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pTipos.push(tipoRes)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
5. Si top(pOper) == '+' o '-'
============================================
'''
def exp_5():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '+' or pOper.peek() == '-':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pTipos.push(tipoRes)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
6. Meter Fondo Falso en pOper
============================================
'''
def exp_6():
global pOper
pOper.push('[')
'''
============================================
7. Sacar Fondo Falso
============================================
'''
def exp_7():
global pOper
pOper.pop()
'''
============================================
8. Meter AND/OR en pOper
============================================
'''
def exp_8(and_or):
global pOper
pOper.push(and_or)
'''
=====================================================
9. Si top(pOper) es and o or , sacar and/or de pOper
=====================================================
'''
def exp_9():
global pOper
global pTipos
global pilaO
#printPilas()
if not pOper.isEmpty():
if pOper.peek() == 'and' or pOper.peek() == 'or':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pTipos.push(tipoRes)
pilaO.push(temp)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
10. Meter < <= > >= <> == en pOper
============================================
'''
def exp_10(oper_logic):
global pOper
pOper.push(oper_logic)
'''
=====================================================
11. Si top(pOper) es < <= > >= <> == , sacar de pOper
====================================================
'''
def exp_11():
global pOper
global pTipos
global pilaO
#printPilas()
if not pOper.isEmpty():
if pOper.peek() == '<' or pOper.peek() == '<=' or pOper.peek() == '>' or pOper.peek() == '>=' or pOper.peek() == '<>' or pOper.peek() == '==':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
opdoIzq = pilaO.pop()
tipoIzq = pTipos.pop()
tipoRes = cuboSemantico[tipoIzq][tipoDer][op];
if tipoRes != "Error":
temp = set_dir_temp(tipoRes)
genera_cuadruplo = Cuadruplo(op,opdoIzq,opdoDer,temp)
push_cuadruplo(genera_cuadruplo)
pTipos.push(tipoRes)
pilaO.push(temp)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
12. Meter = en pOper
============================================
'''
def exp_12(asignOper):
global pOper
pOper.push(asignOper)
'''
============================================
13. Si top(pOper) es = , sacar = de pOper
============================================
'''
def exp_13():
global pOper
global pTipos
global pilaO
if not pOper.isEmpty():
if pOper.peek() == '=':
op = pOper.pop()
opdoDer = pilaO.pop()
tipoDer = pTipos.pop()
res = pilaO.pop()
tipoRes = pTipos.pop()
revisaTipoRes = cuboSemantico[tipoRes][tipoDer][op];
if revisaTipoRes != "Error":
genera_cuadruplo = Cuadruplo(op,opdoDer,None,res)
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Error. Tipos Incompatibles.")
'''
============================================
Estatuto PRINT
============================================
'''
def estatuto_print():
global pilaO
global pTipos
res = pilaO.pop()
pTipos.pop()
genera_cuadruplo = Cuadruplo("PRINT", "", "", res)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Estatuto IF 1
============================================
'''
def estatuto_if_1():
global pilaO
global pTipos
global pSaltos
auxTipo = pTipos.pop()
if auxTipo != "bool":
sys.exit("Error Semantico.")
else:
res = pilaO.pop()
genera_cuadruplo = Cuadruplo("GOTOF", res, "", "")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto ELSE
============================================
'''
def estatuto_else():
global pilaO
global pTipos
global cuadruplos
global pSaltos
genera_cuadruplo = Cuadruplo("GOTO", "", "", "")
push_cuadruplo(genera_cuadruplo)
falso = pSaltos.pop()
cuadruplos[falso].res = contSaltos
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto ENDIF
============================================
'''
def estatuto_endif():
global pilaO
global pTipos
global cuadruplos
global pSaltos
fin = pSaltos.pop()
cuadruplos[fin].res = contSaltos
'''
============================================
Estatuto WHILE 1
============================================
'''
def estatuto_while_1():
global pSaltos
pSaltos.push(contSaltos)
'''
============================================
Estatuto WHILE 2
============================================
'''
def estatuto_while_2():
global pilaO
global pTipos
global pSaltos
auxTipo = pTipos.pop()
if auxTipo != "bool":
sys.exit("Error Semantico.")
else:
res = pilaO.pop()
genera_cuadruplo = Cuadruplo("GOTOF", res, "", "")
push_cuadruplo(genera_cuadruplo)
pSaltos.push(contSaltos-1)
'''
============================================
Estatuto WHILE 3
============================================
'''
def estatuto_while_3():
global pSaltos
falso = pSaltos.pop()
retorno = pSaltos.pop()
genera_cuadruplo = Cuadruplo("GOTO", "", "", retorno)
push_cuadruplo(genera_cuadruplo)
cuadruplos[falso].res = contSaltos
'''
=========================================================
Regresa numero del cuadruplo en el que inicia la funcion
=========================================================
'''
def altaInicioFunc():
global contSaltos
return contSaltos
'''
=========================================================
Genera Accion Retorno cuando termina una funcion
=========================================================
'''
def generaAccionRetorno(funcActual):
totalTempInts = get_Total_Temp_Int()
totalTempFloats = get_Total_Temp_Float()
totalTempBools = get_Total_Temp_Bool()
totalTempStrings = get_Total_Temp_String()
if (funcActual != 'main'):
genera_cuadruplo = Cuadruplo("RET", "", "", "")
push_cuadruplo(genera_cuadruplo)
return {'totalTempInts' : totalTempInts,'totalTempFloats':totalTempFloats,'totalTempBools':totalTempBools,'totalTempStrings':totalTempStrings}
'''
=========================================================
Genera Accion End al final de MAIN
=========================================================
'''
def generaAccionEndMain():
genera_cuadruplo = Cuadruplo("END", "", "", "")
push_cuadruplo(genera_cuadruplo)
'''
=========================================================
Estatuto Llamada Funcion 2
=========================================================
'''
def estatuto_llamadafunc_2(funcLlamada, tamMemoriaLocalLlamadaFunc):
genera_cuadruplo = Cuadruplo("ERA",tamMemoriaLocalLlamadaFunc,funcLlamada,"")
push_cuadruplo(genera_cuadruplo)
'''
=========================================================
Estatuto Llamada Funcion 3
=========================================================
'''
def estatuto_llamadafunc_3(dirParamActual, tipoParamActual):
global pilaO
argumento = pilaO.pop()
tipoArgumento = pTipos.pop()
if (tipoArgumento == tipoParamActual):
genera_cuadruplo = Cuadruplo("PARAM",argumento,"",dirParamActual)
push_cuadruplo(genera_cuadruplo)
else:
sys.exit('Error. Tipo de argumento y parametro no coinciden.')
'''
=========================================================
Estatuto Llamada Funcion 6
=========================================================
'''
def estatuto_llamadafunc_6(funcLlamada,quadInicioFuncLlamada,tipoFuncLlamada,dirFuncLlamada):
global contSaltos
global pEjecucion
global pilaO
global pTipos
pEjecucion.push(contSaltos)
genera_cuadruplo = Cuadruplo("GOSUB",funcLlamada,"",quadInicioFuncLlamada)
push_cuadruplo(genera_cuadruplo)
if (tipoFuncLlamada != 'void'):
res = set_dir_temp(tipoFuncLlamada)
genera_cuadruplo = Cuadruplo("=",dirFuncLlamada,"",res)
push_cuadruplo(genera_cuadruplo)
pilaO.push(res)
pTipos.push(tipoFuncLlamada)
'''
============================================
Estatuto RETURN
============================================
'''
def estatuto_return(funcActual, tipoFuncActual):
global pilaO
global pTipos
tipoVarRetorno = pTipos.pop()
tipoFunc = tipoFuncActual
if (tipoFuncActual != 'void') and (tipoVarRetorno==tipoFunc):
varRetorno = pilaO.pop()
genera_cuadruplo = Cuadruplo("RETURN",funcActual,"",varRetorno)
push_cuadruplo(genera_cuadruplo)
elif (tipoFuncActual=='void') or (tipoVarRetorno!=tipoFunc):
sys.exit("Error. Tipo de variable retorno no coincide con tipo de la funcion.")
'''
============================================
Estatuto Variable Dimensionada 2
============================================
'''
def acceso_dimvar_2(accessingMatrix):
global pilaO
global pDimensionada
global pOper
global actualAccessDIM
global actualIDDim
global actualAccessMatrix
actualAccessMatrix = accessingMatrix;
idDim = pilaO.pop()
actualIDDim = idDim
actualAccessDIM = 1
pDimensionada.push([idDim,actualAccessDIM])
pOper.push('[')
'''
============================================
Estatuto Variable Dimensionada 3
============================================
'''
def acceso_dimvar_3():
global pilaO
global pTipos
global actualDirBaseMatrix
Li_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['Li']
Ls_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['Ls']
m_DIM = actualAccessMatrix['Dim'][actualAccessDIM]['m']
actualDirBaseMatrix = actualAccessMatrix['Dir']
genera_cuadruplo = Cuadruplo("VERIFICA",pilaO.peek(),Li_DIM,Ls_DIM)
push_cuadruplo(genera_cuadruplo)
if actualAccessDIM == 1: #Si siguiente dimension es diferente de nulo
aux = pilaO.pop()
pTipos.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("*",aux,m_DIM,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
if actualAccessDIM == 2:
aux2 = pilaO.pop()
aux1 = pilaO.pop()
pTipos.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("+",aux1,aux2,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
'''
============================================
Estatuto Variable Dimensionada 4
============================================
'''
def acceso_dimvar_4():
global actualAccessDIM
global pDimensionada
actualAccessDIM = actualAccessDIM + 1
pDimensionada.push([actualIDDim,actualAccessDIM])
'''
============================================
Estatuto Variable Dimensionada 5
============================================
'''
def acceso_dimvar_5():
global pilaO
global pTipos
global pOper
global pDimensionada
aux1 = pilaO.pop()
temp = set_dir_temp('int')
genera_cuadruplo = Cuadruplo("+",aux1,actualDirBaseMatrix,temp)
push_cuadruplo(genera_cuadruplo)
pilaO.push(temp)
pOper.pop()
pDimensionada.pop()
'''
============================================
Funcion integrada RANDOM
============================================
'''
def opfunc_random():
global pilaO
global pTipos
superior = pilaO.pop()
tipoSuperior = pTipos.pop()
inferior = pilaO.pop()
tipoInferior = pTipos.pop()
temp = set_dir_temp('int')
pilaO.push(temp)
pTipos.push('int')
genera_cuadruplo = Cuadruplo("RANDOM",inferior,superior,temp)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Line Width
============================================
'''
def dibujafunc_linewidth():
global pilaO
global pTipos
width = pilaO.pop()
tipoWidth = pTipos.pop()
genera_cuadruplo = Cuadruplo("LINEWIDTH",width,"","")
push_cuadruplo(genera_cuadruplo)
'''
============================================
Line Color
============================================
'''
def dibujafunc_linecolor():
global pilaO
global pTipos
blue = pilaO.pop()
tipoBlue= pTipos.pop()
green = pilaO.pop()
tipoGreen = pTipos.pop()
red = pilaO.pop()
tipoRed = pTipos.pop()
if tipoRed == 'int' and tipoGreen == 'int' and tipoBlue == 'int':
genera_cuadruplo = Cuadruplo("LINECOLOR",[red,green,blue],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion lineColor deben ser de tipo entero.")
'''
============================================
Dibuja una linea
============================================
'''
def dibujafunc_line():
global pilaO
global pTipos
cordY2 = pilaO.pop()
tipoCordY2= pTipos.pop()
cordX2 = pilaO.pop()
tipoCordX2 = pTipos.pop()
cordY1 = pilaO.pop()
tipoCordY1 = pTipos.pop()
cordX1 = pilaO.pop()
tipoCordX1 = pTipos.pop()
if tipoCordX1 == 'int' and tipoCordY1 == 'int' and tipoCordX2 == 'int' and tipoCordY2 == 'int':
genera_cuadruplo = Cuadruplo("LINE",[cordX1,cordY1,cordX2,cordY2],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion line deben ser de tipo entero.")
'''
============================================
Dibuja un cuadrado
============================================
'''
def dibujafunc_square():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("SQUARE",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion square deben ser de tipo entero.")
'''
============================================
Dibuja un circulo
============================================
'''
def dibujafunc_circle():
global pilaO
global pTipos
radio = pilaO.pop()
tipoRadio = pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoRadio == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("CIRCLE",[cordX,cordY,radio],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion circle deben ser de tipo entero.")
'''
============================================
Dibuja una estrella
============================================
'''
def dibujafunc_star():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("STAR",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion star deben ser de tipo entero.")
'''
============================================
Dibuja un triangulo
============================================
'''
def dibujafunc_triangle():
global pilaO
global pTipos
tamano = pilaO.pop()
tipoTamano= pTipos.pop()
cordY= pilaO.pop()
tipoCordY = pTipos.pop()
cordX = pilaO.pop()
tipoCordX = pTipos.pop()
if tipoTamano == 'int' and tipoCordY == 'int' and tipoCordX == 'int':
genera_cuadruplo = Cuadruplo("TRIANGLE",[cordX,cordY,tamano],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion triangle deben ser de tipo entero.")
'''
============================================
Dibuja un arco
============================================
'''
def dibujafunc_arc():
global pilaO
global pTipos
grados = pilaO.pop()
tipoGrados = pTipos.pop()
cordY2 = pilaO.pop()
tipoCordY2= pTipos.pop()
cordX2 = pilaO.pop()
tipoCordX2 = pTipos.pop()
cordY1 = pilaO.pop()
tipoCordY1 = pTipos.pop()
cordX1 = pilaO.pop()
tipoCordX1 = pTipos.pop()
if tipoCordX1 == 'int' and tipoCordY1 == 'int' and tipoCordX2 == 'int' and tipoCordY2 == 'int' and tipoGrados == 'int':
genera_cuadruplo = Cuadruplo("ARC",[cordX1,cordY1,cordX2,cordY2,grados],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion arc deben ser de tipo entero.")
'''
============================================
Indice que comienza a rellenar figura
============================================
'''
def dibujafunc_startfill():
global pilaO
global pTipos
blue = pilaO.pop()
tipoBlue= pTipos.pop()
green = pilaO.pop()
tipoGreen = pTipos.pop()
red = pilaO.pop()
tipoRed = pTipos.pop()
if tipoRed == 'int' and tipoGreen == 'int' and tipoBlue == 'int':
genera_cuadruplo = Cuadruplo("STARTFILL",[red,green,blue],"","")
push_cuadruplo(genera_cuadruplo)
else:
sys.exit("Argumentos de funcion startFill deben ser de tipo entero.")
'''
============================================
Indica que termina de rellenar figura
============================================
'''
def dibujafunc_stopfill():
genera_cuadruplo = Cuadruplo("STOPFILL",255,255,255)
push_cuadruplo(genera_cuadruplo)
'''
============================================
Imprime las pilas
============================================
'''
def printPilas():
print "pilaO ", pilaO.getElements()
print "pTipos ", pTipos.getElements()
print "pOper ", pOper.getElements()
print "pSaltos ", pSaltos.getElements()
print "pDimensionada" , pDimensionada.getElements()
print_cuadruplos(cuadruplos)
'''
============================================
Imprime los cuadruplos
============================================
'''
def print_cuadruplos(currentCuadList):
print "Tabla Cuadruplos"
index = 0
for currentCuad in currentCuadList:
if currentCuad:
print index, " " ,currentCuad.op, " , ", currentCuad.opdoIzq, " , ", currentCuad.opdoDer," , ",currentCuad.res
else:
print "List is empty"
index += 1
pass
|
[
"oomarcarreon@gmail.com"
] |
oomarcarreon@gmail.com
|
d6f17420ce83772b4d31a9811f043fc6f9240f6b
|
8e30082f46128316d78504c5efa13319726a52fd
|
/django/salon/polls/models.py
|
6927af7977f957aa28db99a135854209e9a702bf
|
[] |
no_license
|
vbitjp/Python
|
8d08bcaa8fac6348798bbc199b6cc965b163c81c
|
abfe098ab2152ded87d121c42c1df5ca37c7e837
|
refs/heads/master
| 2021-09-12T17:56:28.100117
| 2018-04-19T14:28:41
| 2018-04-19T14:28:41
| 106,935,158
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 832
|
py
|
import datetime
from django.db import models
from django.utils import timezone
class Question(models.Model):
question_text = models.CharField(max_length=200)
pub_date = models.DateTimeField('date published')
def __str__(self):
return self.question_text
def was_published_recently(self):
now = timezone.now()
return now - datetime.timedelta(days=1) <= self.pub_date <= now
was_published_recently.admin_order_field = 'pub_date'
was_published_recently.boolean = True
was_published_recently.short_description = 'Published recently?'
class Choice(models.Model):
question = models.ForeignKey(Question, on_delete=models.CASCADE)
choice_text = models.CharField(max_length=200)
votes = models.IntegerField(default=0)
def __str__(self):
return self.choice_text
|
[
"7ywrxsxrw7z571g@vbit.jp"
] |
7ywrxsxrw7z571g@vbit.jp
|
d141035695650deff0fc75c74efaa0d6e66cd50c
|
b1c3529a652041dfb4a5b9d7684695f1e3cb8148
|
/settings/settings_50cm_open_4K.py
|
0f4dc7c1b6d4092502901684794f75535adce7d6
|
[] |
no_license
|
ksyoung/load_and_sensitivity
|
bc51ef7c146f652fa4b567414ed215d30af2d3cc
|
aa291c8d25a7c23ba79327718c53a34d15b350ab
|
refs/heads/master
| 2021-09-22T08:31:30.527291
| 2021-09-15T20:51:15
| 2021-09-15T20:51:15
| 99,721,664
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 15,934
|
py
|
# -*- coding: utf-8 -*-
import os
import sys
from pylab import sqrt
from pywtl.core.wtl_ConvertUtils import convert_squid
import pywtl.common.analysis.noise.analysis.NoisePred as NP
import pywtl.common.analysis.noise.analysis.ParameterLib as PL
import numpy as np
class Class(object):
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, str(self.__dict__))
settings = Class()
# Run time paramters
settings.freq = 'All_GHz'
settings.version = '50cm_open'
settings.name = '50cm_open'
settings.verbose = True #
# Telescope/ Receiver Optical Parameters
settings.mult_bands = True
#settings.band = [133.,167.] # GHz, lower, upper.
#settings.band = np.array(settings.band)*1e9 # Hz
settings.aperture_radius = 0.25 # aperture radius in meters (2.5 meter primary = 1.25m radius)
settings.f_number = 1.5 #
settings.edge_db = 10 # edge taper on primary mirror in dB. May later be calculated from pixel sizes.
settings.dB_scan = False#True # to scan or not to scan on dB.
settings.dB_array = np.linspace(.1,30,20)#[10.,15.,20] # array to scan over.
settings.dB_array = [ 0.1000, 0.13501524, 0.18228761, 0.24611127,
0.33228126, 0.44862162, 0.60569578, 0.81776571,
1.10408687, 1.49065655, 2.01257438, 2.7172293 ,
3.66860233, 4.95307593, 6.68727732, 9.02866797,
12.18984071, 16.45782268, 22.22013673, 30.0]
settings.mission_length = 4 # years
settings.sky_area = 41253 # deg (full sky)
# pixel size calculation parameters
settings.MCP = True # assumes MCPs and finds pixel diameters, edge tapers, counts, etc.
# if this is False then all bands have same edge taper, which is defined above.
settings.diameter_to_waist_ratio = 2.95 #from Toki's thesis.
settings.use_edge_dB = True ## this means calculate pixel sizes from edge taper given earlier.
settings.use_D_px = False ## use D_px from bands.csv import
if settings.use_edge_dB and settings.use_D_px:
print '\n use_edge_dB and use_D_px can\'t both be True!!\n\n'
sys.exit()
settings.calc_N_px_by_area_csv = False # if true uses FP sizes by band from csv.
settings.calc_N_px_rough = False # if true, estimates how many detectors would fit per band.
# if False, uses N_px from bands.csv input file
settings.x_fp_diameter = 1.20 #
settings.y_fp_diameter = 1.0 #
if settings.calc_N_px_by_area_csv and settings.calc_N_px_rough:
print '\n calc_N_px_by_area and calc_N_px_rough can\'t both be True!!\n\n'
sys.exit()
settings.calc_correlated_noise = False#True # if true then full focal plane calculations
# include correlated bunching photon white noise.
# Bolo parameters
settings.t_bath = 0.100 # Kelvin
settings.safety_factor = 2. # Unitless, ratio of p_sat to p_opt
settings.n = 2.0 # thermal power law exponent (EBEX was ~2)
settings.bolo_Rn = 1.33 # Ohms. TES resistance warm.
settings.bias_point = 0.75 # depth in transition assumed
settings.bolo_resistance = settings.bolo_Rn*settings.bias_point # Ohms
## old readout noise method
settings.readout_noise_amps = 7e-12 # Amps*rt(sec), number from Franky for scaling readout noise.
# More bolo parameters for franky's noise code.
settings.conv = convert_squid('Normalized_16ch')
settings.prediction_type = 'theoretical'
dfmux_settings = {}
squid_settings = {}
bolo_char = {}
settings.noise_type = "transition"
# boost factors for noise.
settings.johnson_and_readout_factor = None # use un-modified noise theory.
settings.system = {"T_warm_electronics": 300, # temperature of the warm electronics for Johnson noise
"T_squid_board": 4.2, # temperature of the SQUID board
"R_sq_x28": 20., # gain select from 1st stage SQUID ctrl
"R_sq_x25": 69800., # feedback resistor for 1st stage SQUID ctrl
"R_sq_x23": 10., # voltage divider after 1st stage SQUID ctrl (10 for EBEX, 20 for ETC[nominal])
"R_sq_x24": 121., # voltage divider after 1st stage SQUID ctrl
"R_sq_x21": 500., # feedback resistor for 2nd stage SQUID ctrl
"R_sq_x14": 100., # gain select #1 from 2nd stage SQUID ctrl
"R_sq_x15": 82.5, # gain select #2 from 2nd stage SQUID ctrl
"R_sq_x17": 50., # impedance matching to mezz
"R_sq_x000": 200., # voltage divider for bias resistor
"R_sq_x001": 100., # voltage divider for nuller
"R_sq_x44": 820., # current converters for nullers (4 of them)
"R_sq_11": 50., # current converting resistors for bias
"RA_me_13": 50., # impedance matching to SQUID ctrl
"RA_me_18": 200., # feedback resistor for demod 1st stage amplifier
"RA_me_Y": 10000., # Variable resistor for demod gain (2nd stage)
"RA_me_33": 50., # 50 ohm resistors in series with cariable resistor for demod gain (2nd stage)
"RA_me_32": 10000., # feedback resistor for demod 2nd stage amplifier
"RA_me_40": 50., # series R for votage divider before ADC (one per line)
"RA_me_41": 100., # middle R for votage divider before ADC
"RD_me_10": 50., # voltage divider by the carrier/nuller DAC (2 of these)
"RD_me_14": 100., # voltage divider by the carrier/nuller DAC (1 of these)
"RD_me_4": 200., # feedback resistor for 1st stage carrier/nuller
"RD_me_12": 200., # "shorting" resistor for 1st stage carrier
"RD_me_2": 50., # between 1st and 2nd stage of carrier/nuller (2 of these)
"RD_me_CX": 200., # gain selecting resistor for 2nd stage carrier ##############################
"RD_me_NX": 200., # gain selecting resistor for 2nd stage nuller ###############################
"RD_me_8": 1000., # feedback resistor for 2nd stage carrier/nuller
"RD_me_13": 50., # impedance matching to SQUID ctrl carrier/nuller
"R_bolo_termination": 50., # termination resistor in parallel to the bolos
"R_fl_bias": 50000., # resistor converting the flux bias
"R_bias": 0.03, # bias resistor
"R_sq": 100., # SQUID impedance
"C_ll": 1e-9, # C of the lead-lag filter
"Rw": 10., # R from thre wires, contributing to the lead-lag filter
"Zt": 500., # SQUID transimpedance
"G_digital": 2., # digital gain
"N_channel": 16, # Mux factor
}
### changes by Franky to lower readout noise, 9/22/17
#settings.optimize_gains = False
settings.optimize_gains = True
#settings.system["R_sq_x44"] = 820. # current converters for nullers (4 of them)
settings.system["R_sq_x44"] = 1640. # current converters for nullers (4 of them)
#settings.system["R_bias"] = 0.03 # bias resistor
settings.system["R_bias"] = 0.015 # bias resistor
settings.system["Zt"] = 750. # SQUID transimpedance
# DfMUX general setup
dfmux_settings['DAN_firmware'] = True
dfmux_settings['DAN_parser'] = False
dfmux_settings['bitshift'] = 8 # can be 8 for 24 bit.
dfmux_settings['fir'] = 6
dfmux_settings['fsamp'] = 25e6/2**(11+dfmux_settings['fir'])
# DAC/ADC settings
dfmux_settings['fc'] = 500000.
dfmux_settings['Gc'] = 1
dfmux_settings['fn'] = dfmux_settings['fc']
dfmux_settings['Gn'] = 0
dfmux_settings['fd'] = dfmux_settings['fc']
dfmux_settings['Gd'] = 0
# SQUID/cryostat settings
squid_settings['R_FB'] = 5000.
##calced in code # bolo_char['nu'] = 150e9
##calced in code # bolo_char['dnu'] = 34e9
bolo_char['Zt'] = settings.system["Zt"]
bolo_char['L_fll'] = PL.LoopGain(bolo_char['Zt'])
settings.R_wire = 10. # is warm wire, squid board to squid controller.
bolo_char['Tbath'] = settings.t_bath
# bolometer characteristics
##calced in code # bolo_char['Tc'] = 0.42625
##calced in code # bolo_char['Tbolo'] = bolo_char['Tc']
bolo_char['Rn'] = settings.bolo_Rn
bolo_char['R'] = settings.bolo_resistance
bolo_char['tau_etf'] = 0.010
##calced in code # bolo_char['Popt'] = .124127e-12
bolo_char['L'] = 25. ##### something reasonable, but could be any number.
bolo_char['xi'] = 1 ## assume all correlation noise.
# other bolometer characteristics
##calced in code # bolo_char['Psat'] = 2.5 * bolo_char['Popt']
bolo_char['n'] = settings.n
settings.cryo = 'EBEX'
# derived values from Psat, Tc, Tbath and n
##calced in code bolo_char['Gbar'] = bolo_char['Psat']/(bolo_char['Tc']-bolo_char['Tbath'])
##calced in code bolo_char['G'] = PL.G_dyn(bolo_char['Gbar'], bolo_char['Tbath'], bolo_char['Tc'], bolo_char['n'])
##calced in code bolo_char['gamma'] = round(PL.CalcGamma(bolo_char['Tc'], bolo_char['Tbath'], bolo_char['n']), 3)
# derived DfMUX settings
##calced in code dfmux_settings['Vb'] = bolo_char['R'] * (bolo_char['Gbar']*(bolo_char['Tbolo']-bolo_char['Tbath']) - bolo_char['Popt'])
##calced in code dfmux_settings['Vb'] = sqrt(dfmux_settings['Vb'])
##calced in code dfmux_settings['Ac'] = dfmux_settings['Vb'] / conv.DDStoVbias(Carrier_amplitude=1,
# Carrier_gain=dfmux_settings['Gc'],
# firmware_version='16ch')
##calced in code R_gain = [2000., 820., 200., 0.]
##calced in code dfmux_settings['An'] = dfmux_settings['Ac']/3. * \
# (R_gain[dfmux_settings['Gn']] + 100.) / (R_gain[dfmux_settings['Gc']] + 100.)
##calced in code bolo_char['Si'] = NP.Si(dfmux_settings['Vb']) # assumes deep in transition
# set a value to have noise in counts or Kcmb (from franky's code)
settings.A_per_count = None
settings.Kcmb_per_cnt = None
# copying those into settings
settings.dfmux_settings = dfmux_settings
settings.squid_settings = squid_settings
settings.bolo_char = bolo_char
# Paths
settings.base_path = '/home/astro/kyoung/Documents/load_and_sensitivity/'
settings.elements_path = os.path.join(settings.base_path,
'inputs/50cm_open_dragone.csv') # all telescope surfaces, lenses, etc.
# now being defined in code.
#settings.elements_out_path = os.path.join(settings.base_path,
# 'outputs/%s_%s_elements_out.csv ' %(settings.freq, settings.version)) # data that gets saved.
settings.bands_path = os.path.join(settings.base_path,
'inputs/CMBP_bands.csv') # csv of bands.
settings.FP_areas_path = os.path.join(settings.base_path,
'inputs/FP_areas_%s.csv' %settings.version) # csv of FP areas.
# unneeded stuff below this line.
'''
# Run time paramters
settings.name = '150'
settings.version = 'greg_f3'
settings.design = '1m_EBEX10K_f3_2016'
settings.do_point_source_analysis = False
settings.footer = True
# Telescope/ Receiver Optical Parameters
settings.frequency = 150.0 # Ghz
settings.frequency_range = (133, 167.0, 150.)
settings.bandwidth = settings.frequency_range[1] - settings.frequency_range[0] # Ghz
# Optics Parameters
settings.diameter_to_waist_ratio = 2.95 #
# Pixel Parameters
settings.pixel_diameter_step = 0.1
settings.pixel_diameter_range = [settings.pixel_diameter_step, 10 + settings.pixel_diameter_step] # in mm 0 to 20 mm
settings.default_pixel_diameter_m = 0.0042 # In meters
settings.default_pixel_diameter_mm = settings.default_pixel_diameter_m * 1000 # In millimeters
settings.lens_loss_tangent = 9e-5
settings.lens_index = 3.2
settings.lens_thickness = 0.050 # In meters
#Fmux parameters, Readout Noise Contribution
settings.readout_contribution = 0.10 # readout noise should increasee noise by 10%
settings.tes_accuracy = 1.15 # TES resistance value across wafer +/- 15%
settings.do_fmux = True # 1 to calculate fMUx params such as L value, cross talk etc
settings.fmux_max_freq = 1.0e6 # maximum frequency for fMux readout
settings.fmux_max_mux = 36 # mux factor
settings.capacitor_tan_d = 0.0002 # expected tand for interdigitated capacitor
settings.cross_talk_level = 0.01 # allowed cross-talk level
settings.capacitor_accuracy = 0.005 # fractional accuracy of capacitor
settings.f_delta_factor = 1.25 # factor to increase frequency spacing look at crosstalk_off to decide
settings.esr_contribution = 0.10 # allowed fractional ESR contribution to total R (10% = 0.01)
settings.readout_noise = 7e-12 # readout noise in A*sqrt(s)
settings.v_bias = 1e-6 #4 microVolt voltage bias
settings.nep_readout_fixed = 9.3e-18 # in Watts/root(Hz) this number from Franky July 7th, noise email to Shaul.
# Computatonal Parameters
settings.spatial_integration_accuracy = 10000 # numerical integration accuracy. ex: 100 splits integration space in 100 rectangles.
settings.frequency_bin_size = 0.384 # GHz
settings.integration_time = 0.5 # seconds
#Bolometer Parameters
settings.num_bolo_legs = 4
settings.bath_temp = 0.275 # Kelvin
settings.bolo_R_normal = 1.333333333 # Ohms. TES resistance warm.
settings.bias_fraction_rnormal = 0.75
settings.bolo_resistance = settings.bolo_R_normal*settings.bias_fraction_rnormal # Ohms
settings.thermal_carrier_exponent = 3.0
settings.a_over_l_bolo_leg = 149.2*1e-3*1e-12 # From Toki's script
settings.alpha = 250 # Measure for AlTi R v T curve d(logR)/dT
settings.tau_safety_factor = 5.8 # How much slower than readout bolo should be
#Design parameters/Goals
settings.psat_to_optical_power = 2.5 # Unitless
settings.bias_fraction_rnormal = 0.6
settings.target_num_pixels = 924
settings.num_sub_arrays = 6
settings.max_focal_plane_diameter_x = 0.25 # in m
settings.max_focal_plane_diameter_y = 0.25 # in m
settings.max_num_pixels = 10000
#settings.max_focal_plane_diameter = FOV_deg*(np.pi/180)*2.0*settings.aperture_radius*settings.f_number
settings.hex_outer_radius = 0.096 # in m
#Observing Parameters
settings.fract_sky = 0.5
settings.obs_time = 10 #flight time in days.
settings.obs_efficiency =.8 #percent of flight time when the telescope is acutally observing.
# Input Paths
settings.base_path = '/home/astro/kyoung/Documents/35cm-xdragone/Mapping_speed_code_py'
settings.atmospheric_windows_data_file = os.path.join(settings.base_path, 'input_data_products', 'LDB_34km_30deg_el.out') # Atmospheric Windows
settings.receiver_throughput_data_path = os.path.join(settings.base_path, 'input_data_products',
'1m_EBEX10K_2016_Throughput_%sGHz.csv' % str(int(settings.frequency))) # Aperture Illumination
#Aperture Output paths
settings.effective_aperture_data_path = os.path.join(settings.base_path,
'output/effective_aperture',
'1m_EBEX10K_f3_2016_Effective_Aperture_Output_%s_GHz_%s.dat' %(str(int(settings.frequency)),
settings.version)) # Aperture Illumination
settings.effective_aperture_png_path = os.path.join(settings.base_path,
'output/effective_aperture',
'1m_EBEX10K_f3_2016_Effective_Aperture_Output_%s_GHz_%s.png' %(str(int(settings.frequency)),
settings.version)) # Aperture Illumination
#Output Paths
settings.base_path = '/home/astro/kyoung/Documents/35cm-xdragone/Mapping_speed_code_py/output/mapping_speed/EBEX10K_f3_2016_gregorian'
settings.mapping_speed_output_png_path = os.path.join(settings.base_path, '1m_EBEX10K_f3_2016_mapping_speed_%sGHz_%s' % (str(int(settings.frequency)), settings.version))
settings.mapping_speed_output_data_path = os.path.join(settings.base_path, 'mapping_speed_%sGHz_%s.csv' % (str(int(settings.frequency)), settings.version))
settings.num_pixels_output_png_path = os.path.join(settings.base_path, 'num_pixels_%sGHz_%s.png' % (str(int(settings.frequency)), settings.version))
settings.throughput_path_out = os.path.join(settings.base_path, 'throughput_%sGHz_%s.csv' % (str(int(settings.frequency)), settings.version))
'''
|
[
"kyoung@astro.umn.edu"
] |
kyoung@astro.umn.edu
|
5187524c1cb90f930855028a542fe8b09194d3b4
|
a5898ba24399d8a05cb8a730e10400126fb04c3a
|
/ToDoApp/urls.py
|
b023ed47ea1b03001f8a266d49de0a9564a0531a
|
[] |
no_license
|
Adnan232/Django-WebApp
|
ae50507082be98c7ede02cb9fbad2ac93524bb05
|
e0ffcfef21396f96751efb9b39d917895f38ae52
|
refs/heads/master
| 2023-09-04T01:30:07.148630
| 2021-10-10T17:22:15
| 2021-10-10T17:22:15
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 797
|
py
|
"""TodoProject URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.2/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.contrib import admin
from django.urls import path,include
urlpatterns = [
path('admin/', admin.site.urls),
path('App/',include('App.urls'))
]
|
[
"adnanhabib120@gmail.com"
] |
adnanhabib120@gmail.com
|
79c9ac4f57d7c75785c6e238248c49297bcd93e6
|
a40d5c5cd0fcc2410e3200f40f6a79f7201b0193
|
/kubernetes/client/models/v1beta1_custom_resource_column_definition.py
|
f844f995b1a28c3f912663019e6dab34922ac0e7
|
[
"Apache-2.0"
] |
permissive
|
Unacademy/kubernetes-client
|
662cdc2b9fe6df43301e32427e48b1b2715773ca
|
b7f9c740a82b4585478d052c8032495cdeb3b331
|
refs/heads/master
| 2023-06-24T05:57:27.226613
| 2022-12-14T16:29:58
| 2022-12-14T16:29:58
| 181,669,794
| 0
| 0
|
Apache-2.0
| 2023-06-20T13:05:37
| 2019-04-16T10:43:37
|
Python
|
UTF-8
|
Python
| false
| false
| 8,689
|
py
|
# coding: utf-8
"""
Kubernetes
No description provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
OpenAPI spec version: v1.14.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1beta1CustomResourceColumnDefinition(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'json_path': 'str',
'description': 'str',
'format': 'str',
'name': 'str',
'priority': 'int',
'type': 'str'
}
attribute_map = {
'json_path': 'JSONPath',
'description': 'description',
'format': 'format',
'name': 'name',
'priority': 'priority',
'type': 'type'
}
def __init__(self, json_path=None, description=None, format=None, name=None, priority=None, type=None):
"""
V1beta1CustomResourceColumnDefinition - a model defined in Swagger
"""
self._json_path = None
self._description = None
self._format = None
self._name = None
self._priority = None
self._type = None
self.discriminator = None
self.json_path = json_path
if description is not None:
self.description = description
if format is not None:
self.format = format
self.name = name
if priority is not None:
self.priority = priority
self.type = type
@property
def json_path(self):
"""
Gets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:return: The json_path of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._json_path
@json_path.setter
def json_path(self, json_path):
"""
Sets the json_path of this V1beta1CustomResourceColumnDefinition.
JSONPath is a simple JSON path, i.e. with array notation.
:param json_path: The json_path of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if json_path is None:
raise ValueError("Invalid value for `json_path`, must not be `None`")
self._json_path = json_path
@property
def description(self):
"""
Gets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:return: The description of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._description
@description.setter
def description(self, description):
"""
Sets the description of this V1beta1CustomResourceColumnDefinition.
description is a human readable description of this column.
:param description: The description of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._description = description
@property
def format(self):
"""
Gets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The format of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._format
@format.setter
def format(self, format):
"""
Sets the format of this V1beta1CustomResourceColumnDefinition.
format is an optional OpenAPI type definition for this column. The 'name' format is applied to the primary identifier column to assist in clients identifying column is the resource name. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param format: The format of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
self._format = format
@property
def name(self):
"""
Gets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:return: The name of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""
Sets the name of this V1beta1CustomResourceColumnDefinition.
name is a human readable name for the column.
:param name: The name of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if name is None:
raise ValueError("Invalid value for `name`, must not be `None`")
self._name = name
@property
def priority(self):
"""
Gets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:return: The priority of this V1beta1CustomResourceColumnDefinition.
:rtype: int
"""
return self._priority
@priority.setter
def priority(self, priority):
"""
Sets the priority of this V1beta1CustomResourceColumnDefinition.
priority is an integer defining the relative importance of this column compared to others. Lower numbers are considered higher priority. Columns that may be omitted in limited space scenarios should be given a higher priority.
:param priority: The priority of this V1beta1CustomResourceColumnDefinition.
:type: int
"""
self._priority = priority
@property
def type(self):
"""
Gets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:return: The type of this V1beta1CustomResourceColumnDefinition.
:rtype: str
"""
return self._type
@type.setter
def type(self, type):
"""
Sets the type of this V1beta1CustomResourceColumnDefinition.
type is an OpenAPI type definition for this column. See https://github.com/OAI/OpenAPI-Specification/blob/master/versions/2.0.md#data-types for more.
:param type: The type of this V1beta1CustomResourceColumnDefinition.
:type: str
"""
if type is None:
raise ValueError("Invalid value for `type`, must not be `None`")
self._type = type
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1beta1CustomResourceColumnDefinition):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
|
[
"haoweic@google.com"
] |
haoweic@google.com
|
d5c4cac739d6c9ad1a641938dda9973c912c84c5
|
e944d288093c9234c3a6a2a76ffe4e3c9b236cf1
|
/annotation_utils/coco/structs/__init__.py
|
66e257d06be64002c0bce0580e1d58fd6c768ce7
|
[
"MIT"
] |
permissive
|
darwinharianto/annotation_utils
|
598b043345790580e99f34f159b9612b9b1bcd52
|
1cbdadaa28ff945e705dd7b806dda395e32ab23c
|
refs/heads/master
| 2022-04-27T01:20:10.738778
| 2020-04-27T09:23:57
| 2020-04-27T09:23:57
| 255,525,300
| 0
| 0
|
MIT
| 2020-04-27T09:23:59
| 2020-04-14T06:10:57
|
Python
|
UTF-8
|
Python
| false
| false
| 245
|
py
|
from .objects import COCO_Info, COCO_License, COCO_Image, \
COCO_Annotation, COCO_Category
from .handlers import COCO_License_Handler, COCO_Image_Handler, \
COCO_Annotation_Handler, COCO_Category_Handler
from .dataset import COCO_Dataset
|
[
"mork.clayton3@gmail.com"
] |
mork.clayton3@gmail.com
|
3d842f277956e7f09b7d38e83fd8d63cafd6e1cc
|
cc954deeb8178398a99be10c72d0d6b150801a4e
|
/log_bolt/bolt_kafka_registe.py
|
b9e47a8b219759261c816d36ed7f89ae835da4ba
|
[] |
no_license
|
Wstc2013/log_bolt
|
486b3d645b9f37bdf7010178169dcd35e29b51b8
|
f48473c0559072f6e5673631d31c2ba47c9c3450
|
refs/heads/master
| 2021-01-25T13:24:07.933630
| 2018-03-02T10:26:48
| 2018-03-02T10:26:48
| 123,565,326
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,289
|
py
|
#-*-coding:utf8-*-
#!/usr/bin/env python
from __future__ import absolute_import
from pyleus.storm import SimpleBolt
from module.registehandlelogic import RegisteHandleLogic
import logging
import time
import configparser
config = configparser.ConfigParser()
config.read("config/config.ini",encoding='utf-8')
logdir = config.get("log", "dir")
log = logging.getLogger('test_kafka')
class RegisteBolt(SimpleBolt):
def process_tuple(self,tup):
value = tup.values
if value != [''] and value != ['\x03']:
log.debug("kafka่ทๅๅฐ็ๆฐๆฎไธบ:%s" % (value))
registe_handle_logic_obj = RegisteHandleLogic(value)
log.debug(u"ๅผๅงๆณจๅๆฌกๆฐๅค็!!!!")
registe_handle_logic_obj.count('REGISTECOUNT')
log.debug(u"ๅผๅงๆณจๅๆๅ
ฅๆฐๆฎๅบๅค็!!!!")
registe_handle_logic_obj.insertMysqlRegister()
if __name__ == '__main__':
log_time = time.strftime('%Y%m%d', time.localtime(time.time()))
log_filename = '%s/test_registe_%s.log' % (logdir,log_time)
logging.basicConfig(
level=logging.DEBUG,
filename=log_filename,
format="%(asctime)s[%(levelname)s][%(lineno)d]%(message)s",
filemode='a',
)
RegisteBolt().run()
|
[
"visen@enjoybcg.com"
] |
visen@enjoybcg.com
|
a182c56ebe7dd48855684d7cf5c5f4884bd9b717
|
9e4290b8e96c260852fd9b0e624835e7e24e422d
|
/0007.py
|
ed7dc336e0ca9463dabb9bc3d6f2a9af8c511509
|
[] |
no_license
|
emojipeach/euler_problems_python
|
3d7b2bbb70b3f8b3e155751ca1f343417a2172fe
|
39d0e1969ea9ac9f466845cab4e255efa2f41bd9
|
refs/heads/master
| 2020-03-21T23:19:46.185326
| 2018-07-05T21:45:39
| 2018-07-05T21:45:39
| 139,180,546
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 520
|
py
|
def is_prime(test_number):
if test_number == 1:
return False
elif test_number < 4:
return True
elif test_number % 2 == 0:
return False
else:
x = int(test_number ** (1/2)) + 1
for i in range(3, x, 2):
if test_number % i == 0:
return False
return True
def next_prime(primes):
x = max(primes)
for i in range(x + 1, x + 100000):
if is_prime(i):
primes.append(i)
return
def nth_prime(n):
while n > len(primes):
next_prime(primes)
print(max(primes))
primes = [2, 3, 5]
nth_prime(10001)
|
[
"37852121+emojipeach@users.noreply.github.com"
] |
37852121+emojipeach@users.noreply.github.com
|
fc9b0c269aecdb44c4736fe6b9da03555f7de8e3
|
31622dd16963b459ac6eec71fcf54e4d243ac773
|
/edu_sharing_client/models/license.py
|
073b8ab7d8a99b38a95e9902e2a8e4a23e2cd02e
|
[] |
no_license
|
torsten-simon/oeh-search-etl
|
95e6e92698a97c98ef9d5b02076edcf993736d6f
|
eacdadcd8af169cb54629db0d2d46a5616f854a6
|
refs/heads/master
| 2023-04-16T05:08:41.194239
| 2020-11-16T09:51:59
| 2020-11-16T09:51:59
| 318,169,232
| 0
| 0
| null | 2023-04-03T23:04:46
| 2020-12-03T11:20:44
| null |
UTF-8
|
Python
| false
| false
| 3,484
|
py
|
# coding: utf-8
"""
edu-sharing Repository REST API
The public restful API of the edu-sharing repository. # noqa: E501
OpenAPI spec version: 1.1
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class License(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'icon': 'str',
'url': 'str'
}
attribute_map = {
'icon': 'icon',
'url': 'url'
}
def __init__(self, icon=None, url=None): # noqa: E501
"""License - a model defined in Swagger""" # noqa: E501
self._icon = None
self._url = None
self.discriminator = None
if icon is not None:
self.icon = icon
if url is not None:
self.url = url
@property
def icon(self):
"""Gets the icon of this License. # noqa: E501
:return: The icon of this License. # noqa: E501
:rtype: str
"""
return self._icon
@icon.setter
def icon(self, icon):
"""Sets the icon of this License.
:param icon: The icon of this License. # noqa: E501
:type: str
"""
self._icon = icon
@property
def url(self):
"""Gets the url of this License. # noqa: E501
:return: The url of this License. # noqa: E501
:rtype: str
"""
return self._url
@url.setter
def url(self, url):
"""Sets the url of this License.
:param url: The url of this License. # noqa: E501
:type: str
"""
self._url = url
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(License, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, License):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
|
[
"simon@edu-sharing.net"
] |
simon@edu-sharing.net
|
91b20ea47434b099c8ae47c90abfab4af64dad34
|
89cd8b77ad5171c336cc60b2133fe6468a6cb53f
|
/Module01_CZ/day1_basics/04-ไปฃ็ /day1/20_้่ฆๆผ็คบ__________ๅ้็ๅฎไน.py
|
f4f737892f50ed4f9b166828e1067e18cd29954f
|
[
"MIT"
] |
permissive
|
fenglihanxiao/Python
|
75178f6b6b0c53345e1ed54226ea645216572d6c
|
872baf3a3a5ee42740161152605ca2b1ddf4cd30
|
refs/heads/master
| 2021-05-23T18:49:20.656433
| 2020-04-29T01:06:21
| 2020-04-29T01:06:21
| 253,199,073
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 110
|
py
|
"""
ไฝฟ็จๅ้ไฟๅญไธชไบบไฟกๆฏ๏ผ
ๅงๅ๏ผๅผ ไผ ๆบ
ๆงๅซ๏ผ็ท
ๅนด้พ๏ผ21
็ต่ฏ๏ผ18800008888
"""
|
[
"fenglihanxiao@qq.com"
] |
fenglihanxiao@qq.com
|
a1f02577c0adfa04d1396283c0f946dca6808285
|
77ee1f677ab2ececb821a11be128b76bcf0e8d6f
|
/electrum_mona/gui/qt/lightning_dialog.py
|
1d709aed9935b2c01bce4e473c6c8bdd4f25e9d9
|
[
"MIT"
] |
permissive
|
zcore-dev/electrum-mona
|
c74e6142a0f34721be70dba68d524ae9ce03179c
|
2beb0c9c7794e8b03d1725bae41ee8b792c57275
|
refs/heads/master
| 2020-08-22T15:32:55.604727
| 2019-10-21T22:56:29
| 2019-10-21T22:56:29
| 216,427,159
| 0
| 0
|
MIT
| 2019-10-20T21:03:48
| 2019-10-20T21:03:48
| null |
UTF-8
|
Python
| false
| false
| 3,658
|
py
|
#!/usr/bin/env python
#
# Electrum - lightweight Bitcoin client
# Copyright (C) 2012 thomasv@gitorious
#
# Permission is hereby granted, free of charge, to any person
# obtaining a copy of this software and associated documentation files
# (the "Software"), to deal in the Software without restriction,
# including without limitation the rights to use, copy, modify, merge,
# publish, distribute, sublicense, and/or sell copies of the Software,
# and to permit persons to whom the Software is furnished to do so,
# subject to the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS
# BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN
# ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
# CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
from PyQt5.QtGui import QStandardItemModel, QStandardItem
from PyQt5.QtCore import Qt
from PyQt5.QtWidgets import (QDialog, QWidget, QLabel, QVBoxLayout, QCheckBox,
QGridLayout, QPushButton, QLineEdit, QTabWidget)
from electrum_mona.i18n import _
from .util import HelpLabel, MyTreeView, Buttons
class LightningDialog(QDialog):
def __init__(self, gui_object):
QDialog.__init__(self)
self.gui_object = gui_object
self.config = gui_object.config
self.network = gui_object.daemon.network
self.setWindowTitle(_('Lightning Network'))
self.setMinimumSize(600, 20)
vbox = QVBoxLayout(self)
self.num_peers = QLabel('')
vbox.addWidget(self.num_peers)
self.num_nodes = QLabel('')
vbox.addWidget(self.num_nodes)
self.num_channels = QLabel('')
vbox.addWidget(self.num_channels)
self.status = QLabel('')
vbox.addWidget(self.status)
vbox.addStretch(1)
b = QPushButton(_('Close'))
b.clicked.connect(self.close)
vbox.addLayout(Buttons(b))
self.network.register_callback(self.on_channel_db, ['channel_db'])
self.network.register_callback(self.set_num_peers, ['gossip_peers'])
self.network.register_callback(self.set_unknown_channels, ['unknown_channels'])
self.network.channel_db.update_counts() # trigger callback
self.set_num_peers('', self.network.lngossip.num_peers())
self.set_unknown_channels('', len(self.network.lngossip.unknown_ids))
def on_channel_db(self, event, num_nodes, num_channels, num_policies):
self.num_nodes.setText(_(f'{num_nodes} nodes'))
self.num_channels.setText(_(f'{num_channels} channels'))
def set_num_peers(self, event, num_peers):
self.num_peers.setText(_(f'Connected to {num_peers} peers'))
def set_unknown_channels(self, event, unknown):
self.status.setText(_(f'Requesting {unknown} channels...') if unknown else '')
def is_hidden(self):
return self.isMinimized() or self.isHidden()
def show_or_hide(self):
if self.is_hidden():
self.bring_to_top()
else:
self.hide()
def bring_to_top(self):
self.show()
self.raise_()
def closeEvent(self, event):
self.gui_object.lightning_dialog = None
event.accept()
|
[
"root@DESKTOP-97LL1PI.localdomain"
] |
root@DESKTOP-97LL1PI.localdomain
|
b7bf9bf3508f0c9b91f0a398d44aa26c34edd8f1
|
fdd78a985cde644ac362caaaa8e270f7177a6e16
|
/python/spreadsheet_month.py
|
0a292a31eeeef4cdb15e5044c586c89d8c613a69
|
[] |
no_license
|
andwxu/hackgt
|
d2980a4c67ab9c50e14783e6b8c22d1bbd584668
|
a147fc546c1080b42df26caebbb530268d12c632
|
refs/heads/master
| 2022-12-31T00:29:56.239822
| 2020-10-18T11:41:30
| 2020-10-18T11:41:30
| 304,765,023
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,433
|
py
|
import gspread
from oauth2client.service_account import ServiceAccountCredentials
from matplotlib import pyplot as plt
from matplotlib import ticker as tick
from datetime import datetime, timezone, timedelta, date
import numpy as np
import random
import re
import mpld3
# use creds to create a client to interact with the Google Drive API
scope = ["https://spreadsheets.google.com/feeds",'https://www.googleapis.com/auth/spreadsheets',"https://www.googleapis.com/auth/drive.file","https://www.googleapis.com/auth/drive"]
creds = ServiceAccountCredentials.from_json_keyfile_name('sheets_data.json', scope)
client = gspread.authorize(creds)
sheet = client.open("Orders").sheet1
list_of_orders = sheet.get_all_values()
#initialize dictionary for each hour in a day
hours_month = dict()
i = 0
while i < 24:
hours_month[i] = dict()
i = i + 1
#iterate through rows in sheet, pulling time and item of order
menu = []
open_hour = 12
close_hour = 22
for order in list_of_orders:
time = datetime.fromtimestamp(int(order[1]) - 14400, timezone.utc)
elapsed = datetime.now() - time.replace(tzinfo=None)
elapsed_day = elapsed.days
hour = time.hour
order = order[2]
foods = re.split(",", order)
for food in foods:
if hour in range(open_hour, close_hour):
if food not in menu:
menu.append(food)
if elapsed_day <= 31:
if food in hours_month[hour]:
hours_month[hour][food] = hours_month[hour][food] + 1
else:
hours_month[hour][food] = 1
for hour in hours_month:
for food in menu:
if food in hours_month[hour]:
hours_month[hour][food] = hours_month[hour][food] / 31
else:
hours_month[hour][food] = .05
##generate x axis labels based on opening/closing hours
x_labels = []
for i in range(open_hour, close_hour + 1):
label = ""
time = i
if time % 12 != 0:
time = time % 12
else:
time = 12
label += str(time) + ":00"
if i <= 12:
label += " AM"
else:
label += " PM"
x_labels.append(label)
##format graph
barWidth = .5/len(menu)
position_base = np.arange(0,24,1)
i = -.25
fig, ax = plt.subplots(figsize=(20, 6))
x = range(len(menu))
plt.xlabel('Hour')
ax.xaxis.set_major_locator(tick.MultipleLocator(1))
ax.yaxis.set_major_locator(tick.MultipleLocator(1))
ax.set_xticklabels(x_labels)
ax.margins(2,0)
plt.xlim(12-.3, 21+.3)
plt.ylabel('Number of Orders')
ax.set_title('Average Orders/Hour per Month')
ax.yaxis.set_major_locator(tick.MultipleLocator(1))
max_val = 0
for menu_item in menu: #construct each bar
bar = []
for order_hour in hours_month:
if menu_item in hours_month[order_hour]:
if hours_month[order_hour][menu_item] > max_val:
max_val = hours_month[order_hour][menu_item]
if hours_month[order_hour][menu_item] < .05:
bar.append(.05)
else:
bar.append(hours_month[order_hour][menu_item])
else:
bar.append(.05)
r = random.random()
g = random.random()
b = random.random()
rgb = (r, g, b)
plt.bar((position_base + i), bar, color=rgb, width = barWidth, edgecolor = 'white', label = menu_item, align='edge')
i = i + .5/len(menu)
plt.ylim(0, max(round(max_val) + .5, 1))
plt.legend()
plt.savefig('../public/avg_month.png')
|
[
"46511636+aidandonelan@users.noreply.github.com"
] |
46511636+aidandonelan@users.noreply.github.com
|
aad525f6d2bb1919a03e49948c4f34fe56ee5e66
|
8e6e3f7fc065548cb25825632c49d83964bf9f30
|
/Network/icmp_arp_nmap.py
|
d0ae2b9a0e1de85c750dab6d970b376a9474716c
|
[] |
no_license
|
raviwithu/Scripts
|
1d13c9f368ed9ab966fda434d022acd9f71d3f1d
|
cba34cdceee121ce696bc1b30faf19a1fc126eda
|
refs/heads/master
| 2021-01-22T02:13:18.252626
| 2018-05-03T01:53:16
| 2018-05-03T01:53:16
| 92,339,767
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 637
|
py
|
#!/usr/bin/env python
import logging
import subprocess
logging.getLogger("scapy.runtime").setLevel(logging.ERROR)
logging.getLogger("scapy.interactive").setLevel(logging.ERROR)
logging.getLogger("scapy.loading").setLevel(logging.ERROR)
from scapy.all import *
ans, unans = sr(IP(dst = "192.168.0.2-25") / ICMP(), timeout = 2, iface = "eth0", verbose = 0)
reachable = []
for reply in ans:
reachable.append(reply[1][IP].src)
for host in reachable:
send(ARP(hwsrc = get_if_hwaddr("eth0"), psrc = "192.168.0.1", hwdst = "ff:ff:ff:ff:ff:ff", pdst = host), iface = "eth0", verbose = 0)
print "\nDone!\n"
|
[
"miravishankar@yahoo.co.in"
] |
miravishankar@yahoo.co.in
|
181dff928bcc29942a15804141960b142b348397
|
0183da495028d9d341641791355e62ba36d3f698
|
/PersonDetection/main.py
|
9b885d355bd53b10b03072db8adf438bb1ef2c69
|
[] |
no_license
|
Joshua1225/PersonSearchEngine
|
11bde400a39d555e00a363ca25b4b8721165e3fb
|
fdda6bcd6bfaf6cea736d19bb559ba94e359e41b
|
refs/heads/main
| 2023-03-07T04:58:18.604258
| 2020-12-12T18:03:15
| 2020-12-12T18:03:15
| 317,192,690
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 382
|
py
|
from Detector import PedestrianDetector, PedestrianDetectionResultDTO
import cv2
if __name__ == '__main__':
detector = PedestrianDetector('./weights/epoches_112.pth', cuda=True, cpu=False)
img = cv2.imread('./images/1.jpg')
pdrDTO = detector.detect(img)
img_list = pdrDTO.get_img_list()
for i, img in enumerate(img_list):
cv2.imwrite(f"{i}_.jpg", img)
|
[
"noreply@github.com"
] |
Joshua1225.noreply@github.com
|
e44d807c8b5f173e32fde9820ee02ff6100abd9b
|
259b81f2a6d02947cd95c1fbdac4ea182592d331
|
/API wrappers/Python/examples/mySE python.py
|
b0aff4b8e7f0dd249fd2f9c17246f26a7d8d3f7b
|
[
"MIT"
] |
permissive
|
Arthurdw/mySE
|
58ee59c982266b1d4ff0a53e01b882820125a71f
|
0100c2a4e8fd5006e170ca8a2890080ed96d0153
|
refs/heads/master
| 2020-12-08T20:48:54.380742
| 2020-05-03T11:56:29
| 2020-05-03T11:56:29
| 233,091,191
| 0
| 0
|
MIT
| 2020-01-16T20:06:44
| 2020-01-10T16:52:39
|
Python
|
UTF-8
|
Python
| false
| false
| 966
|
py
|
from mySE import mySE
from time import sleep
local_url, server_secret, mail = "http://127.0.0.1:5000/", "mySecureServerPassword", "mail@mail.mail"
# Generate a token:
# try:
mySE.gen_token(local_url, server_secret, mail)
# except mySE.error.UnauthorizedError:
# pass
# token = mySe.gen_token(local_url, mail)
# Fetch our token:
token = mySE.get_token(local_url, mail)
# Create our client object.
client = mySE.Client(local_url, token)
print(f"Client ID: {client.id}")
print(f"This secret token: {token}")
# Create 2 logs:
print("\r", "Creating 2 logs...", end=' ')
client.add_log(False)
sleep(1.2)
client.add_log(True)
print("\b\b\b\b: Done.")
# Display our logs:
print(f"Log information: ({len(client.logs)})")
count = 0
for log in client.logs:
count += 1
print(f"Log {count} | ID: {log.id};")
print(f"Log {count} | Time: {log.time.strftime('%d/%m/%Y | %H:%M:%S')}")
print(f"Log {count} | The light was {'on' if log.light else 'off'}!")
|
[
"arthur.dewitte@gmail.com"
] |
arthur.dewitte@gmail.com
|
8174036a334ba82c11c5dbd9fa16642607155e86
|
06ad8f591b7b68d98d6f422e12c447c8453e1476
|
/data/cifar.py
|
6d323fbf99da68c37831849f7b128d2ca2b91676
|
[
"MIT"
] |
permissive
|
stormraiser/disunknown
|
4713a177bfa187ea918cd8dbe2f8d325b9a0db5d
|
194cc01851fe26bc2f0ed87cdcc238c801f4a333
|
refs/heads/main
| 2023-08-29T08:42:45.019039
| 2021-10-13T06:33:23
| 2021-10-13T06:33:23
| 406,071,664
| 20
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 640
|
py
|
import torch, torchvision
class CIFAR10(torchvision.datasets.CIFAR10):
def __init__(self, root, part, labeled_factors, transform):
super().__init__(root, part == 'train', transform = transform, download = True)
if len(labeled_factors) == 0:
self.has_label = False
self.nclass = []
self.class_freq = []
else:
self.has_label = True
self.nclass = [10]
class_count = torch.tensor(self.targets).bincount(minlength = 10)
self.class_freq = [class_count.float() / self.data.shape[0]]
def __getitem__(self, k):
img, target = super().__getitem__(k)
return (img, torch.tensor([target])) if self.has_label else img
|
[
"stormraiser2012@gmail.com"
] |
stormraiser2012@gmail.com
|
0289b4bcf761b49c33907f4f98a3ded9f257d4fa
|
429a8441bb9730dcf0e33fedcb5f3672a731b3e7
|
/xero_python/accounting/models/tax_rate.py
|
3105e7e85477aa221bd8d79e66b609249374e58e
|
[
"MIT"
] |
permissive
|
gregsteelxinja/xero-python
|
1a26ec3b05ea156dd6848f2ec313c72e9f39b0e2
|
d0473ba91099de3464b3dffa377df5a11ad95afc
|
refs/heads/master
| 2022-12-16T10:54:11.424971
| 2020-09-01T01:00:23
| 2020-09-01T01:00:23
| 291,526,551
| 0
| 0
| null | 2020-08-30T18:16:48
| 2020-08-30T18:16:48
| null |
UTF-8
|
Python
| false
| false
| 14,802
|
py
|
# coding: utf-8
"""
Accounting API
No description provided (generated by Openapi Generator https://github.com/openapitools/openapi-generator) # noqa: E501
OpenAPI spec version: 2.2.14
Contact: api@xero.com
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
from xero_python.models import BaseModel
class TaxRate(BaseModel):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
"""
Attributes:
openapi_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
openapi_types = {
"name": "str",
"tax_type": "str",
"tax_components": "list[TaxComponent]",
"status": "str",
"report_tax_type": "str",
"can_apply_to_assets": "bool",
"can_apply_to_equity": "bool",
"can_apply_to_expenses": "bool",
"can_apply_to_liabilities": "bool",
"can_apply_to_revenue": "bool",
"display_tax_rate": "float",
"effective_rate": "float",
}
attribute_map = {
"name": "Name",
"tax_type": "TaxType",
"tax_components": "TaxComponents",
"status": "Status",
"report_tax_type": "ReportTaxType",
"can_apply_to_assets": "CanApplyToAssets",
"can_apply_to_equity": "CanApplyToEquity",
"can_apply_to_expenses": "CanApplyToExpenses",
"can_apply_to_liabilities": "CanApplyToLiabilities",
"can_apply_to_revenue": "CanApplyToRevenue",
"display_tax_rate": "DisplayTaxRate",
"effective_rate": "EffectiveRate",
}
def __init__(
self,
name=None,
tax_type=None,
tax_components=None,
status=None,
report_tax_type=None,
can_apply_to_assets=None,
can_apply_to_equity=None,
can_apply_to_expenses=None,
can_apply_to_liabilities=None,
can_apply_to_revenue=None,
display_tax_rate=None,
effective_rate=None,
): # noqa: E501
"""TaxRate - a model defined in OpenAPI""" # noqa: E501
self._name = None
self._tax_type = None
self._tax_components = None
self._status = None
self._report_tax_type = None
self._can_apply_to_assets = None
self._can_apply_to_equity = None
self._can_apply_to_expenses = None
self._can_apply_to_liabilities = None
self._can_apply_to_revenue = None
self._display_tax_rate = None
self._effective_rate = None
self.discriminator = None
if name is not None:
self.name = name
if tax_type is not None:
self.tax_type = tax_type
if tax_components is not None:
self.tax_components = tax_components
if status is not None:
self.status = status
if report_tax_type is not None:
self.report_tax_type = report_tax_type
if can_apply_to_assets is not None:
self.can_apply_to_assets = can_apply_to_assets
if can_apply_to_equity is not None:
self.can_apply_to_equity = can_apply_to_equity
if can_apply_to_expenses is not None:
self.can_apply_to_expenses = can_apply_to_expenses
if can_apply_to_liabilities is not None:
self.can_apply_to_liabilities = can_apply_to_liabilities
if can_apply_to_revenue is not None:
self.can_apply_to_revenue = can_apply_to_revenue
if display_tax_rate is not None:
self.display_tax_rate = display_tax_rate
if effective_rate is not None:
self.effective_rate = effective_rate
@property
def name(self):
"""Gets the name of this TaxRate. # noqa: E501
Name of tax rate # noqa: E501
:return: The name of this TaxRate. # noqa: E501
:rtype: str
"""
return self._name
@name.setter
def name(self, name):
"""Sets the name of this TaxRate.
Name of tax rate # noqa: E501
:param name: The name of this TaxRate. # noqa: E501
:type: str
"""
self._name = name
@property
def tax_type(self):
"""Gets the tax_type of this TaxRate. # noqa: E501
The tax type # noqa: E501
:return: The tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._tax_type
@tax_type.setter
def tax_type(self, tax_type):
"""Sets the tax_type of this TaxRate.
The tax type # noqa: E501
:param tax_type: The tax_type of this TaxRate. # noqa: E501
:type: str
"""
self._tax_type = tax_type
@property
def tax_components(self):
"""Gets the tax_components of this TaxRate. # noqa: E501
See TaxComponents # noqa: E501
:return: The tax_components of this TaxRate. # noqa: E501
:rtype: list[TaxComponent]
"""
return self._tax_components
@tax_components.setter
def tax_components(self, tax_components):
"""Sets the tax_components of this TaxRate.
See TaxComponents # noqa: E501
:param tax_components: The tax_components of this TaxRate. # noqa: E501
:type: list[TaxComponent]
"""
self._tax_components = tax_components
@property
def status(self):
"""Gets the status of this TaxRate. # noqa: E501
See Status Codes # noqa: E501
:return: The status of this TaxRate. # noqa: E501
:rtype: str
"""
return self._status
@status.setter
def status(self, status):
"""Sets the status of this TaxRate.
See Status Codes # noqa: E501
:param status: The status of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"ACTIVE",
"DELETED",
"ARCHIVED",
"PENDING",
"None",
] # noqa: E501
if status not in allowed_values:
raise ValueError(
"Invalid value for `status` ({0}), must be one of {1}".format( # noqa: E501
status, allowed_values
)
)
self._status = status
@property
def report_tax_type(self):
"""Gets the report_tax_type of this TaxRate. # noqa: E501
See ReportTaxTypes # noqa: E501
:return: The report_tax_type of this TaxRate. # noqa: E501
:rtype: str
"""
return self._report_tax_type
@report_tax_type.setter
def report_tax_type(self, report_tax_type):
"""Sets the report_tax_type of this TaxRate.
See ReportTaxTypes # noqa: E501
:param report_tax_type: The report_tax_type of this TaxRate. # noqa: E501
:type: str
"""
allowed_values = [
"AVALARA",
"BASEXCLUDED",
"CAPITALSALESOUTPUT",
"CAPITALEXPENSESINPUT",
"ECOUTPUT",
"ECOUTPUTSERVICES",
"ECINPUT",
"ECACQUISITIONS",
"EXEMPTEXPENSES",
"EXEMPTINPUT",
"EXEMPTOUTPUT",
"GSTONIMPORTS",
"INPUT",
"INPUTTAXED",
"MOSSSALES",
"NONE",
"NONEOUTPUT",
"OUTPUT",
"PURCHASESINPUT",
"SALESOUTPUT",
"EXEMPTCAPITAL",
"EXEMPTEXPORT",
"CAPITALEXINPUT",
"GSTONCAPIMPORTS",
"GSTONCAPITALIMPORTS",
"REVERSECHARGES",
"PAYMENTS",
"INVOICE",
"CASH",
"ACCRUAL",
"FLATRATECASH",
"FLATRATEACCRUAL",
"ACCRUALS",
"TXCA",
"SRCAS",
"DSOUTPUT",
"BLINPUT2",
"EPINPUT",
"IMINPUT2",
"MEINPUT",
"IGDSINPUT2",
"ESN33OUTPUT",
"OPINPUT",
"OSOUTPUT",
"TXN33INPUT",
"TXESSINPUT",
"TXREINPUT",
"TXPETINPUT",
"NRINPUT",
"ES33OUTPUT",
"ZERORATEDINPUT",
"ZERORATEDOUTPUT",
"DRCHARGESUPPLY",
"DRCHARGE",
"CAPINPUT",
"CAPIMPORTS",
"IMINPUT",
"INPUT2",
"CIUINPUT",
"SRINPUT",
"OUTPUT2",
"SROUTPUT",
"CAPOUTPUT",
"SROUTPUT2",
"CIUOUTPUT",
"ZROUTPUT",
"ZREXPORT",
"ACC28PLUS",
"ACCUPTO28",
"OTHEROUTPUT",
"SHOUTPUT",
"ZRINPUT",
"BADDEBT",
"OTHERINPUT",
"None",
] # noqa: E501
if report_tax_type not in allowed_values:
raise ValueError(
"Invalid value for `report_tax_type` ({0}), must be one of {1}".format( # noqa: E501
report_tax_type, allowed_values
)
)
self._report_tax_type = report_tax_type
@property
def can_apply_to_assets(self):
"""Gets the can_apply_to_assets of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:return: The can_apply_to_assets of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_assets
@can_apply_to_assets.setter
def can_apply_to_assets(self, can_apply_to_assets):
"""Sets the can_apply_to_assets of this TaxRate.
Boolean to describe if tax rate can be used for asset accounts i.e. true,false # noqa: E501
:param can_apply_to_assets: The can_apply_to_assets of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_assets = can_apply_to_assets
@property
def can_apply_to_equity(self):
"""Gets the can_apply_to_equity of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:return: The can_apply_to_equity of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_equity
@can_apply_to_equity.setter
def can_apply_to_equity(self, can_apply_to_equity):
"""Sets the can_apply_to_equity of this TaxRate.
Boolean to describe if tax rate can be used for equity accounts i.e true,false # noqa: E501
:param can_apply_to_equity: The can_apply_to_equity of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_equity = can_apply_to_equity
@property
def can_apply_to_expenses(self):
"""Gets the can_apply_to_expenses of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:return: The can_apply_to_expenses of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_expenses
@can_apply_to_expenses.setter
def can_apply_to_expenses(self, can_apply_to_expenses):
"""Sets the can_apply_to_expenses of this TaxRate.
Boolean to describe if tax rate can be used for expense accounts i.e. true,false # noqa: E501
:param can_apply_to_expenses: The can_apply_to_expenses of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_expenses = can_apply_to_expenses
@property
def can_apply_to_liabilities(self):
"""Gets the can_apply_to_liabilities of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:return: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_liabilities
@can_apply_to_liabilities.setter
def can_apply_to_liabilities(self, can_apply_to_liabilities):
"""Sets the can_apply_to_liabilities of this TaxRate.
Boolean to describe if tax rate can be used for liability accounts i.e. true,false # noqa: E501
:param can_apply_to_liabilities: The can_apply_to_liabilities of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_liabilities = can_apply_to_liabilities
@property
def can_apply_to_revenue(self):
"""Gets the can_apply_to_revenue of this TaxRate. # noqa: E501
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:return: The can_apply_to_revenue of this TaxRate. # noqa: E501
:rtype: bool
"""
return self._can_apply_to_revenue
@can_apply_to_revenue.setter
def can_apply_to_revenue(self, can_apply_to_revenue):
"""Sets the can_apply_to_revenue of this TaxRate.
Boolean to describe if tax rate can be used for revenue accounts i.e. true,false # noqa: E501
:param can_apply_to_revenue: The can_apply_to_revenue of this TaxRate. # noqa: E501
:type: bool
"""
self._can_apply_to_revenue = can_apply_to_revenue
@property
def display_tax_rate(self):
"""Gets the display_tax_rate of this TaxRate. # noqa: E501
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The display_tax_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._display_tax_rate
@display_tax_rate.setter
def display_tax_rate(self, display_tax_rate):
"""Sets the display_tax_rate of this TaxRate.
Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param display_tax_rate: The display_tax_rate of this TaxRate. # noqa: E501
:type: float
"""
self._display_tax_rate = display_tax_rate
@property
def effective_rate(self):
"""Gets the effective_rate of this TaxRate. # noqa: E501
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:return: The effective_rate of this TaxRate. # noqa: E501
:rtype: float
"""
return self._effective_rate
@effective_rate.setter
def effective_rate(self, effective_rate):
"""Sets the effective_rate of this TaxRate.
Effective Tax Rate (decimal to 4dp) e.g 12.5000 # noqa: E501
:param effective_rate: The effective_rate of this TaxRate. # noqa: E501
:type: float
"""
self._effective_rate = effective_rate
|
[
"sid.maestre@gmail.com"
] |
sid.maestre@gmail.com
|
d2edaeec8fdcd119849df0305b0cb817b3235ebe
|
8d9318a33afc2c3b5ca8ac99fce0d8544478c94a
|
/Books/Casandra DB/opscenter-5.1.0/lib/py/orbited/proxy.py
|
4c0e80c1f97cce4bb513bffb9be5583f06edd599
|
[] |
no_license
|
tushar239/git-large-repo
|
e30aa7b1894454bf00546312a3fb595f6dad0ed6
|
9ee51112596e5fc3a7ab2ea97a86ec6adc677162
|
refs/heads/master
| 2021-01-12T13:48:43.280111
| 2016-11-01T22:14:51
| 2016-11-01T22:14:51
| 69,609,373
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 129
|
py
|
version https://git-lfs.github.com/spec/v1
oid sha256:ba1b92cda51cc0fbe137994a7b857befa45aa64d45009e4fb34ed1df78d0f3fc
size 5501
|
[
"tushar239@gmail.com"
] |
tushar239@gmail.com
|
6346c55ed5ae3d16b8b3bcf214eb2f9ab0feed02
|
beb5c6eda82d6ed5d1999f74c115c30f9440feb0
|
/setup.py
|
269b77f96dc7dc8b0749146096b1b6eb644bf458
|
[
"Apache-2.0"
] |
permissive
|
fpgaco/hyperscalar
|
5aadcb4fd5e47439b00169f65cfdfce3d02563ed
|
44edb9005ed0117ecf14bfd161741ecf23c41279
|
refs/heads/master
| 2020-07-30T11:14:28.785400
| 2019-09-22T21:08:43
| 2019-09-22T21:08:43
| 210,209,838
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 909
|
py
|
#!/usr/bin/env python3
from setuptools import setup, find_packages
with open('README.md') as f:
readme = f.read()
with open('LICENSE.txt') as f:
license = f.read()
setup(
name='hyperscalar',
version='0.0.1',
description='Next generation type',
keywords=['hyperscalar'],
url='https://github.com/fpgaco/hyperscalar',
license=license,
long_description=readme,
author='Kenso Trabing',
author_email='ktrabing@acm.org',
maintainer='Kenso Trabing',
maintainer_email='ktrabing@acm.org',
packages=find_packages(exclude=('tests','docs','venv')),
classifiers=[
'Programming Language :: Python :: 3.5',
'License :: OSI Approved :: Apache Software License',
'Operating System :: OS Independent',
'Development Status :: 3 - Alpha',
'Topic :: Scientific/Engineering :: Electronic Design Automation (EDA)'
]
)
|
[
"ktrabing@acm.org"
] |
ktrabing@acm.org
|
7c1091c1be6fe52aee062e9522b51fcd54fa5f0e
|
526019f16e76a4afd84d25ea90fc39ffda285e34
|
/hw/HW03/code/HW03_utils.py
|
acdc2809d4a0fad12464afff2a0c335605d22bed
|
[] |
no_license
|
mitchnegus/CS289_mnegus
|
ac6a3cdd633bf5fda0fefbdf75c014eeaddb0f48
|
6111a029818762ca3fac6e6097d2a2c6a610aaf3
|
refs/heads/master
| 2021-09-05T00:31:52.637848
| 2018-01-23T04:55:38
| 2018-01-23T04:55:38
| 80,948,313
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 418
|
py
|
#HW03_utils.py
#-----------------------------------------
# Python module for CS289A HW03
#-----------------------------------------
#-----------------------------------------
import math
import numpy as np
from scipy import io as spio
def loaddata(shortpath,_DATA_DIR,dictkey):
#Load data
data_dict = spio.loadmat(_DATA_DIR+"/"+shortpath)
data = np.array(data_dict[dictkey])
return data
|
[
"mitchell.negus.57@gmail.com"
] |
mitchell.negus.57@gmail.com
|
27be4e80e15ee4738b6c1266e451404d6934047c
|
0445b05dc0c1a6b92cd4dde04fafd8acddc2365c
|
/libs/nrgreader.py
|
7a2baf3346816a09ee00824d20953e2a6634a4ae
|
[] |
no_license
|
ananchev/energymeter
|
a7842f7c31173aa332460c51b972ecf9321f0bb5
|
133e4ba75a14adb8d45ae5d5500850ee9c8fefb5
|
refs/heads/main
| 2023-08-21T10:59:06.721270
| 2021-10-17T10:31:52
| 2021-10-17T10:31:52
| 305,511,561
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 7,976
|
py
|
# Generic imports
import json # used when storing nrg readings into local json
from datetime import datetime # used to capture time when meter readings are stored
import os # used for file operations
# Establish logging
from libs.logger import logger, LOG_FILE
logger = logger.getChild('nrgreader')
# Pymodbus object
from pymodbus.client.sync import ModbusTcpClient
from pymodbus.payload import BinaryPayloadDecoder
from pymodbus.constants import Endian
# Client to work with influx database
from influxdb import InfluxDBClient
# IP and port of the Modbus TCP gateway
MODBUS_GW = dict(host='192.168.2.222', port=502)
# Energy meters
METERS = [
dict(meter_id=10,
influx_measure_base_name='line0'),
dict(meter_id=11,
influx_measure_base_name='line1'),
dict(meter_id=2,
influx_measure_base_name='line2'),
dict(meter_id=3,
influx_measure_base_name='line3'),
dict(meter_id=4,
influx_measure_base_name='line4'),
dict(meter_id=5,
influx_measure_base_name='line5')
]
# filename of the json where the readings from previous run are stored
READINGS_CACHE = 'readings_cache.json'
# details of the influx database to store timeseries data into
INLUX_DB = dict(host='192.168.2.8', port=8086, username='ananchev', password='1Race96R', database='openhab')
class Reader():
def __init__(self, interval="Manual"):
logger.info(f"Initialising energy reader with interval '{interval}'...")
self.interval = interval
self.prev_readings = {}
self.readings_cache = self.init_readings_cache()
self.modbus_client = ModbusTcpClient(**MODBUS_GW)
self.publish_to_influx_lst = []
def init_readings_cache(self) -> dict:
readtime = datetime.now()
readtime_epoch = readtime.timestamp()
readtime_str = readtime.utcnow().strftime('%Y-%m-%dT%H:%M:%SZ')
# does a local cache dict exist?
if not os.path.exists(READINGS_CACHE):
logger.info("Cached results from earlier readings not found. Creating a new file now...")
new_dict = {self.interval:dict(readtime_epoch=readtime_epoch,
readtime_str=readtime_str,
readings=list()
)
}
return new_dict
with open(READINGS_CACHE, 'r', encoding='utf-8') as f:
exisiting_dict = json.load(f)
if self.interval in exisiting_dict: # local cache exists and interval within it exists
logger.info(f"Cached results from earlier readings found. Copying interval '{self.interval}' from it.")
self.prev_readings = {key: value for key, value in exisiting_dict.items() if key in self.interval}
# with open('prev_readings.json', 'w', encoding='utf-8') as f:
# json.dump(self.prev_readings, f, ensure_ascii=False, indent=4)
exisiting_dict.update({self.interval:{'readtime_epoch':readtime_epoch, 'readtime_str':readtime_str, 'readings':list()}})
else: # local cache existis, but this is the first time we add the current read interval in it
logger.info(f"Cached results from earlier readings found, but interval '{self.interval}' does not exist and will be added.")
exisiting_dict[self.interval] = dict(readtime_epoch=readtime_epoch,
readtime_str=readtime_str,
readings=list()
)
return exisiting_dict
def execute(self):
self.read_current()
if self.calculate_consumed_energy():
self.write_to_influx()
def read_current(self):
self.connect_modbus()
for m in METERS:
# store the current readings for total energy value
current_energy_reading = self.total_energy_now(m)
self.readings_cache[self.interval]['readings'].append(current_energy_reading)
self.publish_to_influx_lst.append({"measurement":current_energy_reading["measurement_total"],
"time":self.readings_cache[self.interval]["readtime_str"],
"fields":dict(item=current_energy_reading["measurement_total"],
value=current_energy_reading["value_total"])})
self.modbus_client.close()
with open(READINGS_CACHE, 'w', encoding='utf-8') as f:
json.dump(self.readings_cache, f, ensure_ascii=False, indent=4)
def connect_modbus(self, retries = 0):
connection = self.modbus_client.connect()
if not connection:
if (retries < 3):
time.sleep(1)
self._connect(self, retries+1)
else:
raise Exception('cannot establish connection to gateway')
logger.info('connected to Modbus gateway')
def total_energy_now(self, meter):
meter_id = meter.get('meter_id')
result = self.read_modbus_registers(meter_id)
decoder = BinaryPayloadDecoder.fromRegisters(result.registers, byteorder=Endian.Big)
energy_kwh = decoder.decode_32bit_uint() / 100
influx_measure = meter.get('influx_measure_base_name')
logger.info(f"{influx_measure}total = {energy_kwh} kWh")
return dict(meter_id=meter_id, measurement_total=influx_measure+"Total", value_total = energy_kwh)
def calculate_consumed_energy(self):
# calculate consumed energy only if previous reading cache for the trigger period is found
if not self.prev_readings: #empty dict evaluates to false
logger.info(f"No previous readings exist for trigger interval '{self.interval}'. Consumed energy will be calculated on next trigger.")
return False
for m in METERS:
meter_id = m.get('meter_id')
meter_prev_reading = next(i for i in self.prev_readings[self.interval]['readings'] if i['meter_id'] == meter_id)
meter_current_reading = next(j for j in self.readings_cache[self.interval]['readings'] if j['meter_id'] == meter_id)
# {"meter_id": 10, "measurement_total": "line0-total", "value_total": 0.95}
consumed = round(meter_current_reading['value_total'] - meter_prev_reading['value_total'],2)
logger.info(f"Consumed energy on meter '{meter_id}' for the last '{self.interval}' period is '{consumed}' kWh")
measure_base_name = m.get('influx_measure_base_name')
self.publish_to_influx_lst.append({"measurement":measure_base_name+"Last" + self.interval,
"time":self.readings_cache[self.interval]["readtime_str"],
"fields":dict(item=measure_base_name+"Last" + self.interval,
value=consumed)})
# with open("to_pub_to_influx.json", 'w', encoding='utf-8') as f:
# json.dump(self.publish_to_influx_lst, f, ensure_ascii=False, indent=4)
return True
def write_to_influx(self):
logger.info("Publishing total and interval results into influx db...")
client = InfluxDBClient(**INLUX_DB)
client.write_points(self.publish_to_influx_lst)
logger.info("Done!")
def read_modbus_registers(self, meter_id):
result = self.modbus_client.read_holding_registers(address=0,count=2,unit=meter_id)
if result.isError(): # retry in case of ModbusIOException due to connection issues.
logger.warning("invalid result, retrying the read operation...")
result = self.read_modbus_registers(meter_id)
return result
|
[
"ananchev@gmail.com"
] |
ananchev@gmail.com
|
4a4d2c2a023f48bedef21da3b002bc22be2cf986
|
42b16c3a421e59949441edc7c2153fef8ac690eb
|
/Admin.py
|
073ca76fa3a8ef4d75a360602cd9692bddb8af52
|
[] |
no_license
|
mastermobin/DatabaseProject
|
9a7179921e2878adeba820eb662a7609084a3667
|
bc80290b7d1c132763554dd5b910c9b9075d99f2
|
refs/heads/master
| 2020-12-24T00:13:10.429887
| 2020-01-30T22:46:06
| 2020-01-30T22:46:06
| 237,320,302
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,426
|
py
|
import mysql.connector
from tabulate import tabulate
def showQuery(mydb, query, h):
mycursor = mydb.cursor()
mycursor.execute(query)
myresult = mycursor.fetchall()
if mycursor.rowcount != 0:
print(tabulate(myresult, headers=h, tablefmt='psql'))
else:
print("No Result")
def run(ID, MyDB):
print('-------------------------------')
print('Wellcome ' + str(ID) + ' To Admin Panel')
while(True):
print('-------------------------------')
print("Choose One Of Below Options To Continue: ")
print("""
1.Show Average Charity's Population Per Region
2.Show Drivers With Average Score More Than 4
3.Show Min And Max Score For Each Driver
4.Show Restaurants Ordered By Donated Food Amount
5.Show Driver With Maximum State Changes
6.Show Most Needful Charities
7.Show Today's Deliveries With Driver Score More Than Avg
""")
ans = input("Your Answer: ")
if ans == '1':
showQuery(MyDB,
"""
SELECT CONCAT(City, ', ', Region) as Region, AVG(Population) as AvgPopulation
FROM charity
GROUP BY CONCAT(City, ', ', Region);
""",
['Region', 'AvgPopulation'])
elif ans == '2':
showQuery(MyDB,
"""
SELECT CONCAT(FirstName, ' ', LastName) as Name, AVG(Rate) as RateAvg
FROM delivery
INNER JOIN driver d on delivery.DriverID = d.ID
GROUP BY DriverID
HAVING (AVG(Rate) >= 4);
""",
['Name', 'RateAvg'])
elif ans == '3':
showQuery(MyDB,
"""
SELECT CONCAT(FirstName, ' ', LastName) as Name, MIN(Rate) as MinRate, MAX(Rate) as MaxRate
FROM delivery
INNER JOIN driver d on delivery.DriverID = d.ID
GROUP BY DriverID;
""",
['Name', 'MinRate', 'MaxRate'])
elif ans == '4':
showQuery(MyDB,
"""
SELECT Name, SUM(Count) as SentFood
FROM delivery
INNER JOIN restaurant r on delivery.RestaurantID = r.ID
GROUP BY RestaurantID
ORDER BY SentFood DESC;
""",
['Name', 'DonatedAmount'])
elif ans == '5':
showQuery(MyDB,
"""
SELECT DriverID, CONCAT(FirstName, ' ', LastName) as Name, COUNT(driver_log.ID) as ChangeCount
FROM driver_log
INNER JOIN driver d on driver_log.DriverID = d.ID
WHERE Date >= SUBDATE(CURRENT_TIMESTAMP, INTERVAL 1 DAY)
GROUP BY DriverID
ORDER BY ChangeCount DESC
LIMIT 1;
""",
['DriverID', 'Name', 'Change Count'])
elif ans == '6':
showQuery(MyDB,
"""
SELECT t1.Name, TakenFood, Population, ContractedRestaurants, (TakenFood * ContractedRestaurants / Population) as Score
FROM ((SELECT c.Name, SUM(Count) as TakenFood
FROM delivery
INNER JOIN charity c on CharityID = c.ID
GROUP BY CharityID)
UNION
(SELECT Name, 0 as TakenFood
FROM charity
WHERE ID NOT IN (SELECT DISTINCT CharityID FROM delivery d))
) as t1
INNER JOIN (SELECT c7.Name, Population, ContractedRestaurants
FROM charity as c7
INNER JOIN ((SELECT Name, COUNT(RestaurantID) as ContractedRestaurants
FROM contract
INNER JOIN charity c on contract.CharityID = c.ID
GROUP BY CharityID)
UNION
(SELECT Name, 0 as ContractedRestaurants
FROM charity
WHERE ID NOT IN (SELECT DISTINCT CharityID FROM contract))) as t3
ON t3.Name = c7.Name
) c2 ON c2.Name = t1.Name
ORDER BY Score DESC;
""",
['Name', 'TakenFood', 'Population', 'ContractedRestaurants', 'Score'])
elif ans == '7':
showQuery(MyDB,
"""
SELECT CONCAT(d.FirstName, ' ', d.LastName) as DriverName, c.Name as CharityName, r.Name as RestaurantName, Rate as DriverRate
FROM delivery
INNER JOIN charity c on delivery.CharityID = c.ID
INNER JOIN restaurant r on delivery.RestaurantID = r.ID
INNER JOIN driver d on delivery.DriverID = d.ID
WHERE Date = CURRENT_DATE() AND Rate > (SELECT AVG(Rate) FROM delivery);
""",
['Driver Name', 'Charity Name', 'Restaurant Name', 'Driver Rate'])
else:
print("Wrong Choice!")
ans = input("Do You Want To Continue? (y/N): ")
if ans == "Y" or ans == "y":
continue
break
|
[
"mvtikm@gmail.com"
] |
mvtikm@gmail.com
|
8f55ee77bb2e6f0f501c6aae41fe353d5946e7ed
|
48f092fd8191b0218df8605dc7125e526764e59e
|
/NestedLoops/venv/Scripts/pip-script.py
|
1f860a6a2d99a98a14ef6f35a31d2812b31131f3
|
[] |
no_license
|
LalityaSawant/Python-Projects
|
2edb430c094fe3d6b4e706cc61f885aa07e24dff
|
b142708256e26867f09b3063f5f3fffa305ec496
|
refs/heads/master
| 2020-05-01T03:00:26.012301
| 2019-03-23T22:09:33
| 2019-03-23T22:09:33
| 177,235,109
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 443
|
py
|
#!C:\Users\lsawant\Documents\Learning\Python\PycharmProjects\NestedLoops\venv\Scripts\python.exe
# EASY-INSTALL-ENTRY-SCRIPT: 'pip==10.0.1','console_scripts','pip'
__requires__ = 'pip==10.0.1'
import re
import sys
from pkg_resources import load_entry_point
if __name__ == '__main__':
sys.argv[0] = re.sub(r'(-script\.pyw?|\.exe)?$', '', sys.argv[0])
sys.exit(
load_entry_point('pip==10.0.1', 'console_scripts', 'pip')()
)
|
[
"lalitya.sawant@gmail.com"
] |
lalitya.sawant@gmail.com
|
9c1ce6e5363f9d645e9a14dc86c5acf6f135c7dc
|
1dc0f0d007c060c74de33bddd4cdcd26ecdf05e0
|
/stancode_Projects/boggle_game_solver/boggle.py
|
505e63ff243f47684b3699ff04cd8ee5cc7b9451
|
[
"MIT"
] |
permissive
|
hoholarry/sc-projects
|
178e0f93f6dc6b6cda4be17403fa74c84d0d8e36
|
4bd45e52b8c13a35d38d30e6583d242480dabff4
|
refs/heads/main
| 2023-03-06T06:58:29.741353
| 2021-02-22T15:16:03
| 2021-02-22T15:16:03
| 341,227,759
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,724
|
py
|
import copy
class Graph(object):
def __init__(self, board):
self.board = board
self.letters = {}
self.adj_list = {}
directions = [(-1, -1), (-1, 0), (-1, 1), (0, -1), (0, 1), (1, -1), (1, 0), (1, 1)]
for i in range(4):
for j in range(4):
if board[i][j] not in self.letters:
self.letters[board[i][j]] = []
self.letters[board[i][j]].append((i, j))
self.adj_list[(board[i][j], i, j)] = []
for d1, d2 in directions:
k = i+d1
l = j+d2
if k >= 0 and k < 4 and l >= 0 and l < 4:
self.adj_list[(board[i][j], i, j)].append((board[k][l], k, l))
print(self.letters)
print(self.adj_list)
def dfs(self, word):
if len(word) < 4:
return False
stack = []
if word[0] not in self.letters:
return False
for i, j in self.letters[word[0]]:
stack.append((word[0], word, (word[0], i, j), set([(i, j)])))
# print('this is stack', stack)
while len(stack) > 0:
sub, word, let, positions = stack.pop()
if sub == word:
return True
next_letter = word[len(sub)]
for l, i, j in self.adj_list[let]:
if l == next_letter and (i, j) not in positions:
p2 = copy.deepcopy(positions)
p2.add((i, j))
stack.append((sub+next_letter, word, (l, i, j), p2))
return False
d = []
def load_dictionary():
with open('dictionary.txt') as f:
for line in f:
if len(line) >= 4 and not line[0].isupper():
d.append(line.upper()[:-1])
def find_words(board):
g = Graph(board)
words = []
for word in d:
if g.dfs(word):
words.append((word))
return words
def boggle_input():
board = []
for i in range(4):
row = input().upper()
if len(row.split()) == 4:
for a in row.split():
if len(a) != 1:
print('illegal input')
return False
board.append(row.split())
else:
print('illegal input')
return False
print('1 row of letters:', board[0])
print('2 row of letters:', board[1])
print('3 row of letters:', board[2])
print('4 row of letters:', board[3])
print(board)
words = find_words(board)
for word in words:
print('Found:', word)
print('There are', len(words), 'anagrams in total.')
if __name__ == '__main__':
load_dictionary()
boggle_input()
|
[
"noreply@github.com"
] |
hoholarry.noreply@github.com
|
fd98027336d2b8b9faa6def3fed6e1c34db9988d
|
5e3d34e2025831896851d9b11dfac81869252de0
|
/p9/pyth_triplet.py
|
0df4b399e858734011ec8428e144b53927286c6c
|
[] |
no_license
|
sanjkm/ProjectEuler
|
b522fc51907821aeeffbb64c016502aa89f6db22
|
6f1300232846e4303e6c4516e11c506e64f10fd9
|
refs/heads/master
| 2020-04-12T06:30:10.928298
| 2017-01-12T22:43:45
| 2017-01-12T22:43:45
| 65,332,463
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 651
|
py
|
# pyth_triplet.py
from operator import mul
def check_combo (a, b, c):
if c**2 == (a**2 + b**2):
return 1
return 0
def gen_possible_ab_combos (c, total_sum):
low_start_point = (total_sum - c) / 2
high_start_point = total_sum - c - low_start_point
a, b = low_start_point, high_start_point
while a > 0 and b < c:
if check_combo (a, b, c) == 1:
return [a,b,c]
a, b = a-1, b+1
return []
total_sum = 1000
for c in range(334, 500):
final_combo = gen_possible_ab_combos (c, total_sum)
if final_combo != []:
print final_combo, reduce (mul, final_combo)
break
|
[
"sanjay.menon@gmail.com"
] |
sanjay.menon@gmail.com
|
816e4e22dde6bfeb7a7f9ef1ae675c0be4bd67a6
|
ed291071decb3514b7f9f321e68fd57fb3c11ebc
|
/Python/168_excel-sheet-column-title.py
|
573dae55e7016bdb1c10939d8dbf0ccfb6ce97db
|
[] |
no_license
|
antonylu/leetcode2
|
d7b1681cc9477bb01619be26461634edbb85a4e5
|
a57282895fb213b68e5d81db301903721a92d80f
|
refs/heads/master
| 2021-11-25T01:30:56.358849
| 2021-11-19T08:32:12
| 2021-11-19T08:32:12
| 130,139,831
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,549
|
py
|
"""
https://leetcode.com/problems/excel-sheet-column-title/description/
Given a positive integer, return its corresponding column title as appear in an Excel sheet.
For example:
1 -> A
2 -> B
3 -> C
...
26 -> Z
27 -> AA
28 -> AB
...
Example 1:
Input: 1
Output: "A"
Example 2:
Input: 28
Output: "AB"
Example 3:
Input: 701
Output: "ZY"
"""
class Solution(object):
def convertToTitle(self, n):
"""
:type n: int
:rtype: str
"""
# Approach #1, 26 ้ฒไฝ, 3ไฝ
#
"""
A 1 AA 26+ 1 BA 2ร26+ 1 ... ZA 26ร26+ 1 AAA 1ร26ยฒ+1ร26+ 1
B 2 AB 26+ 2 BB 2ร26+ 2 ... ZB 26ร26+ 2 AAB 1ร26ยฒ+1ร26+ 2
. . .. ..... .. ....... ... .. ........ ... .............
. . .. ..... .. ....... ... .. ........ ... .............
. . .. ..... .. ....... ... .. ........ ... .............
Z 26 AZ 26+26 BZ 2ร26+26 ... ZZ 26ร26+26 AAZ 1ร26ยฒ+1ร26+26
"""
# 55%,32ms
number2char = "ABCDEFGHIJKLMNOPQRSTUVWXYZ"
r = ""
while n > 26:
(n,b) = divmod(n-1,26)
r = number2char[b] + r
r = number2char[n-1] +r
return r
if __name__ == "__main__":
s=Solution()
tc = [1,28,701,702]
#tc = [1]
for t in tc:
print(s.convertToTitle(t))
#s.convertToTitle(t)
|
[
"antony_lu@compal.com"
] |
antony_lu@compal.com
|
204bf46a6fc6a8243ffaadcf8dc901323591f909
|
ad3e2940d52d6a213d4fc20a7a68a984e6a420d5
|
/diffusion_relaxation.py
|
93a3c27514ff8c16c1cec02b23a8eaea73e93e3b
|
[] |
no_license
|
alex21347/Temperature-Diffusion
|
3639aac6ef52b9ffa1fff2b563bd4b9d2470f2dd
|
e93b8c25e3f4ed2cc52bdb00ce3a3645bfed48ff
|
refs/heads/main
| 2023-01-06T00:25:09.294736
| 2020-10-31T16:16:37
| 2020-10-31T16:16:37
| 304,435,224
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,565
|
py
|
#Simulating and Analysing the Method of Relaxation for 2D Dirichlet Problem
import numpy as np
from tqdm import tqdm
from matplotlib import pyplot as plt
from matplotlib import cm
import time
from matplotlib.ticker import LinearLocator, FormatStrFormatter
from mpl_toolkits.mplot3d import Axes3D
#setting up for-loop to find average time to reach accurate solution
dum = 10
tic = time.time()
for i in tqdm(range(dum)):
a = 17 #length of grid
b = 17 #width of grid
its = 200 #number of iterations
p_x = np.zeros((b,a,its))
x_coords = np.linspace(0,a-1,a)
y_coords = np.linspace(0,b-1,b)
graph = np.zeros((a*b,2))
#generating 2D graph
for j in range(0,a):
for i in range(0,b):
graph[a*i+j,0] = x_coords[j]
graph[a*i+j,1] = y_coords[i]
#Building the set of edges via finding closest neighbours of each vertex
neighbours = []
g = graph
for k in range(a*b):
neighbours.append(np.array(np.where(abs(g[k,1]-g[:,1])+abs(g[k,0]-g[:,0])<1.1))[0])
neighbours[k] = np.delete(neighbours[k], np.where(neighbours[k] == k), axis=0)
vals = np.zeros((a*b,1))
scenario = 16 #for scenario 1 set equal to 2, for scenario 2 set equal to 16
vals[:scenario] = 1
#finding interior of graph i.e. where the walker may walk
interior = []
for i in range(a*b):
if len(neighbours[i]) == 4:
interior.append(i)
#approximating p(x,y) over many iterations
for i in range(its):
for k in range(len(vals)):
if k in interior:
vals[k] = np.mean(vals[neighbours[k]])
p_x[int((k-np.mod(k,a))/(a)),np.mod(k,a),i] = vals[k]
if i > 0:
errorest = (np.abs(p_x[:,:,i]-p_x[:,:,i-1]).sum())/15**2
p_x1 = p_x[1:-1,1:-1]
#neatening solution
p_x = p_x[1:-1,1:-1]
toc = time.time()
print(f'Time Elapsed : {(toc-tic)/dum}')
#plotting solution of 2D Dirichlet Problem via Method of Relaxation
fig = plt.figure(figsize = (8,6))
ax = fig.gca(projection='3d')
Y = np.arange(0, b-2, 1)
Y = -1* Y
X = np.arange(0, a-2, 1)
X,Y = np.meshgrid(X, Y)
Z = np.transpose(p_x[:,:,99])
surf = ax.plot_surface(X,Y,Z, cmap=cm.coolwarm,
linewidth=0, antialiased=False)
fig.colorbar(surf, shrink=0.5, aspect=5)
plt.show()
|
[
"noreply@github.com"
] |
alex21347.noreply@github.com
|
f6d477f59ee4c2505b13d5f5b674da4640d5ea4f
|
eb9f05bee11d64f299b3d7c47a2812958a718e6e
|
/3_neural-network/3_5/2.py
|
368db922d7f6ea1cccc22e5e6c07b102926219e9
|
[] |
no_license
|
araki-ka/DeepLearning
|
c85730f5094bd8c5e9184aeb39074fc4dbbdbfc3
|
8b359e89f5f266f80e82b571cdd9f6491cbaf331
|
refs/heads/main
| 2022-08-21T17:27:42.861542
| 2017-10-11T05:09:59
| 2017-10-11T05:09:59
| 96,615,686
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 264
|
py
|
#!/usr/bin/python
# -*- coding: utf-8 -*-
# 3.5.2 ใฝใใใใใฏใน้ขๆฐใฎๅฎ่ฃ
ไธใฎๆณจๆ
import numpy as np
a = np.array([1010, 1000, 990])
print(np.exp(a) / np.sum(np.exp(a)))
c = np.max(a)
print(a - c)
print(np.exp(a - c) / np.sum(np.exp(a - c)))
|
[
"araki-ka@legendapl.com"
] |
araki-ka@legendapl.com
|
10cefb112ffc8a72f2ddcd285ff5b6f871ecf497
|
41523dd4871e8ed1043d2b3ddf73417fcbdde209
|
/day16/mapๅฝๆฐ.py
|
7a19700236dcf557aafb01afb59951babcaa5d8d
|
[] |
no_license
|
WayneChen1994/Python1805
|
2aa1c611f8902b8373b8c9a4e06354c25f8826d6
|
a168cd3b7749afc326ec4326db413378fd3677d5
|
refs/heads/master
| 2020-03-30T23:19:00.773288
| 2018-11-02T10:47:40
| 2018-11-02T10:47:40
| 151,697,105
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 943
|
py
|
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# author๏ผWayne
'''
mapๅฝๆฐ็ๅ่ฝ๏ผ
ๅฐไผ ๅ
ฅ็ๅฝๆฐไพๆฌกไฝ็จไบๅบๅไธญ็ๆฏไธไธชๅฏน่ฑก๏ผ
็ถๅๅฐๅ
ถไฝ็จ็็ปๆไฝไธบไธไธช่ฟญไปฃๅจ่ฟๅ
'''
'''
้ๆฑ๏ผๅฐๅ่กจไธญ็["1", "2", "3", "4", "5"]
่ฝฌไธบ[1, 2, 3, 4, 5]๏ผๅๆไธไธชๅฝๆฐใ
'''
def func(alist):
return [int(x) for x in alist]
list1 = ["1", "2", "3", "4", "5"]
print(list1)
print(func(list1))
res = map(int, list1)
print(list(res))
'''
map(func,lsd)
ๅๆฐไธ๏ผ่ฆไฝ็จๅฝๆฐ๏ผใๆญคๅฝๆฐๆไธๅชๆไธไธชๅๆฐใ
ๅๆฐไบ๏ผ่ฆไฝ็จ็ๅบๅ
'''
'''
ไฝฟ็จmapๅฝๆฐ๏ผๆฑn็ๅบๅ[1, 4, 9, ..., n^2]๏ผ ไธ่กไปฃ็ ๅฎ็ฐไธ่ฟฐ็่ฆๆฑ๏ผnไปๆงๅถๅฐ่พๅ
ฅใ
'''
def func2(n):
return list(map(lambda x:x**2, range(1, n+1)))
num = int(input("่ฏท่พๅ
ฅn็ๅผ๏ผ"))
print(func2(num))
print(list(map(lambda n:n*n, range(1, int(input("่ฏท่พๅ
ฅไธไธชๆดๆฐ๏ผ"))+1))))
|
[
"waynechen1994@163.com"
] |
waynechen1994@163.com
|
7b21a9e794befbf4b2268bb9e211d4e4aff762d6
|
53d03e48ca88e05fb134be8a2d3dda9b6079216e
|
/fabfile.py
|
ca82fcc42642e2e6fffd7cd3c79a84a7357a5b66
|
[] |
no_license
|
myungseokang/djangogirls
|
3b9e18acb7b7747ca04c32dbebb070fc15b88966
|
dd7f82131de66e0732553af2b1e3c170a6f44b5f
|
refs/heads/master
| 2021-06-18T09:59:14.143057
| 2017-07-03T00:16:00
| 2017-07-03T00:16:00
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| true
| false
| 6,054
|
py
|
from fabric.contrib.files import append, exists, sed, put
from fabric.api import env, local, run, sudo
import random
import os
import json
PROJECT_DIR = os.path.dirname(os.path.abspath(__file__))
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# deploy.jsonํ์ผ์ ๋ถ๋ฌ์ envs๋ณ์์ ์ ์ฅํฉ๋๋ค.
with open(os.path.join(PROJECT_DIR, "deploy.json")) as f:
envs = json.loads(f.read())
REPO_URL = envs['REPO_URL']
PROJECT_NAME = envs['PROJECT_NAME']
REMOTE_HOST = envs['REMOTE_HOST']
REMOTE_USER = envs['REMOTE_USER']
STATIC_ROOT_NAME = envs['STATIC_ROOT']
STATIC_URL_NAME = envs['STATIC_URL']
MEDIA_ROOT = envs['MEDIA_ROOT']
env.user = REMOTE_USER
username = env.user
env.hosts = [
REMOTE_HOST,
]
project_folder = '/home/{}/{}'.format(env.user, PROJECT_NAME)
apt_requirements = [
'ufw',
'curl',
'git',
'python3-dev',
'python3-pip',
'build-essential',
'python3-setuptools',
'apache2',
'libapache2-mod-wsgi-py3',
'libssl-dev',
'libxml2-dev',
'libjpeg8-dev',
'zlib1g-dev',
]
def new_server():
setup()
deploy()
def setup():
_register_ssh_key()
_get_latest_apt()
_install_apt_requirements(apt_requirements)
_make_virtualenv()
def deploy():
_get_latest_source()
_update_settings()
_update_virtualenv()
_update_static_files()
_update_database()
_make_virtualhost()
_grant_apache2()
_grant_sqlite3()
_restart_apache2()
def create_superuser():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py createsuperuser' % (
project_folder, virtualenv_folder
))
def _register_ssh_key():
local("ssh-keyscan -H {} >> {}".format(REMOTE_HOST, os.path.expanduser('~/.ssh/known_hosts')))
def _get_latest_apt():
update_or_not = input('Would U install Apache2/Python3 ?\n'
'[y/n, default: y]: ')
if update_or_not != 'n':
sudo('sudo apt-get update && sudo apt-get -y upgrade')
def _install_apt_requirements(apt_requirements):
reqs = ''
for req in apt_requirements:
reqs += (' ' + req)
sudo('sudo apt-get -y install {}'.format(reqs))
def _make_virtualenv():
if not exists('~/.virtualenvs'):
script = '''"# python virtualenv settings
export WORKON_HOME=~/.virtualenvs
export VIRTUALENVWRAPPER_PYTHON="$(command \which python3)" # location of python3
source /usr/local/bin/virtualenvwrapper.sh"'''
run('mkdir ~/.virtualenvs')
sudo('sudo pip3 install virtualenv virtualenvwrapper')
run('echo {} >> ~/.bashrc'.format(script))
def _get_latest_source():
if exists(project_folder + '/.git'):
run('cd %s && git fetch' % (project_folder,))
else:
run('git clone %s %s' % (REPO_URL, project_folder))
current_commit = local("git log -n 1 --format=%H", capture=True)
run('cd %s && git reset --hard %s' % (project_folder, current_commit))
def _update_settings():
settings_path = project_folder + '/{}/settings.py'.format(PROJECT_NAME)
sed(settings_path, "DEBUG = True", "DEBUG = False")
sed(settings_path,
'ALLOWED_HOSTS = .+$',
'ALLOWED_HOSTS = ["%s"]' % (REMOTE_HOST,)
)
secret_key_file = project_folder + '/{}/secret_key.py'.format(PROJECT_NAME)
if not exists(secret_key_file):
chars = 'abcdefghijklmnopqrstuvwxyz0123456789!@#$%^&*(-_=+)'
key = ''.join(random.SystemRandom().choice(chars) for _ in range(50))
append(secret_key_file, "SECRET_KEY = '%s'" % (key,))
append(settings_path, '\nfrom .secret_key import SECRET_KEY')
def _update_virtualenv():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
if not exists(virtualenv_folder + '/bin/pip'):
run('cd /home/%s/.virtualenvs && virtualenv %s' % (env.user, PROJECT_NAME))
run('%s/bin/pip install "django<2"' % (
virtualenv_folder
))
def _update_static_files():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py collectstatic --noinput' % (
project_folder, virtualenv_folder
))
def _update_database():
virtualenv_folder = project_folder + '/../.virtualenvs/{}'.format(PROJECT_NAME)
run('cd %s && %s/bin/python3 manage.py migrate --noinput' % (
project_folder, virtualenv_folder
))
def _make_virtualhost():
script = """'<VirtualHost *:80>
ServerName {servername}
Alias /{static_url} /home/{username}/{project_name}/{static_root}
Alias /{media_url} /home/{username}/{project_name}/{media_url}
<Directory /home/{username}/{project_name}/{media_url}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{static_root}>
Require all granted
</Directory>
<Directory /home/{username}/{project_name}/{project_name}>
<Files wsgi.py>
Require all granted
</Files>
</Directory>
WSGIDaemonProcess {project_name} python-home=/home/{username}/.virtualenvs/{project_name} python-path=/home/{username}/{project_name}
WSGIProcessGroup {project_name}
WSGIScriptAlias / /home/{username}/{project_name}/{project_name}/wsgi.py
ErrorLog ${{APACHE_LOG_DIR}}/error.log
CustomLog ${{APACHE_LOG_DIR}}/access.log combined
</VirtualHost>'""".format(
static_root=STATIC_ROOT_NAME,
username=env.user,
project_name=PROJECT_NAME,
static_url=STATIC_URL_NAME,
servername=REMOTE_HOST,
media_url=MEDIA_ROOT
)
sudo('echo {} > /etc/apache2/sites-available/{}.conf'.format(script, PROJECT_NAME))
sudo('a2ensite {}.conf'.format(PROJECT_NAME))
def _grant_apache2():
sudo('sudo chown -R :www-data ~/{}'.format(PROJECT_NAME))
def _grant_sqlite3():
sudo('sudo chmod 775 ~/{}/db.sqlite3'.format(PROJECT_NAME))
def _restart_apache2():
sudo('sudo service apache2 restart')
|
[
"l3opold7@gmail.com"
] |
l3opold7@gmail.com
|
a06d0667553909c1cab686643fbcb795d207c11e
|
648ed90628cd26d8a3d3941793100a8d5c309be8
|
/my_blog/settings.py
|
09929503a0adf0075496e708e183ea88b73a3ca4
|
[
"MIT"
] |
permissive
|
Chancj/my_blog
|
03200f62cf826558e2a074cb0f0eb578bbe2644c
|
988831904d2bd800afa332a1b4113143e1aaeccd
|
refs/heads/master
| 2022-12-29T11:23:24.973262
| 2020-04-29T16:19:24
| 2020-04-29T16:19:24
| 251,649,792
| 0
| 0
| null | 2022-12-08T03:56:34
| 2020-03-31T15:39:55
|
JavaScript
|
UTF-8
|
Python
| false
| false
| 7,717
|
py
|
"""
Django settings for my_blog project.
Generated by 'django-admin startproject' using Django 2.1.
For more information on this file, see
https://docs.djangoproject.com/en/2.1/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.1/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.1/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '!z027!4o-s@jutnc=+iyvl$=u*9gooigul4(t+k373wv&1r169'
# SECURITY WARNING: don't run with debug turned on in production!
# ้จ็ฝฒๅฐ็บฟไธๆถไธบ False; ่ฏป่
ๅจๆฌๅฐ่ฐ่ฏๆถ่ฏทไฟฎๆนไธบ True
DEBUG = True
ALLOWED_HOSTS = ['*']
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'django.contrib.sites',
'allauth',
'allauth.account',
'allauth.socialaccount',
# ๅฏๆทปๅ ้่ฆ็็ฌฌไธๆน็ปๅฝ
'allauth.socialaccount.providers.github',
'allauth.socialaccount.providers.weibo',
'password_reset',
'taggit',
'ckeditor',
'mptt',
'notifications',
# ่ชๅฎไน็app
'article',
'userprofile',
'comment',
'notice',
'article.templatetags',
# mlๆบๅจๅญฆไน ็app
'titanic',
'iris',
'cat_dog',
'face',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'my_blog.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
# ๅฎไนๆจกๆฟไฝ็ฝฎ
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'my_blog.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.1/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': 'djblog',
'USER': 'root',
'PASSWORD': '123456',
'HOST': 'localhost',
'PORT': 3306,
}
}
# django่ฎค่ฏ็ณป็ปไฝฟ็จ็ๆจกๅ็ฑป
# AUTH_USER_MODEL='article.User'
# Password validation
# https://docs.djangoproject.com/en/2.1/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.1/topics/i18n/
LANGUAGE_CODE = 'zh-hans'
TIME_ZONE = 'Asia/Shanghai'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.1/howto/static-files/
# ้ๆๆไปถๅฐๅ
STATIC_URL = '/static/'
STATICFILES_DIRS = [os.path.join(BASE_DIR, 'static')]
# ้ๆๆไปถๆถ้็ฎๅฝ
STATIC_ROOT = os.path.join(BASE_DIR, 'collected_static')
# ๅชไฝๆไปถๅฐๅ
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media/')
# SMTPๆๅกๅจ๏ผๆนไธบไฝ ็้ฎ็ฎฑ็smtp!
EMAIL_HOST = 'smtp.qq.com'
# ๆนไธบไฝ ่ชๅทฑ็้ฎ็ฎฑๅ๏ผ
EMAIL_HOST_USER = 'your_email_account@xxx.com'
# ไฝ ็้ฎ็ฎฑๅฏ็
EMAIL_HOST_PASSWORD = 'your_password'
# ๅ้้ฎไปถ็็ซฏๅฃ
EMAIL_PORT = 25 # ไธ้๏ผ465ๆ587็ซฏๅฃ
# ๆฏๅฆไฝฟ็จ TLS
EMAIL_USE_TLS = True
# ้ป่ฎค็ๅไปถไบบ
DEFAULT_FROM_EMAIL = 'xxx็ๅๅฎข <your_email_account@xxx.com>'
CKEDITOR_CONFIGS = {
# django-ckeditor้ป่ฎคไฝฟ็จdefault้
็ฝฎ
'default': {
# ็ผ่พๅจๅฎฝๅบฆ่ช้ๅบ
'width':'auto',
'height':'250px',
# tab้ฎ่ฝฌๆข็ฉบๆ ผๆฐ
'tabSpaces': 4,
# ๅทฅๅ
ทๆ ้ฃๆ ผ
'toolbar': 'Custom',
# ๅทฅๅ
ทๆ ๆ้ฎ
'toolbar_Custom': [
# ่กจๆ
ไปฃ็ ๅ
['Smiley', 'CodeSnippet'],
# ๅญไฝ้ฃๆ ผ
['Bold', 'Italic', 'Underline', 'RemoveFormat', 'Blockquote'],
# ๅญไฝ้ข่ฒ
['TextColor', 'BGColor'],
# ้พๆฅ
['Link', 'Unlink'],
# ๅ่กจ
['NumberedList', 'BulletedList'],
# ๆๅคงๅ
['Maximize']
],
# ๆไปถ
'extraPlugins': ','.join(['codesnippet', 'prism', 'widget', 'lineutils']),
}
}
AUTHENTICATION_BACKENDS = (
# ๆญค้กนไฝฟ Django ๅๅฐๅฏ็ฌ็ซไบ allauth ็ปๅฝ
'django.contrib.auth.backends.ModelBackend',
# ้
็ฝฎ allauth ็ฌๆ็่ฎค่ฏๆนๆณ๏ผๅฆ email ็ปๅฝ
'allauth.account.auth_backends.AuthenticationBackend',
)
# ่ฎพ็ฝฎ็ซ็น
SITE_ID = 1
# ็ปๅฝๆๅๅ้ๅฎๅๅฐๅ
# ้ๅฎๅ url
LOGIN_REDIRECT_URL = '/'
# LOGGING = {
# 'version': 1,
# 'handlers': {
# 'file': {
# 'level': 'INFO',
# 'class': 'logging.FileHandler',
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['file'],
# 'level': 'INFO',
# },
# },
# }
# LOGGING = {
# 'version': 1,
# 'disable_existing_loggers': False,
# 'formatters': {
# 'verbose': {
# 'format': '{levelname} {asctime} {module} {process:d} {thread:d} {message}',
# 'style': '{',
# },
# 'simple': {
# 'format': '{levelname} {message}',
# 'style': '{',
# },
# },
# 'filters': {
# 'require_debug_true': {
# '()': 'django.utils.log.RequireDebugTrue',
# },
# },
# 'handlers': {
# 'console': {
# 'level': 'INFO',
# 'filters': ['require_debug_true'],
# 'class': 'logging.StreamHandler',
# 'formatter': 'simple'
# },
# 'mail_admins': {
# 'level': 'ERROR',
# 'class': 'django.utils.log.AdminEmailHandler',
# 'formatter': 'verbose',
# },
# 'file': {
# 'level': 'WARNING',
# # 'class': 'logging.FileHandler',
# 'class': 'logging.handlers.TimedRotatingFileHandler',
# 'when': 'midnight',
# 'backupCount': 30,
# 'filename': os.path.join(BASE_DIR, 'logs/debug.log'),
# 'formatter': 'verbose',
# },
# },
# 'loggers': {
# 'django': {
# 'handlers': ['console'],
# 'propagate': True,
# },
# 'django.request': {
# 'handlers': ['file', 'mail_admins'],
# 'level': 'WARNING',
# 'propagate': False,
# },
# }
# }
|
[
"1049669825@qq.com"
] |
1049669825@qq.com
|
db10d84562d00aeab527245c8df21ab7c305aea4
|
8bd977f6f59799bfc8aad9e1605ba1afe637e275
|
/myComputerVisonLib.py
|
f4a3a4d702252bbf1d35cd7eba47ed9480accf69
|
[] |
no_license
|
wuethral/Automated_masking
|
0453634cf64f83e977a23163e06dfa2c1cf27eb9
|
f2852ca2848b2f60660db674d718f5c6b3ba391d
|
refs/heads/master
| 2023-08-15T00:09:57.479507
| 2021-10-01T11:56:45
| 2021-10-01T11:56:45
| 412,448,340
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 8,905
|
py
|
import cv2 as cv
import numpy as np
from PIL import Image
from numpy import unique
from numpy import where
from sklearn.datasets import make_classification
from sklearn.cluster import DBSCAN
from matplotlib import pyplot
def canny_edge_det(img_nr, img):
# Convert to graycsale
img_gray = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
# Blur the image for better edge detection
img_blur = cv.GaussianBlur(img_gray, (3, 3), 0)
# Sobel Edge Detection
# sobelx = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=1, dy=0, ksize=5) # Sobel Edge Detection on the X axis
# sobely = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=0, dy=1, ksize=5) # Sobel Edge Detection on the Y axis
# sobelxy = cv.Sobel(src=img_blur, ddepth=cv.CV_64F, dx=1, dy=1, ksize=5) # Combined X and Y Sobel Edge Detection
# Display Sobel Edge Detection Images
# cv.imshow('Sobel X', sobelx)
# cv.waitKey(0)
# cv.imshow('Sobel Y', sobely)
# cv.waitKey(0)
# cv.imshow('Sobel X Y using Sobel() function', sobelxy)
# cv.waitKey(0)
# Canny Edge Detection
edges = cv.Canny(image=img_blur, threshold1=100, threshold2=200) # Canny Edge Detection
path = 'D:/masks/pliers(video34)_3/canny_edge_images/canny_edge_mask_' + str(img_nr) +'.png'
cv.imwrite(path, edges)
# Display Canny Edge Detection Image
# cv.imshow('Canny Edge Detection', edges)
# cv.waitKey(0)
# cv.destroyAllWindows()
# img = cv.imread('C:/Users/wuethral/Desktop/colorfilter_2/14.9.21_try_2/Example_4/pliers.png')
# canny_edge_det(img)
def switch_pixel_row(row_array_hsv_filter, width):
new_row_image_matrix = [0] * width
for pixel_value in range(width):
if row_array_hsv_filter[pixel_value] == 0:
new_row_image_matrix[pixel_value] = 255
new_row_image_matrix = np.array(new_row_image_matrix)
return new_row_image_matrix
class SwitchingBlackWhite():
def __init__(self, img_nr, mask_hsv_filter, height, width):
self.img_nr = img_nr
self.mask_hsv_filter = mask_hsv_filter
self.height = height
self.width = width
self.switch_pixel()
def switch_pixel(self):
array_hsv_filter = np.array(self.mask_hsv_filter)
mask_matrix = np.zeros((self.height, self.width))
for i in range(self.height):
new_row = switch_pixel_row(array_hsv_filter[i], self.width)
mask_matrix[i, :] = new_row
matrix_to_array = np.squeeze(np.asarray(mask_matrix))
matrix_to_array = np.reshape(matrix_to_array, (self.height, self.width)).astype(np.uint8)
switch_pixel_mask = Image.fromarray(matrix_to_array)
path = "D:/masks/pliers(video34)_3/hsv_switch_bw/hsv_switch_bw_" + str(self.img_nr) + '.png'
switch_pixel_mask.save(path)
def hsv_filter(img_nr, img):
# convert the BGR image to HSV colour space
hsv = cv.cvtColor(img, cv.COLOR_BGR2HSV)
# set the lower and upper bounds for the green hue
green_black_lower_hsv = np.array([49, 66, 80])
green_black_higher_hsv = np.array([179, 255, 255])
mask_green_black_hsv = cv.inRange(hsv, green_black_lower_hsv, green_black_higher_hsv)
path = 'D:/masks/pliers(video34)_3/hsv_filter_images/hsv_mask_' + str(img_nr) +'.png'
cv.imwrite(path, mask_green_black_hsv)
def adding_pixel_values(row_canny_edge_det, row_array_hsv_filter, width):
new_row_image_matrix = [0] * width
for pixel_value in range(width):
if row_array_hsv_filter[pixel_value] == 0 or row_canny_edge_det[pixel_value] == 255:
new_row_image_matrix[pixel_value] = 255
new_row_image_matrix = np.array(new_row_image_matrix)
return new_row_image_matrix
class MergingMasks():
def __init__(self, img_nr, mask_canny_edge_detection, mask_hsv_filter, height, width):
self.img_nr = img_nr
self.mask_canny_edge_detection = mask_canny_edge_detection
self.mask_hsv_filter = mask_hsv_filter
self.height = height
self.width = width
self.merging_masks()
def merging_masks(self):
array_canny_edge_det = np.array(self.mask_canny_edge_detection)
array_hsv_filter = np.array(self.mask_hsv_filter)
mask_matrix = np.zeros((self.height, self.width))
for i in range(self.height):
new_row = adding_pixel_values(array_canny_edge_det[i], array_hsv_filter[i], self.width)
mask_matrix[i, :] = new_row
matrix_to_array = np.squeeze(np.asarray(mask_matrix))
matrix_to_array = np.reshape(matrix_to_array, (self.height, self.width)).astype(np.uint8)
final_mask_no_morph = Image.fromarray(matrix_to_array)
path = "D:/masks/pliers(video34)_3/final_mask_no_morph/final_mask_no_morph_" + str(self.img_nr) +'.png'
final_mask_no_morph.save(path)
# cv.waitKey(0)
def morphological_operation(img_nr, source_path, destination_path):
mask_to_morph = cv.imread(source_path)
# Taking a matrix of size 5 as the kernel
kernel = np.ones((5, 5), np.uint8)
# The first parameter is the original image,
# kernel is the matrix with which image is
# convolved and third parameter is the number
# of iterations, which will determine how much
# you want to erode/dilate a given image.
# img_erosion = cv.erode(img, kernel, iterations=1)
img_dilation = cv.dilate(mask_to_morph, kernel, iterations=2)
cv.imwrite(destination_path,
img_dilation)
# cv.imshow('Input', img)
# cv.imshow('Erosion', img_erosion)
class DbScan():
def __init__(self, img_nr, path, input_img_with_canny):
self.img_nr = img_nr
self.path_to_image = path
self.input_img_with_canny = input_img_with_canny
self.dbscan()
# define dataset
# X, _ = make_classification(n_samples=1000, n_features=2, n_informative=2, n_redundant=0, n_clusters_per_class=1, random_state=4)
def dbscan(self):
img = cv.imread(self.path_to_image)
img = cv.cvtColor(img, cv.COLOR_BGR2GRAY)
coordinates_of_white_pixels = []
rows, cols = img.shape[:2]
for i in range(rows):
for j in range(cols):
if img[i, j] == 255:
coordinates_of_white_pixels.append([i, j])
X = np.asarray(coordinates_of_white_pixels)
# print(coordinates_of_white_pixels)
# define the model
# print(X)
model = DBSCAN(eps=2, min_samples=9)
# fit model and predict clusters
yhat = model.fit_predict(X)
# retrieve unique clusters
clusters = unique(yhat)
# create scatter plot for samples from each cluster
for cluster in clusters:
# get row indexes for samples with this cluster
row_ix = where(yhat == cluster)
# create scatter of these samples
pyplot.scatter(X[row_ix, 1], X[row_ix, 0])
# show the plot
path_cluster = 'D:/masks/pliers(video34)_3/cluster_plots/clusterplot_' + str(self.img_nr)
pyplot.savefig(path_cluster)
pyplot.clf()
size_of_biggest_cluster = 0
index_of_biggest_cluster = 0
for cluster in clusters:
row_ix = where(yhat == cluster)
if row_ix[0].size > size_of_biggest_cluster:
if max(X[row_ix, 0][0]) == 1079 or max(X[row_ix, 1][0]) == 1919 or min(X[row_ix, 0][0]) == 0 or min(
X[row_ix, 1][0]) == 0:
continue
else:
size_of_biggest_cluster = row_ix[0].size
index_of_biggest_cluster = cluster
for cluster in clusters:
if cluster == index_of_biggest_cluster:
continue
else:
row_ix = where(yhat == cluster)
x_coord_to_delete_mask = X[row_ix, 0]
y_coord_to_delete_mask = X[row_ix, 1]
for i in range(len(x_coord_to_delete_mask[0])):
img[x_coord_to_delete_mask[0][i], y_coord_to_delete_mask[0][i]] = 0
if self.input_img_with_canny:
path = "D:/masks/pliers(video34)_3/hsv_canny_dbscan/mask_hsv_canny_dbscan_" + str(self.img_nr) +'.png'
cv.imwrite(path, img)
else:
path = "D:/masks/pliers(video34)_3/hsv_dbscan(no_canny)/mask_hsv_dbscan_" + str(
self.img_nr) + '.png'
cv.imwrite(path, img)
def fill_hole(mask):
contours, hierarchy = cv.findContours(mask, cv.RETR_TREE, cv.CHAIN_APPROX_SIMPLE)
len_contour = len(contours)
contour_list = []
for i in range(len_contour):
drawing = np.zeros_like(mask, np.uint8) # create a black image
img_contour = cv.drawContours(drawing, contours, i, (255,255,255), -1)
contour_list.append(img_contour)
out = sum(contour_list)
return out
|
[
"wuethral@ethz.ch"
] |
wuethral@ethz.ch
|
cafb55372742d6d435053197b309733edbd35a30
|
aa257d7c9c99d66ec3723a145dbc672c688ca84a
|
/example
|
a90ef91fa95bd2484fbda6fd43f70b4be7614399
|
[
"MIT"
] |
permissive
|
unix-example-command/example
|
ae286828475193d7e8df57b4d76062fbfc53ccc7
|
def114584c09a3119793ea3efdf5491c80a4e858
|
refs/heads/master
| 2020-06-04T04:00:13.992711
| 2015-07-12T14:09:06
| 2015-07-12T14:09:06
| 33,952,491
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 6,849
|
#!/usr/bin/env python2.7
# Copyright (c) 2015 Sampo Raudaskoski, Samu Kallio
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
import os, sys, platform
import fcntl, termios, struct
import textwrap
import difflib
COPYRIGHT = "Copyright (c) 2015 Sampo Raudaskoski, Samu Kallio"
COLORS = { 'reset': "\033[0m",
'error': "\033[31m",
'matches': "\033[32m",
'related': "\033[33m",
'description': "\033[36m", }
MINCMDCOLS = 10 # minimum width for command line
MINDSCCOLS = 10 # minimum width for description
MINPADCOLS = 2 # minimum padding between command and description
g_colors = sys.stdout.isatty()
g_dbdirs = ["/usr/local/share/example", "/usr/share/example"]
def colorize(type, text):
if g_colors:
return "%s%s%s" % (COLORS[type], text, COLORS['reset'])
return text
def fdttycols(fd):
"""Find out terminal window width for an fd with a TTY."""
try:
data = fcntl.ioctl(fd, termios.TIOCGWINSZ, '1234')
return struct.unpack('hh', data)[1]
except:
return None
def ttycols():
"""Try to figure out our terminal width."""
cols = fdttycols(0) or fdttycols(1) or fdttycols(2)
if not cols:
try:
fd = os.open(os.ctermid(), os.O_RDONLY)
cols = fdttycols(fd)
os.close(fd)
except:
pass
if not cols:
cols = os.environ.get('COLUMNS', 80)
return int(cols)
def dbsuggest(name):
"""Suggest close match commands from the database."""
names = []
for dbdir in g_dbdirs:
if not os.path.exists(dbdir):
continue
fnames = filter(lambda name: name.endswith(".txt"), os.listdir(dbdir))
names.extend([fname[:-4] for fname in fnames])
return difflib.get_close_matches(name, names)
def dbexists(name):
"""Check if a command exists in the database."""
for dbdir in g_dbdirs:
if os.path.exists("%s/%s.txt" % (dbdir, name)):
return True
return False
def dbload(name):
"""Load a command example database file."""
examples = []
tips = []
related = []
for dbdir in g_dbdirs:
filename = "%s/%s.txt" % (dbdir, name)
if os.path.exists(filename):
dbfile = open(filename, 'r')
break
state = 'start'
cmddesc = []
for line in dbfile:
line = line.strip()
if not line:
if state == 'cmddesc':
examples.append((cmd, "\n".join(cmddesc)))
cmddesc = []
state = 'start'
continue
if state == 'start':
if line in ("# tips", "#tips"):
state = 'tips'
elif line in ("# related", "#related"):
state = 'related'
else:
state = 'cmddesc'
cmd = line
elif state == 'cmddesc':
cmddesc.append(line)
elif state == 'tips':
if line.startswith("- "):
line = line[2:]
tips.append(line)
elif state == 'related':
if line.startswith("- "):
line = line[2:]
related.append(line)
if state == 'cmddesc':
examples.append((cmd, "\n".join(cmddesc)))
return examples, tips, related
def error(msg, *args):
me = os.path.basename(sys.argv[0])
sys.stderr.write("%s: %s\n" % (me, msg % args))
# -----------------------------------------------------------------------------
if __name__ == '__main__':
# only for linux for now
if platform.system() != "Linux":
print ("Example is currently only available for Linux. If you "
"wish to contribute examples for other systems, please see "
"https://github.com/unix-example-command")
sys.exit(254)
#
cmd = sys.argv[1] if len(sys.argv) >= 2 else "example"
grep = sys.argv[2:]
# check that the command exists in the database
if not dbexists(cmd):
error("unknown command: %s", colorize('error', cmd))
cmds = dbsuggest(cmd)
if cmds:
cmdlist = " ".join(cmds)
error("close matches: %s", colorize('matches', cmdlist))
sys.exit(1)
# load command entry
examples, tips, related = dbload(cmd)
if not examples:
error("dbfile for command %s is empty", colorize('error', cmd))
sys.exit(2)
# compute unconstrained column counts
cmdcols = max(map(lambda (cmd,_): len(cmd), examples))
padcols = 11
dsccols = max(map(lambda (_,dsc): len(dsc), examples))
# tune columns to fit terminal width
maxcols = ttycols()
while cmdcols + padcols + dsccols > maxcols:
if padcols > MINPADCOLS:
padcols = max(maxcols - cmdcols - dsccols, MINPADCOLS)
elif dsccols > MINDSCCOLS:
dsccols = max(maxcols - cmdcols - padcols, MINDSCCOLS)
elif cmdcols > MINCMDCOLS:
cmdcols = max(maxcols - dsccols - padcols, MINCMDCOLS)
else:
break
# print examples
fmt = "%%-%ds%%s" % (cmdcols + padcols)
for cmdline, dscline in examples:
if not all(map(lambda kw: kw in cmdline, grep)) and \
not all(map(lambda kw: kw in dscline, grep)):
continue
cmdlines = textwrap.wrap(cmdline, cmdcols)
dsclines = textwrap.wrap(dscline, dsccols)
for _cmdline, _dscline in map(None, cmdlines, dsclines):
_cmdline = _cmdline or ""
_dscline = colorize('description', _dscline or "")
print fmt % (_cmdline, _dscline)
# print related
if related:
text = " ".join(related)
print "\nsee also: %s" % colorize('related', text)
# print tips
if tips:
print "\n" + "\n".join(tips)
if cmd == "example":
print "\n%s" % COPYRIGHT
|
[
"samu@samukallio.net"
] |
samu@samukallio.net
|
|
df2614ce95235a5ab406bf91e83a7e1c6f3a910e
|
0dbeaea56fc3e178894db1ac993bb2f2f7b44829
|
/docs/exercises/fwi.py
|
ad205314073398ebced78c7922ab22a541bf0156
|
[] |
no_license
|
simonlegrand/pysit
|
1bcc4fc4df225bc09f26a77751f447e3635774c8
|
1fb1a80839ceebef12a8d71aa9c295b65b08bac4
|
refs/heads/master
| 2021-06-30T11:43:54.909080
| 2021-03-15T13:06:17
| 2021-03-15T13:06:17
| 218,262,289
| 1
| 0
| null | 2019-10-29T10:34:57
| 2019-10-29T10:34:56
| null |
UTF-8
|
Python
| false
| false
| 5,387
|
py
|
import numpy as np
import matplotlib.pyplot as plt
from models import basic_model
config = dict()
##############################################################################
# Problem 1.1
def ricker(t, config):
nu0 = config['nu0']
# implementation goes here
return w
# Configure source wavelet
config['nu0'] = 10 # Hz
# Evaluate wavelet and plot it
ts = np.linspace(0, 0.5, 1000)
ws = ricker(ts, config)
plt.figure()
plt.plot(ts, ws,
color='green',
label=r'$\nu_0 =\,{0}$Hz'.format(config['nu0']),
linewidth=2)
plt.xlabel(r'$t$', fontsize=18)
plt.ylabel(r'$w(t)$', fontsize=18)
plt.title('Ricker Wavelet', fontsize=22)
plt.legend()
##############################################################################
# Problem 1.2
def point_source(value, position, config):
# implementation goes here
return f
# Domain parameters
config['x_limits'] = [0.0, 1.0]
config['nx'] = 201
config['dx'] = (config['x_limits'][1] - config['x_limits'][0]) / (config['nx']-1)
# Source parameter
config['x_s'] = 0.1
##############################################################################
# Problem 1.3
def construct_matrices(C, config):
# implementation goes here
return M, A, K
# Load the model
C, C0 = basic_model(config)
# Build an example set of matrices
M, A, K = construct_matrices(C, config)
##############################################################################
# Problem 1.4
def leap_frog(C, sources, config):
# implementation goes here
return us # list of wavefields
# Set CFL safety constant
config['alpha'] = 1.0/6.0
# Define time step parameters
config['T'] = 3 # seconds
config['dt'] = config['alpha'] * config['dx'] / C.max()
config['nt'] = int(config['T']/config['dt'])
# Generate the sources
sources = list()
for i in xrange(config['nt']):
t = i*config['dt']
f = point_source(ricker(t, config), config['x_s'], config)
sources.append(f)
# Generate wavefields
us = leap_frog(C, sources, config)
##############################################################################
# Problem 1.5
def plot_space_time(us, config, title=None):
# implementation goes here
pass
# Call your function
plot_space_time(us, config, title=r'u(x,t)')
##############################################################################
# Problem 1.6
def record_data(u, config):
# implementation goes here
return d
# Receiver position
config['x_r'] = 0.15
##############################################################################
# Problem 1.7
def forward_operator(C, config):
# implementation goes here
return us, trace
us, d = forward_operator(C, config)
# The last argument False excludes the end point from the list
ts = np.linspace(0, config['T'], config['nt'], False)
plt.figure()
plt.plot(ts, d, label=r'$x_r =\,{0}$'.format(config['x_r']), linewidth=2)
plt.xlabel(r'$t$', fontsize=18)
plt.ylabel(r'$d(t)$', fontsize=18)
plt.title('Trace at $x_r={0}$'.format(config['x_r']), fontsize=22)
plt.legend()
##############################################################################
# Problem 2.1
##############################################################################
# Problem 2.2
##############################################################################
# Problem 2.3
def imaging_condition(qs, u0s, config):
# implementation goes here
return image
# Compute the image
I_rtm = imaging_condition(qs, u0s, config)
# Plot the comparison
xs = np.arange(config['nx'])*config['dx']
dC = C-C0
plt.figure()
plt.subplot(2, 1, 1)
plt.plot(xs, dC, label=r'$\delta C$')
plt.legend()
plt.subplot(2, 1, 2)
plt.plot(xs, I_rtm, label=r'$I_\text{RTM}$')
plt.legend()
##############################################################################
# Problem 2.4
##############################################################################
# Problem 2.5
def adjoint_operator(C0, d, config):
# implementation goes here
return image
##############################################################################
# Problem 3.1
def linear_sources(dm, u0s, config):
# implementation goes here
return sources
##############################################################################
# Problem 3.2
def linear_forward_operator(C0, dm, config):
# implementation goes here
return u1s
##############################################################################
# Problem 3.3
def adjoint_condition(C0, config):
# implementation goes here
pass
##############################################################################
# Problem 4.1
def gradient_descent(C0, d, k, config):
# implementation goes here
return sources
##############################################################################
# Problem 3.2
def linear_forward_operator(C0, dm, config):
# implementation goes here
return u1s
##############################################################################
# Problem 3.3
def adjoint_condition(C0, config):
# implementation goes here
pass
##############################################################################
# Problem 4.1
def gradient_descent(C0, d, k, config):
# implementation goes here
pass
##############################################################################
# Problem 4.2
|
[
"rhewett@vt.edu"
] |
rhewett@vt.edu
|
a611fe6a9f40e4b853cdbfceb4e2125ec01b9c3d
|
550cfce0e688ffbba8290ece0baf5751d20016ea
|
/exts.py
|
7b042d99f614f1a09c08a6b6987c0f83177c9d1a
|
[] |
no_license
|
thunderwin/firstweb
|
0c32169ea1b2081cbe6b5019ba9cdb3993c93bd2
|
53f509a7044bd3174980e6f4f74a813e5b2d9f44
|
refs/heads/master
| 2021-07-21T02:22:00.243247
| 2017-10-31T11:28:42
| 2017-10-31T11:28:42
| 108,497,735
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 76
|
py
|
#encoding: utf-8
from flask_sqlalchemy import SQLAlchemy
db = SQLAlchemy()
|
[
"nathan@MacBook-Pro.local"
] |
nathan@MacBook-Pro.local
|
c484b176ad74bbf3c3d2c6945058b3f6fa039104
|
1978a9455159b7c2f3286e0ad602652bc5277ffa
|
/exercises/05_basic_scripts/task_5_2b.py
|
942e752a8c38f07e0e2a188e036ef30e8781ecff
|
[] |
no_license
|
fortredux/py_net_eng
|
338fd7a80debbeda55b5915dbfba4f5577279ef0
|
61cf0b2a355d519c58bc9f2b59d7e5d224922890
|
refs/heads/master
| 2020-12-03T17:32:53.598813
| 2020-04-08T20:55:45
| 2020-04-08T20:55:45
| 231,409,656
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,047
|
py
|
# -*- coding: utf-8 -*-
'''
ะะฐะดะฐะฝะธะต 5.2b
ะัะตะพะฑัะฐะทะพะฒะฐัั ัะบัะธะฟั ะธะท ะทะฐะดะฐะฝะธั 5.2a ัะฐะบะธะผ ะพะฑัะฐะทะพะผ,
ััะพะฑั ัะตัั/ะผะฐัะบะฐ ะฝะต ะทะฐะฟัะฐัะธะฒะฐะปะธัั ั ะฟะพะปัะทะพะฒะฐัะตะปั,
ะฐ ะฟะตัะตะดะฐะฒะฐะปะธัั ะบะฐะบ ะฐัะณัะผะตะฝั ัะบัะธะฟัั.
ะะณัะฐะฝะธัะตะฝะธะต: ะัะต ะทะฐะดะฐะฝะธั ะฝะฐะดะพ ะฒัะฟะพะปะฝััั ะธัะฟะพะปัะทัั ัะพะปัะบะพ ะฟัะพะนะดะตะฝะฝัะต ัะตะผั.
'''
from sys import argv
ip = str(argv[1])
mask = int(argv[2])
host_net_lst = ip.split('.')
host_net_lst[3] = '0'
mask_32 = int('1' * mask)
mask_32 = '{:<032}'.format(mask_32)
template = '''
Network:
{0:<8} {1:<8} {2:<8} {3:<8}
{0:08b} {1:08b} {2:08b} {3:08b}
Mask:
/{4:}
{5:<8} {6:<8} {7:<8} {8:<8}
{9:8} {10:8} {11:8} {12:8}
'''
print(template.format(int(host_net_lst[0]), int(host_net_lst[1]), int(host_net_lst[2]), int(host_net_lst[3]), mask,
int(mask_32[0:8], 2), int(mask_32[8:16], 2), int(mask_32[16:24], 2), int(mask_32[24:32], 2),
mask_32[0:8], mask_32[8:16], mask_32[16:24], mask_32[24:32]))
|
[
"fortunaredux@protonmail.com"
] |
fortunaredux@protonmail.com
|
fe91480c51ec9d9e11d8cbf4c07c3dbad667f8a4
|
f2f21c643d1f5459253989e7cdba85c064cca8ce
|
/adding_bootstarp/adding_bootstarp/wsgi.py
|
b02fcd063eb36aa3dc1d03dc3104e13e690ebccf
|
[] |
no_license
|
NiteshTyagi/django_tutorial
|
342decea7532f1efb200b9f45e4123c581aad43f
|
3353f0d2907a00f43e1faee2b97abd9af66ca08f
|
refs/heads/master
| 2022-03-05T19:46:50.642154
| 2022-03-01T04:53:14
| 2022-03-01T04:53:14
| 205,629,609
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 409
|
py
|
"""
WSGI config for adding_bootstarp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/2.2/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'adding_bootstarp.settings')
application = get_wsgi_application()
|
[
"nitesh.tyagi.cs.2015@miet.ac.in"
] |
nitesh.tyagi.cs.2015@miet.ac.in
|
38acb8c211006d953999bf2dfc3090c9f9313ea5
|
ee27325f6a3e6a2d1f5e004aa60f5974ad864ae9
|
/contrib/python/plotly/py3/plotly/validators/contourcarpet/__init__.py
|
09c50961c6d1e808ad2e54b12da590314f6b6cc2
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
alvinahmadov/catboost
|
f32d2b16be9db7439e429c88feb5676de842fc89
|
a6e0caa4779b31199f535cf43b09879d7c653abe
|
refs/heads/master
| 2023-06-12T19:29:52.028508
| 2023-05-11T18:33:03
| 2023-05-11T18:33:03
| 202,584,937
| 0
| 0
|
Apache-2.0
| 2019-08-15T17:35:23
| 2019-08-15T17:35:23
| null |
UTF-8
|
Python
| false
| false
| 4,621
|
py
|
import sys
from typing import TYPE_CHECKING
if sys.version_info < (3, 7) or TYPE_CHECKING:
from ._zsrc import ZsrcValidator
from ._zmin import ZminValidator
from ._zmid import ZmidValidator
from ._zmax import ZmaxValidator
from ._zauto import ZautoValidator
from ._z import ZValidator
from ._yaxis import YaxisValidator
from ._xaxis import XaxisValidator
from ._visible import VisibleValidator
from ._uirevision import UirevisionValidator
from ._uid import UidValidator
from ._transpose import TransposeValidator
from ._textsrc import TextsrcValidator
from ._text import TextValidator
from ._stream import StreamValidator
from ._showscale import ShowscaleValidator
from ._showlegend import ShowlegendValidator
from ._reversescale import ReversescaleValidator
from ._opacity import OpacityValidator
from ._ncontours import NcontoursValidator
from ._name import NameValidator
from ._metasrc import MetasrcValidator
from ._meta import MetaValidator
from ._line import LineValidator
from ._legendwidth import LegendwidthValidator
from ._legendrank import LegendrankValidator
from ._legendgrouptitle import LegendgrouptitleValidator
from ._legendgroup import LegendgroupValidator
from ._idssrc import IdssrcValidator
from ._ids import IdsValidator
from ._hovertextsrc import HovertextsrcValidator
from ._hovertext import HovertextValidator
from ._fillcolor import FillcolorValidator
from ._db import DbValidator
from ._da import DaValidator
from ._customdatasrc import CustomdatasrcValidator
from ._customdata import CustomdataValidator
from ._contours import ContoursValidator
from ._colorscale import ColorscaleValidator
from ._colorbar import ColorbarValidator
from ._coloraxis import ColoraxisValidator
from ._carpet import CarpetValidator
from ._btype import BtypeValidator
from ._bsrc import BsrcValidator
from ._b0 import B0Validator
from ._b import BValidator
from ._autocontour import AutocontourValidator
from ._autocolorscale import AutocolorscaleValidator
from ._atype import AtypeValidator
from ._asrc import AsrcValidator
from ._a0 import A0Validator
from ._a import AValidator
else:
from _plotly_utils.importers import relative_import
__all__, __getattr__, __dir__ = relative_import(
__name__,
[],
[
"._zsrc.ZsrcValidator",
"._zmin.ZminValidator",
"._zmid.ZmidValidator",
"._zmax.ZmaxValidator",
"._zauto.ZautoValidator",
"._z.ZValidator",
"._yaxis.YaxisValidator",
"._xaxis.XaxisValidator",
"._visible.VisibleValidator",
"._uirevision.UirevisionValidator",
"._uid.UidValidator",
"._transpose.TransposeValidator",
"._textsrc.TextsrcValidator",
"._text.TextValidator",
"._stream.StreamValidator",
"._showscale.ShowscaleValidator",
"._showlegend.ShowlegendValidator",
"._reversescale.ReversescaleValidator",
"._opacity.OpacityValidator",
"._ncontours.NcontoursValidator",
"._name.NameValidator",
"._metasrc.MetasrcValidator",
"._meta.MetaValidator",
"._line.LineValidator",
"._legendwidth.LegendwidthValidator",
"._legendrank.LegendrankValidator",
"._legendgrouptitle.LegendgrouptitleValidator",
"._legendgroup.LegendgroupValidator",
"._idssrc.IdssrcValidator",
"._ids.IdsValidator",
"._hovertextsrc.HovertextsrcValidator",
"._hovertext.HovertextValidator",
"._fillcolor.FillcolorValidator",
"._db.DbValidator",
"._da.DaValidator",
"._customdatasrc.CustomdatasrcValidator",
"._customdata.CustomdataValidator",
"._contours.ContoursValidator",
"._colorscale.ColorscaleValidator",
"._colorbar.ColorbarValidator",
"._coloraxis.ColoraxisValidator",
"._carpet.CarpetValidator",
"._btype.BtypeValidator",
"._bsrc.BsrcValidator",
"._b0.B0Validator",
"._b.BValidator",
"._autocontour.AutocontourValidator",
"._autocolorscale.AutocolorscaleValidator",
"._atype.AtypeValidator",
"._asrc.AsrcValidator",
"._a0.A0Validator",
"._a.AValidator",
],
)
|
[
"akhropov@yandex-team.com"
] |
akhropov@yandex-team.com
|
1b3085c6b0e6c23be8cc76bd570aa2586dae7368
|
7e7b0a0c32998c13ed9ecb5e4f57ea3b8f6f2644
|
/histo.py
|
eda41b3531ed474468be080ad59e8d11fea32b46
|
[
"MIT"
] |
permissive
|
Pacific01/openPDF
|
f37a8485060ec4f46ca69436befa269f53b62ff4
|
a32f9aeab540fb6cfb7b268a905c74f33211d5ca
|
refs/heads/master
| 2020-12-24T07:11:22.490281
| 2016-12-13T10:48:37
| 2016-12-13T10:48:37
| 73,378,180
| 0
| 0
| null | 2016-12-12T23:51:39
| 2016-11-10T12:02:49
|
Python
|
UTF-8
|
Python
| false
| false
| 1,593
|
py
|
# -*- coding: utf-8 -*-
import json
from os import system, remove
with open('questions.json') as data_file:
questions = json.load(data_file)
with open('answers.json') as data_file:
answers = json.load(data_file)
questions = questions['questions']
answers = answers['answers']
for question in questions:
#Generar el tsv con los datos
for answer in answers:
if answer['questionId'] == question['id']:
respuestas = answer['answers']
yrange = max(answer['answers'])
respuestastsv = ''
cont = 0
for res in question['answers']:
respuestastsv += str(respuestas[res['id']-1]) + ' ' + res['text'].encode('utf-8')
cont+=1
file = open('respuestas.tsv', 'w+')
file.write(respuestastsv) # python will convert \n to os.linesep
file.close()
gnuplot = "\
# ______________________________________________________________________\n\
#Setting output\n\
set term png\n\
set output \"./Plots/plot"+str(question['id'])+".png\"\n\
# For the next graph, we want a histogram.\n\
set style data boxes\n\
# set xrange [0:"+str(question['numAnswers'])+"]\n\
# set yrange [0:"+str(yrange)+"]\n\
# set xtics rotate by -45\n\
\n\
# We want a small gap between solid (filled-in) bars.\n\
set boxwidth 0.8 relative\n\
set style fill solid 1.0\n\
\n\
# Plot the histogram (one curve).\n\
plot 'respuestas.tsv' using 1:xtic(2) with boxes title '"+question['text'].strip('\n').encode('utf-8')+"'\n\
"
file = open('tmp.gp', 'w+')
file.write(gnuplot) # python will convert \n to os.linesep
file.close()
#
#
#
#
system('gnuplot tmp.gp')
remove('respuestas.tsv')
remove('tmp.gp')
|
[
"cod.al.pro@gmail.com"
] |
cod.al.pro@gmail.com
|
dc188b288e92680cb9d0fa42245dc94996074db1
|
d3178cc89ce03eb68e6b5dd7b4d99cb36d68a6a4
|
/nets/vgg/__init__.py
|
24afea7e7bb181f07e703fcbdb3fb4cbda1ef8c7
|
[
"Unlicense"
] |
permissive
|
jumpsnack/SOA_DORN_TF
|
829debc37584d6140396fcfa58ced1612fb3c0d7
|
33814467e9135036abf28f2da19c5984c8744089
|
refs/heads/master
| 2021-11-27T14:10:44.364498
| 2019-01-29T07:15:15
| 2019-01-29T07:15:15
| 161,754,223
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 19
|
py
|
from ._vgg import *
|
[
"kir0302@gmail.com"
] |
kir0302@gmail.com
|
9b4fcb23bad9aefa541e7185a3d4e8f29c97b4b5
|
d0e2ef0fb0d9a0c3c9d5656273e8eae6787faee7
|
/app.py
|
0dc0cdd8295a03c72a0c122a7be8a0403b5210f0
|
[] |
no_license
|
HemendraTripathi/demo2
|
fe1ba499c914f5077f05f92b1b2aff2dd4e330a4
|
f5ec3be456ecad37017052795c2fe72b78eab4cf
|
refs/heads/master
| 2022-12-21T13:18:04.327276
| 2020-09-23T04:59:31
| 2020-09-23T04:59:31
| 297,856,545
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,838
|
py
|
#Shree Ganesh#
from bottle import (run, post, response, request as bottle_request)
import os,sqlite3,requests,json,sys,datetime
from xlsxwriter.workbook import Workbook
from ver import verify_input
from time import sleep
from get_user import get_user
from get_message import get_message
from send_message import send_message
from save_to_excel import save_to_excel
flag, TOKEN, API_TOKEN = verify_input(sys.argv)
if not flag:
sys.exit(3)
current_month_text = datetime.datetime.now().strftime('%B')
current_year_text = datetime.datetime.now().strftime('%Y')
sheet_name = current_month_text +"_"+current_year_text+".xlsx"
url = f'https://api.telegram.org/bot{TOKEN}/'
webhook_url = input("Enter Webhook URL : ")
try:
web = requests.get(url+'deletewebhook')
print("\nPrevious WebHook Deleted.")
except requests.exceptions.ConnectionError:
print("Please Check Your Internet Connection! WEBHOOK")
exit(6)
try:
web = requests.get(url+'setWebHook?url='+webhook_url)
print("\nWebhook Setted....\n")
except requests.exceptions.ConnectionError:
print("Please Check Your Internet Connection! WEBHOOK2")
exit(5)
cwd = os.getcwd()
path = os.path.join(cwd,'delivery_data')
print(path)
try:
os.mkdir(path)
print(path)
except FileExistsError:
pass
sheet_name = os.path.join(path,sheet_name)
print(sheet_name)
@post('/')
def main():
data = bottle_request.json
get_user(TOKEN)
get_message(TOKEN,API_TOKEN,data)
send_message(TOKEN)
save_to_excel(sheet_name)
if sys.platform == 'win32':
os.system('cls')
elif sys.platform == 'linux':
os.system('clear')
print("""
Refreshing ............
) (
( ) )
) ( (
_______)_
.-'---------|
( C|/\/\/\/\/|
'-./\/\/\/\/|
'_________'
'-------'
""")
return response
if __name__ == '__main__':
run(host = 'localhost', port = 8080, debug = True)
|
[
"noreply@github.com"
] |
HemendraTripathi.noreply@github.com
|
e965fc7f54e9b3311e96c8e2934c07c82278a4d5
|
34d075680d450ef1ef74dd0c284f917f7b655c33
|
/lyremd/main.py
|
7bba0a4b39b56c992e5252b622642adff3db4ba5
|
[
"MIT"
] |
permissive
|
nao159/PycharmProjects
|
b252e837b44d79f630a964374c6dec48ec795966
|
c3abf404c9281ce89fdcd5d032220a1d5886d8bc
|
refs/heads/main
| 2023-07-27T14:05:26.711901
| 2021-09-09T07:09:39
| 2021-09-09T07:09:39
| 398,906,786
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,916
|
py
|
from os import path
from threading import Thread
from time import sleep
from mido import MidiFile
import argparse
import keyboard as kbd
from utils import find_best_shift, midi_play_filter
octave_interval = 12
c3_pitch = 48
c4_pitch = 60
c5_pitch = 72
b5_pitch = 83
keytable = "z?x?cv?b?n?m" + "a?s?df?g?h?j" + "q?w?er?t?y?u"
notetable = "C?D?EF?G?A?B"
play_state = 'idle'
def help():
print('Press "+" to start/stop playing, press "backspace" to exit.\n')
def note_name(note):
idx = note % octave_interval
if idx < 0:
return '-'
pre = notetable[idx]
if pre == '?':
pre = notetable[idx - 1] + '#'
return pre + str(note // octave_interval - 1)
def print_note(ch, orig, play, key):
print("ch {:<2} orig: {:<3}{:<5} play: {:<3}{:<5} {}\n"
.format(ch, note_name(orig),
'(' + str(orig) + ')',
note_name(play) if play else '-',
'(' + str(play) + ')' if play else '-',
key if key else '-'))
def play(midi, msg_filter, shift, no_semi, out_range):
global play_state
play_state = 'running'
print('Start playing')
for msg in midi:
if play_state != 'running':
break
sleep(msg.time)
if not msg_filter(msg):
continue
note = msg.note + shift
orig_note = note
if note < c3_pitch:
print(
'note {:<3} lower than C3 : {:+}'.format(note, c3_pitch - note))
if out_range:
note = note % octave_interval + c3_pitch
elif note > b5_pitch:
print(
'note {:<3} higher than B5: {:+}'.format(note, b5_pitch - note))
if out_range:
note = note % octave_interval + c5_pitch
if note < c3_pitch or note > b5_pitch:
print_note(msg.channel, orig_note, None, None)
continue
if keytable[note - c3_pitch] == '?' and not no_semi:
note -= 1
key = keytable[note - c3_pitch]
print_note(msg.channel, orig_note, note, key.upper())
kbd.send(key)
print('Stop playing')
help()
play_state = 'idle'
def control(*args):
global play_state
if play_state == 'running':
play_state = 'stopping'
elif play_state == 'idle':
kbd.call_later(
play,
args=args,
delay=1)
if __name__ == '__main__':
parser = argparse.ArgumentParser(
description='Play midi file with Windsong Lyre in Genshin Impact')
parser.add_argument('midi', nargs="?", type=str, help='path to midi file')
parser.add_argument('-c', '--channels', nargs="*", type=int,
help="enabled midi channels, available values:0, 1, 2,...,N")
parser.add_argument('-s', '--shift', type=int, default=None,
help="shift note pitch, auto calculated by default")
parser.add_argument('-n', '--no-semi', action='store_true',
help="don't shift black key to white key")
parser.add_argument('-r', '--shift-out-of-range', dest="out_range",
action='store_true', help="shift notes which out of range")
args = parser.parse_args()
midi = args.midi
if not midi:
midi = path.join(path.dirname(
path.realpath(__file__)), 'files/canon.mid')
midi = MidiFile(midi)
msg_filter = lambda msg, ch=args.channels: midi_play_filter(msg, ch)
shift = args.shift
if shift == None:
shift = find_best_shift(midi, msg_filter)
print('Auto calculated pitch shift: {:+} semitone(s)\n'.format(shift))
kbd.add_hotkey('+',
lambda: control(midi, msg_filter, shift, args.no_semi, args.out_range),
suppress=True,
trigger_on_release=True)
help()
kbd.wait('backspace', suppress=True)
|
[
"79338207+nao159@users.noreply.github.com"
] |
79338207+nao159@users.noreply.github.com
|
60e00547cea5608007f9512ca8f4505dd0388455
|
52ffc8be8e69745864f1a45992d27c94ca51cffe
|
/Admin/views.py
|
b460fb713762b1d8b21cbf2665ae329d00885cd2
|
[] |
no_license
|
Kaviya-M12/College-Enquiry-Chatbot
|
137c6222798c32a6c34e2dcd67e4120d4c57ad9c
|
4c0700ae6bd9b52fe80cd7ac238be239bf0b9de5
|
refs/heads/master
| 2023-04-09T00:32:45.470227
| 2021-04-16T11:11:55
| 2021-04-16T11:11:55
| 350,371,938
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,930
|
py
|
from django.shortcuts import render
# Create your views here.
# from django.shortcuts import render, redirect
from django.contrib import messages
from django.contrib.auth.models import User, auth
#from .models import Users
# Create your views here.
def Adminlogin(request):
if request.method== 'POST':
MailId = request.POST['MailId']
password = request.POST['password']
user = auth.authenticate(email=MailId,password=password)
if user is not None:
auth.login(request, user)
return redirect("/")
else:
messages.info(request,'invalid credentials')
return redirect('Adminlogin')
else:
return render(request,'Adminlogin.html')
def Adminregister(request):
if request.method == 'POST':
email = request.POST['MailId']
password= request.POST['createpassword']
secpassword = request.POST['confirmpassword']
username = request.POST['username']
if password==secpassword:
if User.objects.filter(email=email).exists():
messages.info(request,'MailId Taken')
return redirect('Adminregister')
elif User.objects.filter(username=username).exists():
messages.info(request,'username taken')
return redirect('Adminregister')
else:
user = User.objects.create_user(username=username, password=password, email=email)
#dummyusers=Users()
# #dummyusers.name=username
# print('user created')
return redirect('Adminlogin')
else:
messages.info(request,'password not matching..')
return redirect('Adminregister')
#return redirect('/')
else:
return render(request,'Adminregister.html')
def Adminlogout(request):
auth.Adminlogout(request)
return redirect('/')
|
[
"kaviyamadesh4@gmail.com"
] |
kaviyamadesh4@gmail.com
|
e066f05d3207fd56a8422bdeb6707bf4ebecfb1b
|
76f3b6dd64acdf60ff464f5c0fe9b4f4151358e0
|
/pymnn/pip_package/MNN/tools/mnn_fb/Transpose.py
|
557f49e65400b320cf2ced36f4306e9ee1dad48e
|
[
"Apache-2.0"
] |
permissive
|
qipengwang/Melon
|
33ce5d4b683af70215f73b82a7b9c15ffd7706d9
|
10c9d71cdc609a290bfdd09296db6af3913bb461
|
refs/heads/main
| 2023-08-31T17:44:28.797075
| 2023-08-17T14:24:59
| 2023-08-17T14:24:59
| 486,278,764
| 22
| 2
| null | 2022-04-27T16:54:48
| 2022-04-27T16:54:47
| null |
UTF-8
|
Python
| false
| false
| 899
|
py
|
# automatically generated by the FlatBuffers compiler, do not modify
# namespace: MNN
import flatbuffers
class Transpose(object):
__slots__ = ['_tab']
@classmethod
def GetRootAsTranspose(cls, buf, offset):
n = flatbuffers.encode.Get(flatbuffers.packer.uoffset, buf, offset)
x = Transpose()
x.Init(buf, n + offset)
return x
# Transpose
def Init(self, buf, pos):
self._tab = flatbuffers.table.Table(buf, pos)
# Transpose
def Tperm(self):
o = flatbuffers.number_types.UOffsetTFlags.py_type(self._tab.Offset(4))
if o != 0:
return self._tab.Get(flatbuffers.number_types.Int32Flags, o + self._tab.Pos)
return 0
def TransposeStart(builder): builder.StartObject(1)
def TransposeAddTperm(builder, Tperm): builder.PrependInt32Slot(0, Tperm, 0)
def TransposeEnd(builder): return builder.EndObject()
|
[
"861026685@qq.com"
] |
861026685@qq.com
|
acb80ba6822833f25d966e362da91eaa8b931efc
|
1df048bc8092f333c53f4e345c729c40ba2612c5
|
/count_list_4.py
|
1b5da491e36d1c5ef606f85dc241e68e374beef1
|
[] |
no_license
|
saipreeti1999/python_prog
|
58e3e5b5dab356fb3755b3abf36529aa475923be
|
8bdb773c29e0d8b2beb3a16dd4692485b4726b79
|
refs/heads/master
| 2020-06-28T09:15:46.787982
| 2019-08-06T07:03:18
| 2019-08-06T07:03:18
| 200,196,486
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 173
|
py
|
def count_list4(num):
count=0
for i in num:
if i==4:
count+=1
return count
print(count_list4([1,4,6,7,4]))
print(count_list4([1,4,6,4,7,4]))
|
[
"noreply@github.com"
] |
saipreeti1999.noreply@github.com
|
b7852190aea462adb905c42a666ee25b1de79558
|
ecaffc075b41e36457423b810903e74280d7af75
|
/manage.py
|
abc9e3310be544a334973b156ca82af408f28585
|
[] |
no_license
|
MounishKesavan/Myproject2
|
c4d601869bc98ef78556b36c0de1412525c09d0d
|
b5f6dda8e4f46a2805e0bf73ef157255188cbe5f
|
refs/heads/main
| 2023-05-26T17:39:21.955239
| 2021-06-05T13:55:19
| 2021-06-05T13:55:19
| 374,117,990
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 832
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "resumeparser.settings")
try:
from django.core.management import execute_from_command_line
except ImportError:
# The above import may fail for some other reason. Ensure that the
# issue is really that Django is missing to avoid masking other
# exceptions on Python 2.
try:
import django
except ImportError:
raise ImportError(
"Couldn't import Django. Are you sure it's installed and "
"available on your PYTHONPATH environment variable? Did you "
"forget to activate a virtual environment?"
)
raise
execute_from_command_line(sys.argv)
|
[
"noreply@github.com"
] |
MounishKesavan.noreply@github.com
|
54e34c038c98d02ef48734b8a1b6e55a21b24693
|
32128ef01b5ca9d3c634bdffd5a870be12f23a01
|
/main.py
|
446a2fe5b0a39de49c9331cd568b2a0ac7a477e9
|
[] |
no_license
|
geminiwayne/cloud_assignment
|
4efa2ed28800853ac28c92ed546aad6d94fcd361
|
f187e86d3bf4227b270db155ea192b02a3e31737
|
refs/heads/master
| 2021-01-20T01:52:35.861799
| 2017-05-14T16:04:27
| 2017-05-14T16:04:27
| 89,339,596
| 2
| 3
| null | 2018-02-03T04:01:27
| 2017-04-25T09:00:07
|
Python
|
UTF-8
|
Python
| false
| false
| 1,474
|
py
|
#################################
#Team 4 #
#Melbourne,Sydney,Brisbane,Perth#
#Dong Wang 773504 #
#Danping Zeng 777691 #
#Jia Zhen 732355 #
#Jinghan Liang 732329 #
#Sixue Yang 722804 #
#################################
max_volum=0
import tweepy
import config
import history_twitter
import new_twitter
import threading
import time
import sys
if __name__=="__main__":
file = sys.argv[1]
config.get_config(file)
max_volum=(int)(config.max_size)
max_history_tweet= max_volum*2/3
# twitter_stream.filter(track=config.streaming_topic,locations=config.bound,async=True)
# # to use thread to control two crawling fucntion
while(1):
myStreamListener =new_twitter.MyStreamListener()
new_twitter.get_max(max_volum)
twitter_stream = tweepy.Stream(auth = new_twitter.get_connection(), listener=myStreamListener)
try:
t1 = threading.Thread(target=history_twitter.tweeet_crawl(max_history_tweet))
t1.start()
t1.join()
except Exception as e:
time.sleep(5)
print ("Error: t1 thread stop",e)
try:
t2 = threading.Thread(target=twitter_stream.filter(locations=config.bound,async=True))
t2.start()
t2.join()
except Exception as e:
time.sleep(5)
print ("Error: t2 thread stop",e)
print ("congratulation! Data harvest finished!")
|
[
"wayne@127.0.0.1 my_computer.local"
] |
wayne@127.0.0.1 my_computer.local
|
30515edd35063c0b0ce56bb0b4cca5653ebf3076
|
ee0d795e2874b30ba1edd568f9f2f9483854f0a7
|
/Run/wsgi.py
|
6c10531c680a9fc6ce3a27c9d0eba2d6ba5a2cc0
|
[
"Apache-2.0"
] |
permissive
|
CodeMaxx/Run-Backend
|
91e65e5633263cf9913c814ac0985e8063679b33
|
880610ea473d0a0cde60a89064e987d92842dde3
|
refs/heads/master
| 2021-01-19T16:59:34.281976
| 2017-04-27T19:53:48
| 2017-04-27T19:53:48
| 86,144,226
| 2
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 384
|
py
|
"""
WSGI config for Run project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.10/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "Run.settings")
application = get_wsgi_application()
|
[
"akash.trehan123@gmail.com"
] |
akash.trehan123@gmail.com
|
dcf9b0a27f7c7e49b3ad9e8d7887b134117d129a
|
76a6d509f366cc31febd05020a42da3024586407
|
/app/recipe/serializers.py
|
74e46eb63137f3678b98be3637ba359129a3648c
|
[
"MIT"
] |
permissive
|
dipodaimary/recipe-app-api
|
52e5a6d378d177bcf50e4db54e9eacc22f57ab84
|
fbf2a452b2fed41570bb5444a202284ab1d1a03f
|
refs/heads/main
| 2023-01-28T21:44:21.319328
| 2020-11-30T15:12:54
| 2020-11-30T15:12:54
| 316,896,884
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,411
|
py
|
from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag objects"""
class Meta:
model = Tag
fields = ('id', 'name')
read_only_fields = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
"""Serializer for Ingredient objects"""
class Meta:
model = Ingredient
fields = ('id', 'name')
read_only_fields = ('id', )
class RecipeSerializer(serializers.ModelSerializer):
"""Serialize a recipe"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = ('id', 'title', 'ingredients', 'tags', 'time_minutes',
'price', 'link')
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
"""Serialize a recipe detail"""
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
"""Serializer for uploading image to recipes"""
class Meta:
model = Recipe
fields = ('id', 'image')
read_only_fields = ('id',)
|
[
"dipodaimary@gmail.com"
] |
dipodaimary@gmail.com
|
5b11a4092b62457332b80ede0b087786272aa1cf
|
66580ac2680bd9060f6a4e852da52f45177e9721
|
/test/functional/p2p_leak.py
|
45620addb94fc3d51265c9c9c10c386dade7bddb
|
[
"MIT"
] |
permissive
|
bitcoinpaythrough-project/bitcoinpaythrough
|
459cae4b1805e3f966a870e820a9aab933b6a7a0
|
2aabe233abf16ec4e41be4c0528204b7a479dbd3
|
refs/heads/master
| 2022-11-18T17:04:30.689139
| 2020-07-16T23:42:34
| 2020-07-16T23:42:34
| 279,911,155
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 5,310
|
py
|
#!/usr/bin/env python3
# Copyright (c) 2017-2019 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test message sending before handshake completion.
A node should never send anything other than VERSION/VERACK/REJECT until it's
received a VERACK.
This test connects to a node and sends it a few messages, trying to entice it
into sending us something it shouldn't."""
import time
from test_framework.messages import msg_getaddr, msg_ping, msg_verack
from test_framework.mininode import mininode_lock, P2PInterface
from test_framework.test_framework import BitcoinpaythroughTestFramework
from test_framework.util import wait_until
banscore = 10
class CLazyNode(P2PInterface):
def __init__(self):
super().__init__()
self.unexpected_msg = False
self.ever_connected = False
def bad_message(self, message):
self.unexpected_msg = True
self.log.info("should not have received message: %s" % message.command)
def on_open(self):
self.ever_connected = True
def on_version(self, message): self.bad_message(message)
def on_verack(self, message): self.bad_message(message)
def on_reject(self, message): self.bad_message(message)
def on_inv(self, message): self.bad_message(message)
def on_addr(self, message): self.bad_message(message)
def on_getdata(self, message): self.bad_message(message)
def on_getblocks(self, message): self.bad_message(message)
def on_tx(self, message): self.bad_message(message)
def on_block(self, message): self.bad_message(message)
def on_getaddr(self, message): self.bad_message(message)
def on_headers(self, message): self.bad_message(message)
def on_getheaders(self, message): self.bad_message(message)
def on_ping(self, message): self.bad_message(message)
def on_mempool(self, message): self.bad_message(message)
def on_pong(self, message): self.bad_message(message)
def on_feefilter(self, message): self.bad_message(message)
def on_sendheaders(self, message): self.bad_message(message)
def on_sendcmpct(self, message): self.bad_message(message)
def on_cmpctblock(self, message): self.bad_message(message)
def on_getblocktxn(self, message): self.bad_message(message)
def on_blocktxn(self, message): self.bad_message(message)
# Node that never sends a version. We'll use this to send a bunch of messages
# anyway, and eventually get disconnected.
class CNodeNoVersionBan(CLazyNode):
# send a bunch of veracks without sending a message. This should get us disconnected.
# NOTE: implementation-specific check here. Remove if bitcoinpaythroughd ban behavior changes
def on_open(self):
super().on_open()
for i in range(banscore):
self.send_message(msg_verack())
def on_reject(self, message): pass
# Node that never sends a version. This one just sits idle and hopes to receive
# any message (it shouldn't!)
class CNodeNoVersionIdle(CLazyNode):
def __init__(self):
super().__init__()
# Node that sends a version but not a verack.
class CNodeNoVerackIdle(CLazyNode):
def __init__(self):
self.version_received = False
super().__init__()
def on_reject(self, message): pass
def on_verack(self, message): pass
# When version is received, don't reply with a verack. Instead, see if the
# node will give us a message that it shouldn't. This is not an exhaustive
# list!
def on_version(self, message):
self.version_received = True
self.send_message(msg_ping())
self.send_message(msg_getaddr())
class P2PLeakTest(BitcoinpaythroughTestFramework):
def set_test_params(self):
self.num_nodes = 1
self.extra_args = [['-banscore=' + str(banscore)]]
def run_test(self):
no_version_bannode = self.nodes[0].add_p2p_connection(CNodeNoVersionBan(), send_version=False, wait_for_verack=False)
no_version_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVersionIdle(), send_version=False, wait_for_verack=False)
no_verack_idlenode = self.nodes[0].add_p2p_connection(CNodeNoVerackIdle())
wait_until(lambda: no_version_bannode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_version_idlenode.ever_connected, timeout=10, lock=mininode_lock)
wait_until(lambda: no_verack_idlenode.version_received, timeout=10, lock=mininode_lock)
# Mine a block and make sure that it's not sent to the connected nodes
self.nodes[0].generatetoaddress(1, self.nodes[0].get_deterministic_priv_key().address)
#Give the node enough time to possibly leak out a message
time.sleep(5)
#This node should have been banned
assert not no_version_bannode.is_connected
self.nodes[0].disconnect_p2ps()
# Wait until all connections are closed
wait_until(lambda: len(self.nodes[0].getpeerinfo()) == 0)
# Make sure no unexpected messages came in
assert no_version_bannode.unexpected_msg == False
assert no_version_idlenode.unexpected_msg == False
assert no_verack_idlenode.unexpected_msg == False
if __name__ == '__main__':
P2PLeakTest().main()
|
[
"bitcoinpaythrough@gmail.com"
] |
bitcoinpaythrough@gmail.com
|
056bd5bc3d264e0caaf44ba1301141d9a63a1857
|
4786222ae4c6302c26e63d0281c12bdf6b1c9420
|
/permute.py
|
2e3a66ff4bb1de27aaadbc0c7ed1549a13fa0136
|
[] |
no_license
|
Kaviprakash156/hunterset2
|
423da05270209e089150a9165ac9d89e7a31e46f
|
182790c3edc66123fab398e79778e37fce5ad0c3
|
refs/heads/master
| 2021-01-24T22:21:42.944448
| 2018-02-28T12:25:06
| 2018-02-28T12:25:06
| 123,279,510
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 320
|
py
|
def toString(List):
return ''.join(List)
def permute(a, l, r):
if l==r:
print (toString(a))
else:
for i in range(l,r+1):
a[l], a[i] = a[i], a[l]
permute(a, l+1, r)
a[l], a[i] = a[i], a[l]
string = "kavi"
n = len(string)
a = list(string)
permute(a, 0, n-1)
|
[
"noreply@github.com"
] |
Kaviprakash156.noreply@github.com
|
355ba71678f6bb9ec2076002fa247bff0631c87c
|
7fe92cf2077e83e13d6a496ec84788bd71713d21
|
/exp3/test_submodule_package/add.py
|
6b07b462f69e1b50f226a867c8b5a983d644a4c3
|
[] |
no_license
|
loveu3000s/learnPython
|
91546b3712deebdade1b87c0fb7691c70ad77397
|
80f7cd70708e96015e552311d843a2960654ce77
|
refs/heads/main
| 2023-06-12T23:08:44.040645
| 2021-05-30T04:38:18
| 2021-05-30T04:38:18
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 185
|
py
|
'''
Description:
Version: 2.0
Author: xuchaoxin
Date: 2021-04-13 11:36:54
LastEditors: xuchaoxin
LastEditTime: 2021-04-13 11:41:48
'''
def add_func(a, b):
return a+b
|
[
"838808930@qq.com"
] |
838808930@qq.com
|
42cc5b26ee8c1863e26fc185667574ef48c9737f
|
4208b2260aac638af6169872c87e923195f0dd11
|
/piopencvsandbox/motion_detector.py
|
50de871a827c73202fafbdd551baa1d0e6d188ae
|
[] |
no_license
|
cjore/pibox
|
be85b9e996b78392c20f267016fdd8349fe68583
|
62acdd791b1fb0bbcf209ad825e69f3d3ec1a64b
|
refs/heads/master
| 2020-12-29T02:32:33.165913
| 2017-04-06T20:22:28
| 2017-04-06T20:22:28
| 49,876,246
| 0
| 0
| null | 2016-01-19T20:45:57
| 2016-01-18T12:44:59
| null |
UTF-8
|
Python
| false
| false
| 2,897
|
py
|
#!/usr/bin/python3
# -*- coding: utf-8 -*-
# Import the necessary packages
import argparse
import datetime
import imutils
from imutils.video import FPS
import time
import cv2
# Construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-v", "--video", help="path to the video file")
ap.add_argument("-a", "--min-area", type=int, default=500, help="minimum area size")
args = vars(ap.parse_args())
# If the video argument is None, then we are reading from webcam
if args.get("video", None) is None:
camera = cv2.VideoCapture(0)
time.sleep(0.25)
# Otherwise, we are reading from a video file
else:
camera = cv2.VideoCapture(args["video"])
# Initialize the first fram in the video stream
firstFrame = None
fps=FPS().start()
# Loop over the frames of the video
while True :
# grab the current frame ans initialize the occupied/unoccupied test
(grabbed, frame) = camera.read()
text = "Unocoppied"
# if the frame could not be grabbed, then we have reached the end of the video
if not grabbed:
break
# resize the frame, convert it to greyscale, and blur it
frame = imutils.resize(frame, width=500)
gray = cv2.cvtColor(frame, cv2.COLOR_BGR2GRAY)
gray = cv2.GaussianBlur(gray, (21,21), 0)
# if the first frame is None, initialize it
if firstFrame is None:
firstFrame = gray
continue
# compute the absolute difference between the current frame and first frame
frameDelta = cv2.absdiff(firstFrame, gray)
thresh = cv2.threshold(frameDelta, 25, 255, cv2.THRESH_BINARY)[1]
# dilate the thresolded image to fill in holes, then find contours on thresholded image
thresh = cv2.dilate(thresh, None, iterations=2)
(_, cnts, _) = cv2.findContours(thresh.copy(), cv2.RETR_EXTERNAL, cv2.CHAIN_APPROX_SIMPLE)
# loop over the contours
for c in cnts:
# if the contour is too small, ignore it
if cv2.contourArea(c) < args["min_area"]:
continue
# compute the bounding box for the contour, draw it on the frame, and update the text
(x, y, w, h) = cv2.boundingRect(c)
cv2.rectangle(frame, (x, y), (x+w, y+h), (0, 255, 0), 2)
text = "Occupied"
# draw the text and timestamp on the frame
cv2.putText(frame, "Room Status: {}".format(text), (10, 20), cv2.FONT_HERSHEY_SIMPLEX, 0.5, (0, 0, 255), 2)
cv2.putText(frame, datetime.datetime.now().strftime("%A %d %B %Y %I:%M:%S%p"), (10, frame.shape[0] - 10), cv2.FONT_HERSHEY_SIMPLEX, 0.35, (0, 0, 255), 1)
fps.update()
# show the frameand record if the user presses a key
cv2.imshow("Security Feed", frame)
cv2.imshow("Thresh", thresh)
cv2.imshow("Frame Delta", frameDelta)
key = cv2.waitKey(1) & 0xFF
# if the 'q' key is pressed, break from the loop
if key == ord("q"):
break
fps.stop()
print("[INFO] approx. FPS: {:.2f}".format(fps.fps()))
# Cleanuo the camera and close any open windows
camera.release()
cv2.destroyAllWindows()
|
[
"chris.jore@gmail.com"
] |
chris.jore@gmail.com
|
079662848033b228ee09c8bb812f1c80e52e4cb0
|
1f68b6f9f55afaa7cb32df262f4fe0864472da05
|
/leetcode(ๅค็บฟ็จ,DP,่ดชๅฟ,SQL)/ไบๅทDPไธ่ดชๅฟLeetCode/ๅๆบฏ/51. N็ๅ/solution.py
|
761c902fdb433e6e6f0765ec8b75578496b26cb9
|
[] |
no_license
|
faker-hong/testOne
|
7c4496362cb5495c25c640076102fe0704f8552f
|
768edc4a5526c8972fec66c6a71a38c0b24a1451
|
refs/heads/master
| 2022-12-04T14:47:53.614685
| 2020-10-30T03:17:50
| 2020-10-30T03:17:50
| 196,514,862
| 1
| 0
| null | 2022-11-22T02:43:32
| 2019-07-12T05:35:09
|
Python
|
UTF-8
|
Python
| false
| false
| 1,100
|
py
|
class Solution(object):
def solveNQueens(self, n):
"""
:type n: int
:rtype: List[List[str]]
"""
# ๅคๆญๆพ็ฝฎ็็ๅไฝ็ฝฎไธไนๅ็ๆฏๅฆๅฒ็ช
def is_valid(row, col, track):
# ๅ ไธบๆฏไธๆฌก็rowไธๅ๏ผๆไปฅไธ็จๅคๆญๆฏๅฆๅจๅไธ่ก
if col in track: # ๆฏๅฆๅจๅไธๅ
return False
# ๅคๆญๆฏๅฆๅจไธคๆกๅฏน่ง็บฟไธ
for k in range(row):
if row + col == k + track[k] or row - col == k - track[k]:
return False
return True
def backtrack(row, track):
if row == n:
res.append(track)
return
for col in range(n):
if is_valid(row, col, track): # ไฝ็ฝฎๅๆณ๏ผ่ฟๅ
ฅไธไธ่ก
backtrack(row + 1, track + [col])
res = []
backtrack(0, [])
return [['.'*i + 'Q' + '.'*(n-i-1) for i in l] for l in res]
if __name__ == '__main__':
s = Solution()
res = s.solveNQueens(4)
print(res)
|
[
"42666723+hongcheng97@users.noreply.github.com"
] |
42666723+hongcheng97@users.noreply.github.com
|
bc3e0aae1bddeb212273308b0e470cd458c735c0
|
ccabc13a33099b751cbf02459237ff806fdf3037
|
/week3/BiDAF_tf2/layers/attention.py
|
606212184a2c37720d71ba002efb8827df0b2717
|
[] |
no_license
|
SoloPro-Git/MRC_learning
|
9007f4f1cac814925be063a2d7857114a856a212
|
2a22a96789e21c6bb5e9d6fd301a57a69a01ce93
|
refs/heads/master
| 2023-02-07T18:48:56.898289
| 2020-12-17T10:01:04
| 2020-12-17T10:01:04
| 313,486,346
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,915
|
py
|
import tensorflow as tf
class C2QAttention(tf.keras.layers.Layer):
def call(self, similarity, qencode):
# 1. ๅฏนqecncode่ฟ่กๆฉๅฑ็ปดๅบฆ ๏ผtf.expand_dims
qencode_exp = tf.expand_dims(qencode, axis=1)
# 2. softmaxๅฝๆฐๅค็็ธไผผๅบฆ็ฉ้ต๏ผtf.keras.activations.softmax
similarity_softmax = tf.keras.activations.softmax(similarity, axis=1)
# 3. ๅฏนๅค็็ปๆๆฉๅฑ็ปดๅบฆ๏ผtf.expand_dims
similarity_softmax_exp = tf.expand_dims(similarity_softmax, axis=-1)
# 4. ๅ ๆๆฑๅ๏ผtf.math.reduce_sum
c2q_att = tf.reduce_max(tf.multiply(qencode_exp, similarity_softmax_exp), axis=2)
return c2q_att
class Q2CAttention(tf.keras.layers.Layer):
def call(self, similarity, cencode):
# 1.่ฎก็ฎsimilarity็ฉ้ตๆๅคงๅผ๏ผtf.math.reduce_max
simi_max = tf.reduce_max(similarity, axis=2)
# 2.ไฝฟ็จ softmaxๅฝๆฐๅค็ๆๅคงๅผ็็ธไผผๅบฆ็ฉ้ต๏ผtf.keras.activations.softmax
simi_sfmax = tf.keras.activations.softmax(simi_max, axis=1)
# 3.็ปดๅบฆๅค็๏ผtf.expand_dims
simi_sfmax_exp = tf.expand_dims(simi_sfmax, axis=-1)
# 4.ๅ ๆๆฑๅ๏ผtf.math.reduce_sum
simi_sum = tf.math.reduce_sum(tf.multiply(simi_sfmax_exp, cencode), axis=2)
# 5.ๅๆฌก็ปดๅบฆๅค็ๅ ๆๆฑๅๅ็็ปๆ๏ผtf.expand_dims
simi_sum = tf.math.reduce_sum(tf.multiply(simi_sfmax_exp, cencode), axis=2)
# 6.่ทๅ้ๅค็ๆฌกๆฐ๏ผ cencode.shape[1]
# 7.้ๅคๆผๆฅ่ทๅๆ็ป็ฉ้ต๏ผtf.tile
simi_sum_exp = tf.expand_dims(simi_sum, axis=-2)
q2c_att = tf.tile(simi_sum_exp, (1, cencode.shape[1], 1))
return q2c_att
if __name__ == '__main__':
# T=5,J=8 ,2d=10
g1 = tf.random_uniform_initializer(minval=0)
simi = g1(shape=[2, 5, 8])
q = tf.ones(shape=(2, 8, 10))
att_layer = C2QAttention()
att_layer.call(simi, q)
|
[
"137033760@qq.com"
] |
137033760@qq.com
|
7b7f8627897a44c4bb4219ff4d136d59fc8e6391
|
7b218983611d96c653f99c3e2c7b2bb74091ac9e
|
/splitNSP.py
|
b5d2cdb1b990a4bdf5b9473e40cda1c86e5dcbb9
|
[
"Unlicense"
] |
permissive
|
doctorpangloss/splitNSP
|
02de827b20b6c949967f38e090e75808674fc577
|
15941e8204b73a4261034e39d9d5939c97394261
|
refs/heads/master
| 2020-04-25T07:12:43.167583
| 2019-02-26T00:24:15
| 2019-02-26T00:24:15
| 172,607,074
| 0
| 0
|
Unlicense
| 2019-02-26T00:23:41
| 2019-02-26T00:22:27
|
Python
|
UTF-8
|
Python
| false
| false
| 5,634
|
py
|
#!/usr/bin/env python3
# Author: AnalogMan
# Modified Date: 2018-10-08
# Purpose: Splits Nintendo Switch files into parts for installation on FAT32
import os
import argparse
import shutil
import os.path
import subprocess
from datetime import datetime
startTime = datetime.now()
splitSize = 0xFFFF0000 # 4,294,901,760 bytes
chunkSize = 0x8000 # 32,768 bytes
from os.path import splitext
def splitext_(path):
if len(path.split('.')) > 2:
return path.split('.')[0],'.'.join(path.split('.')[-2:])
return splitext(path)
def splitQuick(filepath):
fileSize = os.path.getsize(filepath)
info = shutil.disk_usage(os.path.dirname(os.path.abspath(filepath)))
if info.free < splitSize:
print('Not enough temporary space. Needs 4GiB of free space\n')
return
print('Calculating number of splits...\n')
splitNum = int(fileSize/splitSize)
if splitNum == 0:
print('This file is under 4GiB and does not need to be split.\n')
return
print('Splitting file into {0} parts...\n'.format(splitNum + 1))
# Create directory, delete if already exists
file_name,extension = splitext_(filepath)
dir = filepath[:-4] + '_split' + extension
if os.path.exists(dir):
shutil.rmtree(dir)
os.makedirs(dir)
if os.path.exists(dir):
subprocess.call(['attrib', '+a', dir])
# Move input file to directory and rename it to first part
filename = os.path.basename(filepath)
shutil.move(filepath, os.path.join(dir, '00'))
filepath = os.path.join(dir, '00')
# Calculate size of final part to copy first
finalSplitSize = fileSize - (splitSize * splitNum)
# Copy final part and trim from main file
with open(filepath, 'r+b') as nspFile:
nspFile.seek(finalSplitSize * -1, os.SEEK_END)
outFile = os.path.join(dir, '{:02}'.format(splitNum))
partSize = 0
print('Starting part {:02}'.format(splitNum))
with open(outFile, 'wb') as splitFile:
while partSize < finalSplitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
nspFile.seek(finalSplitSize * -1, os.SEEK_END)
nspFile.truncate()
print('Part {:02} complete'.format(splitNum))
# Loop through additional parts and trim
with open(filepath, 'r+b') as nspFile:
for i in range(splitNum - 1):
nspFile.seek(splitSize * -1, os.SEEK_END)
outFile = os.path.join(dir, '{:02}'.format(splitNum - (i + 1)))
partSize = 0
print('Starting part {:02}'.format(splitNum - (i + 1)))
with open(outFile, 'wb') as splitFile:
while partSize < splitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
nspFile.seek(splitSize * -1, os.SEEK_END)
nspFile.truncate()
print('Part {:02} complete'.format(splitNum - (i + 1)))
# Print assurance statement for user
print('Starting part 00\nPart 00 complete')
print('\nFile successfully split!\n')
def splitCopy(filepath):
fileSize = os.path.getsize(filepath)
info = shutil.disk_usage(os.path.dirname(os.path.abspath(filepath)))
if info.free < fileSize*2:
print('Not enough free space to run. Will require twice the space as the file\n')
return
print('Calculating number of splits...\n')
splitNum = int(fileSize/splitSize)
if splitNum == 0:
print('This file is under 4GiB and does not need to be split.\n')
return
print('Splitting file into {0} parts...\n'.format(splitNum + 1))
# Create directory, delete if already exists
file_name,extension = splitext_(filepath)
dir = filepath[:-4] + '_split' + extension
if os.path.exists(dir):
shutil.rmtree(dir)
os.makedirs(dir)
if os.path.exists(dir):
subprocess.call(['attrib', '+a', dir])
remainingSize = fileSize
# Open source file and begin writing to output files stoping at splitSize
with open(filepath, 'rb') as nspFile:
for i in range(splitNum + 1):
partSize = 0
print('Starting part {:02}'.format(i))
outFile = os.path.join(dir, '{:02}'.format(i))
with open(outFile, 'wb') as splitFile:
if remainingSize > splitSize:
while partSize < splitSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
remainingSize -= splitSize
else:
while partSize < remainingSize:
splitFile.write(nspFile.read(chunkSize))
partSize += chunkSize
print('Part {:02} complete'.format(i))
print('\nFile successfully split!\n')
def main():
print('\n========== File Splitter ==========\n')
# Arg parser for program options
parser = argparse.ArgumentParser(description='Split files into FAT32 compatible sizes')
parser.add_argument('filepath', help='Path to file')
parser.add_argument('-q', '--quick', action='store_true', help='Splits file in-place without creating a copy. Only requires 4GiB free space to run')
# Check passed arguments
args = parser.parse_args()
filepath = args.filepath
# Check if required files exist
if os.path.isfile(filepath) == False:
print('File cannot be found\n')
return 1
# Split file
if args.quick:
splitQuick(filepath)
else:
splitCopy(filepath)
if __name__ == "__main__":
main()
|
[
"noreply@github.com"
] |
doctorpangloss.noreply@github.com
|
b5f9ff772b6e5342461769fd2d62a492d7654f0d
|
d1b9fc59b69ffbfe988ea308f9005df12b50d4cd
|
/downloader.py
|
93d08ab725bf40a751a2c400bc9e0325988b1be2
|
[] |
no_license
|
jakemuncada/xkcd-crawler
|
98b9287fe213dbcbf17c64d9433bcf641369db8e
|
7f0788585d41dfb13321290c654856de16123113
|
refs/heads/master
| 2023-03-28T02:55:03.221505
| 2021-04-01T00:32:17
| 2021-04-01T00:32:17
| 337,334,063
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,376
|
py
|
"""
Provides high-level functions for fetching stuff from the internet.
"""
import shutil
import requests
class DownloaderError(Exception):
"""
Errors related to the Downloader
"""
class Downloader:
"""
Provides high-level functions for fetching stuff from the internet.
"""
##################################################
# GET REQUEST
##################################################
@staticmethod
def get(url):
"""
Send a GET request to the given URL.
Parameters:
url: The URL.
Returns:
response: The requests library response.
error: The error that was generated. None if the request was successful.
"""
response = None
error = None
try:
# Fetch the data.
response = requests.get(url)
# Raise exception if any.
response.raise_for_status()
# If there were no exceptions, the download was successful.
except Exception as err: # pylint: disable=broad-except
error = err
return response, error
##################################################
# DOWNLOAD IMAGE
##################################################
@staticmethod
def downloadImage(url, outputPath):
"""
Download an image to the given path.
Parameters:
url (str): The image URL.
outputPath (str): The full path (including filename) of the image.
Returns:
An exception if the download failed. None if the download was a success.
"""
error = None
try:
response = requests.get(url, stream=True)
if response.status_code != 200:
raise DownloaderError(f'Error: Status code {response.status_code}')
with open(outputPath, 'wb') as outputFile:
shutil.copyfileobj(response.raw, outputFile)
del response
except Exception as err: # pylint: disable=broad-except
error = err
return error
##################################################
# GET ERROR STRING
##################################################
@staticmethod
def getErrorString(err):
"""
Get the description of the error.
Parameters:
err: The error.
Returns:
The description of the error.
"""
desc = None
try:
raise err
except requests.exceptions.HTTPError as err:
desc = 'An HTTP error occurred.'
except requests.exceptions.ProxyError as err:
desc = 'A proxy error occurred.'
except requests.exceptions.SSLError as err:
desc = 'An SSL error occurred.'
except requests.exceptions.ConnectTimeout as err:
desc = 'The request timed out while trying to connect to the remote server.'
except requests.exceptions.ReadTimeout as err:
desc = 'The server did not send any data in the allotted amount of time.'
except requests.exceptions.Timeout as err:
desc = 'The request timed out.'
except requests.exceptions.ConnectionError as err:
desc = 'A Connection error occurred.'
except requests.exceptions.URLRequired as err:
desc = 'A valid URL is required to make a request.'
except requests.exceptions.TooManyRedirects as err:
desc = 'Too many redirects.'
except requests.exceptions.MissingSchema as err:
desc = 'The URL schema (e.g. http or https) is missing.'
except requests.exceptions.InvalidSchema as err:
desc = 'The URL schema is invalid.'
except requests.exceptions.InvalidHeader as err:
desc = 'The header value provided was somehow invalid.'
except requests.exceptions.InvalidProxyURL as err:
desc = 'The proxy URL provided is invalid.'
except requests.exceptions.InvalidURL as err:
desc = 'The URL provided was somehow invalid.'
except Exception as err: # pylint: disable=broad-except
desc = 'An unexpected error occurred.'
return desc
|
[
"jake@tkb.mss.co.jp"
] |
jake@tkb.mss.co.jp
|
a78cee9826237d0c0568190586b4b45200cc39e1
|
a49bab6a5a7df245c0eea181cd041ae344cc018e
|
/server/settings/dev.py
|
3c192fb5410716bedffcfd9d31060f4382539528
|
[] |
no_license
|
ego/pyblog
|
0a52d2cd2835cd9a3607b55075ad985b960c8b40
|
28b6a9066a60b71bbec9f7b37ef40794e8b5e2aa
|
refs/heads/master
| 2023-04-02T16:37:00.922916
| 2020-11-07T23:58:55
| 2020-11-07T23:58:55
| 123,491,870
| 0
| 0
| null | 2021-04-16T20:36:49
| 2018-03-01T21:01:04
|
Python
|
UTF-8
|
Python
| false
| false
| 456
|
py
|
# Dev settings
from .base import *
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.postgresql_psycopg2',
'NAME': 'blog_db',
'USER': 'blog_user',
'PASSWORD': 'blog_passwd',
'HOST': 'localhost',
'PORT': '5432',
}
}
CACHES = {
'default': {
'BACKEND': 'django.core.cache.backends.dummy.DummyCache',
}
}
THUMBNAIL_KVSTORE = 'sorl.thumbnail.kvstores.cached_db_kvstore.KVStore'
|
[
"0x7c48@gmail.com"
] |
0x7c48@gmail.com
|
b60a557139a95f213d84afedb6641a8bf0a4412d
|
ccfd551d677652ba1cf716fd514fac454faefd51
|
/minify.py
|
9b534e07816dee71eaff7a652906e9e8bc30f3f4
|
[] |
no_license
|
miguel-acevedo/CssMinify
|
3b7ce385773eb15ec2167a1300945c5e20dfc5ec
|
10e1ce9f059277544c48fd4955923a270e67fd51
|
refs/heads/master
| 2021-01-15T11:03:27.273605
| 2017-08-07T19:18:05
| 2017-08-07T19:18:05
| 99,608,332
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,127
|
py
|
import os
import requests
css = 'https://cssminifier.com/raw' #Calls the minifier url
js = 'https://javascript-minifier.com/raw'
def minifyCode(url, path):
print(path)
data = {'input': open(path, 'rb').read()} # Packs the css file to a data variable to be sent off to the minifier.
response = requests.post(url, data=data) # Sends a Post requests, then retrieves the data.
wr = open(path, 'w') # Opens current local file.
wr.write(response.text) # Overwrites the local file.
def find_files(directory): #Function to recursivly loop through all files and minify.
for filename in os.listdir(directory):
path = os.path.join(directory, filename) # Creates path
if (os.path.isdir(path)): #Checks if the file is directory
find_files(path)
if filename.endswith(".css"): # Checks for css files
minifyCode(css, path)
continue
elif filename.endswith(".js"): # Checks for css files
minifyCode(js, path)
continue
else:
continue
find_files(os.getcwd()) #Calls the function with the current directory
|
[
"acevedomiguel@outlook.com"
] |
acevedomiguel@outlook.com
|
bcaf8c92849c381fc6a341c20a4a37be90d0e991
|
6b948d8110b910aed1989e940f194dcee5e3ba51
|
/538_HW1_113166835/model.py
|
5d4531881ef1d484979c3a06b653b5e6779f15a5
|
[] |
no_license
|
SriramVithala/NLP
|
3589e07b7e5348c5418723cb74f780d607cb8932
|
ca61b9a9923f816312070e9a5d7b4a98ba66ffd5
|
refs/heads/main
| 2023-08-18T10:48:37.557680
| 2021-09-27T03:08:06
| 2021-09-27T03:08:06
| 409,264,778
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 4,081
|
py
|
"""
author-gh: @adithya8
editor-gh: ykl7
"""
import math
import numpy as np
from numpy.core.fromnumeric import size
from numpy.core.records import array
import torch
import torch.nn as nn
sigmoid = lambda x: 1/(1 + torch.exp(-x))
class WordVec(nn.Module):
def __init__(self, V, embedding_dim, loss_func, counts):
super(WordVec, self).__init__()
self.center_embeddings = nn.Embedding(num_embeddings=V, embedding_dim=embedding_dim)
self.center_embeddings.weight.data.normal_(mean=0, std=1/math.sqrt(embedding_dim))
self.center_embeddings.weight.data[self.center_embeddings.weight.data<-1] = -1
self.center_embeddings.weight.data[self.center_embeddings.weight.data>1] = 1
self.context_embeddings = nn.Embedding(num_embeddings=V, embedding_dim=embedding_dim)
self.context_embeddings.weight.data.normal_(mean=0, std=1/math.sqrt(embedding_dim))
self.context_embeddings.weight.data[self.context_embeddings.weight.data<-1] = -1 + 1e-10
self.context_embeddings.weight.data[self.context_embeddings.weight.data>1] = 1 - 1e-10
self.loss_func = loss_func
self.counts = counts
def forward(self, center_word, context_word):
if self.loss_func == "nll":
return self.negative_log_likelihood_loss(center_word, context_word)
elif self.loss_func == "neg":
return self.negative_sampling(center_word, context_word)
else:
raise Exception("No implementation found for %s"%(self.loss_func))
def negative_log_likelihood_loss(self, center_word, context_word):
### TODO(students): start
# import pdb; pdb.set_trace()
center_embeds = self.center_embeddings(center_word)
context_embeds = self.context_embeddings(context_word)
MatrixMultiplication = torch.matmul(center_embeds , context_embeds.T)
totalSum=torch.sum(torch.exp(MatrixMultiplication), dim=1)
logofsum=torch.log(totalSum)
MatrixMultiplication1=(torch.multiply(center_embeds, context_embeds))
totalsum1=torch.sum(MatrixMultiplication1, dim=1)
# # torch.exp()
loss=torch.mean(logofsum-totalsum1)
### TODO(students): end
# loss=0
return loss
def negative_sampling(self, center_word, context_word):
### TODO(students): start
center_embeds = self.center_embeddings(center_word)
context_embeds = self.context_embeddings(context_word)
batch_size=center_word.size()[0]
probability=(self.counts)/sum(self.counts)
k=5
neg= np.random.choice(len(self.counts),(batch_size,k), replace=False, p=probability)
Negative_embeds=self.context_embeddings.weight[neg]
sum1 = torch.log(torch.sigmoid(torch.sum(torch.multiply(center_embeds , context_embeds),dim=1)))
center_embeds = center_embeds.reshape((center_embeds.shape[0], center_embeds.shape[1], 1))
sum2 = torch.sum(torch.log(torch.sigmoid(torch.sum(-torch.matmul(Negative_embeds, center_embeds),dim=2))),dim=1)
loss=torch.mean(-sum2-sum1)
### TODO(students): end
return loss
def print_closest(self, validation_words, reverse_dictionary, top_k=8):
print('Printing closest words')
embeddings = torch.zeros(self.center_embeddings.weight.shape).copy_(self.center_embeddings.weight)
embeddings = embeddings.data.cpu().numpy()
validation_ids = validation_words
norm = np.sqrt(np.sum(np.square(embeddings),axis=1,keepdims=True))
normalized_embeddings = embeddings/norm
validation_embeddings = normalized_embeddings[validation_ids]
similarity = np.matmul(validation_embeddings, normalized_embeddings.T)
for i in range(len(validation_ids)):
word = reverse_dictionary[validation_words[i]]
nearest = (-similarity[i, :]).argsort()[1:top_k+1]
print(word, [reverse_dictionary[nearest[k]] for k in range(top_k)])
|
[
"noreply@github.com"
] |
SriramVithala.noreply@github.com
|
1821540a5a6f992cc99d9431e91eda6bcd4b6a05
|
c8ef42b8fa355d62bb6c06c61c54baf00e4c4c90
|
/KDD99/kddMultiClass.py
|
a8c5c7192bf7ad6d92d28446d80cef682697cb86
|
[] |
no_license
|
Gci04/LightweightAutoencoderApproachForAnomalyDetection
|
0d90517bcb34765a686be6e30aea3f5a4a529d73
|
a474218b786e45448bde36ef5cb549cae82a326a
|
refs/heads/master
| 2022-04-27T04:12:20.376461
| 2020-04-07T12:24:46
| 2020-04-07T12:24:46
| 187,182,098
| 3
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,877
|
py
|
import numpy as np
import pandas as pd
np.random.seed(43)
import os, sys, keras, pickle, warnings
from scipy import stats
from time import time
import tensorflow as tf
from keras.layers import Input, Dense, Dropout
from keras.models import Model
from keras.callbacks import TensorBoard
from keras import optimizers, regularizers, backend as K
warnings.filterwarnings('ignore')
from sklearn.preprocessing import StandardScaler
from sklearn.metrics import classification_report
import seaborn as sn
from matplotlib import pyplot as plt
from preprocessing import get_kdd_data
import Utils
train ,test ,indx = get_kdd_data("multiclass")
train_label = train.label
train = train.drop(["label"],axis=1)
Scaler = StandardScaler()
train = Scaler.fit_transform(train.values)[np.where(train_label == 1)]
xtest , ytest = Scaler.transform(test.drop(["label"],axis=1)), test.label.values
def fit_kdd_AE(X):
input_dim = X.shape[1]
latent_space_size = 12
K.clear_session()
input_ = Input(shape = (input_dim, ))
layer_1 = Dense(100, activation="tanh")(input_)
layer_2 = Dense(50, activation="tanh")(layer_1)
layer_3 = Dense(25, activation="tanh")(layer_2)
encoding = Dense(latent_space_size,activation=None)(layer_3)
layer_5 = Dense(25, activation="tanh")(encoding)
layer_6 = Dense(50, activation="tanh")(layer_5)
layer_7 = Dense(100, activation='tanh')(layer_6)
decoded = Dense(input_dim,activation=None)(layer_7)
autoencoder = Model(inputs=input_ , outputs=decoded)
# opt = optimizers.Adam(lr=0.001, beta_1=0.9, beta_2=0.999, epsilon=1e-08, decay=0.0)
autoencoder.compile(metrics=['accuracy'],loss='mean_squared_error',optimizer="adam")
# autoencoder.summary()
#create TensorBoard
tb = TensorBoard(log_dir="kdd99logs/{}".format(time()),histogram_freq=0,write_graph=True,write_images=False)
# Fit autoencoder
autoencoder.fit(X, X,epochs=100,validation_split=0.1 ,batch_size=100,shuffle=False,verbose=0,callbacks=[tb])
return autoencoder
model = fit_kdd_AE(train)
losses = Utils.get_losses(model, train)
thresholds = Utils.confidence_intervals(losses,0.95)
threshold = thresholds[1]
pred = Utils.predictAnomaly(model,xtest,threshold)
true = np.where(ytest == "normal", 1,0)
Utils.performance(pred,true)
#1 : normal , 0 : Anomal
for key in indx.keys():
if(key != "normal"):
print('-'*35)
print(' '*18 + key)
print('-'*35)
temp = np.ones(len(pred))
mask = indx[key]
np.put(temp,mask,0)
temp_pred = np.ones(len(pred))
np.put(temp_pred,mask,pred[mask])
res = classification_report(temp,temp_pred,output_dict=True)["0.0"]
print("{:<12s}{:<12s}{:<12s}".format("precision", "recall" ,"f1-score"))
print("{:<12.2f} {:<12.2f} {:<12.2f}".format(res["precision"],res["recall"],res["f1-score"]))
print()
|
[
"gcinzoe04@gmail.com"
] |
gcinzoe04@gmail.com
|
5c9012668e6fd64b0cc6875fd32e3144b136c72f
|
1c538a3c3a0c218bab4137bcefe650fdfc8be252
|
/api_server/assign_resources.py
|
ae6b08bbdbf8fe0adec08e7da5694a0c4e525383
|
[] |
no_license
|
Team-LZZZ/CarPooling-Server
|
b0070a41c6eea51232179d25ba7552d57b41d7bc
|
658b9f82a81d2f6a0cae9d563c93a57f75b2809e
|
refs/heads/master
| 2021-08-29T11:48:39.094390
| 2017-12-13T21:42:47
| 2017-12-13T21:42:47
| 108,786,782
| 0
| 1
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 667
|
py
|
from . import api
from .api_resources.UserLogin import UserLogin
from .api_resources.UserSettings import UserSettings
from .api_resources.UserRegister import UserRegister
from .api_resources.CarPools import CarPools
from .api_resources.GetToken import GetToken
from .api_resources.Reservations import Reservations
from .api_resources.Offers import Offers
api.add_resource(UserLogin, "/api/login")
api.add_resource(UserRegister, "/api/reg")
api.add_resource(UserSettings, "/api/settings")
api.add_resource(GetToken, "/api/token")
api.add_resource(CarPools, "/api/carPools")
api.add_resource(Reservations, "/api/reservations")
api.add_resource(Offers, "/api/offers")
|
[
"zhouyou66666@gmail.com"
] |
zhouyou66666@gmail.com
|
6de3f25bbada06daec113eef9872b1c48f03e8b2
|
ac46ba236c54af834ef1d37870fd6147b9b606e3
|
/lesson_6/insert.py
|
63defec9dcb5a5177ee284cd5c3690357e20fb56
|
[] |
no_license
|
Loosper/algorithms
|
55c929022501a32a65ef94a3b98e3bbb0c152eaa
|
7c435a5c2ef50f59b292d907854e86c07698b4c3
|
refs/heads/master
| 2021-05-15T09:30:27.632163
| 2018-02-10T15:59:49
| 2018-02-10T15:59:49
| 108,136,059
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 625
|
py
|
"""
Insert Node at a specific position in a linked list
head input could be None as well for empty list
Node is defined as
class Node(object):
def __init__(self, data=None, next_node=None):
self.data = data
self.next = next_node
return back the head of the linked list in the below method.
"""
class Node:
pass
def InsertNth(head, data, position):
legit_head = head
for i in range(position - 1):
head = head.next
if position == 0:
legit_head = Node(data, legit_head)
else:
next = head.next
head.next = Node(data, next)
return legit_head
|
[
"boian4o1@gmail.com"
] |
boian4o1@gmail.com
|
22667e36536935585748b0c24fcc4a732a2b8384
|
d5e0347bc2f492afb969149113b494c20a030244
|
/basic/list.py
|
a380fe9b6d965c5b32418c285739e60e76c79a10
|
[] |
no_license
|
qingmingsang/python-demo
|
2eff3cd7452f690cf4e058a28e83099b2dc3a0c7
|
3c96659cd5d5de537e3eefdc42086cf36851f14a
|
refs/heads/master
| 2021-04-06T14:11:16.248901
| 2019-10-15T16:31:43
| 2019-10-15T16:31:43
| 125,257,307
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 357
|
py
|
classmates = ["Michael", "Bob", "Tracy"]
print(classmates)
print(len(classmates))
print(classmates[-1])
# print(classmates[6])
# IndexError: list index out of range
print(range(5))
# range(0, 5)
print(list(range(5)))
# [0, 1, 2, 3, 4]
sum = 0
for x in range(101):
sum = sum + x
print(sum)
# 5050
L = ["Bart", "Lisa", "Adam"]
for x in L:
print(x)
|
[
"358242939@qq.com"
] |
358242939@qq.com
|
b4cef5c07bfaf8de55ea028da11403d750c273ac
|
90e2412b0216f27285a98f4ee713e8f819910a6f
|
/manage.py
|
b9cc74196e0c0be51010c293aecec323f3a66509
|
[] |
no_license
|
rsikri/LetsCarpool
|
504edbf1125c736a51896865b7e137ac8b9d3d82
|
60c6aa010d2308844fb168612201e6659f04fc37
|
refs/heads/master
| 2021-04-09T11:42:40.011302
| 2018-03-16T21:30:00
| 2018-03-16T21:30:00
| 125,552,630
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 256
|
py
|
#!/usr/bin/env python
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "ShareYourRide.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
|
[
"richasikri@Richas-MacBook-Pro.local"
] |
richasikri@Richas-MacBook-Pro.local
|
f3c78a164ef56f6a0ec9276cfd66d8ebe7bc2607
|
8dbc386a5ec3943ac64b35a4da34b70fbb988152
|
/src/handlers/me/articles/comments/reply/handler.py
|
af9a8487352380b5ead2b639f4a498b23f2a10df
|
[] |
no_license
|
AlisProject/serverless-application
|
60cab427d3088f1fa7b653ad7ee78674d64dd70c
|
8a3f9ed146f71281036986ec5baa481718768866
|
refs/heads/master
| 2023-03-30T06:48:47.426067
| 2023-03-28T13:09:24
| 2023-03-28T13:09:24
| 123,153,011
| 62
| 20
| null | 2023-03-28T13:09:18
| 2018-02-27T16:00:10
|
Python
|
UTF-8
|
Python
| false
| false
| 328
|
py
|
# -*- coding: utf-8 -*-
import boto3
from me_articles_comments_reply import MeArticlesCommentsReply
dynamodb = boto3.resource('dynamodb')
def lambda_handler(event, context):
me_articles_comments_reply = MeArticlesCommentsReply(event=event, context=context, dynamodb=dynamodb)
return me_articles_comments_reply.main()
|
[
"matsumatsu20@gmail.com"
] |
matsumatsu20@gmail.com
|
c8bf5f9685eb4c0e1013dff3da0cfca040acec35
|
4c0dd004f54979c87db0bcaff3f74490d82591a7
|
/plotBaseline.py
|
c27724ae1f21ccdfc3efb25eff309f8f8a56610d
|
[] |
no_license
|
albertpuente/newInterpDetect
|
3decf2a5459e82ba69ca4d754c869af3f1b3a94a
|
174b7ecd521f2918f742dafb7e1939357c55aa35
|
refs/heads/master
| 2021-01-17T21:39:25.174906
| 2016-07-21T11:17:31
| 2016-07-21T11:17:31
| 62,659,146
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,229
|
py
|
import matplotlib.pyplot as plt
V = []
theta = []
theta_b = []
with open('DEBUG_FIND_OUTPUT.txt', 'r') as f:
for l in f.readlines():
things = l.split(' ')
V.append(things[0])
theta.append(things[1])
theta_b.append(things[2])
plt.plot(V, 'r-')
plt.plot(theta, 'b--')
plt.plot(theta_b, 'r--')
plt.title('Interpolated voltage + boundaries')
plt.show()
##
Qdiff = []
vMovingAvg = []
vGlobalMovingAvg = []
baseline = []
variability = []
vGlobal = []
Qmin = []
with open('DEBUG_OUTPUT.txt', 'r') as f:
for l in f.readlines():
things = l.split(' ')
vMovingAvg.append(things[0])
vGlobalMovingAvg.append(things[1])
baseline.append(things[2])
variability.append(things[3])
Qdiff.append(things[4])
vGlobal.append(things[5])
Qmin.append(things[6])
plt.plot(vGlobalMovingAvg, 'r-')
plt.plot(vGlobal, 'b-')
plt.title('vGlobalMovingAvg and vGlobal')
plt.show()
plt.plot(baseline, 'r-')
plt.plot(vMovingAvg, 'b-')
plt.title('Baseline and vMovingAvg')
plt.show()
plt.plot(variability, 'r-')
plt.title('variability')
plt.show()
plt.plot(Qdiff, 'r-')
plt.title('Qdiff')
plt.show()
plt.plot(Qmin, 'r-')
plt.title('Qmin')
plt.show()
|
[
"albertpuente93@gmail.com"
] |
albertpuente93@gmail.com
|
3123f9b6c63d2f4d24498b526b5adf3d5f85a175
|
99247d0562f3bdc1952ef3f8b2b7465247bde9eb
|
/users/views.py
|
61f199e43fd6b308f4bff2b029179ada5808d86f
|
[] |
no_license
|
Goryunova/yamdb_final
|
c2977ef6ec359adcac98538b3cf2d010ddbb0630
|
0950dc35ca454dfe44d92b90f40a26f66ccfa452
|
refs/heads/master
| 2023-07-26T19:04:09.091856
| 2021-09-11T13:19:19
| 2021-09-11T13:19:19
| 401,342,047
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,283
|
py
|
from django_filters.rest_framework import DjangoFilterBackend
from rest_framework import status
from rest_framework.decorators import action
from rest_framework.response import Response
from rest_framework.viewsets import ModelViewSet
from yamdb.models import User
from yamdb_auth.permissions import IsAdmin, IsAuthenticated
from .serializers import UsersSerializer
class UsersViewSet(ModelViewSet):
queryset = User.objects.all()
serializer_class = UsersSerializer
filter_backends = [DjangoFilterBackend]
filterset_fields = ['username']
lookup_field = 'username'
permission_classes = [IsAdmin]
@action(detail=False,
methods=['get', 'patch'],
permission_classes=[IsAuthenticated])
def me(self, request):
user = request.user
if request.method == 'GET':
serializer = UsersSerializer(user)
return Response(serializer.data, status=status.HTTP_200_OK)
serializer = self.get_serializer(user,
data=request.data,
partial=True)
serializer.is_valid(raise_exception=True)
serializer.save(role=user.role, partial=True)
return Response(serializer.data, status=status.HTTP_200_OK)
|
[
"bilka77@mail.ru"
] |
bilka77@mail.ru
|
b7a247ccaea8874f9bdfa9cd247def7e342ba18d
|
c970c3800ab7a33989d7fffaee3da63f72dac6ea
|
/api_trial2.py
|
c1fcd40999a337e958197b9b473f2f67fcd90672
|
[] |
no_license
|
smustala/DSCI551-Project
|
8053d16f2c7f53e8533277c34a1e1ab0444f353b
|
fe335785e0cb529656f064f059435fcd8f0753c0
|
refs/heads/master
| 2023-01-08T19:24:44.866301
| 2020-11-06T11:30:01
| 2020-11-06T11:30:01
| 310,578,443
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,321
|
py
|
#!/usr/bin/env python3
# -*- coding: utf-8 -*-
"""
Created on Fri Nov 6 02:10:06 2020
@author: shalinimustala
"""
import flask
from flask import request, jsonify
def tempbymonth1(month, year, countryname):
from mysql import connector
cnx = connector.connect(user='shalini1', password='hello', host='18.188.12.200', database='551project')
cursor = cnx.cursor()
query = 'select AverageTemperature, city from tempbycity where dt like ' + "'" + str(year) + '-' + str(month) + "-%'" + " and country = " + "'" + countryname + "'"
#query = "select AverageTemperature, city from tempbycity where dt like '2012-12-%' and country = 'India'"
#query = "select distinct city from tempbycity where country = 'India'"
temp_all = []
cursor.execute(query)
for row in cursor:
temp_all.append(row)
return temp_all
app = flask.Flask(__name__)
app.config["DEBUG"] = True
@app.route('/api/temp', methods=['GET'])
def api_filter():
query_parameters = request.args
countryname = query_parameters.get('countryname')
month = query_parameters.get('month')
year = query_parameters.get('year')
temp_all = tempbymonth1(month, year, countryname)
return jsonify(temp_all)
app.run()
#USE API : http://127.0.0.1:5000/api/temp?month=12&year=2012&countryname=India
|
[
"shalinimustala@Shalinis-MBP.attlocal.net"
] |
shalinimustala@Shalinis-MBP.attlocal.net
|
a1e35aa2875921cf394a2c897190977063e15a94
|
849c3c4946c116e7a799d7555e70c310f5236435
|
/playground.py
|
fe9e014182df7020da2e83af988766b493d61416
|
[] |
no_license
|
gwendahartsoe/Graduation-Project
|
faa7e19540b4d381a6ef2e4fd5ae72b456f6091d
|
0b6db6c6913ac5ae2453d7d038192222a4eed629
|
refs/heads/master
| 2023-04-20T11:38:01.860316
| 2021-04-28T11:37:45
| 2021-04-28T11:37:45
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,488
|
py
|
# ๅฏผๅ
ฅๆจกๅ
import numpy as np
import random
import torch
import torch.nn as nn
import torch.nn.functional as F
from torch.utils.data import DataLoader, Dataset, TensorDataset
def get_kfold_data(k, i, X, y):
# ่ฟๅ็ฌฌ i+1 ๆ (i = 0 -> k-1) ไบคๅ้ช่ฏๆถๆ้่ฆ็่ฎญ็ปๅ้ช่ฏๆฐๆฎ๏ผX_trainไธบ่ฎญ็ป้๏ผX_validไธบ้ช่ฏ้
fold_size = X.shape[0] // k # ๆฏไปฝ็ไธชๆฐ:ๆฐๆฎๆปๆกๆฐ/ๆๆฐ๏ผ็ปๆฐ๏ผ
val_start = i * fold_size
if i != k - 1:
val_end = (i + 1) * fold_size
X_valid, y_valid = X[val_start:val_end], y[val_start:val_end]
X_train = torch.cat((X[0:val_start], X[val_end:]), dim = 0)
y_train = torch.cat((y[0:val_start], y[val_end:]), dim = 0)
else: # ่ฅๆฏๆๅไธๆไบคๅ้ช่ฏ
X_valid, y_valid = X[val_start:], y[val_start:] # ่ฅไธ่ฝๆด้ค๏ผๅฐๅค็caseๆพๅจๆๅไธๆ้
X_train = X[0:val_start]
y_train = y[0:val_start]
return X_train, y_train, X_valid,y_valid
# ๅๅปบไธไธชๆฐๆฎ้
# X = torch.rand(500, 100, 10)
# Y = torch.rand(500, 1)
# # X = X.view(X.size(0),X.size(1), X.size(2),1)
# m = nn.Conv1d(15,100,3)
# out = m(X)
# print(out)
# X.view()
x = [1,0,1,0,0]
y = [1,0,1,1,1]
print(x == y)
# x = torch.rand((2,2,3))
# y = torch.rand((2,2,3))
# print("x:",x)
# print("y:",y)
# print("dim=0:", torch.cat((x,y),dim=0).size())
# print("dim=1:", torch.cat((x,y), dim=1).size())
# print("dim=2:", torch.cat((x, y), dim=2).size())
|
[
"819156618@qq.com"
] |
819156618@qq.com
|
5df342ca1b06fc20c69baf25424afb299aa86f44
|
201a281d8539ad015d8d1e3d2a0e351a0fb51640
|
/Charity.py
|
8d488c421fc80c18907b5a29ff72c7541e06b1d1
|
[] |
no_license
|
Once61/charitywall
|
6219ec6db0742ba4556ff45efa52af3ce021186d
|
b84b60d4f2799c8e473b839612adb995220800bf
|
refs/heads/master
| 2022-11-21T11:47:22.576127
| 2020-07-13T17:14:54
| 2020-07-13T17:14:54
| 281,611,305
| 0
| 0
| null | 2020-07-22T07:48:22
| 2020-07-22T07:48:21
| null |
UTF-8
|
Python
| false
| false
| 71,003
|
py
|
#!/usr/bin/env python3
#-*- coding: utf-8 -*-
#github.com/adilkhan/Cam-Hackers
import requests,re,os
import time
import sys
print("""
\033[1;31m\033[1;37m โโโโโโโ โโโโโโ โโโโ โโโโ โโโ โโโ โโโโโโ โโโโโโโโโโ โโโโโโโโโโโโโโโโโโ โโโโโโโโ
โโโโโโโโโโโโโโโโโโโโโ โโโโโ โโโ โโโโโโโโโโโโโโโโโโโโโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโ
โโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโ โโโโโโ โโโโโโโโโโโโโโโโ
โโโ โโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโโ โโโโโโโ โโโโโโ โโโโโโโโโโโโโโโโ
โโโโโโโโโโโ โโโโโโ โโโ โโโ โโโ โโโโโโ โโโโโโโโโโโโโโ โโโโโโโโโโโโโโ โโโโโโโโโโโ
\033[1;31m โโโโโโโโโโ โโโโโโ โโโ โโโ โโโโโโ โโโ โโโโโโโโโโ โโโโโโโโโโโโโโ โโโโโโโโโโโ
\033[1;31m Instagram Adilkhan11143
\033[1;31m1)\033[1;37mUnited States \033[1;31m31)\033[1;37mMexico \033[1;31m61)\033[1;37mMoldova
\033[1;31m2)\033[1;37mJapan \033[1;31m32)\033[1;37mbolavia charitywall \033[1;31m62)\033[1;37mNicaragua
\033[1;31m3)\033[1;37mItaly \033[1;31m33)\033[1;37mChina \033[1;31m63)\033[1;37mMalta
\033[1;31m4)\033[1;37mKorea \033[1;31m34)\033[1;37mChile \033[1;31m64)\033[1;37mTrinidad And Tobago
\033[1;31m5)\033[1;37mFrance \033[1;31m35)\033[1;37mSouth Africa \033[1;31m65)\033[1;37mSoudi Arabia
\033[1;31m6)\033[1;37mGermany \033[1;31m36)\033[1;37mSlovakia \033[1;31m66)\033[1;37mCroatia
\033[1;31m7)\033[1;37mTaiwan \033[1;31m37)\033[1;37mHungary \033[1;31m67)\033[1;37mCyprus
\033[1;31m8)\033[1;37mRussian Federation \033[1;31m38)\033[1;37mIreland \033[1;31m68)\033[1;37mPakistan
\033[1;31m9)\033[1;37mUnited Kingdom \033[1;31m39)\033[1;37mEgypt \033[1;31m69)\033[1;37mUnited Arab Emirates
\033[1;31m10)\033[1;37mNetherlands \033[1;31m40)\033[1;37mThailand \033[1;31m70)\033[1;37mKazakhstan
\033[1;31m11)\033[1;37mCzech Republic \033[1;31m41)\033[1;37mUkraine \033[1;31m71)\033[1;37mKuwait
\033[1;31m12)\033[1;37mTurkey \033[1;31m42)\033[1;37mSerbia \033[1;31m72)\033[1;37mVenezuela
\033[1;31m13)\033[1;37mAustria \033[1;31m43)\033[1;37mHong Kong \033[1;31m73)\033[1;37mGeorgia
\033[1;31m14)\033[1;37mSwitzerland \033[1;31m44)\033[1;37mGreece \033[1;31m74)\033[1;37mMontenegro
\033[1;31m15)\033[1;37mSpain \033[1;31m45)\033[1;37mPortugal \033[1;31m75)\033[1;37mEl Salvador
\033[1;31m16)\033[1;37mCanada \033[1;31m46)\033[1;37mLatvia \033[1;31m76)\033[1;37mLuxembourg
\033[1;31m17)\033[1;37mSweden \033[1;31m47)\033[1;37mSingapore \033[1;31m77)\033[1;37mCuracao
\033[1;31m18)\033[1;37mIsrael \033[1;31m48)\033[1;37mIceland \033[1;31m78)\033[1;37mPuerto Rico
\033[1;31m19)\033[1;37mIran \033[1;31m49)\033[1;37mMalaysia \033[1;31m79)\033[1;37mCosta Rica
\033[1;31m20)\033[1;37mPoland \033[1;31m50)\033[1;37mColombia \033[1;31m80)\033[1;37mBelarus
\033[1;31m21)\033[1;37mIndia \033[1;31m51)\033[1;37mTunisia \033[1;31m81)\033[1;37mAlbania
\033[1;31m22)\033[1;37mNorway \033[1;31m52)\033[1;37mEstonia \033[1;31m82)\033[1;37mLiechtenstein
\033[1;31m23)\033[1;37mRomania \033[1;31m53)\033[1;37mDominican Republic \033[1;31m83)\033[1;37mBosnia And Herzegovia
\033[1;31m24)\033[1;37mViet Nam \033[1;31m54)\033[1;37mSloveania \033[1;31m84)\033[1;37mParaguay
\033[1;31m25)\033[1;37mBelgium \033[1;31m55)\033[1;37mEcuador \033[1;31m85)\033[1;37mPhilippines
\033[1;31m26)\033[1;37mBrazil \033[1;31m56)\033[1;37mLithuania \033[1;31m86)\033[1;37mFaroe Islands
\033[1;31m27)\033[1;37mBulgaria \033[1;31m57)\033[1;37mPalestinian \033[1;31m87)\033[1;37mGuatemala
\033[1;31m28)\033[1;37mIndonesia \033[1;31m58)\033[1;37mNew Zealand \033[1;31m88)\033[1;37mNepal
\033[1;31m29)\033[1;37mDenmark \033[1;31m59)\033[1;37mBangladeh \033[1;31m89)\033[1;37mPeru
\033[1;31m30)\033[1;37mArgentina \033[1;31m60)\033[1;37mPanama \033[1;31m90)\033[1;37mUruguay
\033[1;31m91)\033[1;37mExtra
""")
try:
num = int(input("OPTIONS : "))
if num == 1:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,720):
url = ("https://www.insecam.org/en/bycountry/US/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 2:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,232):
url = ("https://www.insecam.org/en/bycountry/JP/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 3:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,159):
url = ("https://www.insecam.org/en/bycountry/IT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 4:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,141):
url = ("https://www.insecam.org/en/bycountry/KR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 5:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,120):
url = ("https://www.insecam.org/en/bycountry/FR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 6:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,107):
url = ("https://www.insecam.org/en/bycountry/DE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 7:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,92):
url = ("https://www.insecam.org/en/bycountry/TW/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 8:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,82):
url = ("https://www.insecam.org/en/bycountry/RU/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 9:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,81):
url = ("https://www.insecam.org/en/bycountry/GB/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 10:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,66):
url = ("https://www.insecam.org/en/bycountry/NL/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 11:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,58):
url = ("https://www.insecam.org/en/bycountry/CZ/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 12:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,54):
url = ("https://www.insecam.org/en/bycountry/TR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 13:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,48):
url = ("https://www.insecam.org/en/bycountry/AT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 14:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,44):
url = ("https://www.insecam.org/en/bycountry/CH/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 15:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,39):
url = ("https://www.insecam.org/en/bycountry/ES/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 16:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,38):
url = ("https://www.insecam.org/en/bycountry/CA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 17:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,35):
url = ("https://www.insecam.org/en/bycountry/SE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 18:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,31):
url = ("https://www.insecam.org/en/bycountry/IL/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 20:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,30):
url = ("https://www.insecam.org/en/bycountry/PL/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 19:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,22):
url = ("https://www.insecam.org/en/bycountry/IR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 22:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,29):
url = ("https://www.insecam.org/en/bycountry/NO/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 23:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,28):
url = ("https://www.insecam.org/en/bycountry/RO/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 21:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,26):
url = ("https://www.insecam.org/en/bycountry/IN/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 24:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,23):
url = ("https://www.insecam.org/en/bycountry/VN/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 25:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,23):
url = ("https://www.insecam.org/en/bycountry/BE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 26:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,23):
url = ("https://www.insecam.org/en/bycountry/BR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 27:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,21):
url = ("https://www.insecam.org/en/bycountry/BG/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 28:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,16):
url = ("https://www.insecam.org/en/bycountry/ID/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 29:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,16):
url = ("https://www.insecam.org/en/bycountry/DK/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 30:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,13):
url = ("https://www.insecam.org/en/bycountry/AR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 31:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,13):
url = ("https://www.insecam.org/en/bycountry/MX/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 32:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,23):
url = ("https://www.insecam.org/en/bycountry/FI/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 33:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,13):
url = ("https://www.insecam.org/en/bycountry/CN/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 34:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,13):
url = ("https://www.insecam.org/en/bycountry/CL/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 35:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,11):
url = ("https://www.insecam.org/en/bycountry/ZA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 36:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,11):
url = ("https://www.insecam.org/en/bycountry/SK/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 37:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,11):
url = ("https://www.insecam.org/en/bycountry/HU/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 38:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,11):
url = ("https://www.insecam.org/en/bycountry/IE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 39:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,10):
url = ("https://www.insecam.org/en/bycountry/EG/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 40:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,11):
url = ("https://www.insecam.org/en/bycountry/TH/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 41:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,10):
url = ("https://www.insecam.org/en/bycountry/UA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 42:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,10):
url = ("https://www.insecam.org/en/bycountry/RS/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 43:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,7):
url = ("https://www.insecam.org/en/bycountry/HK/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 44:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,8):
url = ("https://www.insecam.org/en/bycountry/GR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 45:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,7):
url = ("https://www.insecam.org/en/bycountry/PT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 46:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/LV/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 47:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,7):
url = ("https://www.insecam.org/en/bycountry/SG/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 48:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,7):
url = ("https://www.insecam.org/en/bycountry/IS/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 49:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/MY/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 50:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/CO/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 51:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/TN/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 52:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/EE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 53:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,5):
url = ("https://www.insecam.org/en/bycountry/DO/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 54:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,6):
url = ("https://www.insecam.org/en/bycountry/SI/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 55:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,5):
url = ("https://www.insecam.org/en/bycountry/EC/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 56:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,5):
url = ("https://www.insecam.org/en/bycountry/LT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 57:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PS/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 58:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,5):
url = ("https://www.insecam.org/en/bycountry/NZ/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 59:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/BD/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 60:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 61:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/MD/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 62:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/NI/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 63:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/MT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 64:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/IT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 65:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/SA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 66:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/HR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 67:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/CY/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 68:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PK/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 69:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/AE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 70:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/KZ/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 71:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/KW/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 72:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/VE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 73:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/GE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 74:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/ME/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 75:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/SV/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 76:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/LU/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 77:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/CW/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 78:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 79:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/CR/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 80:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/BY/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 81:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/AL/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 82:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/LI/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 83:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/BA/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 84:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PY/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 85:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PH/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 86:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/FO/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 87:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/GT/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 88:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/NP/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 89:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/PE/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 90:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,4):
url = ("https://www.insecam.org/en/bycountry/UY/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
elif num == 91:
print("\n")
try:
headers = {'User-Agent': 'Mozilla/5.0 (X11; Linux i686; rv:68.0) Gecko/20100101 Firefox/68.0'}
for page in range (0,16):
url = ("https://www.insecam.org/en/bycountry/-/?page="+str(page))
res = requests.get(url, headers=headers)
findip = re.findall('http://\d+.\d+.\d+.\d+:\d+', res.text)
count = 0
for _ in findip:
hasil = findip[count]
print ("\033[1;31m",hasil)
count += 1
except:
print (" ")
else:
print(" ")
except KeyboardInterrupt:
print (" ")
|
[
"noreply@github.com"
] |
Once61.noreply@github.com
|
e82b311c44c264672396d4f6b68583127bf3dcc8
|
b700c8cfd4033be5a3081f5af94e8a65796dd04a
|
/plots.py
|
0c4542ad4c859ecee52ec5f03927bd5050e75b5e
|
[] |
no_license
|
ahriley/infall-times-gaia
|
149ca9fbb4d5be0b9547d6e2ef702c9ef64b450e
|
96764b33d1da2d40cc8893d7f43ca1f4a2323808
|
refs/heads/master
| 2021-06-01T22:35:03.219172
| 2018-07-12T16:15:10
| 2018-07-12T16:15:10
| 136,056,587
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,159
|
py
|
import numpy as np
import matplotlib.pyplot as plt
import pandas as pd
from utils import *
from astropy.cosmology import WMAP7
# Rocha plots for ELVIS
r = np.array([])
for sim in list_of_sims('elvis'):
halos, subs = load_elvis(sim=sim, processed=True)
r = np.append(r, subs.r)
max_r = np.max(r)
bind, z, r, v_r = [np.array([]) for i in range(4)]
for sim in list_of_sims('elvis'):
if sim[0] != 'i' or 'HiRes' in sim:
continue
try:
halos, subs = load_elvis(sim=sim, processed=True)
subs = subs[subs.nadler2018 > 0.5]
pot = subs.pot_mltr
except AttributeError:
print(sim+" not included")
continue
bind_sim = -pot - 0.5*(subs.v_r.values**2 + subs.v_t.values**2)
bind = np.append(bind, bind_sim)
z = np.append(z, WMAP7.lookback_time(1/subs.a_acc.values - 1))
r = np.append(r, subs.r)
v_r = np.append(v_r, subs.v_r)
"""
plt.scatter(WMAP7.lookback_time(1/subs.a_acc.values - 1)[bind_sim>0], np.log10(bind_sim[bind_sim>0]), s=2.0, c=subs.r[bind_sim>0], cmap='plasma', vmin=0.0, vmax=max_r)
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.xlim(0.0, WMAP7.lookback_time(np.inf).value)
plt.ylim(3.4,5.2)
plt.yticks([3.5,4.0,4.5,5.0])
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
plt.savefig('figures/eachvolume/rocha_fig1_'+sim+'.png', bbox_inches='tight')
plt.close()
plt.scatter(subs.r[bind_sim>0], subs.v_r[bind_sim>0], s=2.0, c=WMAP7.lookback_time(1/subs.a_acc.values - 1)[bind_sim>0], cmap='plasma')
plt.colorbar().set_label(r'Infall time [$Gyr$]')
plt.xlabel(r'Galactocentric Radius [$kpc$]')
plt.ylabel(r'Radial Velocity [$km/s$]')
plt.savefig('figures/eachvolume/rocha_fig3_'+sim+'.png', bbox_inches='tight')
plt.close()
"""
plt.scatter(z[(bind>0)], np.log10(bind[bind>0]), c=r[bind>0], s=2., cmap='plasma')
plt.ylim(2.5,5.2)
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
plt.savefig('figures/isolated.png', bbox_inches='tight')
"""
# plot for single halo
halos, subs = load_vl2(scale=1.0, processed=True)
# subs = subs[subs.nadler2018 > 0.5]
z = WMAP7.lookback_time(1/subs.a_acc.values - 1)
# z = subs.a_acc.values
r = subs.r.values
v_r = subs.v_r
bind = -subs.pot_mltr_1000.values - 0.5*(subs.v_r.values**2 + subs.v_t.values**2)
plt.scatter(z[bind>0], np.log10(bind[bind>0]), s=10.0, c=r[bind>0], cmap='plasma')
plt.colorbar().set_label(r'Galactocentric Radius [$kpc$]')
plt.title('VL2')
plt.ylim(3.4,5.2)
plt.yticks([3.5,4.0,4.5,5.0])
plt.xlabel(r'Infall time [$Gyr$]')
plt.ylabel(r'log(Binding Energy) [$km^2\ s^{-2}$]');
# plt.savefig('figures/rocha_fig1_iScylla_HiRes.png', bbox_inches='tight')
plt.close()
plt.scatter(r[bind>0], v_r[bind>0], s=2.0, c=z[bind>0], cmap='plasma')
plt.colorbar().set_label(r'Infall time [$Gyr$]')
plt.xlabel(r'Galactocentric Radius [$kpc$]')
plt.ylabel(r'Radial Velocity [$km/s$]');
plt.title('iScylla_HiRes')
# plt.savefig('figures/rocha_fig3_iScylla_HiRes.png', bbox_inches='tight')
plt.close()
# """
|
[
"30327239+ahriley@users.noreply.github.com"
] |
30327239+ahriley@users.noreply.github.com
|
06615d6a0ed9cca545e0f513c0da0cc11049404f
|
bc3f5e9272b1142a104390871f86efb97202503b
|
/draiver/tests/DataStreamer/server3.py
|
758310cd263bfcaa6ce23410b294a73b0417ddb7
|
[] |
no_license
|
MarcoSignoretto/drAIver
|
dce358de22e1de03a7435d9388bdfabcf9584f2d
|
7a14b3973a2ca8b57f09eb3ee8a1fedaa5e1cadc
|
refs/heads/master
| 2021-04-06T12:25:40.688394
| 2018-06-16T10:38:06
| 2018-06-16T10:38:06
| 125,176,238
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,104
|
py
|
#!/usr/bin/python
import socket
import cv2
import numpy
def recvall(sock, count):
buf = b''
while count:
newbuf = sock.recv(count)
if not newbuf: return None
buf += newbuf
count -= len(newbuf)
return buf
# camera init
camera_left = cv2.VideoCapture()
camera_left.set(4, 640)
camera_left.set(5, 480)
camera_left.open(0)
# socket init
server_address = ('10.42.0.1', 10000)
sock=socket.socket(socket.AF_INET, socket.SOCK_STREAM)
sock.bind(server_address)
sock.listen(True)
conn, addr = sock.accept()
while True:
length_left = recvall(conn,16)
stringData_left = recvall(conn, int(length_left))
data_left = numpy.fromstring(stringData_left, dtype='uint8')
decimg_left=cv2.imdecode(data_left,1)
length_right = recvall(conn, 16)
stringData_right = recvall(conn, int(length_right))
data_right = numpy.fromstring(stringData_right, dtype='uint8')
decimg_right = cv2.imdecode(data_right, 1)
cv2.imshow('SERVER_LEFT',decimg_left)
cv2.imshow('SERVER_RIGHT',decimg_right)
cv2.waitKey(1)
cv2.destroyAllWindows()
sock.close()
|
[
"marco.signoretto.dev@gmail.com"
] |
marco.signoretto.dev@gmail.com
|
f1eda5ad518399f11f695d6ba4d08d977131db26
|
c72758161d4da978a0cc0c87d88535fc9ca58ba5
|
/inc/preparation/PrepareBaseRequest.py
|
162f602b36060eddbf619cace00aa35737e10375
|
[] |
no_license
|
Damian89/extended-baserequest-importer
|
5478878aefbfa90d34e8ac880b2e84532c0ef202
|
3f72e51546bb833720f014377284bd966db6275a
|
refs/heads/master
| 2020-04-27T09:03:34.575147
| 2019-08-02T06:32:22
| 2019-08-02T06:32:22
| 174,198,885
| 10
| 7
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 2,654
|
py
|
# -*- coding: utf-8 -*-
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston,
# MA 02110-1301, USA.
#
# Author: Damian Schwyrz
from urllib.parse import urlparse
from inc.Headers import *
class PrepareBaseRequest:
def __init__(self, config):
self.config = config
self.tests = []
self.__create_request_data()
def __create_request_data(self):
for attacked_site in self.config.urls:
url = self.__make_url(attacked_site)
path = self.__get_path_and_query(url)
hostname = self.__get_host(url)
port = self.__get_port(url)
self.__add_test(url, hostname, port, path)
def __add_test(self, url, hostname, port, path):
headers = Headers(self.config)
headers.set("Host", hostname)
headers.add_user_defined_headers()
if self.config.cookies != "":
headers.set("Cookie", self.config.cookies)
headers.set("Referer", "{}".format(url))
headers.set("User-Agent", headers.get_random_user_agent())
headers.set("Content-Type", "text/html")
self.tests.append({
'url': url,
'port': port,
'method': 'GET',
'host': hostname,
'path': path,
'headers': headers.make(),
'body': '',
})
@staticmethod
def __make_url(attacked_site):
url = attacked_site
if not attacked_site.startswith("http"):
url = "http://{}/".format(attacked_site)
return url
@staticmethod
def __get_path_and_query(url):
parser = urlparse(url)
path = parser.path
query = parser.query
if query == "" or query is None:
return path
return "{}?{}".format(path, query)
@staticmethod
def __get_host(url):
parser = urlparse(url)
return parser.hostname
@staticmethod
def __get_port(url):
parser = urlparse(url)
return parser.port
|
[
"mail@damianschwyrz.de"
] |
mail@damianschwyrz.de
|
a2f3badba22fbc90f72923fcc33d4c53fcd479b5
|
10ae0467d267b2abbcb0c622d151823d91841f65
|
/Demos and Tests/Student Work - 2010/Dakota.py
|
1484d643a5c742945839bca3462e6bce09c8238b
|
[] |
no_license
|
Panda3D-public-projects-archive/pandacamp
|
06b90f42796bf0734ba7932c927c43508ec381f3
|
1b8950589db10259e078edbdecade5c1b6beee2d
|
refs/heads/master
| 2022-04-27T01:12:46.132224
| 2015-03-13T23:00:19
| 2015-03-13T23:00:19
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,367
|
py
|
from Panda import *
def randomTetra(pics):
pic1 = shuffle(pics)
return tetra(pic1[0], pic1[1], pic1[2], pic1[3], v1 = P3(random11(), random11(), random11()), v2 = P3(random11(), random11(), random11()), v3 = P3(random11(), random11(), random11()), v4 = P3(random11(), random11(), random11()))
group = ["pics/duck1.jpg", "pics/eric1.jpg", "pics/mike1.jpg", "pics/mike2.jpg", "pics/mike3.jpg", "pics/raft1.jpg",
"pics/raft2.jpg", "pics/raft3.jpg", "pics/raft4.jpg", "pics/raft5.jpg", "pics/raft6.jpg", "pics/raft7.jpg",
"pics/raft8.jpg", "pics/raft9.jpg", "pics/rock1.jpg", "pics/rock2.jpg", "pics/rock3.jpg", "pics/rock4.jpg",
"pics/sea1.jpg", "pics/sea2.jpg", "pics/sea3.jpg", "pics/sea4.jpg"]
dak = ["pics/d1.jpg", "pics/d2.jpg", "pics/d3.jpg", "pics/d4.jpg", "pics/d.jpg"]
def randomTet(p):
r = randomTetra(group)
r.position = p
r.size = 1.5
r.hpr = integral(HPR(random11(), random11(), random11()))
for i in range(5):
for j in range(5):
randomTet(P3(i-2, random11()*2, j-2))
world.color = itime(at(color(0,.3, 0)) + to(3, color(0, .3, .3)) + to(3, color(.3, 0, 0)) + to(3, color(.3, 0. ,3)))
c = tags(dak, alarm(start = 2, step = 3))
def launchPhoto(m, f):
f = unitSquare(texture = f)
f.position = P3(-4 + localTime, -3, 0)
react(c, launchPhoto)
# name.hpr = HPR(time*3, 0, 0)
start()
|
[
"ProfessorJohnPeterson@gmail.com"
] |
ProfessorJohnPeterson@gmail.com
|
f2c1ef7bf41e8e509ea29dba4ec11c8ad4e17a5e
|
0d90a0e3174c72aceb4107ca378c52afc7fa7f55
|
/sslproject/sslproject/settings.py
|
4041abe31b3cfa8da348a39382be9e75de4cdc36
|
[] |
no_license
|
alphaWizard/SSLproject
|
c555e2daffee539038558d483b5f0f44c6775cf3
|
c18543316484cb835b93c35c49bfb872de2d47c8
|
refs/heads/master
| 2021-08-22T03:45:54.702602
| 2017-11-29T05:44:47
| 2017-11-29T05:44:47
| 108,448,143
| 0
| 2
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,226
|
py
|
"""
Django settings for sslproject project.
Generated by 'django-admin startproject' using Django 1.11.3.
For more information on this file, see
https://docs.djangoproject.com/en/1.11/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/1.11/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/1.11/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '4jus(01i7s%6nr*b&((tj#i^qn#^of!^9!vb-*@8#bt7=e^i7-'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'mywebsite',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'sslproject.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'sslproject.wsgi.application'
# Database
# https://docs.djangoproject.com/en/1.11/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/1.11/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/1.11/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/1.11/howto/static-files/
STATIC_URL = '/static/'
LOGIN_URL = 'mywebsite:login'
LOGOUT_URL = 'mywebsite:logout'
LOGIN_REDIRECT_URL = 'mywebsite:home'
|
[
"debangshubanerjee1997@gmail.com"
] |
debangshubanerjee1997@gmail.com
|
5b3dbdd973981f53d7e2243cb5ad29122bae8999
|
6f2ee69b2b69877950335936f23f8d584e7711af
|
/src/implementations/helpers/partition.py
|
a2a6bd9976b416318124b09e58107623ac1a9881
|
[
"MIT"
] |
permissive
|
wobedi/algorithms-and-data-structures
|
b5a8fdb27be53ba9fae2a93c9aaa949852bf5ce4
|
2d43ac66fd87881182aed65ec22e7016541e9315
|
refs/heads/master
| 2020-08-24T08:21:17.406623
| 2020-08-04T13:47:28
| 2020-08-04T13:47:28
| 216,792,563
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 841
|
py
|
def three_way_partition(arr: list, lower: int, upper: int) -> (int, int):
"""In-place partitioning of arr. Implements:
https://en.wikipedia.org/wiki/Dutch_national_flag_problem#The_array_case
"""
lt, gt, i = lower, upper, lower
# Performance could be improved by using smarter pivot, e.g. median
pivot = arr[lower]
while i <= gt:
if arr[i] < pivot:
arr[lt], arr[i] = arr[i], arr[lt]
lt, i = lt+1, i+1
elif arr[i] > pivot:
arr[gt], arr[i] = arr[i], arr[gt]
gt -= 1
else:
i += 1
return lt, gt
if __name__ == '__main__':
arr = [4, 5, 4, 4, 1, 8, 3, 2, 9, 6] # [1, 2, 3, 4, 4, 4, 5, 6, 7, 8, 9]
lt, gt = three_way_partition(arr, 0, len(arr) - 1)
print(f'lt: {lt}, gt: {gt}')
assert lt == 3
assert gt == 5
|
[
"yanick.steinbeck@gmail.com"
] |
yanick.steinbeck@gmail.com
|
5e5690e04aa90942ebf8fe480955b7c04ff6e383
|
a78f5fd783acad55e97916114e3ccb0d817e8ae3
|
/2021/12/day12.py
|
99b2835aae66c8fe2c4262b65d9ad70308275469
|
[] |
no_license
|
fredrik-aschehoug/AdventOfCode
|
3853c1f947ebd0fbec6232674e258763a99775da
|
f44d15bf1167dcbbcb15c5a317f72e45ba560316
|
refs/heads/master
| 2023-06-22T23:04:40.670487
| 2023-06-13T11:21:08
| 2023-06-13T11:21:08
| 225,464,307
| 2
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 544
|
py
|
from utils import get_network
from PathFinder import PathFinder1, PathFinder2
def main():
with open("12/input.txt", encoding="UTF-8") as file:
lines = file.read().splitlines()
network = get_network(lines)
pathfinder = PathFinder1(network)
paths = pathfinder.get_distinct_paths()
print("Part 1: ", len(paths))
network = get_network(lines, part2=True)
pathfinder = PathFinder2(network)
paths = pathfinder.get_distinct_paths()
print("Part 2: ", len(paths))
if __name__ == "__main__":
main()
|
[
"15358786+fredrik-aschehoug@users.noreply.github.com"
] |
15358786+fredrik-aschehoug@users.noreply.github.com
|
66fe9f443d7e7476ac2947896a309613455c29b4
|
b527e5f05d2431a724b95beaf42e80377e09dd4e
|
/node_modules/mongoose/node_modules/mongodb/node_modules/mongodb-core/node_modules/kerberos/build/config.gypi
|
7b693bc9ed8b7a0476ffe3e59cd6bc66c4783400
|
[
"Apache-2.0",
"MIT"
] |
permissive
|
wolfoxonly/snc_explorer
|
e8ee42d3db5ab5d8032e21bf462ff435abbb5bea
|
c0e7ab6a664407f96203d816eeb25f9fc2f986c1
|
refs/heads/master
| 2020-04-17T11:43:31.325818
| 2019-02-26T08:57:08
| 2019-02-26T08:57:08
| 166,552,818
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 3,267
|
gypi
|
# Do not edit. File was generated by node-gyp's "configure" step
{
"target_defaults": {
"cflags": [],
"default_configuration": "Release",
"defines": [],
"include_dirs": [],
"libraries": []
},
"variables": {
"clang": 0,
"gcc_version": 41,
"host_arch": "x64",
"node_install_npm": "true",
"node_prefix": "/",
"node_shared_cares": "false",
"node_shared_http_parser": "false",
"node_shared_libuv": "false",
"node_shared_openssl": "false",
"node_shared_v8": "false",
"node_shared_zlib": "false",
"node_tag": "",
"node_unsafe_optimizations": 0,
"node_use_dtrace": "false",
"node_use_etw": "false",
"node_use_openssl": "true",
"node_use_perfctr": "false",
"node_use_systemtap": "false",
"openssl_no_asm": 0,
"python": "/home/iojs/bin/python",
"target_arch": "x64",
"v8_enable_gdbjit": 0,
"v8_no_strict_aliasing": 1,
"v8_use_snapshot": "false",
"want_separate_host_toolset": 0,
"nodedir": "/root/.node-gyp/0.10.48",
"copy_dev_lib": "true",
"standalone_static_library": 1,
"save_dev": "",
"browser": "",
"viewer": "man",
"rollback": "true",
"usage": "",
"globalignorefile": "/root/.nvm/v0.10.48/etc/npmignore",
"init_author_url": "",
"maxsockets": "50",
"shell": "/bin/bash",
"parseable": "",
"shrinkwrap": "true",
"init_license": "ISC",
"if_present": "",
"cache_max": "Infinity",
"init_author_email": "",
"sign_git_tag": "",
"cert": "",
"git_tag_version": "true",
"local_address": "",
"long": "",
"fetch_retries": "2",
"npat": "",
"registry": "https://registry.npmjs.org/",
"key": "",
"message": "%s",
"versions": "",
"globalconfig": "/root/.nvm/v0.10.48/etc/npmrc",
"always_auth": "",
"spin": "true",
"cache_lock_retries": "10",
"heading": "npm",
"fetch_retry_mintimeout": "10000",
"proprietary_attribs": "true",
"access": "",
"json": "",
"description": "true",
"engine_strict": "",
"https_proxy": "",
"init_module": "/root/.npm-init.js",
"userconfig": "/root/.npmrc",
"node_version": "0.10.48",
"user": "",
"editor": "vi",
"save": "",
"tag": "latest",
"global": "",
"optional": "true",
"bin_links": "true",
"force": "",
"searchopts": "",
"depth": "Infinity",
"rebuild_bundle": "true",
"searchsort": "name",
"unicode": "true",
"fetch_retry_maxtimeout": "60000",
"ca": "",
"save_prefix": "^",
"strict_ssl": "true",
"tag_version_prefix": "v",
"dev": "",
"fetch_retry_factor": "10",
"group": "",
"save_exact": "",
"cache_lock_stale": "60000",
"version": "",
"cache_min": "10",
"cache": "/root/.npm",
"searchexclude": "",
"color": "true",
"save_optional": "",
"user_agent": "npm/2.15.1 node/v0.10.48 linux x64",
"ignore_scripts": "",
"cache_lock_wait": "10000",
"production": "true",
"save_bundle": "",
"init_version": "1.0.0",
"umask": "0022",
"git": "git",
"init_author_name": "",
"scope": "",
"onload_script": "",
"tmp": "/tmp",
"unsafe_perm": "",
"link": "",
"prefix": "/root/.nvm/v0.10.48"
}
}
|
[
"16358164@qq.com"
] |
16358164@qq.com
|
2294a459f1fdf255465e1c2a3b0a875e3b2dd9a2
|
0ea9136591fbd928716cd6c1159ffc8985de0242
|
/Mongo_Search/old scripts/SearchChannel.py
|
aaffb1ce5e6d28c4e55e6ffe9dc99ebc35c3cdbe
|
[] |
no_license
|
kaayem/Kym
|
0d8b1042fbd98772b58f294aecb122750d00b99b
|
8f99c20757c5505809cacbe5459ba6605ac93e98
|
refs/heads/master
| 2022-11-17T13:29:46.111333
| 2020-07-22T11:55:01
| 2020-07-22T11:55:01
| 277,321,585
| 0
| 0
| null | 2020-07-22T11:55:03
| 2020-07-05T14:45:47
|
Python
|
UTF-8
|
Python
| false
| false
| 1,478
|
py
|
#generate set up checks for python
# check all brands in a market
import pymongo
from pymongo import MongoClient
import pandas as pd
import numpy as np
import os
def db_connect():
#connecting to a DB in mongoDB
try:
if client.get_database(DB_NAME):
print("Connection Successful!")
return True
except:
print("No, Please check your connection!!!")
return False
def db_close():
print ("Connection Getting Closed")
client.close()
#con = pymongo.MongoClient("mongodb://127.0.0.1:27017/")
con = pymongo.MongoClient("mongodb://192.168.1.181:27017/")
#db = con['mm_pharma']
db = con['mm_dev']
coll = db['attributes']
print("Have we successfully connected to Mongo?")
db_connect()
print(" Please note this will need to be run in python 2")
CH = input("Please enter the channel you are looking at ")
mydoc4 = db.attributes.aggregate([{
'$project' :{'_id':0,'code':1,'source':1,'report':1,'channel':'$channel'}},
{'$unwind':'$channel'},
{'$project':{'_id':0,'code':1,'source':1,'report':1,'mstd' : '$channel.value', 'mraw' : '$channel.name'}},
{'$match':{ 'mstd': {'$in': [CH,]}}}])
doc5 = []
for x in mydoc4:
doc5.append(x)
df = pd.DataFrame(data =doc5)
index = ['code', 'source', 'report', 'mraw', 'mstd']
df = df.reindex(columns = index)
#print(df)
name= 'Channel finder for '+CH+'.csv'
df.to_csv(name)
print(" CSV of all metrics you are that are mapped has been outputted to", os.getcwd())
|
[
"kaayempatel@wessexinsights.com"
] |
kaayempatel@wessexinsights.com
|
7e0ddf1ed82eb7a872aeed25135c9b4572b437ac
|
a7b722424273b0b9ad00ca0088d129b464c9bb1b
|
/Back-end/store/admin.py
|
7d34d3b73fb34832d5bba84103412c4d01fae0ad
|
[] |
no_license
|
HackRx2-0/ps2_legit_geeks
|
df1f71dcf0b43ef750ceeb9ff6ccdae1b2ce4c9e
|
c19de4403c45fdd5ed132e5c17a8f22c6aea0f46
|
refs/heads/main
| 2023-06-23T02:12:44.494118
| 2021-07-24T09:50:30
| 2021-07-24T09:50:30
| 388,513,416
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 747
|
py
|
from django.contrib import admin
import nested_admin
from .models import Store, ShippingZone, ShippingMethod, BankAccount
class StoreAdmin(nested_admin.NestedModelAdmin):
model = Store
# inlines = [WholesaleProductVariantInline]
class ShippingMethodInlineAdmin(nested_admin.NestedTabularInline):
model = ShippingMethod
extra = 0
min_num = 1
class ShippingZoneAdmin(nested_admin.NestedModelAdmin):
inline = [ShippingMethodInlineAdmin]
class BankAccountAdmin(admin.ModelAdmin):
fields = ['store', 'holder_name', 'account_number', 'bank_name', 'ifsc', 'account_type']
admin.site.register(Store, StoreAdmin)
admin.site.register(ShippingZone, ShippingZoneAdmin)
admin.site.register(BankAccount, BankAccountAdmin)
|
[
"181210045@nitdelhi.ac.in"
] |
181210045@nitdelhi.ac.in
|
a86b793d90e6718bbb40e66343ea07b986f34a35
|
9e99a543ac84503729604c3fc967742f741f7d77
|
/todo-list.py
|
9a834c7f1182783a0a02aa028fd6d25d9a09ae5e
|
[] |
no_license
|
emreyeprem/python-class-object
|
17b512a9dfd0ae7a96f9903b6085f33c0e2af599
|
9da06cdc269265a0359dadaed3cc3056b821f6b4
|
refs/heads/master
| 2020-03-29T18:21:12.922481
| 2018-09-25T04:23:31
| 2018-09-25T04:23:31
| null | 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,710
|
py
|
#---------------------- First Try------------------------
# priority_list = []
# task_list = []
# task_and_priority_list = []
#
# while 1==1:
# task_title = input("Enter the task: ").lower()
# task_priority = int(input("Type a number from 0 to 5(0: least important -> 5: highly important): "))
# quit_program = input('Press q to quit or enter to add another task: ').lower()
# task_list.append(task_title)
# priority_list.append(task_priority)
# task_and_priority_list.append("{0} : {1}".format(task_priority,task_title))
# #print(task_and_priority_list)
# #print(task_list)
# final_list = sorted(task_and_priority_list, reverse = True)
# print(final_list)
# if quit_program == 'q':
# break
# -----------------Second try-------------------------------
task_list = {}
priority_list = []
class Task:
def __init__(self,name, priority):
self.name = name
self.priority = priority
task_list[self.name] = self.priority
print(task_list)
def remove(self):
for key in task_list:
i = input('Enter the task to remove: ')
if i in task_list:
removed = task_list.pop(i)
return removed
def sort(self):
sorted_list = sorted(task_list.items(), key=lambda kv: kv[1], reverse = True)
print(sorted_list)
while 1==1:
task = Task(input('Enter task name:'), int(input('Type a number from 0 to 5(0: least important -> 5: highly important): ')))
if input('Press q to quit or enter to add more task: ') == 'q':
break
# task.remove()
task.remove()
print(task_list)
task.sort()
#-------------------------------------------------
|
[
"emreakurek@MacBook-Air-2.local"
] |
emreakurek@MacBook-Air-2.local
|
a528ab42b50817339d31b862bfcedf826beed8cd
|
8751fdfb52527ae28d2bf9a3d982b9fe12c774b8
|
/homework4.py
|
526128a73f6b56f1e907d7b9f90f77b87f932b54
|
[] |
no_license
|
acheng6845/DataStructuresLab
|
f38f8fe17d71e7a6ad0ba0b46a645f151e91c113
|
65e0d025d4ec6857cf762df4cbadf957ed23b8c7
|
refs/heads/master
| 2021-09-02T07:42:05.616983
| 2017-12-31T15:39:38
| 2017-12-31T15:39:38
| 115,870,979
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 11,925
|
py
|
# ---------------The PList code I posted earlier---------
#Note to Self: Submit assignment immediately after finishing it, so you don't leave it to chance waking up in the middle
#of the night a few days later and a few hours before it's due.
class PList:
class _Node:
__slots__ = '_data', '_prev', '_next', '_direction'
def __init__(self, data, prev, next, direction):
self._data = data
self._prev = prev
self._next = next
self._direction = direction
def get_next(self):
if self._direction._isRight:
return self._next
else:
return self._prev
def get_prev(self):
if self._direction._isRight:
return self._prev
else:
return self._next
def set_next(self, node):
if self._direction._isRight:
self._next = node
else:
self._prev = node
def set_prev(self, node):
if self._direction._isRight:
self._prev = node
else:
self._next = node
class Position:
def __init__(self, plist, node):
self._plist = plist
self._node = node
self._vbool = plist._vbool
def data(self):
return self._node._data
def __eq__(self, other):
return type(other) is type(self) and other._node is self._node
def __ne__(self, other):
return not (self == other)
class _ValidationBoolean:
def __init__(self):
self._isValid = True
class _FlippedBoolean:
def __init__(self):
self._isRight = False
class _NotFlippedBoolean:
def __init__(self):
self._isRight = True
def _validate(self, p):
if not p._vbool._isValid:
raise ValueError("p no longer has a valid PList")
if not isinstance(p, self.Position):
raise TypeError("p must be proper Position type")
if p._plist is not self:
raise ValueError('p does not belong to this PList')
if p._node._next is None:
raise ValueError('p is no longer valid')
return p._node
def _make_position(self, node):
if node is self._head or node is self._tail:
return None
else:
return self.Position(self, node)
def __init__(self):
self._flipped = self._FlippedBoolean()
self._notflipped = self._NotFlippedBoolean()
self._head = self._Node(None, None, None, self._notflipped)
self._head._next = self._tail = self._Node(None, self._head, None, self._notflipped)
self._vbool = self._ValidationBoolean()
def __len__(self):
nextNode = self._head.get_next()
length = 0
while (nextNode._data != None):
length += 1
nextNode = nextNode.get_next()
return length
def is_empty(self):
return len(self) == 0
def first(self):
return self._make_position(self._head.get_next())
def last(self):
return self._make_position(self._tail.get_prev())
def before(self, p):
node = self._validate(p)
return self._make_position(node.get_prev())
def after(self, p):
node = self._validate(p)
return self._make_position(node.get_next())
def __iter__(self):
pos = self.first()
while pos:
yield pos.data()
pos = self.after(pos)
def _insert_after(self, data, node):
newNode = self._Node(data, None, None, self._notflipped)
newNode.set_next(node.get_next())
newNode.set_prev(node)
node.get_next().set_prev(newNode)
node.set_next(newNode)
return self._make_position(newNode)
def add_first(self, data):
return self._insert_after(data, self._head)
def add_last(self, data):
return self._insert_after(data, self._tail.get_prev())
def add_before(self, p, data):
node = self._validate(p)
return self._insert_after(data, node.get_prev())
def add_after(self, p, data):
node = self._validate(p)
return self._insert_after(data, node)
def delete(self, p):
node = self._validate(p)
data = node._data
node._prev._next = node._next
node._next._prev = node._prev
node._prev = node._next = node._data = None
return data
def replace(self, p, data):
node = self._validate(p)
olddata = node._data
node._data = data
return olddata
def rev_itr(self):
pos = self.last()
while pos:
yield pos.data()
pos = self.before(pos)
def __iadd__(self, other):
if len(other):
if len(self) == 0:
if other._notflipped._isRight != self._notflipped._isRight:
self.flip()
thisLast = self._head
else:
thisLast = self._validate(self.last())
otherFirst = other._validate(other.first())
otherLast = other._validate(other.last())
thisLast.set_next(otherFirst)
otherFirst.set_prev(thisLast)
otherLast.set_next(self._tail)
self._tail.set_prev(otherLast)
other._head.set_next(other._tail)
other._tail.set_prev(other._head)
if other._notflipped._isRight == self._notflipped._isRight:
other._notflipped = self._notflipped
other._flipped = self._flipped
else:
other._notflipped = self._flipped
other._flipped = self._notflipped
other.invalidate()
return self
def split_after(self, p):
returnList = PList()
node = self._validate(p)
node.get_next().set_prev(returnList._head)
returnList._head.set_next(node.get_next())
returnList._tail.set_prev(self._validate(self.last()))
self._validate(self.last()).set_next(returnList._tail)
self._tail.set_prev(node)
node.set_next(self._tail)
self.invalidate()
return returnList
def split_before(self, p):
node = self._validate(p)
position = self.before(self._make_position(node))
if position != None:
return self.split_after(position)
else:
returnList = PList()
returnList += self
self.invalidate()
return returnList
def invalidate(self):
self._vbool._isValid = False
self._vbool = self._ValidationBoolean()
def flip(self):
self._flipped._isRight = not self._flipped._isRight
self._notflipped._isRight = not self._notflipped._isRight
self._flipped, self._notflipped = self._notflipped, self._flipped
self._head, self._tail = self._tail, self._head
# ---------------CODE USED TO CHECK TESTS--------------------
def printList(L):
print(" Forward:", list(L))
print(" Backward:", list(L.rev_itr()))
def checkAnswer(taskno, testno, yours, correct):
print("Task:", taskno, " Test:", testno, end=" ")
if yours == correct:
print("Correct: ", yours)
else:
print("Wrong: ", yours, " The correct answer is:")
print(correct)
def checkList(taskno, testno, yours, correctforward):
print("Task:", taskno, " Test:", testno, end=" ")
yoursforward = list(yours)
yoursbackward = list(yours.rev_itr())
correctbackward = correctforward.copy()
correctbackward.reverse()
if yoursforward == correctforward:
if yoursbackward == correctbackward:
print("Correct: ", yoursforward)
else:
print("Wrong! Your forward iterator is correct and gives ",
yoursforward,
" but your reverse iterator gives ",
yoursbackward)
else:
print("Wrong. Your code gives ", yoursforward,
" but the correct answer is:", correctforward)
# ------------------------------------------------------
"""
To enable the test code for each task, change the booleans below. When you are
working on one task you may want to disable the others.
"""
testTask1 = True
testTask2 = True
testTask3 = True
testTask4 = True
testTask5 = True
# ------------------------TASK 1-----------------------
if (testTask1):
print("\n------TASK 1------")
L = PList()
for i in range(5):
L.add_first(i)
printList(L) # Demo of the printList function, you may want to use to debug
checkAnswer(1, 1, len(L), 5)
checkAnswer(1, 2, L.is_empty(), False)
checkAnswer(1, 3, len(PList()), 0)
checkAnswer(1, 4, PList().is_empty(), True)
# ------------------------TASK 2-----------------------
if (testTask2):
print("\n------TASK 2------")
L = PList()
for i in range(5):
L.add_first(i)
L2 = PList()
for i in ("a", "b", "c", "d", "e"):
L2.add_first(i)
L += L2
checkList(2, 1, L, [4, 3, 2, 1, 0, 'e', 'd', 'c', 'b', 'a'])
checkList(2, 2, L2, [])
# ------------------------TASK 3-----------------------
if (testTask3):
print("\n------TASK 3------")
L = PList()
for i in [1, 2, "a", "b"]:
L.add_last(i)
L2 = L.split_after(L.after(L.first()))
checkList(3, 1, L, [1, 2])
checkList(3, 2, L2, ['a', 'b'])
L3 = L2.split_before(L2.last())
checkList(3, 3, L2, ['a'])
checkList(3, 4, L3, ['b'])
L4 = L3.split_before(L3.first())
checkList(3, 5, L3, [])
checkList(3, 6, L4, ['b'])
# ------------------------TASK 4-----------------------
if (testTask4):
print("\n------TASK 4------")
L = PList()
for i in range(5):
L.add_first(i)
p = L.last()
L2 = L.split_before(L.before(p))
try:
q = L.before(p)
except ValueError:
print("Task: 4 Test:1 Correctly has an exception")
else:
print("Task: 4 Test:1 Wrong! No exception")
try:
q = L2.before(p)
except ValueError:
print("Task: 4 Test:2 Correctly has an exception")
else:
print("Task: 4 Test:2 Wrong! No exception")
p = L.first()
p2 = L2.first()
try:
p = L.after(p)
p2 = L2.after(p2)
L += L2
p = L.before(p)
except ValueError:
print("Task: 4 Test:3 Wrong! There should be no exception")
else:
print("Task: 4 Test:3 Correct, no exception")
try:
p2 = L2.before(p2)
except ValueError:
print("Task: 4 Test:4 Correctly has an exception")
else:
print("Task: 4 Test:4 Wrong! No exception")
# ------------------------TASK 5-----------------------
if (testTask5):
print("\n------TASK 5------")
L = PList()
for i in range(5):
L.add_first(i)
# checking the basic flip operation
L.flip()
checkList(5, 1, L, [0, 1, 2, 3, 4])
L2 = PList()
for i in ("a", "b", "c", "d", "e"):
L2.add_first(i)
# checking the += works with flip
L += L2
checkList(5, 2, L, [0, 1, 2, 3, 4, 'e', 'd', 'c', 'b', 'a'])
checkList(5, 3, L2, [])
# checking that split works with flip
L = PList()
for i in [1, 2, "a", "b"]:
L.add_last(i)
L.flip()
L2 = L.split_after(L.after(L.first()))
checkList(5, 4, L2, [2, 1])
checkList(5, 5, L, ['b', 'a'])
L3 = L2.split_before(L2.last())
L.flip()
checkList(5, 6, L2, [2])
checkList(5, 7, L3, [1])
L4 = L3.split_before(L3.first())
checkList(5, 8, L3, [])
checkList(5, 9, L4, [1])
# checking the positions move in the right direction always
L = PList()
for i in range(5):
L.add_last(i)
p = L.after(L.first())
checkAnswer(5, 10, (L.before(p).data(), L.after(p).data()), (0, 2))
L.flip()
checkAnswer(5, 11, (L.before(p).data(), L.after(p).data()), (2, 0))
|
[
"acheng6845@gmail.com"
] |
acheng6845@gmail.com
|
e79ce4977b6a49f90c9aaada2b5644b561275441
|
a2170a9fc6355dfd9a2a54e7b7f9de531dc74c9c
|
/pywhoisxml/exceptions.py
|
de667387dc2898d0ed0a1507dd5f50e4c2b7e9f6
|
[] |
no_license
|
VarthanV/pywhoisxml
|
82a826d0c23b9319e1fd75b58bab4bbb7602786e
|
47fb58ace086df5222fbef936fc71bbb4383d0d1
|
refs/heads/master
| 2022-05-27T03:30:46.806937
| 2020-05-01T03:32:07
| 2020-05-01T03:32:07
| 259,951,022
| 0
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 44
|
py
|
class PyWhoisException(Exception):
pass
|
[
"vichu@Vishnus-MacBook-Air.local"
] |
vichu@Vishnus-MacBook-Air.local
|
686c9615805e1a5fc4ed701965c783ef17b68d2e
|
3de6a297bdacd268c9aafadc37d0f53d1a77da76
|
/example/strategy_two_graphs.py
|
877f0c12157d9964cba360e559135a7547036ae7
|
[] |
no_license
|
HipGraph/GNNfam
|
bfe7ae9b3f120f7fd06174dc318c086bb4d8e0e0
|
7b6e2bc7f07402696b574625222979b8f57a1a24
|
refs/heads/master
| 2023-04-26T05:12:34.990943
| 2021-05-25T23:38:27
| 2021-05-25T23:38:27
| 357,778,089
| 1
| 0
| null | null | null | null |
UTF-8
|
Python
| false
| false
| 1,150
|
py
|
#!/home/anuj/virtualenvforest/globalvenv/bin/python
import networkx as nx
def main():
basegraph = nx.Graph()
with open('graph.txt', 'r') as f:
for line in f.readlines():
a, b, c = line.strip().split()
basegraph.add_edge(int(a), int(b), weight=float(c))
#basegraph is the networkx graph generated from the original graph.txt file
for stepval in range(10, 101, 10):
step = stepval/100
all_edges = []
for node in basegraph.nodes():
edges = [
(k,v['weight']) for k,v in sorted(
basegraph[node].items(), reverse=True,
key=lambda item:item[1]['weight']
)
]
keepedges = edges[:int(len(edges)*step)]
for edge in keepedges:
all_edges.append(
"{} {} {}\n".format(node, edge[0], edge[1])
)
all_edges.append("{} {} {}\n".format(node,node, 1))
with open("strategy_two_graph_{}_percent.txt".format(stepval), "w") as f:
f.writelines(all_edges)
if __name__ == '__main__':
main()
|
[
"abgodase@iu.edu"
] |
abgodase@iu.edu
|
c280ee3b854a4f6043932dbcd3aa1b31846f9e2c
|
47988c4d1e2c07cd2465da204890f481d59dbd4b
|
/src/tests/ftest/util/command_utils_base.py
|
e25429c501973d0d6e453644fdfcba2b0cf4b268
|
[
"BSD-2-Clause",
"BSD-2-Clause-Patent"
] |
permissive
|
dsikich/daos
|
974000a2e9a37c2edc994007f864ab69afe347e3
|
13385f8eb3209dfe9f63772a68a3bb8cadaf2e23
|
refs/heads/master
| 2022-07-07T05:46:07.074084
| 2022-06-29T13:01:52
| 2022-06-29T13:01:52
| 242,208,796
| 0
| 0
|
NOASSERTION
| 2021-12-07T21:17:27
| 2020-02-21T18:50:31
|
C
|
UTF-8
|
Python
| false
| false
| 26,663
|
py
|
#!/usr/bin/python
"""
(C) Copyright 2020-2022 Intel Corporation.
SPDX-License-Identifier: BSD-2-Clause-Patent
"""
from logging import getLogger
import os
import yaml
from exception_utils import CommandFailure
class BasicParameter():
"""A class for parameters whose values are read from a yaml file."""
def __init__(self, value, default=None, yaml_key=None):
"""Create a BasicParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
value (object): initial value for the parameter
default (object, optional): default value. Defaults to None.
yaml_key (str, optional): the yaml key name to use when finding the
value to assign from the test yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
self._value = value if value is not None else default
self._default = default
self._yaml_key = yaml_key
self.log = getLogger(__name__)
# Flag used to indicate if a parameter value has or has not been updated
self.updated = True
def __str__(self):
"""Convert this BasicParameter into a string.
Returns:
str: the string version of the parameter's value
"""
return str(self.value) if self.value is not None else ""
@property
def value(self):
"""Get the value of this setting.
Returns:
object: value currently assigned to the setting
"""
return self._value
@value.setter
def value(self, item):
"""Set the value of this setting.
Args:
item (object): value to assign for the setting
"""
if item != self._value:
self._value = item
self.updated = True
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
if hasattr(test, "config") and test.config is not None:
self.value = test.config.get(name, path, self._default)
else:
self.value = test.params.get(name, path, self._default)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
if append and isinstance(self.value, list):
if isinstance(value, list):
# Add the new list of value to the existing list
self.value.extend(value)
else:
# Add the new value to the existing list
self.value.append(value)
self.updated = True
elif append and isinstance(self.value, dict):
# Update the dictionary with the new key/value pairs
self.value.update(value)
self.updated = True
else:
# Override the current value with the new value
self.value = value
if name is not None:
self.log.debug("Updated param %s => %s", name, self.value)
def update_default(self, value):
"""Update the BasicParameter default value.
Args:
value (object): new default value
"""
self._default = value
class FormattedParameter(BasicParameter):
# pylint: disable=too-few-public-methods
"""A class for test parameters whose values are read from a yaml file."""
def __init__(self, str_format, default=None, yaml_key=None):
"""Create a FormattedParameter object.
Normal use includes assigning this object to an attribute name that
matches the test yaml file key used to assign its value. If the
variable name will conflict with another class attribute, e.g. self.log,
then the `yaml_key` argument can be used to define the test yaml file
key independently of the attribute name.
Args:
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
yaml_key (str, optional): alternative yaml key name to use when
assigning the value from a yaml file. Default is None which
will use the object's variable name as the yaml key.
"""
super().__init__(default, default)
self._str_format = str_format
self._yaml_key = yaml_key
def __str__(self):
"""Return a FormattedParameter object as a string.
Returns:
str: if defined, the parameter, otherwise an empty string
"""
parameter = ""
if isinstance(self._default, bool) and self.value:
parameter = self._str_format
elif not isinstance(self._default, bool) and self.value is not None:
if isinstance(self.value, dict):
parameter = " ".join([
self._str_format.format(
"{} \"{}\"".format(key, self.value[key]))
for key in self.value])
elif isinstance(self.value, (list, tuple)):
parameter = " ".join(
[self._str_format.format(value) for value in self.value])
else:
parameter = self._str_format.format(self.value)
return parameter
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file - not used
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
if self._yaml_key is not None:
# Use the yaml key name instead of the variable name
name = self._yaml_key
return super().get_yaml_value(name, test, path)
class LogParameter(FormattedParameter):
"""A class for a test log file parameter which is read from a yaml file."""
def __init__(self, directory, str_format, default=None):
"""Create a LogParameter object.
Args:
directory (str): fixed location for the log file name specified by
the yaml file
str_format (str): format string used to convert the value into an
command line argument string
default (object): default value for the param
"""
super().__init__(str_format, default)
self._directory = directory
self._add_directory()
def _add_directory(self):
"""Add the directory to the log file name assignment.
The initial value is restricted to just the log file name as the
location (directory) of the file is fixed. This method updates the
initial log file value (just the log file name) to include the directory
and name for the log file.
"""
if isinstance(self.value, str):
name = os.path.basename(self.value)
self.value = os.path.join(self._directory, name)
elif self.value is not None:
self.log.info(
"Warning: '%s' not added to '%s' due to incompatible type: %s",
self._directory, self.value, type(self.value))
def get_yaml_value(self, name, test, path):
"""Get the value for the parameter from the test case's yaml file.
Args:
name (str): name of the value in the yaml file
test (Test): avocado Test object to use to read the yaml file
path (str): yaml path where the name is to be found
"""
super().get_yaml_value(name, test, path)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
def update(self, value, name=None, append=False):
"""Update the value of the parameter.
Args:
value (object): value to assign
name (str, optional): name of the parameter which, if provided, is
used to display the update. Defaults to None.
append (bool, optional): append/extend/update the current list/dict
with the provided value. Defaults to False - override the
current value.
"""
super().update(value, name, append)
self._add_directory()
self.log.debug(" Added the directory: %s => %s", name, self.value)
class ObjectWithParameters():
"""A class for an object with parameters."""
def __init__(self, namespace):
"""Create a ObjectWithParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
"""
self.namespace = namespace
self.log = getLogger(__name__)
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
return [
name for name in sorted(self.__dict__.keys())
if attr_type is None or isinstance(getattr(self, name), attr_type)]
def get_param_names(self):
"""Get a sorted list of the names of the BasicParameter attributes.
Note: Override this method to change the order or inclusion of a
command parameter in the get_params() method.
Returns:
list: a list of class attribute names used to define parameters
"""
return self.get_attribute_names(BasicParameter)
def get_params(self, test):
"""Get values for all of the command params from the yaml file.
Sets each BasicParameter object's value to the yaml key that matches
the assigned name of the BasicParameter object in this class. For
example, the self.block_size.value will be set to the value in the yaml
file with the key 'block_size'.
If no key matches are found in the yaml file the BasicParameter object
will be set to its default value.
Args:
test (Test): avocado Test object
"""
for name in self.get_param_names():
getattr(self, name).get_yaml_value(name, test, self.namespace)
def update_params(self, **params):
"""Update each of provided parameter name and value pairs."""
for name, value in params.items():
try:
getattr(self, name).update(value, name)
except AttributeError as error:
raise CommandFailure("Unknown parameter: {}".format(name)) from error
class CommandWithParameters(ObjectWithParameters):
"""A class for command with parameters."""
def __init__(self, namespace, command, path=""):
"""Create a CommandWithParameters object.
Uses Avocado's utils.process module to run a command str provided.
Args:
namespace (str): yaml namespace (path to parameters)
command (str): string of the command to be executed.
path (str, optional): path to location of command binary file.
Defaults to "".
"""
super().__init__(namespace)
self._command = command
self._path = path
self._pre_command = None
@property
def command(self):
"""Get the command without its parameters."""
return self._command
@property
def command_path(self):
"""Get the path used for the command."""
return self._path
def __str__(self):
"""Return the command with all of its defined parameters as a string.
Returns:
str: the command with all the defined parameters
"""
# Join all the parameters that have been assigned a value with the
# command to create the command string
params = []
for name in self.get_str_param_names():
value = str(getattr(self, name))
if value != "":
params.append(value)
# Append the path to the command and prepend it with any other
# specified commands
command_list = [] if self._pre_command is None else [self._pre_command]
command_list.append(os.path.join(self._path, self._command))
# Return the command and its parameters
return " ".join(command_list + params)
def get_str_param_names(self):
"""Get a sorted list of the names of the command attributes.
Returns:
list: a list of class attribute names used to define parameters
for the command.
"""
return self.get_param_names()
class YamlParameters(ObjectWithParameters):
"""A class of parameters used to create a yaml file."""
def __init__(self, namespace, filename=None, title=None, other_params=None):
"""Create a YamlParameters object.
Args:
namespace (str): yaml namespace (path to parameters)
filename (str): the yaml file to generate with the parameters
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
other_params (YamlParameters, optional): yaml parameters to
include with these yaml parameters. Defaults to None.
"""
super().__init__(namespace)
self.filename = filename
self.title = title
self.other_params = other_params
def get_params(self, test):
"""Get values for the yaml parameters from the test yaml file.
Args:
test (Test): avocado Test object
"""
# Get the values for the yaml parameters defined by this class
super().get_params(test)
# Get the values for the yaml parameters defined by the other class
if self.other_params is not None:
self.other_params.get_params(test)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
if (self.other_params is not None and
hasattr(self.other_params, "get_yaml_data")):
yaml_data = self.other_params.get_yaml_data()
else:
yaml_data = {}
for name in self.get_param_names():
value = getattr(self, name).value
if value is not None:
yaml_data[name] = value
return yaml_data if self.title is None else {self.title: yaml_data}
def is_yaml_data_updated(self):
"""Determine if any of the yaml file parameters have been updated.
Returns:
bool: whether or not a yaml file parameter has been updated
"""
yaml_data_updated = False
if (self.other_params is not None and
hasattr(self.other_params, "is_yaml_data_updated")):
yaml_data_updated = self.other_params.is_yaml_data_updated()
if not yaml_data_updated:
for name in self.get_param_names():
if getattr(self, name).updated:
yaml_data_updated = True
break
return yaml_data_updated
def reset_yaml_data_updated(self):
"""Reset each yaml file parameter updated state to False."""
if (self.other_params is not None and
hasattr(self.other_params, "reset_yaml_data_updated")):
self.other_params.reset_yaml_data_updated()
for name in self.get_param_names():
getattr(self, name).updated = False
def create_yaml(self, filename=None):
"""Create a yaml file from the parameter values.
A yaml file will only be created if at least one of its parameter values
have be updated (BasicParameter.updated = True).
Args:
filename (str, optional): the yaml file to generate with the
parameters. Defaults to None, which uses self.filename.
Raises:
CommandFailure: if there is an error creating the yaml file
Returns:
bool: whether or not an updated yaml file was created
"""
create_yaml = self.is_yaml_data_updated()
if create_yaml:
# Write a new yaml file if any of the parameters have been updated
if filename is None:
filename = self.filename
yaml_data = self.get_yaml_data()
self.log.info("Writing yaml configuration file %s", filename)
try:
with open(filename, 'w') as write_file:
yaml.dump(yaml_data, write_file, default_flow_style=False)
except Exception as error:
raise CommandFailure(
"Error writing the yaml file {}: {}".format(
filename, error)) from error
self.reset_yaml_data_updated()
return create_yaml
def set_value(self, name, value):
"""Set the value for a specified attribute name.
Args:
name (str): name of the attribute for which to set the value
value (object): the value to set
Returns:
bool: if the attribute name was found and the value was set
"""
status = False
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "update"):
setting.update(value, name)
status = True
elif setting is not None:
setattr(self, name, value)
self.log.debug("Updated param %s => %s", name, value)
status = True
elif self.other_params is not None:
status = self.other_params.set_value(name, value)
return status
def get_value(self, name):
"""Get the value of the specified attribute name.
Args:
name (str): name of the attribute from which to get the value
Returns:
object: the object's value referenced by the attribute name
"""
setting = getattr(self, name, None)
if setting is not None and hasattr(setting, "value"):
value = setting.value
elif setting is not None:
value = setting
elif self.other_params is not None:
value = self.other_params.get_value(name)
else:
value = None
return value
class TransportCredentials(YamlParameters):
"""Transport credentials listing certificates for secure communication."""
def __init__(self, namespace, title, log_dir):
"""Initialize a TransportConfig object.
Args:
namespace (str): yaml namespace (path to parameters)
title (str, optional): namespace under which to place the
parameters when creating the yaml file. Defaults to None.
"""
super().__init__(namespace, None, title)
default_insecure = str(os.environ.get("DAOS_INSECURE_MODE", True))
default_insecure = default_insecure.lower() == "true"
self.ca_cert = LogParameter(log_dir, None, "daosCA.crt")
self.allow_insecure = BasicParameter(None, default_insecure)
def get_yaml_data(self):
"""Convert the parameters into a dictionary to use to write a yaml file.
Returns:
dict: a dictionary of parameter name keys and values
"""
yaml_data = super().get_yaml_data()
# Convert the boolean value into a string
if self.title is not None:
yaml_data[self.title]["allow_insecure"] = self.allow_insecure.value
else:
yaml_data["allow_insecure"] = self.allow_insecure.value
return yaml_data
def get_certificate_data(self, name_list):
"""Get certificate data by name_list.
Args:
name_list (list): list of certificate attribute names.
Returns:
data (dict): a dictionary of parameter directory name keys and
value.
"""
data = {}
if not self.allow_insecure.value:
for name in name_list:
value = getattr(self, name).value
if isinstance(value, str):
dir_name, file_name = os.path.split(value)
if dir_name not in data:
data[dir_name] = [file_name]
else:
data[dir_name].append(file_name)
return data
class CommonConfig(YamlParameters):
"""Defines common daos_agent and daos_server configuration file parameters.
Includes:
- the daos system name (name)
- a list of access point nodes (access_points)
- the default port number (port)
- the transport credentials
"""
def __init__(self, name, transport):
"""Initialize a CommonConfig object.
Args:
name (str): default value for the name configuration parameter
transport (TransportCredentials): transport credentials
"""
super().__init__(
"/run/common_config/*", None, None, transport)
# Common configuration parameters
# - name: <str>, e.g. "daos_server"
# Name associated with the DAOS system.
#
# - access_points: <list>, e.g. ["hostname1:10001"]
# Hosts can be specified with or without port, default port below
# assumed if not specified. Defaults to the hostname of this node
# at port 10000 for local testing
#
# - port: <int>, e.g. 10001
# Default port number with with to bind the daos_server. This
# will also be used when connecting to access points if the list
# only contains host names.
#
self.name = BasicParameter(None, name)
self.access_points = BasicParameter(None, ["localhost"])
self.port = BasicParameter(None, 10001)
class EnvironmentVariables(dict):
"""Dictionary of environment variable keys and values."""
def copy(self):
"""Return a copy of this object.
Returns:
EnvironmentVariables: a copy of this object
"""
return EnvironmentVariables(self)
def get_list(self):
"""Get a list of environment variable assignments.
Returns:
list: a list of environment variable assignment (key=value) strings
"""
return [
key if value is None else "{}={}".format(key, value)
for key, value in list(self.items())
]
def get_export_str(self, separator=";"):
"""Get the command to export all of the environment variables.
Args:
separator (str, optional): export command separator.
Defaults to ";".
Returns:
str: a string of export commands for each environment variable
"""
export_list = ["export {}".format(export) for export in self.get_list()]
export_str = separator.join(export_list)
if export_str:
export_str = "".join([export_str, separator])
return export_str
class PositionalParameter(BasicParameter):
"""Parameter that defines position.
Used to support positional parameters for dmg and daos.
"""
def __init__(self, position, default=None):
"""Create a PositionalParameter object.
Args:
position (int): argument position/order
default (object, optional): default value for the param. Defaults to
None.
"""
super().__init__(default, default)
self._position = position
@property
def position(self):
"""Position property that defines the position of the parameter."""
return self._position
def __lt__(self, other):
return self.position < other.position
def __gt__(self, other):
return self.position > other.position
def __eq__(self, other):
return self.position == other.position
def __hash__(self):
"""Returns self.position as the hash of the class.
This is used in CommandWithPositionalParameters.get_attribute_names()
where we use this object as the key for a dictionary.
"""
return self.position
class CommandWithPositionalParameters(CommandWithParameters):
"""Command that uses positional parameters.
Used to support positional parameters for dmg and daos.
"""
def get_attribute_names(self, attr_type=None):
"""Get a sorted list of the names of the attr_type attributes.
The list has the ordered positional parameters first, then
non-positional parameters.
Args:
attr_type(object, optional): A single object type or tuple of
object types used to filter class attributes by their type.
Defaults to None.
Returns:
list: a list of class attribute names used to define parameters
"""
positional = {}
non_positional = []
for name in sorted(list(self.__dict__)):
attr = getattr(self, name)
if isinstance(attr, attr_type):
if hasattr(attr, "position"):
positional[attr] = name
else:
non_positional.append(name)
return [positional[key] for key in sorted(positional)] + non_positional
|
[
"noreply@github.com"
] |
dsikich.noreply@github.com
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.